blob_id
stringlengths 40
40
| language
stringclasses 1
value | repo_name
stringlengths 5
140
| path
stringlengths 5
183
| src_encoding
stringclasses 6
values | length_bytes
int64 12
5.32M
| score
float64 2.52
4.94
| int_score
int64 3
5
| detected_licenses
listlengths 0
47
| license_type
stringclasses 2
values | text
stringlengths 12
5.32M
| download_success
bool 1
class |
---|---|---|---|---|---|---|---|---|---|---|---|
3b4098ea5d388889edb494a8954cfe0fee1c3d09
|
Rust
|
Girish21/rust-sandbox
|
/simple_io/src/main.rs
|
UTF-8
| 727 | 3.46875 | 3 |
[] |
no_license
|
use std::io::{self, Write};
fn main() {
let quit: &str = "f";
let mut sum = 0;
let sum = loop {
let mut input = String::new();
print!("enter a number ('f' to quit): ");
io::stdout().flush().unwrap();
match io::stdin().read_line(&mut input) {
Ok(_) => {}
Err(_) => panic!("enter a string"),
}
let input = &input.trim();
match input.eq(&quit) {
true => break sum,
false => {}
}
let parsed_number: i32 = match input.trim().parse() {
Ok(x) => x,
Err(_) => panic!("enter a number"),
};
sum += parsed_number;
};
println!("the sum is: {}", sum);
}
| true |
d37776161e055024281856ae96d33edb5a007a64
|
Rust
|
jnix85/surface-control
|
/src/sys/perf.rs
|
UTF-8
| 3,173 | 3.109375 | 3 |
[
"MIT"
] |
permissive
|
use std::fs::OpenOptions;
use std::path::{Path, PathBuf};
use std::io::{Read, Write};
use crate::error::{Error, ErrorKind, Result, ResultExt};
#[derive(Debug, Copy, Clone, PartialEq, Eq)]
pub enum Mode {
Normal,
Battery,
Perf1,
Perf2,
}
impl Mode {
pub fn from_str(s: &str) -> Option<Self> {
// TODO: handle other strings?
match s {
"1" => Some(Mode::Normal),
"2" => Some(Mode::Battery),
"3" => Some(Mode::Perf1),
"4" => Some(Mode::Perf2),
_ => None,
}
}
pub fn short_str(self) -> &'static str {
match self {
Mode::Normal => "1",
Mode::Battery => "2",
Mode::Perf1 => "3",
Mode::Perf2 => "4",
}
}
pub fn long_str(self) -> &'static str {
match self {
Mode::Normal => "Normal",
Mode::Battery => "Battery-Saver",
Mode::Perf1 => "Better Performance",
Mode::Perf2 => "Best Performance",
}
}
}
impl std::fmt::Display for Mode {
fn fmt(&self, fmt: &mut std::fmt::Formatter) -> std::fmt::Result {
write!(fmt, "{}", self.long_str())
}
}
#[derive(Debug)]
pub struct InvalidPerformanceModeError;
impl std::str::FromStr for Mode {
type Err = InvalidPerformanceModeError;
fn from_str(s: &str) -> std::result::Result<Self, Self::Err> {
Mode::from_str(s).ok_or(InvalidPerformanceModeError)
}
}
pub struct Device {
path: PathBuf,
}
impl Device {
pub fn open() -> Result<Self> {
Device::open_path("/sys/bus/platform/devices/surface_sam_sid_perfmode")
}
pub fn open_path<P: AsRef<Path>>(path: P) -> Result<Self> {
if path.as_ref().is_dir() {
Ok(Device { path: path.as_ref().to_owned() })
} else {
Err(failure::err_msg("Surface performance-mode device not found"))
.context(ErrorKind::DeviceAccess)
.map_err(Into::into)
}
}
pub fn get_mode(&self) -> Result<Mode> {
use std::ffi::CStr;
let mut file = OpenOptions::new()
.read(true)
.open(self.path.as_path().join("perf_mode"))
.context(ErrorKind::DeviceAccess)?;
let mut buf = [0; 4];
let len = file.read(&mut buf).context(ErrorKind::Io)?;
let len = std::cmp::min(len + 1, buf.len());
let state = CStr::from_bytes_with_nul(&buf[0..len])
.context(ErrorKind::InvalidData)?
.to_str().context(ErrorKind::InvalidData)?
.trim();
Mode::from_str(state)
.ok_or_else(|| Error::from(ErrorKind::InvalidData))
}
pub fn set_mode(&self, mode: Mode) -> Result<()> {
let mode = mode.short_str().as_bytes();
let mut file = OpenOptions::new()
.write(true)
.open(self.path.as_path().join("perf_mode"))
.context(ErrorKind::DeviceAccess)?;
let len = file.write(mode).context(ErrorKind::Io)?;
if len == mode.len() {
Ok(())
} else {
Err(Error::from(ErrorKind::Io))
}
}
}
| true |
8c18511a11648b171303408c6446de724abe3e45
|
Rust
|
shooontan/pocketcache
|
/examples/with-actix-web/src/handlers/bye.rs
|
UTF-8
| 429 | 2.5625 | 3 |
[
"MIT"
] |
permissive
|
use actix_web::{web, HttpRequest, Responder};
use pocketcache::cache::Cache;
use std::sync::{Arc, Mutex};
use crate::models::user::User;
pub async fn get(req: HttpRequest, cache: web::Data<Arc<Mutex<Cache<User>>>>) -> impl Responder {
let name = req.match_info().get("name").unwrap_or("World");
// delete in cache
let mut cache = cache.lock().unwrap();
cache.delete(name);
format!("Goodbye {}!", &name)
}
| true |
c8b1a86e7166d5fd68548e468594e3aefedcec2b
|
Rust
|
drupalio/headlines
|
/headlines/src/headlines.rs
|
UTF-8
| 6,348 | 2.71875 | 3 |
[
"MIT"
] |
permissive
|
use serde::{Deserialize, Serialize};
use std::{
borrow::Cow,
iter::FromIterator,
sync::mpsc::{Receiver, SyncSender},
};
use eframe::egui::{
self, epaint::text, Button, Color32, CtxRef, FontDefinitions, FontFamily, Hyperlink, Label,
Layout, Separator, TopBottomPanel, Window,
};
pub const PADDING: f32 = 5.0;
const WHITE: Color32 = Color32::from_rgb(255, 255, 255);
const BLACK: Color32 = Color32::from_rgb(0, 0, 0);
const CYAN: Color32 = Color32::from_rgb(0, 255, 255);
const RED: Color32 = Color32::from_rgb(255, 0, 0);
pub enum Msg {
ApiKeySet(String),
}
#[derive(Serialize, Deserialize)]
pub struct HeadlinesConfig {
pub dark_mode: bool,
pub api_key: String,
}
impl Default for HeadlinesConfig {
fn default() -> Self {
Self {
dark_mode: Default::default(),
api_key: String::new(),
}
}
}
pub struct Headlines {
pub articles: Vec<NewsCardData>,
pub config: HeadlinesConfig,
pub api_key_initialized: bool,
pub news_rx: Option<Receiver<NewsCardData>>,
pub app_tx: Option<SyncSender<Msg>>,
}
pub struct NewsCardData {
pub title: String,
pub desc: String,
pub url: String,
}
impl Headlines {
pub fn new() -> Headlines {
let config: HeadlinesConfig = confy::load("headlines").unwrap_or_default();
Headlines {
api_key_initialized: !config.api_key.is_empty(),
articles: vec![],
config,
news_rx: None,
app_tx: None,
}
}
pub fn configure_fonts(&self, ctx: &CtxRef) {
let mut font_def = FontDefinitions::default();
font_def.font_data.insert(
"MesloLGS".to_string(),
Cow::Borrowed(include_bytes!("../../MesloLGS_NF_Regular.ttf")),
);
font_def.family_and_size.insert(
eframe::egui::TextStyle::Heading,
(FontFamily::Proportional, 35.),
);
font_def.family_and_size.insert(
eframe::egui::TextStyle::Body,
(FontFamily::Proportional, 20.),
);
font_def
.fonts_for_family
.get_mut(&FontFamily::Proportional)
.unwrap()
.insert(0, "MesloLGS".to_string());
ctx.set_fonts(font_def);
}
pub fn render_news_cards(&self, ui: &mut eframe::egui::Ui) {
for a in &self.articles {
ui.add_space(PADDING);
// render title
let title = format!("▶ {}", a.title);
if self.config.dark_mode {
ui.colored_label(WHITE, title);
} else {
ui.colored_label(BLACK, title);
}
// render desc
ui.add_space(PADDING);
let desc = Label::new(&a.desc).text_style(eframe::egui::TextStyle::Button);
ui.add(desc);
// render hyperlinks
if self.config.dark_mode {
ui.style_mut().visuals.hyperlink_color = CYAN;
} else {
ui.style_mut().visuals.hyperlink_color = RED;
}
ui.add_space(PADDING);
ui.with_layout(Layout::right_to_left(), |ui| {
ui.add(Hyperlink::new(&a.url).text("read more ⤴"));
});
ui.add_space(PADDING);
ui.add(Separator::default());
}
}
pub(crate) fn render_top_panel(&mut self, ctx: &CtxRef, frame: &mut eframe::epi::Frame<'_>) {
// define a TopBottomPanel widget
TopBottomPanel::top("top_panel").show(ctx, |ui| {
ui.add_space(10.);
egui::menu::bar(ui, |ui| {
// logo
ui.with_layout(Layout::left_to_right(), |ui| {
ui.add(Label::new("📓").text_style(egui::TextStyle::Heading));
});
// controls
ui.with_layout(Layout::right_to_left(), |ui| {
let close_btn = ui.add(Button::new("❌").text_style(egui::TextStyle::Body));
if close_btn.clicked() {
frame.quit();
}
let refresh_btn = ui.add(Button::new("🔄").text_style(egui::TextStyle::Body));
let theme_btn = ui.add(
Button::new({
if self.config.dark_mode {
"🌞"
} else {
"🌙"
}
})
.text_style(egui::TextStyle::Body),
);
if theme_btn.clicked() {
self.config.dark_mode = !self.config.dark_mode;
}
});
});
ui.add_space(10.);
});
}
pub fn preload_articles(&mut self) {
if let Some(rx) = &self.news_rx {
match rx.try_recv() {
Ok(news_data) => {
self.articles.push(news_data);
}
Err(e) => {
tracing::warn!("Error receiving msg: {}", e);
}
}
}
}
pub fn render_config(&mut self, ctx: &CtxRef) {
Window::new("Configuration").show(ctx, |ui| {
ui.label("Enter you API_KEY for newsapi.org");
let text_input = ui.text_edit_singleline(&mut self.config.api_key);
if text_input.lost_focus() && ui.input().key_pressed(egui::Key::Enter) {
if let Err(e) = confy::store(
"headlines",
HeadlinesConfig {
dark_mode: self.config.dark_mode,
api_key: self.config.api_key.to_string(),
},
) {
tracing::error!("Failed saving app state: {}", e);
}
self.api_key_initialized = true;
if let Some(tx) = &self.app_tx {
tx.send(Msg::ApiKeySet(self.config.api_key.to_string()));
}
tracing::error!("api key set");
}
tracing::error!("{}", &self.config.api_key);
ui.label("If you havn't registered for the API_KEY, head over to");
ui.hyperlink("https://newsapi.org");
});
}
}
| true |
65a144acec2e7bbf7cfdd0d54c72110a7e370dab
|
Rust
|
Mokosha/pbrt_rust
|
/src/diff_geom.rs
|
UTF-8
| 4,676 | 2.5625 | 3 |
[] |
no_license
|
use geometry::normal::Normal;
use geometry::normal::Normalize;
use geometry::point::Point;
use geometry::vector::Cross;
use geometry::vector::Dot;
use geometry::vector::Vector;
use shape::ShapeBase;
use ray::RayDifferential;
use utils::solve_linear_system_2x2;
#[derive(Debug, PartialEq, PartialOrd, Clone)]
pub struct DifferentialGeometry {
pub p: Point,
pub nn: Normal,
pub u: f32,
pub v: f32,
pub shape: Option<ShapeBase>,
pub dpdu: Vector,
pub dpdv: Vector,
pub dndu: Normal,
pub dndv: Normal,
pub dpdx: Vector,
pub dpdy: Vector,
pub dudx: f32,
pub dudy: f32,
pub dvdx: f32,
pub dvdy: f32,
}
impl DifferentialGeometry {
pub fn new() -> DifferentialGeometry {
DifferentialGeometry {
p: Point::new(),
nn: Normal::new(),
u: 0f32,
v: 0f32,
shape: None,
dpdu: Vector::new(),
dpdv: Vector::new(),
dndu: Normal::new(),
dndv: Normal::new(),
dpdx: Vector::new(),
dpdy: Vector::new(),
dudx: 0.0,
dudy: 0.0,
dvdx: 0.0,
dvdy: 0.0,
}
}
pub fn new_with(_p: Point, _dpdu: Vector, _dpdv: Vector,
_dndu: Normal, _dndv: Normal, _u: f32, _v: f32,
_shape: Option<ShapeBase>) -> DifferentialGeometry {
let mut norm = _dpdu.cross_with(&_dpdv).normalize();
if let &Some(ref s) = &_shape {
if s.reverse_orientation ^ s.transform_swaps_handedness {
norm = norm * -1f32;
}
}
DifferentialGeometry {
p: _p,
nn: Normal::from(norm),
u: _u,
v: _v,
shape: _shape,
dpdu: _dpdu,
dpdv: _dpdv,
dndu: _dndu,
dndv: _dndv,
dpdx: Vector::new(),
dpdy: Vector::new(),
dudx: 0.0,
dudy: 0.0,
dvdx: 0.0,
dvdy: 0.0,
}
}
pub fn compute_differentials(&mut self, ray: &RayDifferential) {
if !ray.has_differentials {
self.dpdx = Vector::new();
self.dpdy = Vector::new();
self.dudx = 0.0;
self.dudy = 0.0;
self.dvdx = 0.0;
self.dvdy = 0.0;
return;
}
// Compute auxiliary intersection points with plane
let nvec = Vector::from(self.nn.clone());
let d = -(nvec.dot(&Vector::from(self.p.clone())));
let px = {
let rxv = Vector::from(ray.rx_origin.clone());
let tx = {
let ndrx = -(nvec.dot(&rxv) + d);
let ndrd = nvec.dot(&ray.rx_dir);
ndrx / ndrd
};
&ray.rx_origin + tx * &ray.rx_dir
};
let py = {
let ryv = Vector::from(ray.ry_origin.clone());
let ty = {
let ndry = -(nvec.dot(&ryv) + d);
let ndrd = nvec.dot(&ray.ry_dir);
ndry / ndrd
};
&ray.ry_origin + ty * &ray.ry_dir
};
self.dpdx = px - &self.p;
self.dpdy = py - &self.p;
// Compute (u, v) offsets at auxiliary points
// Initialize A, Bx, and By matricies for offset computation
let axes =
if self.nn.x.abs() > self.nn.y.abs() &&
self.nn.x.abs() > self.nn.z.abs() {
[1, 2]
} else if self.nn.y.abs() > self.nn.z.abs() {
[0, 2]
} else {
[0, 1]
};
// Initialize matrices for chosen projection plane
let a = [[self.dpdu[axes[0]], self.dpdv[axes[0]]],
[self.dpdu[axes[1]], self.dpdv[axes[1]]]];
let bx = [self.dpdx[axes[0]], self.dpdx[axes[1]]];
let by = [self.dpdy[axes[0]], self.dpdy[axes[1]]];
if let Some((x, y)) = solve_linear_system_2x2(a.clone(), bx) {
self.dudx = x;
self.dvdx = y;
} else {
self.dudx = 0.0;
self.dvdx = 0.0;
}
if let Some((x, y)) = solve_linear_system_2x2(a.clone(), by) {
self.dudy = x;
self.dvdy = y;
} else {
self.dudy = 0.0;
self.dvdy = 0.0;
}
}
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
#[ignore]
fn it_can_be_created() {
unimplemented!()
}
#[test]
#[ignore]
fn it_can_compute_differentials() {
// Try with a different plane example for each axis like on p. 506
unimplemented!()
}
}
| true |
daae6f92f5a78b6667f160d8365b1de777368424
|
Rust
|
Tembocs/rome
|
/crates/rome_cli/tests/main.rs
|
UTF-8
| 10,997 | 2.796875 | 3 |
[
"MIT"
] |
permissive
|
use std::{ffi::OsString, path::Path};
use pico_args::Arguments;
use rome_cli::{run_cli, CliSession, Termination};
use rome_console::BufferConsole;
use rome_core::{App, DynRef};
use rome_fs::{FileSystem, MemoryFileSystem};
const UNFORMATTED: &str = " statement( ) ";
const FORMATTED: &str = "statement();\n";
#[test]
fn test_format_print() {
let mut fs = MemoryFileSystem::default();
let file_path = Path::new("format.js");
fs.insert(file_path.into(), UNFORMATTED.as_bytes());
let result = run_cli(CliSession {
app: App::with_filesystem_and_console(
DynRef::Borrowed(&mut fs),
DynRef::Owned(Box::new(BufferConsole::default())),
),
args: Arguments::from_vec(vec![OsString::from("format"), file_path.as_os_str().into()]),
});
assert!(result.is_ok(), "run_cli returned {result:?}");
let mut file = fs
.open(file_path)
.expect("formatting target file was removed by the CLI");
let mut content = String::new();
file.read_to_string(&mut content)
.expect("failed to read file from memory FS");
assert_eq!(content, UNFORMATTED);
}
#[test]
fn test_format_write() {
let mut fs = MemoryFileSystem::default();
let file_path = Path::new("format.js");
fs.insert(file_path.into(), UNFORMATTED.as_bytes());
let mut console = BufferConsole::default();
let app =
App::with_filesystem_and_console(DynRef::Borrowed(&mut fs), DynRef::Borrowed(&mut console));
let result = run_cli(CliSession {
app,
args: Arguments::from_vec(vec![
OsString::from("format"),
OsString::from("--write"),
file_path.as_os_str().into(),
]),
});
assert!(result.is_ok(), "run_cli returned {result:?}");
let mut file = fs
.open(file_path)
.expect("formatting target file was removed by the CLI");
let mut content = String::new();
file.read_to_string(&mut content)
.expect("failed to read file from memory FS");
assert_eq!(content, FORMATTED);
assert_eq!(console.buffer.len(), 1);
}
#[test]
fn test_format_ci() {
let mut fs = MemoryFileSystem::default();
let file_path = Path::new("format.js");
fs.insert(file_path.into(), FORMATTED.as_bytes());
let mut console = BufferConsole::default();
let app =
App::with_filesystem_and_console(DynRef::Borrowed(&mut fs), DynRef::Borrowed(&mut console));
let result = run_cli(CliSession {
app,
args: Arguments::from_vec(vec![
OsString::from("format"),
OsString::from("--ci"),
file_path.as_os_str().into(),
]),
});
assert!(result.is_ok(), "run_cli returned {result:?}");
let mut file = fs
.open(file_path)
.expect("formatting target file was removed by the CLI");
let mut content = String::new();
file.read_to_string(&mut content)
.expect("failed to read file from memory FS");
assert_eq!(content, FORMATTED);
assert_eq!(console.buffer.len(), 1);
}
#[test]
fn test_unknown_command() {
let result = run_cli(CliSession {
app: App::with_filesystem_and_console(
DynRef::Owned(Box::new(MemoryFileSystem::default())),
DynRef::Owned(Box::new(BufferConsole::default())),
),
args: Arguments::from_vec(vec![OsString::from("unknown")]),
});
match result {
Err(Termination::UnknownCommand { command }) => assert_eq!(command, "unknown"),
_ => panic!("run_cli returned {result:?} for an unknown command, expected an error"),
}
}
#[test]
fn test_unknown_command_help() {
let result = run_cli(CliSession {
app: App::with_filesystem_and_console(
DynRef::Owned(Box::new(MemoryFileSystem::default())),
DynRef::Owned(Box::new(BufferConsole::default())),
),
args: Arguments::from_vec(vec![OsString::from("unknown"), OsString::from("--help")]),
});
match result {
Err(Termination::UnknownCommandHelp { command }) => assert_eq!(command, "unknown"),
_ => panic!("run_cli returned {result:?} for an unknown command help, expected an error"),
}
}
#[test]
fn test_indent_style_parse_errors() {
let result = run_cli(CliSession {
app: App::with_filesystem_and_console(
DynRef::Owned(Box::new(MemoryFileSystem::default())),
DynRef::Owned(Box::new(BufferConsole::default())),
),
args: Arguments::from_vec(vec![
OsString::from("format"),
OsString::from("--indent-style"),
OsString::from("invalid"),
OsString::from("file.js"),
]),
});
match result {
Err(Termination::ParseError { argument, .. }) => assert_eq!(argument, "--indent-style"),
_ => panic!("run_cli returned {result:?} for an invalid argument value, expected an error"),
}
}
#[test]
fn test_indent_size_parse_errors_negative() {
let result = run_cli(CliSession {
app: App::with_filesystem_and_console(
DynRef::Owned(Box::new(MemoryFileSystem::default())),
DynRef::Owned(Box::new(BufferConsole::default())),
),
args: Arguments::from_vec(vec![
OsString::from("format"),
OsString::from("--indent-size"),
OsString::from("-1"),
OsString::from("file.js"),
]),
});
match result {
Err(Termination::ParseError { argument, .. }) => assert_eq!(argument, "--indent-size"),
_ => panic!("run_cli returned {result:?} for an invalid argument value, expected an error"),
}
}
#[test]
fn test_indent_size_parse_errors_overflow() {
let result = run_cli(CliSession {
app: App::with_filesystem_and_console(
DynRef::Owned(Box::new(MemoryFileSystem::default())),
DynRef::Owned(Box::new(BufferConsole::default())),
),
args: Arguments::from_vec(vec![
OsString::from("format"),
OsString::from("--indent-size"),
OsString::from("257"),
OsString::from("file.js"),
]),
});
match result {
Err(Termination::ParseError { argument, .. }) => assert_eq!(argument, "--indent-size"),
_ => panic!("run_cli returned {result:?} for an invalid argument value, expected an error"),
}
}
#[test]
fn test_line_width_parse_errors_negative() {
let result = run_cli(CliSession {
app: App::with_filesystem_and_console(
DynRef::Owned(Box::new(MemoryFileSystem::default())),
DynRef::Owned(Box::new(BufferConsole::default())),
),
args: Arguments::from_vec(vec![
OsString::from("format"),
OsString::from("--line-width"),
OsString::from("-1"),
OsString::from("file.js"),
]),
});
match result {
Err(Termination::ParseError { argument, .. }) => assert_eq!(argument, "--line-width"),
_ => panic!("run_cli returned {result:?} for an invalid argument value, expected an error"),
}
}
#[test]
fn test_line_width_parse_errors_overflow() {
let result = run_cli(CliSession {
app: App::with_filesystem_and_console(
DynRef::Owned(Box::new(MemoryFileSystem::default())),
DynRef::Owned(Box::new(BufferConsole::default())),
),
args: Arguments::from_vec(vec![
OsString::from("format"),
OsString::from("--line-width"),
OsString::from("321"),
OsString::from("file.js"),
]),
});
match result {
Err(Termination::ParseError { argument, .. }) => assert_eq!(argument, "--line-width"),
_ => panic!("run_cli returned {result:?} for an invalid argument value, expected an error"),
}
}
#[test]
fn test_unexpected_argument() {
let result = run_cli(CliSession {
app: App::with_filesystem_and_console(
DynRef::Owned(Box::new(MemoryFileSystem::default())),
DynRef::Owned(Box::new(BufferConsole::default())),
),
args: Arguments::from_vec(vec![
OsString::from("format"),
OsString::from("--unknown"),
OsString::from("file.js"),
]),
});
match result {
Err(Termination::UnexpectedArgument { argument, .. }) => {
assert_eq!(argument, OsString::from("--unknown"))
}
_ => panic!("run_cli returned {result:?} for an unknown argument, expected an error"),
}
}
#[test]
fn test_missing_argument() {
let result = run_cli(CliSession {
app: App::with_filesystem_and_console(
DynRef::Owned(Box::new(MemoryFileSystem::default())),
DynRef::Owned(Box::new(BufferConsole::default())),
),
args: Arguments::from_vec(vec![OsString::from("format"), OsString::from("--ci")]),
});
match result {
Err(Termination::MissingArgument { argument }) => assert_eq!(argument, "<INPUT>"),
_ => panic!("run_cli returned {result:?} for a missing argument, expected an error"),
}
}
#[test]
fn test_formatting_error() {
let mut fs = MemoryFileSystem::default();
let file_path = Path::new("format.js");
fs.insert(file_path.into(), UNFORMATTED.as_bytes());
let result = run_cli(CliSession {
app: App::with_filesystem_and_console(
DynRef::Owned(Box::new(fs)),
DynRef::Owned(Box::new(BufferConsole::default())),
),
args: Arguments::from_vec(vec![
OsString::from("format"),
OsString::from("--ci"),
file_path.as_os_str().into(),
]),
});
match result {
Err(Termination::FormattingError) => {}
_ => panic!("run_cli returned {result:?} for a failed CI check, expected an error"),
}
}
#[test]
fn test_empty_arguments() {
let result = run_cli(CliSession {
app: App::with_filesystem_and_console(
DynRef::Owned(Box::new(MemoryFileSystem::default())),
DynRef::Owned(Box::new(BufferConsole::default())),
),
args: Arguments::from_vec(vec![OsString::from("format")]),
});
match result {
Err(Termination::EmptyArguments) => {}
_ => panic!("run_cli returned {result:?} for a failed CI check, expected an error"),
}
}
#[test]
fn test_incompatible_arguments() {
let result = run_cli(CliSession {
app: App::with_filesystem_and_console(
DynRef::Owned(Box::new(MemoryFileSystem::default())),
DynRef::Owned(Box::new(BufferConsole::default())),
),
args: Arguments::from_vec(vec![
OsString::from("format"),
OsString::from("--write"),
OsString::from("--ci"),
OsString::from("format.js"),
]),
});
match result {
Err(Termination::IncompatibleArguments("--write", "--ci")) => {}
_ => panic!("run_cli returned {result:?} for a failed CI check, expected an error"),
}
}
| true |
36d01a787bd5efca67561738696017101d50b4bb
|
Rust
|
myuon/atcoder
|
/dp/src/bin/j.rs
|
UTF-8
| 2,362 | 3 | 3 |
[] |
no_license
|
#[macro_export]
macro_rules! input {
(source = $s:expr, $($r:tt)*) => {
let mut iter = $s.split_whitespace();
input_inner!{iter, $($r)*}
};
($($r:tt)*) => {
let s = {
use std::io::Read;
let mut s = String::new();
std::io::stdin().read_to_string(&mut s).unwrap();
s
};
let mut iter = s.split_whitespace();
input_inner!{iter, $($r)*}
};
}
#[macro_export]
macro_rules! input_inner {
($iter:expr) => {};
($iter:expr, ) => {};
($iter:expr, $var:ident : $t:tt $($r:tt)*) => {
let $var = read_value!($iter, $t);
input_inner!{$iter $($r)*}
};
}
#[macro_export]
macro_rules! read_value {
($iter:expr, ( $($t:tt),* )) => {
( $(read_value!($iter, $t)),* )
};
($iter:expr, [ $t:tt ; $len:expr ]) => {
(0..$len).map(|_| read_value!($iter, $t)).collect::<Vec<_>>()
};
($iter:expr, chars) => {
read_value!($iter, String).chars().collect::<Vec<char>>()
};
($iter:expr, usize1) => {
read_value!($iter, usize) - 1
};
($iter:expr, $t:ty) => {
$iter.next().unwrap().parse::<$t>().expect("Parse error")
};
}
fn solve(n: usize, an: Vec<usize>) -> f64 {
let mut count = vec![0, 0, 0];
for a in an {
count[a - 1] += 1;
}
let mut dp = vec![vec![vec![0.0; n + 1]; n + 1]; n + 1];
for k in 0..=count[2] {
for j in 0..=(count[1] + count[2]) {
for i in 0..=n {
if i == 0 && j == 0 && k == 0 {
continue;
}
if n < i + j + k {
continue;
}
let z = n - i - j - k;
if i > 0 {
dp[i][j][k] += (i as f64 / (n - z) as f64) * dp[i - 1][j][k];
}
if j > 0 {
dp[i][j][k] += (j as f64 / (n - z) as f64) * dp[i + 1][j - 1][k];
}
if k > 0 {
dp[i][j][k] += (k as f64 / (n - z) as f64) * dp[i][j + 1][k - 1];
}
dp[i][j][k] += n as f64 / (n - z) as f64;
}
}
}
dp[count[0]][count[1]][count[2]]
}
fn main() {
input! {
n: usize,
an: [usize; n],
}
println!("{}", solve(n, an));
}
| true |
9da6bb79815a5031a4338720d40e29fd1f2fea62
|
Rust
|
sshyran/diesel
|
/diesel/src/pg/value.rs
|
UTF-8
| 1,165 | 2.75 | 3 |
[
"MIT",
"Apache-2.0",
"LicenseRef-scancode-unknown-license-reference"
] |
permissive
|
use super::Pg;
use crate::backend::BinaryRawValue;
use std::num::NonZeroU32;
use std::ops::Range;
/// Raw postgres value as received from the database
#[derive(Clone, Copy)]
#[allow(missing_debug_implementations)]
pub struct PgValue<'a> {
raw_value: &'a [u8],
type_oid: NonZeroU32,
}
impl<'a> BinaryRawValue<'a> for Pg {
fn as_bytes(value: PgValue<'a>) -> &'a [u8] {
value.raw_value
}
}
impl<'a> PgValue<'a> {
#[cfg(test)]
pub(crate) fn for_test(raw_value: &'a [u8]) -> Self {
Self {
raw_value,
type_oid: NonZeroU32::new(42).unwrap(),
}
}
pub(crate) fn new(raw_value: &'a [u8], type_oid: NonZeroU32) -> Self {
Self {
raw_value,
type_oid,
}
}
/// Get the underlying raw byte representation
pub fn as_bytes(&self) -> &[u8] {
self.raw_value
}
/// Get the type oid of this value
pub fn get_oid(&self) -> NonZeroU32 {
self.type_oid
}
pub(crate) fn subslice(&self, range: Range<usize>) -> Self {
Self {
raw_value: &self.raw_value[range],
..*self
}
}
}
| true |
e9f1cdeaef2c7e98a4be6088318f5877a83e739f
|
Rust
|
hooops/crypto-crawler-rs
|
/crypto-msg-parser/src/exchanges/binance/mod.rs
|
UTF-8
| 1,316 | 2.65625 | 3 |
[
"Apache-2.0"
] |
permissive
|
mod binance_all;
mod binance_option;
use std::collections::HashMap;
use crypto_market_type::MarketType;
use crate::{FundingRateMsg, OrderBookMsg, TradeMsg};
use serde_json::{Result, Value};
pub(crate) fn extract_symbol(_market_type: MarketType, msg: &str) -> Option<String> {
let obj = serde_json::from_str::<HashMap<String, Value>>(msg).unwrap();
let data = obj.get("data").unwrap();
let symbol = data["s"].as_str().unwrap();
Some(symbol.to_string())
}
pub(crate) fn parse_trade(market_type: MarketType, msg: &str) -> Result<Vec<TradeMsg>> {
if market_type == MarketType::EuropeanOption {
binance_option::parse_trade(msg)
} else {
binance_all::parse_trade(market_type, msg)
}
}
pub(crate) fn parse_funding_rate(
market_type: MarketType,
msg: &str,
) -> Result<Vec<FundingRateMsg>> {
if market_type == MarketType::InverseSwap || market_type == MarketType::LinearSwap {
binance_all::parse_funding_rate(market_type, msg)
} else {
panic!("Binance {} does NOT have funding rates", market_type);
}
}
pub(crate) fn parse_l2(market_type: MarketType, msg: &str) -> Result<Vec<OrderBookMsg>> {
if market_type == MarketType::EuropeanOption {
Ok(Vec::new())
} else {
binance_all::parse_l2(market_type, msg)
}
}
| true |
ad95a7823bec266362f77c3c009a21e4b9ad7f93
|
Rust
|
Geigerkind/decision-tree-based-hand-gesture-recognition
|
/lib_evaluation/src/entities/evaluation_entry.rs
|
UTF-8
| 1,156 | 3.40625 | 3 |
[] |
no_license
|
use crate::value_objects::EvaluationEntryKey;
/// The structure holding for each evaluation entry the necessary information.
#[derive(Debug, Getters)]
pub struct EvaluationEntry {
key: EvaluationEntryKey,
true_positive: u32,
false_negative: u32
}
impl EvaluationEntry {
/// Creates a new instance.
pub fn new(key: EvaluationEntryKey) -> Self {
EvaluationEntry {
key,
true_positive: 0,
false_negative: 0
}
}
/// Increment function for true_positive
pub fn add_true_positive(&mut self) {
self.true_positive += 1;
}
/// Increment function for false_negative.
pub fn add_false_negative(&mut self) {
self.false_negative += 1;
}
/// Return the accuracy for this entry.
/// If nothing was collected, e.g. true_positive + false_negative = 0, return None.
/// Otherwise return a value between 0 and 1
pub fn accuracy(&self) -> Option<f64> {
let total = self.true_positive + self.false_negative;
if total == 0 {
return None;
}
Some((self.true_positive as f64) / (total as f64))
}
}
| true |
20d572b808ae8a8a81cb6fce07185822cbd9ef13
|
Rust
|
dvc94ch/redpitaya-mercury
|
/src/la_rle.rs
|
UTF-8
| 1,226 | 2.8125 | 3 |
[
"ISC"
] |
permissive
|
use uio::*;
use volatile_register::{RO, RW};
#[repr(C)]
pub struct LaRleRegs {
/// RLE mode
cfg_rle: RW<u32>,
/// Current counter
status_current: RO<u32>,
/// Last counter
status_last: RO<u32>,
}
impl Default for LaRleRegs {
fn default(&mut self) {
unsafe { self.cfg_rle.write(0); }
}
}
impl Show for LaRleRegs {
fn show(&self) {
println!("cfg_rle = {:x}", self.cfg_rle.read());
println!("status_current = {:x}", self.status_current.read());
println!("status_last = {:x}", self.status_last.read());
}
}
pub trait LaRleRegsAPI {
/// Get RLE mode.
fn rle(&self) -> bool;
/// Set RLE mode.
fn set_rle(&mut self, rle: bool);
/// Current data stream length counter.
fn counter_current(&self) -> u32;
/// Last data stream length counter.
fn counter_last(&self) -> u32;
}
impl LaRleRegsAPI for LaRleRegs {
fn rle(&self) -> bool {
self.cfg_rle.read() > 0
}
fn set_rle(&mut self, rle: bool) {
unsafe { self.cfg_rle.write(rle as u32); }
}
fn counter_current(&self) -> u32 {
self.status_current.read()
}
fn counter_last(&self) -> u32 {
self.status_last.read()
}
}
| true |
de61a335efb3254a5a2411c2ea2dc90a8220c177
|
Rust
|
irevoire/bug
|
/src/main.rs
|
UTF-8
| 833 | 2.5625 | 3 |
[] |
no_license
|
mod commands;
use commands::bug::*;
use serenity::{
framework::{standard::macros::group, StandardFramework},
model::gateway::Ready,
prelude::*,
};
use std::env;
struct Handler;
impl EventHandler for Handler {
fn ready(&self, _: Context, ready: Ready) {
println!("{} is connected!", ready.user.name);
}
}
#[group]
#[commands(bug)]
struct General;
fn main() {
let token = env::var("DISCORD_TOKEN").expect("Expected a token in the environment");
let mut client = Client::new(&token, Handler).expect("Err creating client");
client.with_framework(
StandardFramework::new()
.configure(|c| c.prefix("!").delimiters(vec![", ", ",", " "]))
.group(&GENERAL_GROUP),
);
if let Err(why) = client.start() {
println!("Client error: {:?}", why);
}
}
| true |
d5147eeb9a76000b65b8c4d98f87983cc75e1120
|
Rust
|
hsmtkk/stunning-couscous
|
/src/adapter/square_peg.rs
|
UTF-8
| 232 | 3.359375 | 3 |
[] |
no_license
|
pub struct SquarePeg {
width :f64,
}
impl SquarePeg {
#[allow(dead_code)]
pub fn new(width:f64) -> SquarePeg {
SquarePeg{width}
}
pub fn get_width(&self) -> f64 {
self.width
}
}
| true |
a111c11371f97107ec99f430aa119df0e76eee16
|
Rust
|
lulugo19/advent-of-code-2020
|
/src/day18.rs
|
UTF-8
| 2,817 | 3.546875 | 4 |
[] |
no_license
|
use std::iter::Peekable;
#[derive(Debug, PartialEq, Eq)]
pub enum Operator {
Plus,
Times,
}
#[derive(Debug, PartialEq, Eq)]
pub enum Token {
Number(u64),
LeftParen,
RightParen,
Operator(Operator),
}
#[aoc_generator(day18)]
pub fn input_generator(input: &str) -> Vec<Vec<Token>> {
input
.lines()
.map(|line| {
line
.chars()
.filter_map(|c| match c {
'(' => Some(Token::LeftParen),
')' => Some(Token::RightParen),
'+' => Some(Token::Operator(Operator::Plus)),
'*' => Some(Token::Operator(Operator::Times)),
// There are only on-digit numbers, so this simplification is alright
_ if c.is_numeric() => Some(Token::Number(c.to_string().parse().unwrap())),
' ' => None,
_ => panic!("Invalid character '{}'!", c),
})
.collect()
})
.collect()
}
#[aoc(day18, part1)]
pub fn solve_part1(lines: &[Vec<Token>]) -> u64 {
evaluate_lines(lines, false)
}
#[aoc(day18, part2)]
pub fn solve_part2(lines: &[Vec<Token>]) -> u64 {
evaluate_lines(lines, true)
}
fn evaluate_lines(lines: &[Vec<Token>], is_part2: bool) -> u64 {
lines
.iter()
.map(|l| evaluate(&mut l.iter().peekable(), is_part2))
.sum()
}
fn evaluate<'a>(tokens: &mut Peekable<impl Iterator<Item = &'a Token>>, is_part2: bool) -> u64 {
let mut a = operand(tokens, is_part2);
loop {
if let Some(op) = operator(tokens) {
let b = operand(tokens, is_part2);
match op {
Operator::Plus => a += b,
Operator::Times => a *= b,
}
} else {
break;
}
}
a
}
fn operand<'a>(tokens: &mut Peekable<impl Iterator<Item = &'a Token>>, is_part2: bool) -> u64 {
let a = match tokens.next().unwrap() {
Token::Number(x) => *x,
Token::LeftParen => evaluate(tokens, is_part2),
_ => panic!("Expected operand!"),
};
// modification for part2: if there is a plus operator we add the next operand
if is_part2 && tokens.peek() == Some(&&Token::Operator(Operator::Plus)) {
tokens.next();
a + operand(tokens, is_part2)
} else {
a
}
}
fn operator<'a>(tokens: &mut Peekable<impl Iterator<Item = &'a Token>>) -> Option<&'a Operator> {
match tokens.next() {
Some(Token::Operator(op)) => Some(op),
_ => None,
}
}
#[cfg(test)]
mod test {
use super::{input_generator, solve_part2};
fn test_part2(input: &str, expected: u64) {
assert_eq!(solve_part2(&input_generator(input)), expected)
}
#[test]
fn test_day18_part2() {
test_part2("1 + (2 * 3) + (4 * (5 + 6))", 51);
test_part2("2 * 3 + (4 * 5)", 46);
test_part2("5 + (8 * 3 + 9 + 3 * 4 * 3)", 1445);
test_part2("5 * 9 * (7 * 3 * 3 + 9 * 3 + (8 + 6 * 4))", 669060);
test_part2("((2 + 4 * 9) * (6 + 9 * 8 + 6) + 6) + 2 + 4 * 2", 23340);
}
}
| true |
008244485c68f08d0f16f1b8b551a8651d108125
|
Rust
|
smb374/my_dots
|
/rust_tools/mpd_cover2/src/main.rs
|
UTF-8
| 3,027 | 3.015625 | 3 |
[] |
no_license
|
extern crate image;
extern crate mpd;
extern crate pcre2;
use std::fs::{ self, File, OpenOptions };
use std::path::Path;
use std::str;
use mpd::{ Client, Song };
use pcre2::bytes::Regex;
use image::imageops::FilterType;
fn convert(cp: &Path, op: &Path) -> Result<(), ()> {
let im = image::open(cp).unwrap();
let resized = im.resize(320, 160, FilterType::Gaussian);
match resized.save(op) {
Ok(_) => Ok(()),
Err(err) => panic!("Can't save image! Error: {}", err),
}
}
fn process(cp: &Path, op: &Path, p: &Path) -> std::io::Result<()> {
match OpenOptions::new().read(true).write(false).open(cp) {
Ok(_) => {
convert(&cp, &op).expect("Can't convert!");
},
Err(_) => {
to_placeholder(&p, &op)?;
},
}
Ok(())
}
fn to_placeholder(p: &Path, o: &Path) -> std::io::Result<()> {
let data = fs::read(p)?;
if OpenOptions::new().read(true).write(false).open(o).is_err() {
File::create(o)?;
}
fs::write(o, data)?;
Ok(())
}
fn ls(cap: &Path) -> std::io::Result<String> {
let mut dir_vec: Vec<String> = Vec::new();
for x in fs::read_dir(cap)? {
let dir = x?.path();
let path: &Path = dir.as_ref();
let s = path.to_str().unwrap().to_string();
dir_vec.push(s);
}
Ok(dir_vec.join(" "))
}
fn main() -> std::io::Result<()> {
let mut conn = Client::connect("127.0.0.1:6600").expect("Can't connect to mpd server!");
let current_song: Song = conn.currentsong().unwrap().unwrap();
let current_file_path =
Path::new(Box::leak(format!("/home/thomas/Music/{}", current_song.file).into_boxed_str()));
let out_name = "cover_mpd.png";
let out_path = Path::new(
Box::leak(
format!("/tmp/{}", out_name).into_boxed_str()
)
); // use Box::leak() to leak memory from String to get &str
let placeholder = Path::new("/home/thomas/placeholder.png");
let current_album_path = current_file_path.parent().expect("Error getting parent path!");
let re = Regex::new(r"(cover|folder)[0-9]?\.(jpg|jpeg|png|gif)").expect("Error creating re!"); // generate a pcre2 regex string.
let mut cover_path = placeholder;
if let Some(c) = re.captures(&ls(¤t_album_path).unwrap().into_bytes()).expect("Rabbit hole a") { // captures string in the contents.
let cover_file_slice = c[0].to_vec(); // turn Capture -> Vec<u8>
let cover_file = str::from_utf8(&cover_file_slice).expect("Error convert cover str slice!"); // Vec<u8> -> &[u8] -> String
cover_path =
Path::new(Box::leak(format!("{}/{}", current_album_path.to_string_lossy(), cover_file).into_boxed_str()));
}// &ls(¤t_album_path).unwrap().into_bytes() will return a reference to the bytestring of current dir file joined with space.
if cover_path == placeholder {
to_placeholder(&placeholder, &out_path)?;
}
else {
process(&cover_path, &out_path, &placeholder)?;
}
Ok(())
}
| true |
e55671bed3889cecee53818cb313a317ff8f7a82
|
Rust
|
cristicismas/rscalc
|
/src/calculator.rs
|
UTF-8
| 1,618 | 3.5 | 4 |
[] |
no_license
|
use super::SYMBOLS;
use std::num::ParseFloatError;
pub fn calculate(elements: Vec<String>) -> Result<String, ParseFloatError> {
let mut result: f64 = elements[0].parse::<f64>()?;
for i in 0..elements.len() {
let current_element = &elements[i][..];
if SYMBOLS.contains(¤t_element) {
let next_element = elements[i + 1].parse::<f64>()?;
match current_element {
"+" => result += next_element,
"-" => result -= next_element,
"/" => result /= next_element,
"*" => result *= next_element,
"%" => result %= next_element,
_ => {}
}
}
}
return Ok(result.to_string());
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn test_addition() {
let elements = vec![String::from("4"), String::from("+"), String::from("5")];
assert_eq!(calculate(elements), Ok(String::from("9")));
}
#[test]
fn test_subtraction() {
let elements = vec![String::from("10"), String::from("-"), String::from("2")];
assert_eq!(calculate(elements), Ok(String::from("8")));
}
#[test]
fn test_division() {
let elements = vec![String::from("10"), String::from("/"), String::from("5")];
assert_eq!(calculate(elements), Ok(String::from("2")));
}
#[test]
fn test_multiplication() {
let elements = vec![String::from("3"), String::from("*"), String::from("4")];
assert_eq!(calculate(elements), Ok(String::from("12")));
}
#[test]
fn test_modulo() {
let elements = vec![String::from("15"), String::from("%"), String::from("10")];
assert_eq!(calculate(elements), Ok(String::from("5")));
}
}
| true |
d8ab81d1fb7f9e97e967d38806f6cd4481379a3e
|
Rust
|
rodoufu/challenges
|
/leetCode/string/count-pairs-of-similar-strings.rs
|
UTF-8
| 1,511 | 3.71875 | 4 |
[] |
no_license
|
// https://leetcode.com/problems/count-pairs-of-similar-strings/
use std::{
collections::{
HashMap,
HashSet,
},
iter::FromIterator,
};
#[derive(Debug)]
struct Word {
//letter_count: HashMap<char, usize>,
letters: HashSet<char>,
}
impl From<String> for Word {
fn from(word: String) -> Self {
// let mut letter_count = HashMap::new();
// for letter in word.chars() {
// *letter_count.entry(letter).or_default() += 1;
// }
// Self {
// letter_count,
// }
Self {
letters: HashSet::from_iter(word.chars()),
}
}
}
impl Word {
fn contains(&self, other: &Self) -> bool {
for letter in &other.letters {
if !self.letters.contains(letter) {
return false;
}
}
true
}
fn is_similar(&self, other: &Self) -> bool {
self.contains(other) && other.contains(self)
}
}
impl Solution {
pub fn similar_pairs(words: Vec<String>) -> i32 {
let words = words.into_iter().map(Word::from).collect::<Vec<Word>>();
let mut count = 0;
let len_w = words.len();
for i in 0..len_w-1 {
for j in i+1..len_w {
if words[i].is_similar(&words[j]) {
// println!("{:?} is similar to {:?}", words[i], words[j]);
count += 1;
}
}
}
count
}
}
| true |
5a88a904d6d331cbbb8ac32d5591c0b3c3e571a9
|
Rust
|
technodeguy/rest-api-rust
|
/src/routes/book.rs
|
UTF-8
| 2,538 | 2.640625 | 3 |
[] |
no_license
|
use diesel::mysql::MysqlConnection;
use hyper::{Body, Request, StatusCode, Chunk};
use futures::{future, Future, Stream};
use crate::types::response::*;
use crate::utils::{response::{create_response, create_error_message}, validator::parse_form};
use crate::consts::ErrorCode;
use crate::models::book::{Book, NewBook};
use crate::dto::{IdDto, NewBookDto};
pub fn get_book_by_id(req: Request<Body>, db_conn: &MysqlConnection) -> ResponseFuture {
let books = vec!["Anna Karenina", "Kobsar"];
let response = req.into_body()
.concat2()
.and_then(move |chunk: Chunk| parse_form::<IdDto>(&chunk))
.and_then(move |result| {
let response = match result {
Ok(data) => {
info!("Preparing to succesfull server response, {:#?}", data);
// create_response(StatusCode::CREATED, books.get(0).unwrap())
unimplemented!()
}
Err(error) => {
error!("Preparing to send bad_request server response");
create_response::<String>(StatusCode::BAD_REQUEST, create_error_message(error))
}
};
Ok(response)
});
Box::new(response)
}
pub fn get_all_books(req: Request<Body>, db_conn: &MysqlConnection) -> ResponseFuture {
let books = Book::find_all(db_conn);
let response = if let Ok(json) = serde_json::to_string(&books) {
create_response(StatusCode::OK, json)
} else {
create_response(StatusCode::INTERNAL_SERVER_ERROR, create_error_message(ErrorCode::INTERNAL))
};
Box::new(future::ok(response))
}
pub fn create_book(req: Request<Body>, db_conn: &MysqlConnection) -> ResponseFuture {
let response = req.into_body()
.concat2()
.and_then(move |chunk: Chunk| parse_form::<NewBookDto>(&chunk))
.and_then(move |result| {
let response = match result {
Ok(data) => {
let book = Book::insert(&NewBook::from(data) , db_conn);
info!("Book inserted, {:#?}", book);
create_response(StatusCode::CREATED, vec!(book as u8))
}
Err(error) => {
error!("Preparing to send bad_request server response");
create_response::<String>(StatusCode::BAD_REQUEST, create_error_message(error))
}
};
Ok(response)
});
Box::new(response)
}
| true |
9be96666b9840bf3b5806acc9f9fd57d65210296
|
Rust
|
suharev7/clickhouse-rs
|
/src/lib.rs
|
UTF-8
| 19,611 | 3 | 3 |
[
"MIT"
] |
permissive
|
//! ## clickhouse-rs
//! Asynchronous [Yandex ClickHouse](https://clickhouse.yandex/) client library for rust programming language.
//!
//! ### Installation
//! Library hosted on [crates.io](https://crates.io/crates/clickhouse-rs/).
//!
//! ```toml
//! [dependencies]
//! clickhouse-rs = "*"
//! ```
//!
//! ### Supported data types
//!
//! * Date
//! * DateTime
//! * Decimal(P, S)
//! * Float32, Float64
//! * String, FixedString(N)
//! * UInt8, UInt16, UInt32, UInt64, Int8, Int16, Int32, Int64
//! * Nullable(T)
//! * Array(UInt/Int/String/Date/DateTime)
//! * SimpleAggregateFunction(F, T)
//! * IPv4/IPv6
//! * UUID
//!
//! ### DNS
//!
//! ```url
//! schema://user:password@host[:port]/database?param1=value1&...¶mN=valueN
//! ```
//!
//! parameters:
//!
//! - `compression` - Whether or not use compression (defaults to `none`). Possible choices:
//! * `none`
//! * `lz4`
//!
//! - `readonly` - Restricts permissions for read data, write data and change settings queries. (defaults to `none`). Possible choices:
//! * `0` - All queries are allowed.
//! * `1` - Only read data queries are allowed.
//! * `2` - Read data and change settings queries are allowed.
//!
//! - `connection_timeout` - Timeout for connection (defaults to `500 ms`)
//! - `keepalive` - TCP keep alive timeout in milliseconds.
//! - `nodelay` - Whether to enable `TCP_NODELAY` (defaults to `true`).
//!
//! - `pool_min` - Lower bound of opened connections for `Pool` (defaults to `10`).
//! - `pool_max` - Upper bound of opened connections for `Pool` (defaults to `20`).
//!
//! - `ping_before_query` - Ping server every time before execute any query. (defaults to `true`).
//! - `send_retries` - Count of retry to send request to server. (defaults to `3`).
//! - `retry_timeout` - Amount of time to wait before next retry. (defaults to `5 sec`).
//! - `ping_timeout` - Timeout for ping (defaults to `500 ms`).
//!
//! - `alt_hosts` - Comma separated list of single address host for load-balancing.
//!
//! example:
//! ```url
//! tcp://user:password@host:9000/clicks?compression=lz4&ping_timeout=42ms
//! ```
//!
//! ## Optional features
//!
//! `clickhouse-rs` puts some functionality behind optional features to optimize compile time
//! for the most common use cases. The following features are available.
//!
//! - `tokio_io` *(enabled by default)* — I/O based on [Tokio](https://tokio.rs/).
//! - `async_std` — I/O based on [async-std](https://async.rs/) (doesn't work together with `tokio_io`).
//! - `tls` — TLS support (allowed only with `tokio_io`).
//!
//! ### Example
//!
//! ```rust
//! # use std::env;
//! use clickhouse_rs::{Block, Pool, errors::Error};
//!
//! #[tokio::main]
//! async fn main() -> Result<(), Error> {
//! let ddl = r"
//! CREATE TABLE IF NOT EXISTS payment (
//! customer_id UInt32,
//! amount UInt32,
//! account_name Nullable(FixedString(3))
//! ) Engine=Memory";
//!
//! let block = Block::new()
//! .column("customer_id", vec![1_u32, 3, 5, 7, 9])
//! .column("amount", vec![2_u32, 4, 6, 8, 10])
//! .column("account_name", vec![Some("foo"), None, None, None, Some("bar")]);
//!
//! # let database_url = env::var("DATABASE_URL").unwrap_or("tcp://localhost:9000?compression=lz4".into());
//! let pool = Pool::new(database_url);
//!
//! let mut client = pool.get_handle().await?;
//! client.execute(ddl).await?;
//! client.insert("payment", block).await?;
//! let block = client.query("SELECT * FROM payment").fetch_all().await?;
//!
//! for row in block.rows() {
//! let id: u32 = row.get("customer_id")?;
//! let amount: u32 = row.get("amount")?;
//! let name: Option<&str> = row.get("account_name")?;
//! println!("Found payment {}: {} {:?}", id, amount, name);
//! }
//! Ok(())
//! }
//! ```
#![recursion_limit = "1024"]
use std::{fmt, future::Future, time::Duration};
use futures_util::{
future, future::BoxFuture, future::FutureExt, stream, stream::BoxStream, StreamExt,
};
use log::{info, warn};
use crate::{
connecting_stream::ConnectingStream,
errors::{DriverError, Error, Result},
io::ClickhouseTransport,
pool::PoolBinding,
retry_guard::retry_guard,
types::{
query_result::stream_blocks::BlockStream, Cmd, Context, IntoOptions, OptionsSource, Packet,
Query, QueryResult, SqlType,
},
};
pub use crate::{
pool::Pool,
types::{block::Block, Options},
};
mod binary;
mod client_info;
mod connecting_stream;
/// Error types.
pub mod errors;
mod io;
/// Pool types.
pub mod pool;
mod retry_guard;
/// Clickhouse types.
pub mod types;
/// This macro is a convenient way to pass row into a block.
///
/// ```rust
/// # use clickhouse_rs::{Block, row, errors::Error};
/// # fn make_block() -> Result<(), Error> {
/// let mut block = Block::new();
/// block.push(row!{customer_id: 1, amount: 2, account_name: "foo"})?;
/// block.push(row!{customer_id: 4, amount: 4, account_name: "bar"})?;
/// block.push(row!{customer_id: 5, amount: 5, account_name: "baz"})?;
/// # assert_eq!(block.row_count(), 3);
/// # Ok(())
/// # }
/// # make_block().unwrap()
/// ```
///
/// If a column name has special characters, you can use the alternative syntax
/// with `=>` to pass an expression as column name:
///
/// ```rust
/// # use clickhouse_rs::{Block, row, errors::Error};
/// # fn make_block() -> Result<(), Error> {
/// let mut block = Block::new();
/// block.push(row!{"customer.id" => 1, amount: 2, "account.name" => "foo"})?;
/// block.push(row!{"customer.id" => 4, amount: 4, "account.name" => "bar"})?;
/// block.push(row!{"customer.id" => 5, amount: 5, "account.name" => "baz"})?;
/// # assert_eq!(block.row_count(), 3);
/// # Ok(())
/// # }
/// # make_block().unwrap()
/// ```
///
/// You can also use `Vec<(String, Value)>` to construct a row and insert it into a block:
///
/// ```rust
/// # use clickhouse_rs::{Block, errors::Error, types::Value};
/// # fn make_block() -> Result<(), Error> {
/// let mut block = Block::new();
/// for i in 1..10 {
/// let mut row = Vec::new();
/// for j in 1..10 {
/// row.push((format!("#{}", j), Value::from(i * j)));
/// }
/// block.push(row)?;
/// }
/// assert_eq!(block.row_count(), 9);
/// # println!("{:?}", block);
/// # Ok(())
/// # }
/// # make_block().unwrap()
/// ```
#[macro_export]
macro_rules! row {
() => { $crate::types::RNil };
( $i:ident, $($tail:tt)* ) => {
row!( $($tail)* ).put(stringify!($i).into(), $i.into())
};
( $i:ident ) => { row!($i: $i) };
( $k:ident: $v:expr ) => {
$crate::types::RNil.put(stringify!($k).into(), $v.into())
};
( $k:ident: $v:expr, $($tail:tt)* ) => {
row!( $($tail)* ).put(stringify!($k).into(), $v.into())
};
( $k:expr => $v:expr ) => {
$crate::types::RNil.put($k.into(), $v.into())
};
( $k:expr => $v:expr, $($tail:tt)* ) => {
row!( $($tail)* ).put($k.into(), $v.into())
};
}
#[macro_export]
macro_rules! try_opt {
($expr:expr) => {
match $expr {
Ok(val) => val,
Err(err) => return Err(err),
}
};
}
#[doc(hidden)]
pub struct Client {
_private: (),
}
/// Clickhouse client handle.
pub struct ClientHandle {
inner: Option<ClickhouseTransport>,
context: Context,
pool: PoolBinding,
}
impl fmt::Debug for ClientHandle {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
f.debug_struct("ClientHandle")
.field("server_info", &self.context.server_info)
.finish()
}
}
impl Client {
#[deprecated(since = "0.1.4", note = "please use Pool to connect")]
pub async fn connect(options: Options) -> Result<ClientHandle> {
let source = options.into_options_src();
Self::open(source, None).await
}
pub(crate) async fn open(source: OptionsSource, pool: Option<Pool>) -> Result<ClientHandle> {
let options = try_opt!(source.get());
let compress = options.compression;
let timeout = options.connection_timeout;
let context = Context {
options: source.clone(),
..Context::default()
};
with_timeout(
async move {
let addr = match &pool {
None => &options.addr,
Some(p) => p.get_addr(),
};
info!("try to connect to {}", addr);
if addr.port() == Some(8123) {
warn!("You should use port 9000 instead of 8123 because clickhouse-rs work through the binary interface.");
}
let mut stream = ConnectingStream::new(addr, &options).await?;
stream.set_nodelay(options.nodelay)?;
stream.set_keepalive(options.keepalive)?;
let transport = ClickhouseTransport::new(stream, compress, pool.clone());
let mut handle = ClientHandle {
inner: Some(transport),
context,
pool: match pool {
None => PoolBinding::None,
Some(p) => PoolBinding::Detached(p),
},
};
handle.hello().await?;
Ok(handle)
},
timeout,
)
.await
}
}
impl ClientHandle {
pub(crate) async fn hello(&mut self) -> Result<()> {
let context = self.context.clone();
info!("[hello] -> {:?}", &context);
let mut h = None;
let mut info = None;
let mut stream = self.inner.take().unwrap().call(Cmd::Hello(context.clone()));
while let Some(packet) = stream.next().await {
match packet {
Ok(Packet::Hello(inner, server_info)) => {
info!("[hello] <- {:?}", &server_info);
h = Some(inner);
info = Some(server_info);
}
Ok(Packet::Exception(e)) => return Err(Error::Server(e)),
Err(e) => return Err(Error::Io(e)),
_ => return Err(Error::Driver(DriverError::UnexpectedPacket)),
}
}
self.inner = h;
self.context.server_info = info.unwrap();
Ok(())
}
pub async fn ping(&mut self) -> Result<()> {
let timeout = try_opt!(self.context.options.get()).ping_timeout;
with_timeout(
async move {
info!("[ping]");
let mut h = None;
let transport = self.inner.take().unwrap().clear().await?;
let mut stream = transport.call(Cmd::Ping);
while let Some(packet) = stream.next().await {
match packet {
Ok(Packet::Pong(inner)) => {
info!("[pong]");
h = Some(inner);
}
Ok(Packet::Exception(e)) => return Err(Error::Server(e)),
Err(e) => return Err(Error::Io(e)),
_ => return Err(Error::Driver(DriverError::UnexpectedPacket)),
}
}
self.inner = h;
Ok(())
},
timeout,
)
.await
}
/// Executes Clickhouse `query` on Conn.
pub fn query<Q>(&mut self, sql: Q) -> QueryResult
where
Query: From<Q>,
{
let query = Query::from(sql);
QueryResult {
client: self,
query,
}
}
/// Convenience method to prepare and execute a single SQL statement.
pub async fn execute<Q>(&mut self, sql: Q) -> Result<()>
where
Query: From<Q>,
{
let transport = self.execute_(sql).await?;
self.inner = Some(transport);
Ok(())
}
async fn execute_<Q>(&mut self, sql: Q) -> Result<ClickhouseTransport>
where
Query: From<Q>,
{
let timeout = try_opt!(self.context.options.get())
.execute_timeout
.unwrap_or_else(|| Duration::from_secs(0));
let context = self.context.clone();
let query = Query::from(sql);
with_timeout(
async {
self.wrap_future(move |c| {
info!("[execute query] {}", query.get_sql());
let transport = c.inner.take().unwrap();
async move {
let mut h = None;
let transport = transport.clear().await?;
let mut stream = transport.call(Cmd::SendQuery(query, context.clone()));
while let Some(packet) = stream.next().await {
match packet {
Ok(Packet::Eof(inner)) => h = Some(inner),
Ok(Packet::Block(_))
| Ok(Packet::ProfileInfo(_))
| Ok(Packet::Progress(_)) => (),
Ok(Packet::Exception(e)) => return Err(Error::Server(e)),
Err(e) => return Err(Error::Io(e)),
_ => return Err(Error::Driver(DriverError::UnexpectedPacket)),
}
}
Ok(h.unwrap())
}
})
.await
},
timeout,
)
.await
}
/// Convenience method to insert block of data.
pub async fn insert<Q, B>(&mut self, table: Q, block: B) -> Result<()>
where
Query: From<Q>,
B: AsRef<Block>,
{
let transport = self.insert_(table, block.as_ref()).await?;
self.inner = Some(transport);
Ok(())
}
async fn insert_<Q>(&mut self, table: Q, block: &Block) -> Result<ClickhouseTransport>
where
Query: From<Q>,
{
let timeout = try_opt!(self.context.options.get())
.insert_timeout
.unwrap_or_else(|| Duration::from_secs(0));
let mut names: Vec<_> = Vec::with_capacity(block.column_count());
for column in block.columns() {
names.push(try_opt!(column_name_to_string(column.name())));
}
let fields = names.join(", ");
let query = Query::from(table)
.map_sql(|table| format!("INSERT INTO {} ({}) VALUES", table, fields));
let context = self.context.clone();
with_timeout(
async {
self.wrap_future(move |c| {
info!("[insert] {}", query.get_sql());
let transport = c.inner.take().unwrap();
async move {
let transport = transport.clear().await?;
let stream = transport.call(Cmd::SendQuery(query, context.clone()));
let (transport, b) = stream.read_block().await?;
let dst_block = b.unwrap();
let casted_block = match block.cast_to(&dst_block) {
Ok(value) => value,
Err(err) => return Err(err),
};
let send_cmd = Cmd::Union(
Box::new(Cmd::SendData(casted_block, context.clone())),
Box::new(Cmd::SendData(Block::default(), context.clone())),
);
let (transport, _) = transport.call(send_cmd).read_block().await?;
Ok(transport)
}
})
.await
},
timeout,
)
.await
}
pub(crate) async fn wrap_future<T, R, F>(&mut self, f: F) -> Result<T>
where
F: FnOnce(&mut Self) -> R + Send,
R: Future<Output = Result<T>>,
T: 'static,
{
let ping_before_query = try_opt!(self.context.options.get()).ping_before_query;
if ping_before_query {
self.check_connection().await?;
}
f(self).await
}
pub(crate) fn wrap_stream<'a, F>(&'a mut self, f: F) -> BoxStream<'a, Result<Block>>
where
F: (FnOnce(&'a mut Self) -> BlockStream<'a>) + Send + 'static,
{
let ping_before_query = match self.context.options.get() {
Ok(val) => val.ping_before_query,
Err(err) => return Box::pin(stream::once(future::err(err))),
};
if ping_before_query {
let fut: BoxFuture<'a, BoxStream<'a, Result<Block>>> = Box::pin(async move {
let inner: BoxStream<'a, Result<Block>> = match self.check_connection().await {
Ok(_) => Box::pin(f(self)),
Err(err) => Box::pin(stream::once(future::err(err))),
};
inner
});
Box::pin(fut.flatten_stream())
} else {
Box::pin(f(self))
}
}
/// Check connection and try to reconnect if necessary.
pub async fn check_connection(&mut self) -> Result<()> {
self.pool.detach();
let source = self.context.options.clone();
let pool = self.pool.clone();
let (send_retries, retry_timeout) = {
let options = try_opt!(source.get());
(options.send_retries, options.retry_timeout)
};
retry_guard(self, &source, pool.into(), send_retries, retry_timeout).await?;
if !self.pool.is_attached() && self.pool.is_some() {
self.pool.attach();
}
Ok(())
}
pub(crate) fn set_inside(&self, value: bool) {
if let Some(ref inner) = self.inner {
inner.set_inside(value);
} else {
unreachable!()
}
}
}
fn column_name_to_string(name: &str) -> Result<String> {
if name.chars().all(|ch| ch.is_numeric()) {
return Ok(name.to_string());
}
if name.chars().any(|ch| ch == '`') {
let err = format!("Column name {:?} shouldn't contains backticks.", name);
return Err(Error::Other(err.into()));
}
Ok(format!("`{}`", name))
}
#[cfg(feature = "async_std")]
async fn with_timeout<F, T>(future: F, duration: Duration) -> F::Output
where
F: Future<Output = Result<T>>,
{
use async_std::io;
use futures_util::future::TryFutureExt;
io::timeout(duration, future.map_err(Into::into))
.map_err(Into::into)
.await
}
#[cfg(not(feature = "async_std"))]
async fn with_timeout<F, T>(future: F, timeout: Duration) -> F::Output
where
F: Future<Output = Result<T>>,
{
tokio::time::timeout(timeout, future).await?
}
#[cfg(test)]
pub(crate) mod test_misc {
use crate::*;
use std::env;
use lazy_static::lazy_static;
lazy_static! {
pub static ref DATABASE_URL: String = env::var("DATABASE_URL").unwrap_or_else(|_| {
"tcp://localhost:9000?compression=lz4&ping_timeout=1s&retry_timeout=2s".into()
});
}
#[test]
fn test_column_name_to_string() {
assert_eq!(column_name_to_string("id").unwrap(), "`id`");
assert_eq!(column_name_to_string("234").unwrap(), "234");
assert_eq!(column_name_to_string("ns:attr").unwrap(), "`ns:attr`");
assert!(column_name_to_string("`").is_err());
}
}
| true |
c56a7929abf9ee7db54c34465e744b249f31acde
|
Rust
|
sgbasaraner/aoc-rust
|
/d06/src/main.rs
|
UTF-8
| 4,031 | 3.140625 | 3 |
[] |
no_license
|
use std::fs;
use std::collections::HashMap;
use std::collections::HashSet;
type Coords = (i32, i32);
#[derive(Debug, Clone)]
struct Point {
coords: Coords,
closest_beacon_id: Option<String>,
distance_to_beacon: i32,
total_distance_to_all: i32
}
impl Point {
fn update_with_beacon(&mut self, beacon_id: String, beacon_coords: Coords) {
let new_dist = get_manhattan_distance(self.coords, beacon_coords);
self.total_distance_to_all += new_dist;
if new_dist == self.distance_to_beacon {
self.closest_beacon_id = None;
return;
}
if !(new_dist < self.distance_to_beacon) { return; }
self.closest_beacon_id = Some(beacon_id);
self.distance_to_beacon = new_dist;
}
}
fn main() {
let input = read_coord_tuples();
println!("{:?}", part_one(input.clone()));
println!("{:?}", part_two(input.clone()));
}
fn part_two(beacons: Vec<Coords>) -> usize {
let points = detail_points(beacons.clone(), create_point_map(beacons.clone()));
points.into_iter()
.filter(|p| p.total_distance_to_all < 10000)
.collect::<Vec<_>>()
.len()
}
fn part_one(beacons: Vec<Coords>) -> i32 {
let points = detail_points(beacons.clone(), create_point_map(beacons.clone()));
let mut counts: HashMap<String, i32> = HashMap::new();
for point in points.clone() {
if point.closest_beacon_id.is_none() { continue; }
let closest_id = point.closest_beacon_id.unwrap();
*counts.entry(closest_id).or_insert(0) += 1;
}
let bottom_right_edge = get_bottom_right_edge(beacons.clone());
let on_edge_ids: HashSet<String> = points.into_iter()
.filter(|p| on_edge(bottom_right_edge, &p) && p.closest_beacon_id.is_some())
.map(|p| p.closest_beacon_id.unwrap())
.collect();
let mut count_vec: Vec<_> = counts.iter().collect();
count_vec.sort_unstable_by(|t1, t2| t1.1.cmp(t2.1));
let filtered: Vec<_> = count_vec.into_iter()
.filter(|t| !on_edge_ids.contains(t.0)).collect();
*filtered.last().unwrap().1
}
fn on_edge(edge: Coords, point: &Point) -> bool {
let xs = vec![0, edge.0];
let ys = vec![0, edge.1];
xs.contains(&point.coords.0) || ys.contains(&point.coords.1)
}
fn detail_points(beacon_coords: Vec<Coords>, points: Vec<Point>) -> Vec<Point> {
let mut mut_points = points.clone();
for beacon in beacon_coords {
for i in 0..points.len() {
mut_points[i].update_with_beacon(get_beacon_id(&beacon), beacon);
}
}
mut_points
}
fn get_bottom_right_edge(beacon_coords: Vec<Coords>) -> Coords {
let mut y_coords: Vec<i32> = beacon_coords.iter().map(|(_x, y)| *y).collect();
let mut x_coords: Vec<i32> = beacon_coords.iter().map(|(x, _y)| *x).collect();
y_coords.sort();
x_coords.sort();
let bottommost_point = y_coords.last().unwrap();
let rightmost_point = x_coords.last().unwrap();
(*rightmost_point, *bottommost_point)
}
fn create_point_map(beacon_coords: Vec<Coords>) -> Vec<Point> {
let bottom_right_edge = get_bottom_right_edge(beacon_coords.clone());
let mut vec: Vec<Point> = Vec::new();
for x in 0..=bottom_right_edge.0 {
for y in 0..=bottom_right_edge.1 {
vec.push(Point {
coords: (x, y),
closest_beacon_id: None,
distance_to_beacon: i32::max_value(),
total_distance_to_all: 0
})
}
}
vec
}
fn get_beacon_id(beacon: &Coords) -> String {
let strs = vec![beacon.0.to_string(), beacon.1.to_string()];
strs.join(" ")
}
fn get_manhattan_distance(p1: Coords, p2: Coords) -> i32 {
(p1.0 - p2.0).abs() + (p1.1 - p2.1).abs()
}
fn read_coord_tuples() -> Vec<Coords> {
let contents = fs::read_to_string("src/input.in").unwrap();
contents.lines().map(|line| {
let ints: Vec<i32> = line.split(", ").map(|x| x.parse::<i32>().unwrap()).collect();
(ints[0], ints[1])
}).collect()
}
| true |
de4277558d5206a703994f0d7664d0cdae49d1fb
|
Rust
|
95th/torrent-rs
|
/common/src/types.rs
|
UTF-8
| 893 | 2.640625 | 3 |
[
"MIT"
] |
permissive
|
use std::fmt;
macro_rules! new_ty {
($ty: ident; $size: expr) => {
#[derive(Clone)]
pub struct $ty([u8; $size]);
impl Default for $ty {
fn default() -> Self {
Self([0; $size])
}
}
impl fmt::Debug for $ty {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
f.debug_list().entries(self.0.iter()).finish()
}
}
impl std::ops::Deref for $ty {
type Target = [u8; $size];
fn deref(&self) -> &Self::Target {
&self.0
}
}
impl std::ops::DerefMut for $ty {
fn deref_mut(&mut self) -> &mut Self::Target {
&mut self.0
}
}
};
}
new_ty!(PublicKey; 32);
new_ty!(SecretKey; 64);
new_ty!(Signature; 64);
pub type SequenceNumber = i64;
| true |
235074ad5dbb85f3b22b509b108bf03a5c5a6b44
|
Rust
|
gchers/random-world
|
/src/bin/cp-predict.rs
|
UTF-8
| 5,012 | 2.8125 | 3 |
[
"MIT"
] |
permissive
|
#[macro_use]
extern crate ndarray;
#[macro_use]
extern crate serde_derive;
extern crate docopt;
extern crate random_world;
extern crate itertools;
use random_world::cp::*;
use random_world::ncm::*;
use random_world::utils::{load_data, store_predictions};
use itertools::Itertools;
use docopt::Docopt;
use ndarray::*;
const USAGE: &'static str = "
Predict data using Conformal Prediction.
If no <testing-file> is specified, on-line mode is assumed.
Usage: cp-predict knn [--knn=<k>] [options] [--] <output-file> <training-file> [<testing-file>]
cp-predict kde [--kernel<kernel>] [--bandwidth=<bw>] [options] [--] <output-file> <training-file> [<testing-file>]
cp-predict (--help | --version)
Options:
-e, --epsilon=<epsilon> Significance level. If specified, the output are
label predictions rather than p-values.
-s, --smooth Smooth CP.
--seed PRNG seed. Only used if --smooth set.
-k, --knn=<kn> Number of neighbors for k-NN [default: 5].
--n-labels=<n> Number of labels. If specified in advance it
slightly improves performances.
-h, --help Show help.
--version Show the version.
";
#[derive(Deserialize)]
struct Args {
flag_epsilon: Option<f64>,
flag_smooth: bool,
flag_seed: Option<u64>,
flag_knn: usize,
flag_kernel: Option<String>,
flag_bandwidth: Option<f64>,
flag_n_labels: Option<usize>,
arg_training_file: String,
arg_testing_file: Option<String>,
arg_output_file: String,
cmd_knn: bool,
cmd_kde: bool,
}
fn main() {
// Parse args from command line.
let args: Args = Docopt::new(USAGE)
.and_then(|d| d.deserialize())
.unwrap_or_else(|e| e.exit());
// Nonconformity measure.
let ncm = if args.cmd_knn {
KNN::new(args.flag_knn)
} else if args.cmd_kde {
unimplemented!();
} else {
// Docopt shouldn't let this happen.
panic!("This shouldn't happen");
};
// Load training and test data.
let (train_inputs, train_targets) = load_data(&args.arg_training_file)
.expect("Failed to load data");
// Number of labels.
let n_labels = match args.flag_n_labels {
Some(n_labels) => n_labels,
None => train_targets.into_iter()
.unique()
.count()
};
// Initialize CP.
let mut cp = if args.flag_smooth {
let seed = match args.flag_seed {
Some(s) => Some([0, s]),
None => None,
};
CP::new_smooth(ncm, n_labels, args.flag_epsilon, seed)
} else {
CP::new(ncm, n_labels, args.flag_epsilon)
};
// If testing file is specified, predict test data.
// Otherwise, use CP in on-line mode.
if let Some(testing_file) = args.arg_testing_file {
println!("Predicting {}", testing_file);
let (test_inputs, _) = load_data(&testing_file)
.expect("Failed to load data");
// Train.
cp.train(&train_inputs.view(), &train_targets.view())
.expect("Failed to train the model");
// Predict and store results.
if let Some(_) = args.flag_epsilon {
let preds = cp.predict(&test_inputs.view())
.expect("Failed to predict");
store_predictions(preds.view(), &args.arg_output_file, false)
.expect("Failed to store the output");
} else {
let preds = cp.predict_confidence(&test_inputs.view())
.expect("Failed to predict");
store_predictions(preds.view(), &args.arg_output_file, false)
.expect("Failed to store the output");
}
} else {
println!("Using CP in on-line mode on training data");
// Train on first data point.
let x = train_inputs.slice(s![0..1, ..]);
let y = train_targets[[0]];
cp.train(&x, &array![y].view())
.expect("Failed to train CP");
// Reset output file.
store_predictions(Array2::<f64>::zeros((0,0)).view(),
&args.arg_output_file, false).expect("Failed to initialize file");
// Update and predict the remaining points in on-line mode.
for (x, y) in train_inputs.outer_iter().zip(train_targets.view()).skip(1) {
let x_ = x.into_shape((1, x.len())).unwrap();
let y_ = array![*y];
let preds = cp.predict_confidence(&x_)
.expect("Failed to predict");
cp.update(&x_, &y_.view())
.expect("Failed to update CP");
// Write to file.
store_predictions(preds.view(), &args.arg_output_file, true)
.expect("Failed to store the output");
}
}
}
| true |
a1ed5add5b967e44f08de3a77a9073d030083cf7
|
Rust
|
xleyba/kvsold
|
/src/bin/kvs.rs
|
UTF-8
| 3,194 | 3.015625 | 3 |
[] |
no_license
|
#[macro_use]
extern crate clap;
use clap::{App, Arg, SubCommand};
use structopt::StructOpt;
use std::env;
use kvs::{Result, KvStore};
#[derive(StructOpt, Debug)]
#[structopt(name = "git", about = "the stupid content tracker")]
enum Opt {
#[structopt(name = "set")]
Set {
#[structopt(value_name = "KEY", help = "The key to insert.")]
key: String,
#[structopt(value_name = "VALUE", help = "The value to insert.")]
value: String,
},
#[structopt(name = "get", about = "store a value for a key")]
Get {
#[structopt(value_name = "KEY", help = "The key to search the value for")]
key: String,
},
#[structopt(name = "rm")]
Rm {
#[structopt(value_name = "KEY", help = "The key to search the value for")]
key: String,
},
}
fn main() -> Result<()> {
let opt = Opt::from_args();
match Opt::from_args() {
Opt::Set { key, value } => {
let mut store = KvStore::new(env::current_dir()?)?;
store.set(key.to_string(), value.to_string())?;
Ok(())
},
Opt::Rm { key } => {
let mut store = KvStore::new(env::current_dir()?)?;
store.rm(key.to_string())?;
Ok(())
},
Opt::Get { key } => {
let mut store = KvStore::new(env::current_dir()?)?;
if let Some(value) = store.get(key.to_string())? {
println!("{}", value);
} else {
println!("Key not found");
}
Ok(())
}
}
/*
let matches = App::new(env!("CARGO_PKG_NAME"))
.version(env!("CARGO_PKG_VERSION"))
.author(env!("CARGO_PKG_AUTHORS"))
.about(env!("CARGO_PKG_DESCRIPTION"))
.subcommand(
SubCommand::with_name("set")
.about("store a value for a key")
.arg(Arg::with_name("KEY").help("A string key").required(true))
.arg(
Arg::with_name("VALUE")
.help("The string value for the key")
.required(true),
),
)
.subcommand(
SubCommand::with_name("get")
.about("Get the value for a given key")
.arg(
Arg::with_name("KEY")
.help("The key to search the value for")
.required(true),
),
)
.subcommand(
SubCommand::with_name("rm")
.about("Remove the given key from the store")
.arg(
Arg::with_name("KEY")
.help("The key to remove")
.required(true),
),
)
.get_matches();
match matches.subcommand() {
("set", Some(matches)) => {
println!("unimplemented");
exit(1)},
("get", Some(matches)) => {
println!("unimplemented");
exit(1)},
("rm", Some(matches)) => {
println!("unimplemented");
exit(1)},
_ => unreachable!(),
}
*/
}
| true |
d9552e2be60c26ff2481c5cb6b9b00ce280854fe
|
Rust
|
matthieu-m/stysh
|
/stysh-compile/src/model/hir/common.rs
|
UTF-8
| 16,135 | 2.828125 | 3 |
[
"MIT",
"Apache-2.0"
] |
permissive
|
//! Common types.
use std::{convert, fmt};
use crate::basic::{com, mem};
use crate::basic::sea::TableIndex;
use crate::model::ast;
use crate::model::hir::ItemId;
//
// Public Types (IDs)
//
pub use self::com::Id;
/// Identifier (name) of an item or value.
pub type Identifier = mem::InternId;
/// Index of an Expr in the Tree.
#[derive(Clone, Copy, Default, PartialEq, PartialOrd, Eq, Ord, Hash)]
pub struct ExpressionId(com::CoreId);
/// Index of a Pattern in the Tree.
#[derive(Clone, Copy, Default, PartialEq, PartialOrd, Eq, Ord, Hash)]
pub struct PatternId(com::CoreId);
/// Index of a Type in the Tree.
#[derive(Clone, Copy, Default, PartialEq, PartialOrd, Eq, Ord, Hash)]
pub struct TypeId(com::CoreId);
/// Index of an ElaborateType in the Tree.
#[derive(Clone, Copy, Default, PartialEq, PartialOrd, Eq, Ord, Hash)]
pub struct ElaborateTypeId(com::CoreId);
/// A global value number.
///
/// Defaults to 0, which is considered an invalid value.
#[derive(Clone, Copy, Default, PartialEq, PartialOrd, Eq, Ord, Hash)]
pub struct Gvn(pub u32);
/// An item identifier.
#[derive(Clone, Copy, Default, PartialEq, PartialOrd, Eq, Ord, Hash)]
pub struct ItemIdentifier(pub Identifier, pub com::Range);
/// A value identifier.
#[derive(Clone, Copy, Default, PartialEq, PartialOrd, Eq, Ord, Hash)]
pub struct ValueIdentifier(pub Identifier, pub com::Range);
//
// Public Types
//
/// A built-in Type.
#[derive(Clone, Copy, Debug, PartialEq, PartialOrd, Eq, Ord, Hash)]
pub enum BuiltinType {
/// A boolean.
Bool,
/// A 64-bits signed integer.
Int,
/// A String.
String,
/// An uninhabited type.
Void,
}
/// A field.
#[derive(Clone, Copy, Debug, PartialEq, PartialOrd, Eq, Ord, Hash)]
pub enum Field {
/// Index of the field.
Index(u16, com::Range),
/// Unresolved name of the field.
Unresolved(ValueIdentifier),
}
/// A tuple.
#[derive(Clone, Copy, Debug, PartialEq, PartialOrd, Eq, Ord, Hash)]
pub struct Tuple<T> {
/// The tuple fields.
pub fields: Id<[T]>,
/// The name of the fields, empty if unnamed, otherwise of equal length.
pub names: Id<[Identifier]>,
}
//
// Public interface (IDs)
//
impl ExpressionId {
/// Creates a new instance.
pub fn new(id: u32) -> Self { ExpressionId(com::CoreId::new(id)) }
/// Creates an instance from an Expression Gvn.
pub fn from_gvn(gvn: Gvn) -> Option<Self> {
if gvn.0 >= Gvn::EXPRESSION_OFFSET && gvn.0 < Gvn::PATTERN_OFFSET {
Some(ExpressionId::new(gvn.0 - Gvn::EXPRESSION_OFFSET))
} else {
None
}
}
/// Returns the raw ID.
pub fn raw(&self) -> u32 { self.0.raw() }
}
impl PatternId {
/// Creates a new instance.
pub fn new(id: u32) -> Self { PatternId(com::CoreId::new(id)) }
/// Creates an instance from an Pattern Gvn.
pub fn from_gvn(gvn: Gvn) -> Option<Self> {
if gvn.0 >= Gvn::PATTERN_OFFSET {
Some(PatternId::new(gvn.0 - Gvn::PATTERN_OFFSET))
} else {
None
}
}
}
impl TypeId {
/// Creates a new instance.
pub fn new(id: u32) -> Self { TypeId(com::CoreId::new(id)) }
/// Creates a new instance of a Bool TypeId.
pub fn bool_() -> Self { TypeId::new(TypeId::BOOL_ID) }
/// Creates a new instance of a Int TypeId.
pub fn int() -> Self { TypeId::new(TypeId::INT_ID) }
/// Creates a new instance of a String TypeId.
pub fn string() -> Self { TypeId::new(TypeId::STRING_ID) }
/// Creates a new instance of a Void TypeId.
pub fn void() -> Self { TypeId::new(TypeId::VOID_ID) }
/// Returns whether the corresponding Type is a built-in.
pub fn is_builtin(&self) -> bool { self.builtin().is_some() }
/// Converts to a BuiltinType, if possible.
pub fn builtin(&self) -> Option<BuiltinType> {
match self.0.raw() {
t if t == TypeId::BOOL_ID => Some(BuiltinType::Bool),
t if t == TypeId::INT_ID => Some(BuiltinType::Int),
t if t == TypeId::STRING_ID => Some(BuiltinType::String),
t if t == TypeId::VOID_ID => Some(BuiltinType::Void),
_ => None
}
}
/// Returns the inner ID.
pub fn value(&self) -> u32 { self.0.raw() }
}
impl ElaborateTypeId {
/// Creates a new instance.
pub fn new(id: u32) -> Self { ElaborateTypeId(com::CoreId::new(id)) }
/// Creates a new instance of a Bool ElaborateTypeId.
pub fn bool_() -> Self { ElaborateTypeId::new(ElaborateTypeId::BOOL_ID) }
/// Creates a new instance of a Int ElaborateTypeId.
pub fn int() -> Self { ElaborateTypeId::new(ElaborateTypeId::INT_ID) }
/// Creates a new instance of a String ElaborateTypeId.
pub fn string() -> Self { ElaborateTypeId::new(ElaborateTypeId::STRING_ID) }
/// Creates a new instance of a Void ElaborateTypeId.
pub fn void() -> Self { ElaborateTypeId::new(ElaborateTypeId::VOID_ID) }
/// Returns whether the corresponding Type is a built-in.
pub fn is_builtin(&self) -> bool { self.builtin().is_some() }
/// Converts to a BuiltinType, if possible.
pub fn builtin(&self) -> Option<BuiltinType> {
match self.0.raw() {
t if t == ElaborateTypeId::BOOL_ID => Some(BuiltinType::Bool),
t if t == ElaborateTypeId::INT_ID => Some(BuiltinType::Int),
t if t == ElaborateTypeId::STRING_ID => Some(BuiltinType::String),
t if t == ElaborateTypeId::VOID_ID => Some(BuiltinType::Void),
_ => None
}
}
/// Returns the inner ID.
pub fn value(&self) -> u32 { self.0.raw() }
}
impl Gvn {
/// Converts Gvn into an ExpressionId, if possible.
pub fn as_expression(self) -> Option<ExpressionId> {
ExpressionId::from_gvn(self)
}
/// Converts Gvn into an PatternId, if possible.
pub fn as_pattern(self) -> Option<PatternId> {
PatternId::from_gvn(self)
}
}
impl ItemIdentifier {
/// Returns the InternId.
pub fn id(&self) -> mem::InternId { self.0 }
/// Sets the InternId.
pub fn with_id(self, id: mem::InternId) -> Self {
ItemIdentifier(id, self.1)
}
/// Sets the Range.
pub fn with_range(self, range: com::Range) -> Self {
ItemIdentifier(self.0, range)
}
/// Returns a sentinel instance of ItemIdentifier.
pub fn unresolved() -> ItemIdentifier { Default::default() }
}
impl ValueIdentifier {
/// Returns the InternId.
pub fn id(&self) -> mem::InternId { self.0 }
/// Sets the InternId.
pub fn with_id(self, id: mem::InternId) -> Self {
ValueIdentifier(id, self.1)
}
/// Sets the Range.
pub fn with_range(self, range: com::Range) -> Self {
ValueIdentifier(self.0, range)
}
}
//
// Public interface
//
impl BuiltinType {
/// Total number of built-in types.
pub const NUMBER: usize = 4;
/// Index of a built-in type, guaranteed to be in [0, NUMBER)
pub fn index(&self) -> usize {
use self::BuiltinType::*;
match *self {
Bool => 0,
Int => 1,
String => 2,
Void => 3,
}
}
}
impl Field {
/// Returns the index.
///
/// Panics: If the field is Unresolved.
pub fn index(&self) -> u16 {
use self::Field::*;
match *self {
Index(i, ..) => i,
Unresolved(name) => panic!("Unresolved {:?}", name),
}
}
}
impl<T> Tuple<T> {
/// Creates a unit tuple.
pub fn unit() -> Tuple<T> {
Tuple { fields: Id::empty(), names: Id::empty() }
}
/// Creates a tuple with unnamed fields.
pub fn unnamed(fields: Id<[T]>) -> Tuple<T> {
Tuple { fields, names: Id::empty() }
}
}
//
// Private Interface
//
impl Gvn {
const EXPRESSION_OFFSET: u32 = 1;
const PATTERN_OFFSET: u32 = std::u32::MAX / 2;
}
impl TypeId {
const BOOL_ID: u32 = std::u32::MAX - 3;
const INT_ID: u32 = std::u32::MAX - 4;
const STRING_ID: u32 = std::u32::MAX - 5;
const VOID_ID: u32 = std::u32::MAX - 6;
}
impl ElaborateTypeId {
const BOOL_ID: u32 = std::u32::MAX - 3;
const INT_ID: u32 = std::u32::MAX - 4;
const STRING_ID: u32 = std::u32::MAX - 5;
const VOID_ID: u32 = std::u32::MAX - 6;
}
//
// Traits Implementations
//
impl fmt::Debug for ExpressionId {
fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> {
// More compact representation for `{:#?}`.
if *self == Default::default() {
write!(f, "ExpressionId(default)")
} else {
write!(f, "ExpressionId({})", self.index())
}
}
}
impl fmt::Debug for PatternId {
fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> {
// More compact representation for `{:#?}`.
if *self == Default::default() {
write!(f, "PatternId(default)")
} else {
write!(f, "PatternId({})", self.index())
}
}
}
impl fmt::Debug for TypeId {
fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> {
// More compact representation for `{:#?}`.
if *self == Default::default() {
write!(f, "TypeId(default)")
} else if let Some(t) = self.builtin() {
write!(f, "TypeId({:?})", t)
} else if let Some(i) = self.get_tree() {
write!(f, "TypeId(T-{})", i)
} else if let Some(i) = self.get_module() {
write!(f, "TypeId(M-{})", i)
} else {
write!(f, "TypeId(R-{})", self.get_repository().unwrap())
}
}
}
impl fmt::Debug for ElaborateTypeId {
fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> {
// More compact representation for `{:#?}`.
if *self == Default::default() {
write!(f, "ElaborateTypeId(default)")
} else if let Some(t) = self.builtin() {
write!(f, "ElaborateTypeId({:?})", t)
} else if let Some(i) = self.get_tree() {
write!(f, "ElaborateTypeId(T-{})", i)
} else if let Some(i) = self.get_module() {
write!(f, "ElaborateTypeId(M-{})", i)
} else {
write!(f, "ElaborateTypeId(R-{})", self.get_repository().unwrap())
}
}
}
impl fmt::Debug for Gvn {
fn fmt(&self, f: &mut std::fmt::Formatter) -> Result<(), std::fmt::Error> {
match (self.as_expression(), self.as_pattern()) {
(Some(e), None) => write!(f, "Gvn({:?})", e),
(None, Some(p)) => write!(f, "Gvn({:?})", p),
(..) => write!(f, "Gvn(-)"),
}
}
}
impl std::fmt::Debug for ItemIdentifier {
fn fmt(&self, f: &mut std::fmt::Formatter) -> Result<(), std::fmt::Error> {
write!(f, "ItemIdentifier({:?}, {})", self.0, self.1)
}
}
impl fmt::Debug for ValueIdentifier {
fn fmt(&self, f: &mut std::fmt::Formatter) -> Result<(), std::fmt::Error> {
write!(f, "ValueIdentifier({:?}, {})", self.0, self.1)
}
}
impl Default for Field {
fn default() -> Self { Field::Index(0, Default::default()) }
}
impl<T> Default for Tuple<T> {
fn default() -> Self {
Tuple { fields: Default::default(), names: Default::default() }
}
}
impl fmt::Display for BuiltinType {
fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> {
write!(f, "{:?}", self)
}
}
impl fmt::Display for Field {
fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> {
use self::Field::*;
match *self {
Index(i, ..) => write!(f, "{}", i),
Unresolved(name) => write!(f, "{:?}", name),
}
}
}
impl fmt::Display for ItemIdentifier {
fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> {
write!(f, "<{}>", self.1)
}
}
impl convert::From<BuiltinType> for TypeId {
fn from(b: BuiltinType) -> Self {
match b {
BuiltinType::Bool => TypeId::bool_(),
BuiltinType::Int => TypeId::int(),
BuiltinType::String => TypeId::string(),
BuiltinType::Void => TypeId::void(),
}
}
}
impl convert::From<BuiltinType> for ElaborateTypeId {
fn from(b: BuiltinType) -> Self {
match b {
BuiltinType::Bool => ElaborateTypeId::bool_(),
BuiltinType::Int => ElaborateTypeId::int(),
BuiltinType::String => ElaborateTypeId::string(),
BuiltinType::Void => ElaborateTypeId::void(),
}
}
}
impl convert::From<ExpressionId> for Gvn {
fn from(id: ExpressionId) -> Self {
Gvn(id.0.raw().wrapping_add(Gvn::EXPRESSION_OFFSET))
}
}
impl convert::From<PatternId> for Gvn {
fn from(id: PatternId) -> Self {
Gvn(id.0.raw().wrapping_add(Gvn::PATTERN_OFFSET))
}
}
impl convert::From<ast::Identifier> for ItemIdentifier {
fn from(value: ast::Identifier) -> Self {
ItemIdentifier(value.0, value.1)
}
}
impl convert::From<ast::TypeIdentifier> for ItemIdentifier {
fn from(value: ast::TypeIdentifier) -> Self {
ItemIdentifier(value.0, value.1)
}
}
impl convert::From<ast::VariableIdentifier> for ItemIdentifier {
fn from(value: ast::VariableIdentifier) -> Self {
ItemIdentifier(value.0, value.1)
}
}
impl convert::From<ast::Argument> for ValueIdentifier {
fn from(value: ast::Argument) -> Self { value.name.into() }
}
impl convert::From<ast::Identifier> for ValueIdentifier {
fn from(value: ast::Identifier) -> Self {
ValueIdentifier(value.0, value.1)
}
}
impl convert::From<ast::VariableIdentifier> for ValueIdentifier {
fn from(value: ast::VariableIdentifier) -> Self {
ValueIdentifier(value.0, value.1)
}
}
impl com::Span for Field {
/// Returns the range spanned by the field.
fn span(&self) -> com::Range {
use self::Field::*;
match *self {
Index(_, r) => r,
Unresolved(n) => n.1,
}
}
}
impl com::Span for ItemIdentifier {
/// Returns the range spanned by the ItemIdentifier.
fn span(&self) -> com::Range { self.1 }
}
impl com::Span for ValueIdentifier {
/// Returns the range spanned by the ValueIdentifier.
fn span(&self) -> com::Range { self.1 }
}
impl TableIndex for ExpressionId {
fn from_index(index: usize) -> Self { ExpressionId::new(index as u32) }
fn index(&self) -> usize { self.0.raw() as usize }
}
impl TableIndex for PatternId {
fn from_index(index: usize) -> Self { PatternId::new(index as u32) }
fn index(&self) -> usize { self.0.raw() as usize }
}
impl TableIndex for TypeId {
fn from_index(index: usize) -> Self { TypeId::new(index as u32) }
fn index(&self) -> usize { self.0.raw() as usize }
}
impl TableIndex for ElaborateTypeId {
fn from_index(index: usize) -> Self { ElaborateTypeId::new(index as u32) }
fn index(&self) -> usize { self.0.raw() as usize }
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn expression_id() {
assert_eq!(ExpressionId::new(0).index(), 0);
assert_eq!(ExpressionId::new(5).index(), 5);
assert_eq!(
Gvn::from(ExpressionId::new(0)).as_expression(),
Some(ExpressionId::new(0))
);
}
#[test]
fn pattern_id() {
assert_eq!(PatternId::new(0).index(), 0);
assert_eq!(PatternId::new(5).index(), 5);
assert_eq!(
Gvn::from(PatternId::new(0)).as_pattern(),
Some(PatternId::new(0))
);
}
#[test]
fn type_id() {
assert_eq!(TypeId::new(0).index(), 0);
assert_eq!(TypeId::new(5).index(), 5);
}
#[test]
fn type_id_builtin() {
use self::BuiltinType::*;
assert_eq!(Some(Bool), TypeId::bool_().builtin());
assert_eq!(Some(Int), TypeId::int().builtin());
assert_eq!(Some(String), TypeId::string().builtin());
assert_eq!(Some(Void), TypeId::void().builtin());
for &b in &[Bool, Int, String, Void] {
assert_eq!(Some(b), TypeId::from(b).builtin());
}
assert_eq!(None, TypeId::default().builtin());
assert_eq!(None, TypeId::new(0).builtin());
}
}
| true |
63448f41f84bbf1dfaa94db9f2bf98d9a26ebe23
|
Rust
|
oulamineYou/pt2itp
|
/native/src/text/mod.rs
|
UTF-8
| 27,768 | 3.21875 | 3 |
[
"BSD-3-Clause",
"BSD-2-Clause"
] |
permissive
|
mod diacritics;
mod tokens;
mod replace;
//
// A note on fn names:
// - Functions that determine the type of a string should be prefixed with `is_`
// - Functions that operate on Strings should be prefixed with `str_`
// - Functions that generate Name synonyms should be prefixed with `syn_`
//
pub use self::diacritics::diacritics;
pub use self::tokens::Tokens;
use std::collections::HashMap;
use regex::{Regex, RegexSet};
use crate::{Name, Context};
///
/// Return the Levenshtein distance between two strings
///
fn distance<T>(a: &T, b: &T) -> usize
where T: ToString
{
let v1: Vec<char> = a.to_string().chars().collect();
let v2: Vec<char> = b.to_string().chars().collect();
let v1len = v1.len();
let v2len = v2.len();
// Early exit if one of the strings is empty
if v1len == 0 { return v2len; }
if v2len == 0 { return v1len; }
fn min3<T: Ord>(v1: T, v2: T, v3: T) -> T{
std::cmp::min(v1, std::cmp::min(v2, v3))
}
fn delta(x: char, y: char) -> usize {
if x == y { 0 } else { 1 }
}
let mut column: Vec<usize> = (0..v1len+1).collect();
for x in 1..v2len+1 {
column[0] = x;
let mut lastdiag = x-1;
for y in 1..v1len+1 {
let olddiag = column[y];
column[y] = min3(column[y] + 1, column[y-1] + 1, lastdiag + delta(v1[y-1], v2[x-1]));
lastdiag = olddiag;
}
}
column[v1len]
}
///
/// Detects if the name looks like a driveway
///
pub fn is_drivethrough(text: &String, context: &Context) -> bool {
lazy_static! {
static ref DE: Regex = Regex::new(r"(?i) einfahrt$").unwrap();
static ref EN: Regex = Regex::new(r"(?i)drive.?(in|through|thru)$").unwrap();
}
if (
context.country == String::from("US")
|| context.country == String::from("CA")
|| context.country == String::from("GB")
|| context.country == String::from("DE")
|| context.country == String::from("CH")
|| context.country == String::from("AT")
) && EN.is_match(text.as_str()) {
return true;
}
if (
context.country == String::from("DE")
) && DE.is_match(text.as_str()) {
return true;
}
false
}
///
/// Removes the octothorpe from names like "HWY #35", to get "HWY 35"
///
pub fn str_remove_octo(text: &String) -> String {
lazy_static! {
static ref OCTO: Regex = Regex::new(r"(?i)^(?P<type>HWY |HIGHWAY |RTE |ROUTE |US )(#)(?P<post>\d+\s?.*)$").unwrap();
}
match OCTO.captures(text.as_str()) {
Some(capture) => format!("{}{}", &capture["type"], &capture["post"]),
_ => text.clone()
}
}
///
/// Detect Strings like `5 Avenue` and return a synonym like `5th Avenue` where possible
///
pub fn syn_number_suffix(name: &Name, context: &Context) -> Vec<Name> {
lazy_static! {
static ref NUMSUFFIX: Regex = Regex::new(r"(?i)^(?P<number>\d+)\s+(?P<name>\w.*)$").unwrap();
}
match NUMSUFFIX.captures(name.display.as_str()) {
Some(capture) => {
let num: i64 = match capture["number"].parse() {
Ok(num) => num,
_ => { return Vec::new(); }
};
let suffix: String;
if (num % 100) >= 10 && (num % 100) <= 20 {
suffix = String::from("th");
} else if (num % 10) == 1 {
suffix = String::from("st");
} else if (num % 10) == 2 {
suffix = String::from("nd");
} else if (num % 10) == 3 {
suffix = String::from("rd");
} else {
suffix = String::from("th");
}
vec![Name::new(format!("{}{} {}", num, suffix, &capture["name"]), -1, &context)]
},
None => Vec::new()
}
}
///
/// In Quebec is it common to be able to search for simple street names by their street name
/// alone. This creates less desirable synonyms for these cases
///
pub fn syn_ca_french(name: &Name, context: &Context) -> Vec<Name> {
lazy_static! {
static ref STANDALONE: Regex = Regex::new(r"^(r|ch|av|bd)\s").unwrap();
static ref ELIMINATOR: Regex = Regex::new(r"^(r|ch|av|bd)\s(du|des|de)\s").unwrap();
}
let mut syns = Vec::new();
if
STANDALONE.is_match(&*name.tokenized)
&& !ELIMINATOR.is_match(&*name.tokenized)
{
let basic = STANDALONE.replace(&*name.tokenized, "").to_string();
syns.push(Name::new(basic, -1, &context));
}
syns
}
///
/// Adds Synonyms to names like "Highway 123 => NS-123, Nova Scotia Highway 123
///
pub fn syn_ca_hwy(name: &Name, context: &Context) -> Vec<Name> {
let region = match context.region {
Some(ref region) => region,
None => { return Vec::new() }
};
let region_name = match context.region_name() {
Some(region) => region,
None => { return Vec::new() }
};
lazy_static! {
static ref HIGHWAY: RegexSet = RegexSet::new(&[
r"(?i)^[0-9]+[a-z]?$",
r"(?i)(ON|QC|NS|NB|MB|BC|PE|PEI|SK|AB|NL|NT|YT|NU)-[0-9]+[a-z]?$",
r"(?i)(Highway|hwy|route|rte) [0-9]+[a-z]?$",
r"(?i)King'?s Highway [0-9]+[a-z]?",
r"(?i)(Alberta|British Columbia|Saskatchewan|Manitoba|Yukon|New Brunswick|Newfoundland and Labrador|Newfoundland|Labrador|Price Edward Island|PEI|Quebec|Northwest Territories|Nunavut|Nova Scotia) (Highway|hwy|Route|rtw) [0-9]+[a-z]?"
]).unwrap();
static ref NUM: Regex = Regex::new(r"(?i)(?P<num>[0-9]+[a-z]?$)").unwrap();
}
//Trans Canada shouldn't be provincial highway
if name.display == String::from("1") {
Vec::new()
} else if HIGHWAY.is_match(name.display.as_str()) {
match NUM.captures(name.display.as_str()) {
Some(capture) => {
let num = capture["num"].to_string();
let mut syns: Vec<Name> = Vec::new();
// Highway 123
syns.push(Name::new(format!("Highway {}", &num), -1, &context));
// Route 123
syns.push(Name::new(format!("Route {}", &num), -1, &context));
// NB 123
syns.push(Name::new(format!("{} {}", ®ion, &num), -2, &context));
let hwy_type: String;
if
region == &String::from("NB")
|| region == &String::from("NL")
|| region == &String::from("PE")
|| region == &String::from("QC")
{
hwy_type = String::from("Highway");
} else {
hwy_type = String::from("Route");
}
//New Brunswick Route 123 (Display Form)
if name.priority > 0 {
syns.push(Name::new(format!("{} {} {}", ®ion_name, &hwy_type, &num), 0, &context));
} else {
syns.push(Name::new(format!("{} {} {}", ®ion_name, &hwy_type, &num), 1, &context));
}
syns
},
None => Vec::new()
}
} else {
Vec::new()
}
}
///
/// One -> Twenty are handled as geocoder-abbrev. Because Twenty-First has a hyphen, which is converted
/// to a space by the tokenized, these cannot currently be managed as token level replacements and are handled
/// as synonyms instead
///
pub fn syn_written_numeric(name: &Name, context: &Context) -> Vec<Name> {
lazy_static! {
static ref NUMERIC: Regex = Regex::new(r"(?i)(?P<pre>^.*)(?P<tenth>Twenty|Thirty|Fourty|Fifty|Sixty|Seventy|Eighty|Ninety)-(?P<nth>First|Second|Third|Fourth|Fifth|Sixth|Seventh|Eighth|Ninth)(?P<post>.*$)").unwrap();
static ref NUMERIC_MAP: HashMap<String, String> = {
let mut m = HashMap::new();
m.insert(String::from("twenty"), String::from("2"));
m.insert(String::from("thirty"), String::from("3"));
m.insert(String::from("fourty"), String::from("4"));
m.insert(String::from("fifty"), String::from("5"));
m.insert(String::from("sixty"), String::from("6"));
m.insert(String::from("seventy"), String::from("7"));
m.insert(String::from("eighty"), String::from("8"));
m.insert(String::from("ninety"), String::from("9"));
m.insert(String::from("first"), String::from("1st"));
m.insert(String::from("second"), String::from("2nd"));
m.insert(String::from("third"), String::from("3rd"));
m.insert(String::from("fourth"), String::from("4th"));
m.insert(String::from("fifth"), String::from("5th"));
m.insert(String::from("sixth"), String::from("6th"));
m.insert(String::from("seventh"), String::from("7th"));
m.insert(String::from("eighth"), String::from("8th"));
m.insert(String::from("ninth"), String::from("9th"));
m
};
}
match NUMERIC.captures(name.display.as_str()) {
Some(capture) => {
let tenth = match NUMERIC_MAP.get(&capture["tenth"].to_lowercase()) {
None => { return Vec::new(); },
Some(tenth) => tenth
};
let nth = match NUMERIC_MAP.get(&capture["nth"].to_lowercase()) {
None => { return Vec::new(); },
Some(nth) => nth
};
vec![Name::new(format!("{}{}{}{}", &capture["pre"], tenth, nth, &capture["post"]), -1, &context)]
},
_ => Vec::new()
}
}
///
/// Generate synonyms for name like "CR 123" => "County Road 123"
///
pub fn syn_us_cr(name: &Name, context: &Context) -> Vec<Name> {
lazy_static! {
static ref US_CR: Regex = Regex::new(r"(?i)^(CR |County Road )(?P<num>[0-9]+)$").unwrap();
}
let cr: String = match US_CR.captures(name.display.as_str()) {
Some(capture) => capture["num"].to_string(),
None => { return Vec::new(); }
};
// Note ensure capacity is increased if additional permuations are added below
let mut syns: Vec<Name> = Vec::with_capacity(2);
// CR 123
syns.push(Name::new(format!("CR {}", &cr), -1, &context));
// County Road 123 (Display Form)
if name.priority > 0 {
syns.push(Name::new(format!("County Road {}", &cr), 0, &context));
} else {
syns.push(Name::new(format!("County Road {}", &cr), 1, &context));
}
syns
}
///
/// Generate synonyms for names like "US 81" => "US Route 81"
///
pub fn syn_us_hwy(name: &Name, context: &Context) -> Vec<Name> {
lazy_static! {
static ref US_HWY: Regex = Regex::new(r"(?i)^(U\.?S\.?|United States)(\s|-)(Rte |Route |Hwy |Highway )?(?P<num>[0-9]+)$").unwrap();
}
let highway: String = match US_HWY.captures(name.display.as_str()) {
Some(capture) => capture["num"].to_string(),
None => { return Vec::new(); }
};
// Note ensure capacity is increased if additional permuations are added below
let mut syns: Vec<Name> = Vec::with_capacity(5);
// US 81
syns.push(Name::new(format!("US {}", &highway), -1, &context));
//US Route 81 (Display Form)
if name.priority > 0 {
syns.push(Name::new(format!("US Route {}", &highway), 0, &context));
} else {
syns.push(Name::new(format!("US Route {}", &highway), 1, &context));
}
//US Highway 81
syns.push(Name::new(format!("US Highway {}", &highway), -1, &context));
//United States Route 81
syns.push(Name::new(format!("United States Route {}", &highway), -1, &context));
//United States Highway 81
syns.push(Name::new(format!("United States Highway {}", &highway), -1, &context));
syns
}
///
/// Replace names like "NC 1 => North Carolina Highway 1"
/// Replace names like "State Highway 1 => NC 1, North Carolina Highway 1
///
pub fn syn_state_hwy(name: &Name, context: &Context) -> Vec<Name> {
let region = match context.region {
Some(ref region) => region,
None => { return Vec::new() }
};
let region_name = match context.region_name() {
Some(region) => region,
None => { return Vec::new() }
};
// the goal is to get all the input highways to <state> #### and then format the matrix
lazy_static! {
static ref PRE_HWY: Regex = Regex::new(r"(?ix)^
(?P<prefix>
# State 123
# State Highway 123
(State\s(highway|hwy|route|rte)\s)
# North Carolina 123
# North Carolina Highway 123
|((Alabama|Alaska|Arizona|Arkansas|California|Colorado|Connecticut|Delaware|Florida|Georgia|Hawaii|Idaho|Illinois|Indiana|Iowa|Kansas|Kentucky|Louisiana|Maine|Maryland|Massachusetts|Michigan|Minnesota|Mississippi|Missouri|Montana|Nebraska|Nevada|New\sHampshire|New\sJersey|New\sMexico|New\sYork|North\sCarolina|North\sDakota|Ohio|Oklahoma|Oregon|Pennsylvania|Rhode\sIsland|South\sCarolina|South\sDakota|Tennessee|Texas|Utah|Vermont|Virginia|Washington|West\sVirginia|Wisconsin|Wyoming|District\sof\sColumbia|American\sSamoa|Guam|Northern\sMariana\sIslands|Puerto\sRico|United\sStates\sMinor\sOutlying\sIslands|Virgin\sIslands
)\s((highway|hwy|route|rte)\s)?)
# Highway 123
|((highway|hwy|route|rte)\s)
# US-AK 123
# US AK Highway 123
# AK 123
# AK Highway 123
|((US[-\s])?(AL|AK|AZ|AR|CA|CO|CT|DE|FL|GA|HI|ID|IL|IN|IA|KS|KY|LA|ME|MD|MA|MI|MN|MS|MO|MT|NE|NV|NH|NJ|NM|NY|NC|ND|OH|OK|OR|PA|RI|SC|SD|TN|TX|UT|VT|VA|WA|WV|WI|WY|DC|AS|GU|MP|PR|UM|VI|SR)[\s-]((highway|hwy|route|rte)\s)?)
)
(?P<num>\d+)
(\shighway$|\shwy$|\sroute$|\srte$)?
$
").unwrap();
static ref POST_HWY: Regex = Regex::new(r"(?i)^(highway|hwy|route|rte)\s(?P<num>\d+)$").unwrap();
}
let highway: String = match PRE_HWY.captures(name.display.as_str()) {
Some(capture) => capture["num"].to_string(),
None => match POST_HWY.captures(name.display.as_str()) {
Some(capture) => capture["num"].to_string(),
None => { return Vec::new(); }
}
};
// Note ensure capacity is increased if additional permuations are added below
let mut syns: Vec<Name> = Vec::with_capacity(7);
// NC 123 Highway
syns.push(Name::new(format!("{} {} Highway", region.to_uppercase(), &highway), -2, &context));
// NC 123
syns.push(Name::new(format!("{} {}", region.to_uppercase(), &highway), -1, &context));
// Highway 123
syns.push(Name::new(format!("Highway {}", &highway), -2, &context));
// SR 123 (State Route)
syns.push(Name::new(format!("SR {}", &highway), -1, &context));
//State Highway 123
syns.push(Name::new(format!("State Highway {}", &highway), -1, &context));
//State Route 123
syns.push(Name::new(format!("State Route {}", &highway), -1, &context));
// <State> Highway 123 (Display Form)
if name.priority > 0 {
syns.push(Name::new(format!("{} Highway {}", ®ion_name, &highway), 0, &context));
} else {
syns.push(Name::new(format!("{} Highway {}", ®ion_name, &highway), 1, &context));
}
syns
}
#[cfg(test)]
mod tests {
use super::*;
use crate::{Name, Context, Tokens};
#[test]
fn test_distance() {
assert_eq!(distance(&String::from("a"), &String::from("b")), 1);
assert_eq!(distance(&String::from("ab"), &String::from("ac")), 1);
assert_eq!(distance(&String::from("ac"), &String::from("bc")), 1);
assert_eq!(distance(&String::from("abc"), &String::from("axc")), 1);
assert_eq!(distance(&String::from("xabxcdxxefxgx"), &String::from("1ab2cd34ef5g6")), 6);
assert_eq!(distance(&String::from("xabxcdxxefxgx"), &String::from("abcdefg")), 6);
assert_eq!(distance(&String::from("javawasneat"), &String::from("scalaisgreat")), 7);
assert_eq!(distance(&String::from("example"), &String::from("samples")), 3);
assert_eq!(distance(&String::from("forward"), &String::from("drawrof")), 6);
assert_eq!(distance(&String::from("sturgeon"), &String::from("urgently")), 6 );
assert_eq!(distance(&String::from("levenshtein"), &String::from("frankenstein")), 6 );
assert_eq!(distance(&String::from("distance"), &String::from("difference")), 5 );
assert_eq!(distance(&String::from("distance"), &String::from("eistancd")), 2 );
assert_eq!(distance(&String::from("你好世界"), &String::from("你好")), 2);
assert_eq!(distance(&String::from("因為我是中國人所以我會說中文"), &String::from("因為我是英國人所以我會說英文")), 2);
assert_eq!(distance(
&String::from("Morbi interdum ultricies neque varius condimentum. Donec volutpat turpis interdum metus ultricies vulputate. Duis ultricies rhoncus sapien, sit amet fermentum risus imperdiet vitae. Ut et lectus"),
&String::from("Duis erat dolor, cursus in tincidunt a, lobortis in odio. Cras magna sem, pharetra et iaculis quis, faucibus quis tellus. Suspendisse dapibus sapien in justo cursus")
), 143);
}
#[test]
fn test_is_drivethrough() {
let context = Context::new(String::from("us"), None, Tokens::new(HashMap::new()));
assert_eq!(is_drivethrough(
&String::from("Main St NE"),
&context
), false);
assert_eq!(is_drivethrough(
&String::from("McDonalds einfahrt"),
&context
), false);
let context = Context::new(String::from("de"), None, Tokens::new(HashMap::new()));
assert_eq!(is_drivethrough(
&String::from("McDonalds einfahrt"),
&context
), true);
assert_eq!(is_drivethrough(
&String::from("Burger King Drive-through"),
&context
), true);
assert_eq!(is_drivethrough(
&String::from("McDonalds Drivethrough"),
&context
), true);
assert_eq!(is_drivethrough(
&String::from("McDonalds Drive through"),
&context
), true);
assert_eq!(is_drivethrough(
&String::from("McDonalds Drivethru"),
&context
), true);
}
#[test]
fn test_syn_us_cr() {
let context = Context::new(String::from("us"), None, Tokens::new(HashMap::new()));
assert_eq!(syn_us_cr(&Name::new(String::from(""), 0, &context), &context), vec![]);
let results = vec![
Name::new(String::from("CR 123"), -1, &context),
Name::new(String::from("County Road 123"), 1, &context),
];
assert_eq!(syn_us_cr(&Name::new(String::from("County Road 123"), 0, &context), &context), results);
assert_eq!(syn_us_cr(&Name::new(String::from("CR 123"), 0, &context), &context), results);
}
#[test]
fn test_syn_ca_french() {
let context = Context::new(String::from("ca"), Some(String::from("qc")), Tokens::new(HashMap::new()));
assert_eq!(syn_ca_french(&Name::new(String::from(""), 0, &context), &context), vec![]);
// Successful Replacements
assert_eq!(syn_ca_french(&Name::new(String::from("r principale"), 0, &context), &context), vec![
Name::new(String::from("principale"), -1, &context)
]);
// Ignored Replacements
assert_eq!(syn_ca_french(&Name::new(String::from("r des peupliers"), 0, &context), &context), vec![ ]);
assert_eq!(syn_ca_french(&Name::new(String::from("ch des hauteurs"), 0, &context), &context), vec![ ]);
assert_eq!(syn_ca_french(&Name::new(String::from("r du blizzard"), 0, &context), &context), vec![ ]);
assert_eq!(syn_ca_french(&Name::new(String::from("bd de lhotel de vl"), 0, &context), &context), vec![ ]);
}
#[test]
fn test_syn_ca_hwy() {
let context = Context::new(String::from("ca"), Some(String::from("on")), Tokens::new(HashMap::new()));
assert_eq!(syn_ca_hwy(&Name::new(String::from(""), 0, &context), &context), vec![]);
let results = vec![
Name::new(String::from("Highway 101"), -1, &context),
Name::new(String::from("Route 101"), -1, &context),
Name::new(String::from("ON 101"), -2, &context),
Name::new(String::from("Ontario Route 101"), 1, &context)
];
assert_eq!(syn_ca_hwy(&Name::new(String::from("101"), 0, &context), &context), results);
assert_eq!(syn_ca_hwy(&Name::new(String::from("ON-101"), 0, &context), &context), results);
assert_eq!(syn_ca_hwy(&Name::new(String::from("Kings's Highway 101"), 0, &context), &context), results);
assert_eq!(syn_ca_hwy(&Name::new(String::from("Highway 101"), 0, &context), &context), results);
assert_eq!(syn_ca_hwy(&Name::new(String::from("Route 101"), 0, &context), &context), results);
assert_eq!(syn_ca_hwy(&Name::new(String::from("Ontario Highway 101"), 0, &context), &context), results);
}
#[test]
fn test_syn_us_hwy() {
let context = Context::new(String::from("us"), None, Tokens::new(HashMap::new()));
assert_eq!(syn_us_hwy(&Name::new(String::from(""), 0, &context), &context), vec![]);
let results = vec![
Name::new(String::from("US 81"), -1, &context),
Name::new(String::from("US Route 81"), 1, &context),
Name::new(String::from("US Highway 81"), -1, &context),
Name::new(String::from("United States Route 81"), -1, &context),
Name::new(String::from("United States Highway 81"), -1, &context),
];
assert_eq!(syn_us_hwy(&Name::new(String::from("us-81"), 0, &context), &context), results);
assert_eq!(syn_us_hwy(&Name::new(String::from("US 81"), 0, &context), &context), results);
assert_eq!(syn_us_hwy(&Name::new(String::from("U.S. Route 81"), 0, &context), &context), results);
assert_eq!(syn_us_hwy(&Name::new(String::from("US Route 81"), 0, &context), &context), results);
assert_eq!(syn_us_hwy(&Name::new(String::from("US Rte 81"), 0, &context), &context), results);
assert_eq!(syn_us_hwy(&Name::new(String::from("US Hwy 81"), 0, &context), &context), results);
assert_eq!(syn_us_hwy(&Name::new(String::from("US Highway 81"), 0, &context), &context), results);
assert_eq!(syn_us_hwy(&Name::new(String::from("United States 81"), 0, &context), &context), results);
assert_eq!(syn_us_hwy(&Name::new(String::from("United States Route 81"), 0, &context), &context), results);
assert_eq!(syn_us_hwy(&Name::new(String::from("United States Highway 81"), 0, &context), &context), results);
assert_eq!(syn_us_hwy(&Name::new(String::from("United States Hwy 81"), 0, &context), &context), results);
}
#[test]
fn test_syn_state_highway() {
let context = Context::new(String::from("us"), Some(String::from("PA")), Tokens::new(HashMap::new()));
assert_eq!(
syn_state_hwy(
&Name::new(String::from(""), 0, &context),
&context
), vec![]
);
let results = vec![
Name::new(String::from("PA 123 Highway"), -2, &context),
Name::new(String::from("PA 123"), -1, &context),
Name::new(String::from("Highway 123"), -2, &context),
Name::new(String::from("SR 123"), -1, &context),
Name::new(String::from("State Highway 123"), -1, &context),
Name::new(String::from("State Route 123"), -1, &context),
Name::new(String::from("Pennsylvania Highway 123"), 1, &context)
];
assert_eq!(
syn_state_hwy(
&Name::new(String::from("State Highway 123"), 0, &context),
&context
), results
);
assert_eq!(
syn_state_hwy(
&Name::new(String::from("Highway 123"), 0, &context),
&context
), results
);
assert_eq!(
syn_state_hwy(
&Name::new(String::from("Hwy 123"), 0, &context),
&context
), results
);
assert_eq!(
syn_state_hwy(
&Name::new(String::from("Pennsylvania Highway 123"), 0, &context),
&context
), results
);
assert_eq!(
syn_state_hwy(
&Name::new(String::from("Pennsylvania Route 123"), 0, &context),
&context
), results
);
assert_eq!(
syn_state_hwy(
&Name::new(String::from("PA 123"), 0, &context),
&context
), results
);
assert_eq!(
syn_state_hwy(
&Name::new(String::from("PA-123"), 0, &context),
&context
), results
);
assert_eq!(
syn_state_hwy(
&Name::new(String::from("US-PA-123"), 0, &context),
&context
), results
);
}
#[test]
fn test_syn_number_suffix() {
let context = Context::new(String::from("us"), None, Tokens::new(HashMap::new()));
assert_eq!(
syn_number_suffix(&Name::new(String::from("1st Avenue"), 0, &context), &context),
Vec::new()
);
assert_eq!(
syn_number_suffix(&Name::new(String::from("1 Avenue"), 0, &context), &context),
vec![Name::new(String::from("1st Avenue"), -1, &context)]
);
assert_eq!(
syn_number_suffix(&Name::new(String::from("2 Avenue"), 0, &context), &context),
vec![Name::new(String::from("2nd Avenue"), -1, &context)]
);
assert_eq!(
syn_number_suffix(&Name::new(String::from("3 Street"), 0, &context), &context),
vec![Name::new(String::from("3rd Street"), -1, &context)]
);
assert_eq!(
syn_number_suffix(&Name::new(String::from("4 Street"), 0, &context), &context),
vec![Name::new(String::from("4th Street"), -1, &context)]
);
assert_eq!(
syn_number_suffix(&Name::new(String::from("20 Street"), 0, &context), &context),
vec![Name::new(String::from("20th Street"), -1, &context)]
);
assert_eq!(
syn_number_suffix(&Name::new(String::from("21 Street"), 0, &context), &context),
vec![Name::new(String::from("21st Street"), -1, &context)]
);
}
#[test]
fn test_syn_written_numeric() {
let context = Context::new(String::from("us"), None, Tokens::new(HashMap::new()));
assert_eq!(
syn_written_numeric(&Name::new(String::from("Twenty-third Avenue NW"), 0, &context), &context),
vec![Name::new(String::from("23rd Avenue NW"), -1, &context)]
);
assert_eq!(
syn_written_numeric(&Name::new(String::from("North twenty-Third Avenue"), 0, &context), &context),
vec![Name::new(String::from("North 23rd Avenue"), -1, &context)]
);
assert_eq!(
syn_written_numeric(&Name::new(String::from("TWENTY-THIRD Avenue"), 0, &context), &context),
vec![Name::new(String::from("23rd Avenue"), -1, &context)]
);
}
#[test]
fn test_str_remove_octo() {
assert_eq!(
str_remove_octo(&String::from("Highway #12 West")),
String::from("Highway 12 West")
);
assert_eq!(
str_remove_octo(&String::from("RTe #1")),
String::from("RTe 1")
);
}
}
| true |
357ae406a642c6bd2118539f1c5a1640460e8693
|
Rust
|
Bietola/alba
|
/src/main.rs
|
UTF-8
| 496 | 2.921875 | 3 |
[] |
no_license
|
use std::io;
use std::error::Error;
fn main() -> std::result::Result<(), Box<dyn Error>> {
// Loop.
loop {
// Read.
let mut rl = rustyline::Editor::<()>::with_config(
rustyline::Config::builder()
.build()
);
let line = rl.readline(">> ")?;
rl.add_history_entry(line.clone());
// Eval
if line == "quit" {
break;
}
// Print.
println!("{}", line);
}
Ok(())
}
| true |
c8446ad7a9dfbc5cb0ffff76b25344a2a8925ffc
|
Rust
|
adamnemecek/tuix
|
/src/state/style/flexbox.rs
|
UTF-8
| 1,490 | 2.828125 | 3 |
[
"MIT"
] |
permissive
|
use crate::entity::Entity;
#[derive(Copy, Clone, PartialEq, Debug)]
pub enum FlexDirection {
Row,
//RowReverse, //TODO
Column,
//ColumnReverse, //TODO
}
impl Default for FlexDirection {
fn default() -> Self {
FlexDirection::Column
}
}
#[derive(Copy, Clone, PartialEq, Debug)]
pub enum FlexWrap {
NoWrap,
//Wrap, //TODO
//WrapReverse, //TODO
}
impl Default for FlexWrap {
fn default() -> Self {
FlexWrap::NoWrap
}
}
#[derive(Copy, Clone, PartialEq, Debug)]
pub enum JustifyContent {
FlexStart,
FlexEnd,
Center,
Stretch,
SpaceBetween,
SpaceAround,
SpaceEvenly,
}
impl Default for JustifyContent {
fn default() -> Self {
JustifyContent::FlexStart
}
}
#[derive(Copy, Clone, PartialEq, Debug)]
pub enum AlignItems {
//None,
FlexStart,
FlexEnd,
Center,
Stretch,
//Baseline, //TODO
}
impl Default for AlignItems {
fn default() -> Self {
AlignItems::Stretch
}
}
#[derive(Copy, Clone, PartialEq, Debug)]
pub enum AlignSelf {
FlexStart,
FlexEnd,
Center,
Stretch,
}
impl Default for AlignSelf {
fn default() -> Self {
AlignSelf::Stretch
}
}
#[derive(Copy, Clone, PartialEq, Debug)]
pub enum AlignContent {
FlexStart,
FlexEnd,
Center,
Stretch,
SpaceBetween,
SpaceAround,
}
impl Default for AlignContent {
fn default() -> AlignContent {
AlignContent::Stretch
}
}
| true |
4b591fa9a1bc926fbc517dff72d576b1aa4f2ad4
|
Rust
|
sile/atomic_immut
|
/benches/std_atomic_immut.rs
|
UTF-8
| 1,285 | 3.078125 | 3 |
[
"MIT"
] |
permissive
|
use std::mem;
use std::ptr;
use std::sync::atomic::{AtomicPtr, Ordering};
use std::sync::{Arc, RwLock};
#[derive(Debug)]
pub struct StdAtomicImmut<T> {
rwlock: RwLock<AtomicPtr<T>>,
}
impl<T> StdAtomicImmut<T> {
pub fn new(value: T) -> Self {
let ptr = AtomicPtr::new(to_arc_ptr(value));
let rwlock = RwLock::new(ptr);
StdAtomicImmut { rwlock }
}
pub fn load(&self) -> Arc<T> {
let ptr = self.rwlock.read().unwrap();
let raw = ptr.load(Ordering::SeqCst);
let value = unsafe { Arc::from_raw(raw) };
mem::forget(Arc::clone(&value));
value
}
pub fn store(&self, value: T) {
self.swap(value);
}
pub fn swap(&self, value: T) -> Arc<T> {
let new = to_arc_ptr(value);
let old = {
let ptr = self.rwlock.write().unwrap();
ptr.swap(new, Ordering::SeqCst)
};
unsafe { Arc::from_raw(old) }
}
}
impl<T> Drop for StdAtomicImmut<T> {
fn drop(&mut self) {
let mut ptr = self.rwlock.write().unwrap();
let raw = mem::replace(ptr.get_mut(), ptr::null_mut());
let _ = unsafe { Arc::from_raw(raw) };
}
}
fn to_arc_ptr<T>(value: T) -> *mut T {
let boxed = Arc::new(value);
Arc::into_raw(boxed) as _
}
| true |
57a64315687f73aab6245a6dbf6ad7530b717b8a
|
Rust
|
IgorPerikov/tg-chat-stats-rs
|
/src/main.rs
|
UTF-8
| 952 | 2.734375 | 3 |
[] |
no_license
|
use itertools::Itertools;
use std::fs;
use tg_chat_stats::{History, Chat};
use crate::util::count_letters_by_actor;
mod util;
// TODO: print all chat names in a separate cli command
// TODO: support chats exclusion/inclusion
fn main() {
let stats_file_path = "result.json"; // TODO: cli argument
let example_json = fs::read_to_string(stats_file_path).unwrap();
let history: History = serde_json::from_str(&example_json).unwrap();
history
.get_chats()
.iter()
.for_each(|chat| analyze_chat(chat));
}
fn analyze_chat(chat: &Chat) {
let actor_to_letters = count_letters_by_actor(chat);
println!("Chat: {}", chat.get_name());
actor_to_letters
.iter()
.sorted_by(|a, b| Ord::cmp((*a).1, (*b).1))
.rev()
.take(20) // TODO: cli parameter
.enumerate()
.for_each(|a| println!("№{}: {}, letters: {}", a.0 + 1, (a.1).0, (a.1).1));
println!("-------");
}
| true |
bb0b3dd95fbb17c982f7c711a4c0e0f213d6804d
|
Rust
|
rrybarczyk/intermodal
|
/src/metainfo.rs
|
UTF-8
| 11,087 | 2.609375 | 3 |
[
"CC0-1.0"
] |
permissive
|
use crate::common::*;
#[derive(Deserialize, Serialize, Debug, PartialEq, Clone)]
pub(crate) struct Metainfo {
pub(crate) announce: String,
#[serde(
rename = "announce-list",
skip_serializing_if = "Option::is_none",
default,
with = "unwrap_or_skip"
)]
pub(crate) announce_list: Option<Vec<Vec<String>>>,
#[serde(
skip_serializing_if = "Option::is_none",
default,
with = "unwrap_or_skip"
)]
pub(crate) comment: Option<String>,
#[serde(
rename = "created by",
skip_serializing_if = "Option::is_none",
default,
with = "unwrap_or_skip"
)]
pub(crate) created_by: Option<String>,
#[serde(
rename = "creation date",
skip_serializing_if = "Option::is_none",
default,
with = "unwrap_or_skip"
)]
pub(crate) creation_date: Option<u64>,
#[serde(
skip_serializing_if = "Option::is_none",
default,
with = "unwrap_or_skip"
)]
pub(crate) encoding: Option<String>,
pub(crate) info: Info,
#[serde(
skip_serializing_if = "Option::is_none",
default,
with = "unwrap_or_skip"
)]
pub(crate) nodes: Option<Vec<Node>>,
}
impl Metainfo {
pub(crate) fn load(path: impl AsRef<Path>) -> Result<Metainfo, Error> {
let path = path.as_ref();
let bytes = fs::read(path).context(error::Filesystem { path })?;
Self::deserialize(path, &bytes)
}
pub(crate) fn deserialize(path: impl AsRef<Path>, bytes: &[u8]) -> Result<Metainfo, Error> {
let path = path.as_ref();
let metainfo = bendy::serde::de::from_bytes(&bytes).context(error::MetainfoLoad { path })?;
Ok(metainfo)
}
pub(crate) fn serialize(&self) -> Result<Vec<u8>, Error> {
bendy::serde::ser::to_bytes(&self).context(error::MetainfoSerialize)
}
#[cfg(test)]
pub(crate) fn dump(&self, path: impl AsRef<Path>) -> Result<(), Error> {
let path = path.as_ref();
let bencode = bendy::serde::ser::to_bytes(&self).context(error::MetainfoSerialize)?;
fs::write(path, &bencode).context(error::Filesystem { path })?;
Ok(())
}
#[cfg(test)]
pub(crate) fn from_bytes(bytes: &[u8]) -> Metainfo {
Self::deserialize("<TEST>", bytes).unwrap()
}
pub(crate) fn files<'a>(
&'a self,
base: &'a Path,
) -> Box<dyn Iterator<Item = (PathBuf, Bytes, Option<Md5Digest>)> + 'a> {
match &self.info.mode {
Mode::Single { length, md5sum } => Box::new(iter::once((base.to_owned(), *length, *md5sum))),
Mode::Multiple { files } => {
let base = base.to_owned();
Box::new(
files
.iter()
.map(move |file| (file.path.absolute(&base), file.length, file.md5sum)),
)
}
}
}
pub(crate) fn verify(&self, base: &Path) -> Result<Status> {
Verifier::verify(self, base)
}
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn round_trip_single() {
let value = Metainfo {
announce: "announce".into(),
announce_list: Some(vec![vec!["announce".into(), "b".into()], vec!["c".into()]]),
comment: Some("comment".into()),
created_by: Some("created by".into()),
creation_date: Some(1),
encoding: Some("UTF-8".into()),
nodes: Some(vec!["x:12".parse().unwrap(), "1.1.1.1:16".parse().unwrap()]),
info: Info {
private: Some(true),
piece_length: Bytes(16 * 1024),
source: Some("source".into()),
name: "foo".into(),
pieces: PieceList::from_pieces(&["abc"]),
mode: Mode::Single {
length: Bytes(20),
md5sum: None,
},
},
};
let bencode = bendy::serde::ser::to_bytes(&value).unwrap();
let deserialized = bendy::serde::de::from_bytes(&bencode).unwrap();
assert_eq!(value, deserialized);
}
#[test]
fn round_trip_multiple() {
let value = Metainfo {
announce: "announce".into(),
announce_list: Some(vec![vec!["announce".into(), "b".into()], vec!["c".into()]]),
nodes: Some(vec!["x:12".parse().unwrap(), "1.1.1.1:16".parse().unwrap()]),
comment: Some("comment".into()),
created_by: Some("created by".into()),
creation_date: Some(1),
encoding: Some("UTF-8".into()),
info: Info {
private: Some(true),
piece_length: Bytes(16 * 1024),
source: Some("source".into()),
name: "foo".into(),
pieces: PieceList::from_pieces(&["abc"]),
mode: Mode::Multiple {
files: vec![FileInfo {
length: Bytes(10),
path: FilePath::from_components(&["foo", "bar"]),
md5sum: Some(Md5Digest::from_hex("000102030405060708090a0b0c0d0e0f")),
}],
},
},
};
let bencode = bendy::serde::ser::to_bytes(&value).unwrap();
let deserialized = bendy::serde::de::from_bytes(&bencode).unwrap();
assert_eq!(value, deserialized);
}
fn representation(value: Metainfo, want: &str) {
let have = value.serialize().unwrap();
if have != want.as_bytes() {
eprintln!("have:");
eprintln!("{}", String::from_utf8_lossy(&have));
eprintln!("want:");
eprintln!("{}", want);
panic!("Unexpected representation...");
}
}
#[test]
fn bencode_representation_single_some() {
let value = Metainfo {
announce: "ANNOUNCE".into(),
announce_list: Some(vec![vec!["A".into(), "B".into()], vec!["C".into()]]),
nodes: Some(vec![
"domain:1".parse().unwrap(),
"1.1.1.1:16".parse().unwrap(),
"[1234:5678:9abc:def0:1234:5678:9abc:def0]:65000"
.parse()
.unwrap(),
]),
comment: Some("COMMENT".into()),
created_by: Some("CREATED BY".into()),
creation_date: Some(0),
encoding: Some("UTF-8".into()),
info: Info {
private: Some(true),
piece_length: Bytes(1024),
source: Some("SOURCE".into()),
name: "NAME".into(),
pieces: PieceList::from_pieces(&["fae50"]),
mode: Mode::Single {
length: Bytes(5),
md5sum: Some(Md5Digest::from_hex("000102030405060708090a0b0c0d0e0f")),
},
},
};
#[rustfmt::skip]
let want = concat!(
"d",
"8:announce", "8:ANNOUNCE",
"13:announce-list", "l",
"l", "1:A", "1:B", "e",
"l", "1:C", "e",
"e",
"7:comment", "7:COMMENT",
"10:created by", "10:CREATED BY",
"13:creation date", "i0e",
"8:encoding", "5:UTF-8",
"4:info", "d",
"6:length", "i5e",
"6:md5sum", "32:000102030405060708090a0b0c0d0e0f",
"4:name", "4:NAME",
"12:piece length", "i1024e",
"6:pieces", "20:8,OS7d玤{Qk!Mk",
"7:private", "i1e",
"6:source", "6:SOURCE",
"e",
"5:nodes", "l",
"l", "6:domain", "i1e", "e",
"l", "7:1.1.1.1", "i16e", "e",
"l", "39:1234:5678:9abc:def0:1234:5678:9abc:def0", "i65000e", "e",
"e",
"e"
);
representation(value, want);
}
#[test]
fn bencode_representation_single_none() {
let value = Metainfo {
announce: "ANNOUNCE".into(),
announce_list: None,
nodes: None,
comment: None,
created_by: None,
creation_date: None,
encoding: None,
info: Info {
private: None,
piece_length: Bytes(1024),
source: None,
name: "NAME".into(),
pieces: PieceList::from_pieces(&["fae50"]),
mode: Mode::Single {
length: Bytes(5),
md5sum: None,
},
},
};
#[rustfmt::skip]
let want = concat!(
"d",
"8:announce", "8:ANNOUNCE",
"4:info", "d",
"6:length", "i5e",
"4:name", "4:NAME",
"12:piece length", "i1024e",
"6:pieces", "20:8,OS7d玤{Qk!Mk",
"e",
"e"
);
representation(value, want);
}
#[test]
fn bencode_representation_multiple_some() {
let value = Metainfo {
announce: "ANNOUNCE".into(),
announce_list: None,
nodes: None,
comment: None,
created_by: None,
creation_date: None,
encoding: None,
info: Info {
private: None,
piece_length: Bytes(1024),
source: None,
name: "NAME".into(),
pieces: PieceList::from_pieces(&["fae50"]),
mode: Mode::Multiple {
files: vec![FileInfo {
length: Bytes(1024),
md5sum: Some(Md5Digest::from_hex("000102030405060708090a0b0c0d0e0f")),
path: FilePath::from_components(&["a", "b"]),
}],
},
},
};
#[rustfmt::skip]
let want = concat!(
"d",
"8:announce", "8:ANNOUNCE",
"4:info", "d",
"5:files", "l",
"d",
"6:length", "i1024e",
"6:md5sum", "32:000102030405060708090a0b0c0d0e0f",
"4:path", "l", "1:a", "1:b", "e",
"e",
"e",
"4:name", "4:NAME",
"12:piece length", "i1024e",
"6:pieces", "20:8,OS7d玤{Qk!Mk",
"e",
"e"
);
representation(value, want);
}
#[test]
fn bencode_representation_multiple_none() {
let value = Metainfo {
announce: "ANNOUNCE".into(),
announce_list: None,
nodes: None,
comment: None,
created_by: None,
creation_date: None,
encoding: None,
info: Info {
private: None,
piece_length: Bytes(1024),
source: None,
name: "NAME".into(),
pieces: PieceList::from_pieces(&["fae50"]),
mode: Mode::Multiple {
files: vec![FileInfo {
length: Bytes(1024),
md5sum: None,
path: FilePath::from_components(&["a", "b"]),
}],
},
},
};
#[rustfmt::skip]
let want = concat!(
"d",
"8:announce", "8:ANNOUNCE",
"4:info", "d",
"5:files", "l",
"d",
"6:length", "i1024e",
"4:path", "l", "1:a", "1:b", "e",
"e",
"e",
"4:name", "4:NAME",
"12:piece length", "i1024e",
"6:pieces", "20:8,OS7d玤{Qk!Mk",
"e",
"e"
);
representation(value, want);
}
#[test]
fn private_false() {
let value = Metainfo {
announce: "ANNOUNCE".into(),
announce_list: None,
nodes: None,
comment: None,
created_by: None,
creation_date: None,
encoding: None,
info: Info {
private: Some(false),
piece_length: Bytes(1024),
source: None,
name: "NAME".into(),
pieces: PieceList::from_pieces(&["fae50"]),
mode: Mode::Single {
length: Bytes(5),
md5sum: None,
},
},
};
#[rustfmt::skip]
let want = concat!(
"d",
"8:announce", "8:ANNOUNCE",
"4:info", "d",
"6:length", "i5e",
"4:name", "4:NAME",
"12:piece length", "i1024e",
"6:pieces", "20:8,OS7d玤{Qk!Mk",
"7:private", "i0e",
"e",
"e"
);
representation(value, want);
}
}
| true |
b70ec53c5fc8a4cb7670770694295ee46fcec5b1
|
Rust
|
Michael-F-Bryan/dxf-rs
|
/src/dxf_error.rs
|
UTF-8
| 4,316 | 2.59375 | 3 |
[
"Apache-2.0"
] |
permissive
|
// Copyright (c) IxMilia. All Rights Reserved. Licensed under the Apache License, Version 2.0. See License.txt in the project root for license information.
use std::error;
use std::fmt;
use std::io;
use std::num;
use ::image;
use CodePair;
#[derive(Debug)]
pub enum DxfError {
IoError(io::Error),
ImageError(image::ImageError),
ParseFloatError(num::ParseFloatError),
ParseIntError(num::ParseIntError),
ParseError,
UnexpectedCode(i32),
UnexpectedCodePair(CodePair, String),
UnexpectedByte(u8),
UnexpectedEndOfInput,
UnexpectedEnumValue,
UnexpectedEmptySet,
ExpectedTableType,
WrongValueType,
InvalidBinaryFile,
WrongItemType,
}
impl From<io::Error> for DxfError {
fn from(ioe: io::Error) -> DxfError {
DxfError::IoError(ioe)
}
}
impl From<::image::ImageError> for DxfError {
fn from(ie: ::image::ImageError) -> DxfError {
DxfError::ImageError(ie)
}
}
impl fmt::Display for DxfError {
fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
match self {
&DxfError::IoError(ref e) => write!(formatter, "{}", e),
&DxfError::ImageError(ref e) => write!(formatter, "{}", e),
&DxfError::ParseFloatError(ref e) => write!(formatter, "{}", e),
&DxfError::ParseIntError(ref e) => write!(formatter, "{}", e),
&DxfError::ParseError => write!(formatter, "there was a general parsing error"),
&DxfError::UnexpectedCode(c) => write!(formatter, "an unexpected code '{}' was encountered", c),
&DxfError::UnexpectedCodePair(ref cp, ref s) => write!(formatter, "the code pair '{:?}' was not expected at this time: {}", cp, s),
&DxfError::UnexpectedByte(ref b) => write!(formatter, "the byte '0x{:02x}' was not expected at this time", b),
&DxfError::UnexpectedEndOfInput => write!(formatter, "the input unexpectedly ended before the drawing was completely loaded"),
&DxfError::UnexpectedEnumValue => write!(formatter, "the specified enum value does not fall into the expected range"),
&DxfError::UnexpectedEmptySet => write!(formatter, "the set was not expected to be empty"),
&DxfError::ExpectedTableType => write!(formatter, "a 2/<table-type> code pair was expected"),
&DxfError::WrongValueType => write!(formatter, "the CodePairValue does not contain the requested type"),
&DxfError::InvalidBinaryFile => write!(formatter, "the binary file is invalid"),
&DxfError::WrongItemType => write!(formatter, "the specified item type is not correct"),
}
}
}
impl error::Error for DxfError {
fn description(&self) -> &str {
match self {
&DxfError::IoError(ref e) => e.description(),
&DxfError::ImageError(ref e) => e.description(),
&DxfError::ParseFloatError(ref e) => e.description(),
&DxfError::ParseIntError(ref e) => e.description(),
&DxfError::ParseError => "there was a general parsing error",
&DxfError::UnexpectedCode(_) => "an unexpected code was encountered",
&DxfError::UnexpectedCodePair(_, _) => "an unexpected code pair was encountered",
&DxfError::UnexpectedByte(_) => "an unexpected byte was encountered",
&DxfError::UnexpectedEndOfInput => "the input unexpectedly ended before the drawing was completely loaded",
&DxfError::UnexpectedEnumValue => "the specified enum value does not fall into the expected range",
&DxfError::UnexpectedEmptySet => "the set was not expected to be empty",
&DxfError::ExpectedTableType => "a 2/<table-type> code pair was expected",
&DxfError::WrongValueType => "the CodePairValue does not contain the requested type",
&DxfError::InvalidBinaryFile => "the binary file is invalid",
&DxfError::WrongItemType => "the specified item type is not correct",
}
}
fn cause(&self) -> Option<&error::Error> {
match self {
&DxfError::IoError(ref e) => Some(e),
&DxfError::ImageError(ref e) => Some(e),
&DxfError::ParseFloatError(ref e) => Some(e),
&DxfError::ParseIntError(ref e) => Some(e),
_ => None,
}
}
}
| true |
b5fced8d97ad79a18d21dee679fbb06d6378e5c9
|
Rust
|
mgenova16/nand2tetris
|
/projects/08/vm_translator/src/parser.rs
|
UTF-8
| 1,570 | 3.125 | 3 |
[] |
no_license
|
use std::io::{self, Read, BufRead, BufReader};
use std::fs::{File, OpenOptions};
use std::path::PathBuf;
use crate::command::{Command, CommandType};
pub struct Parser<'a, R: Read> {
reader: BufReader<R>,
command: Option<Command<'a>>,
}
impl<'a> Parser<'a, File> {
pub fn new(p: &PathBuf) -> io::Result<Self> {
let file = OpenOptions::new().read(true).open(p)?;
let reader = BufReader::new(file);
Ok(Self { reader, command: None })
}
pub fn has_more_commands(&mut self) -> bool {
let buffer = match self.reader.fill_buf() {
Ok(b) => b,
Err(_) => panic!("Error reading file"),
};
buffer.len() > 0
}
pub fn advance(&mut self) -> io::Result<()> {
let mut line = String::new();
loop {
if let Ok(0) = self.reader.read_line(&mut line) {
self.command = None; // we've reached EOF
break;
}
line = line.trim().split("//").next().unwrap().trim().to_string();
if line.is_empty() {
continue;
}
let iter = &mut line.split_whitespace().map(|s| s.to_string());
let (c, a1, a2) = (iter.next().unwrap(), iter.next(), iter.next());
self.command = Some(Command::new(c, a1, a2));
break;
}
Ok(())
}
pub fn command_type(&self) -> Option<&CommandType> {
match &self.command {
Some(command) => Some(&command.command_type),
None => None
}
}
}
| true |
a89cda21e6a5272294c0bdcf97b648ad799f395e
|
Rust
|
Aurenos/rustache
|
/src/buf_tcpstream.rs
|
UTF-8
| 712 | 3 | 3 |
[] |
no_license
|
use std::io::prelude::*;
use std::io::{self, BufReader, BufWriter};
use std::net::TcpStream;
pub struct BufTcpStream {
input: BufReader<TcpStream>,
output: BufWriter<TcpStream>,
}
impl BufTcpStream {
pub fn new(stream: TcpStream) -> io::Result<Self> {
let input = BufReader::new(stream.try_clone()?);
let output = BufWriter::new(stream);
Ok(Self { input, output })
}
pub fn send_msg(&mut self, msg: String) {
self.output.write_all(msg.as_bytes()).unwrap();
self.output.flush().unwrap();
}
pub fn recv(&mut self) -> String {
let mut buffer = String::new();
self.input.read_line(&mut buffer).unwrap();
buffer
}
}
| true |
bbf1df6e3464d98a1d2fc71d20dc169670c9c35d
|
Rust
|
anonpublic/votesmart
|
/contract/src/lib.rs
|
UTF-8
| 7,512 | 2.6875 | 3 |
[] |
no_license
|
use near_sdk::borsh::{self, BorshDeserialize, BorshSerialize};
use near_sdk::collections::{LookupMap, UnorderedMap};
use near_sdk::json_types::ValidAccountId;
use near_sdk::serde::{Deserialize, Serialize};
use near_sdk::{env, near_bindgen, setup_alloc, AccountId, BorshStorageKey, PanicOnDefault};
setup_alloc!();
#[near_bindgen]
#[derive(BorshDeserialize, BorshSerialize, PanicOnDefault)]
pub struct VoteSmart {
master_account_id: AccountId,
parties: UnorderedMap<u64, String>,
campaigns: UnorderedMap<u64, String>,
regions: UnorderedMap<u64, Region>,
districts: UnorderedMap<u64, District>,
candidates: UnorderedMap<u64, Candidate>,
recommendations: LookupMap<RecommendationIndex, u64>,
}
#[derive(BorshDeserialize, BorshSerialize, Serialize, Deserialize)]
#[serde(crate = "near_sdk::serde")]
pub struct Region {
pub title: String,
}
#[derive(BorshDeserialize, BorshSerialize, Serialize, Deserialize)]
#[serde(crate = "near_sdk::serde")]
pub struct District {
pub region_id: u64,
pub title: String,
}
#[derive(BorshDeserialize, BorshSerialize, Serialize, Deserialize)]
#[serde(crate = "near_sdk::serde")]
pub struct Candidate {
pub title: String,
pub party_id: u64,
}
#[derive(BorshDeserialize, BorshSerialize, Serialize, Deserialize)]
#[serde(crate = "near_sdk::serde")]
pub struct Party {
pub index: u64,
pub title: String,
}
#[derive(BorshDeserialize, BorshSerialize, Serialize, Deserialize)]
#[serde(crate = "near_sdk::serde")]
pub struct Recommendation {
pub title: String,
pub party: String,
}
#[derive(BorshDeserialize, BorshSerialize)]
pub struct RecommendationIndex {
pub campaign_id: u64,
pub district_id: u64,
}
/// Helper structure to for keys of the persistent collections.
#[derive(BorshSerialize, BorshStorageKey)]
pub enum StorageKey {
Parties,
Campaigns,
Regions,
Districts,
Candidates,
Recommendations,
}
#[near_bindgen]
impl VoteSmart {
#[init]
pub fn new(admin_id: Option<ValidAccountId>) -> Self {
let master_account_id: AccountId = if let Some(account_id) = admin_id {
account_id.into()
} else {
env::predecessor_account_id()
};
Self {
master_account_id,
parties: UnorderedMap::new(StorageKey::Parties),
campaigns: UnorderedMap::new(StorageKey::Campaigns),
regions: UnorderedMap::new(StorageKey::Regions),
districts: UnorderedMap::new(StorageKey::Districts),
candidates: UnorderedMap::new(StorageKey::Candidates),
recommendations: LookupMap::new(StorageKey::Recommendations),
}
}
pub(crate) fn assert_access(&self) {
assert_eq!(
env::predecessor_account_id(),
self.master_account_id,
"No access"
);
}
pub fn set_master_account_id(&mut self, admin_id: ValidAccountId) {
self.assert_access();
self.master_account_id = admin_id.into();
}
pub fn add_campaign(&mut self, id: u64, title: String) {
self.assert_access();
self.campaigns.insert(&id, &title);
}
pub fn get_campaigns(&self, from_index: Option<u64>, limit: Option<u64>) -> Vec<(u64, String)> {
unordered_map_pagination(&self.campaigns, from_index, limit)
}
pub fn add_parties(&mut self, parties: Vec<(u64, String)>) {
self.assert_access();
for data in parties {
self.parties.insert(&data.0, &data.1);
}
}
pub fn get_parties(&self, from_index: Option<u64>, limit: Option<u64>) -> Vec<(u64, String)> {
unordered_map_pagination(&self.parties, from_index, limit)
}
pub fn add_regions(&mut self, regions: Vec<(u64, Region)>) {
self.assert_access();
for data in regions {
self.regions.insert(&data.0, &data.1);
}
}
pub fn get_regions(&self, from_index: Option<u64>, limit: Option<u64>) -> Vec<(u64, Region)> {
unordered_map_pagination(&self.regions, from_index, limit)
}
pub fn add_districts(&mut self, districts: Vec<(u64, District)>) {
self.assert_access();
for data in districts {
self.districts.insert(&data.0, &data.1);
}
}
pub fn get_districts(
&self,
from_index: Option<u64>,
limit: Option<u64>,
) -> Vec<(u64, District)> {
unordered_map_pagination(&self.districts, from_index, limit)
}
pub fn get_districts_by_region(
&self,
region_id: u64,
from_index: Option<u64>,
limit: Option<u64>,
) -> Vec<(u64, District)> {
let keys = self.districts.keys_as_vector();
let values = self.districts.values_as_vector();
let from_index = from_index.unwrap_or(0);
let limit = limit.unwrap_or(keys.len());
(from_index..std::cmp::min(keys.len(), limit))
.filter(|index| values.get(*index).unwrap().region_id == region_id)
.map(|index| (keys.get(index).unwrap(), values.get(index).unwrap().into()))
.collect()
}
pub fn add_candidates(&mut self, candidates: Vec<(u64, Candidate)>) {
self.assert_access();
for data in candidates {
self.candidates.insert(&data.0, &data.1);
}
}
pub fn get_candidates(
&self,
from_index: Option<u64>,
limit: Option<u64>,
) -> Vec<(u64, Candidate)> {
unordered_map_pagination(&self.candidates, from_index, limit)
}
// recommendations: [campaign_id: u64, district_id: u64, candidate_id: u64]
pub fn add_recommendations(&mut self, recommendations: Vec<(u64, u64, u64)>) {
self.assert_access();
for data in recommendations {
let campaign_id = data.0;
let district_id = data.1;
let candidate_id = data.2;
self.recommendations.insert(
&RecommendationIndex {
campaign_id,
district_id,
},
&candidate_id,
);
}
}
pub fn get_votesmart(&self, campaign_id: u64, district_id: u64) -> Option<Recommendation> {
let candidate_id = self.recommendations.get(&RecommendationIndex {
campaign_id,
district_id,
});
if let Some(candidate_id_unwrapped) = candidate_id {
if let Some(candidate_unwrapped) = self.candidates.get(&candidate_id_unwrapped) {
let result = Recommendation {
title: candidate_unwrapped.title,
party: self
.parties
.get(&candidate_unwrapped.party_id)
.unwrap_or("Unknown".to_string()),
};
Some(result)
} else {
None
}
} else {
None
}
}
}
pub(crate) fn unordered_map_pagination<K, VV, V>(
m: &UnorderedMap<K, VV>,
from_index: Option<u64>,
limit: Option<u64>,
) -> Vec<(K, V)>
where
K: BorshSerialize + BorshDeserialize,
VV: BorshSerialize + BorshDeserialize,
V: From<VV>,
{
let keys = m.keys_as_vector();
let values = m.values_as_vector();
let from_index = from_index.unwrap_or(0);
let limit = limit.unwrap_or(keys.len());
(from_index..std::cmp::min(keys.len(), limit))
.map(|index| (keys.get(index).unwrap(), values.get(index).unwrap().into()))
.collect()
}
| true |
01d79b53dcd0632eaa5c9ffe4e071815e204de73
|
Rust
|
jamuraa/advent2018
|
/day19/src/main.rs
|
UTF-8
| 5,443 | 3.234375 | 3 |
[
"BSD-3-Clause"
] |
permissive
|
use std::fmt;
#[derive(Clone, Debug, PartialEq, Eq, Hash)]
#[allow(non_camel_case_types)]
enum Opcode {
addr, addi, mulr, muli, banr, bani, borr, bori, setr, seti, gtir, gtri, gtrr, eqir, eqri, eqrr,
}
fn one_if_true(x: bool) -> u64 {
if x {
1
} else {
0
}
}
impl Opcode {
fn execute(&self, registers: &[u64; 6], input_a: u64, input_b: u64, output_c: u64) -> [u64; 6] {
let mut result = registers.clone();
result[output_c as usize] = match self {
Opcode::addr => registers[input_a as usize] + registers[input_b as usize],
Opcode::addi => registers[input_a as usize] + input_b,
Opcode::mulr => registers[input_a as usize] * registers[input_b as usize],
Opcode::muli => registers[input_a as usize] * input_b,
Opcode::banr => registers[input_a as usize] & registers[input_b as usize],
Opcode::bani => registers[input_a as usize] & input_b,
Opcode::borr => registers[input_a as usize] | registers[input_b as usize],
Opcode::bori => registers[input_a as usize] | input_b,
Opcode::setr => registers[input_a as usize],
Opcode::seti => input_a,
Opcode::gtir => one_if_true(input_a > registers[input_b as usize]),
Opcode::gtri => one_if_true(registers[input_a as usize] > input_b),
Opcode::gtrr => one_if_true(registers[input_a as usize] > registers[input_b as usize]),
Opcode::eqir => one_if_true(input_a == registers[input_b as usize]),
Opcode::eqri => one_if_true(registers[input_a as usize] == input_b),
Opcode::eqrr => one_if_true(registers[input_a as usize] == registers[input_b as usize]),
};
result
}
}
struct Instruction {
op: Opcode,
input_a: u64,
input_b: u64,
output_c: u64,
}
impl Instruction {
fn new(op: Opcode, input_a: u64, input_b: u64, output_c: u64) -> Instruction {
Instruction { op, input_a, input_b, output_c }
}
fn execute(&self, registers: &[u64; 6]) -> [u64; 6] {
self.op.execute(registers, self.input_a, self.input_b, self.output_c)
}
}
impl fmt::Display for Instruction {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "{:?} {} {} {}", self.op, self.input_a, self.input_b, self.output_c)?;
Ok(())
}
}
fn main() {
let mut program: Vec<Instruction> = Vec::new();
let ip_reg: usize = 4;
program.push(Instruction::new(Opcode::addi, 4, 16, 4));
program.push(Instruction::new(Opcode::seti, 1, 2, 5));
program.push(Instruction::new(Opcode::seti, 1, 1, 1));
program.push(Instruction::new(Opcode::mulr, 5, 1, 2));
program.push(Instruction::new(Opcode::eqrr, 2, 3, 2));
program.push(Instruction::new(Opcode::addr, 2, 4, 4));
program.push(Instruction::new(Opcode::addi, 4, 1, 4));
program.push(Instruction::new(Opcode::addr, 5, 0, 0));
program.push(Instruction::new(Opcode::addi, 1, 1, 1));
program.push(Instruction::new(Opcode::gtrr, 1, 3, 2));
program.push(Instruction::new(Opcode::addr, 4, 2, 4));
program.push(Instruction::new(Opcode::seti, 2, 4, 4));
program.push(Instruction::new(Opcode::addi, 5, 1, 5));
program.push(Instruction::new(Opcode::gtrr, 5, 3, 2));
program.push(Instruction::new(Opcode::addr, 2, 4, 4));
program.push(Instruction::new(Opcode::seti, 1, 8, 4));
program.push(Instruction::new(Opcode::mulr, 4, 4, 4));
program.push(Instruction::new(Opcode::addi, 3, 2, 3));
program.push(Instruction::new(Opcode::mulr, 3, 3, 3));
program.push(Instruction::new(Opcode::mulr, 4, 3, 3));
program.push(Instruction::new(Opcode::muli, 3, 11, 3));
program.push(Instruction::new(Opcode::addi, 2, 4, 2));
program.push(Instruction::new(Opcode::mulr, 2, 4, 2));
program.push(Instruction::new(Opcode::addi, 2, 6, 2));
program.push(Instruction::new(Opcode::addr, 3, 2, 3));
program.push(Instruction::new(Opcode::addr, 4, 0, 4));
program.push(Instruction::new(Opcode::seti, 0, 8, 4));
program.push(Instruction::new(Opcode::setr, 4, 1, 2));
program.push(Instruction::new(Opcode::mulr, 2, 4, 2));
program.push(Instruction::new(Opcode::addr, 4, 2, 2));
program.push(Instruction::new(Opcode::mulr, 4, 2, 2));
program.push(Instruction::new(Opcode::muli, 2, 14, 2));
program.push(Instruction::new(Opcode::mulr, 2, 4, 2));
program.push(Instruction::new(Opcode::addr, 3, 2, 3));
program.push(Instruction::new(Opcode::seti, 0, 0, 0));
program.push(Instruction::new(Opcode::seti, 0, 0, 4));
let mut ip: u64 = 0;
let mut registers = [0, 0, 0, 0, 0, 0];
while (ip as usize) < program.len() {
let next_inst: &Instruction = &program[ip as usize];
registers[ip_reg] = ip;
print!("ip={} {:?} {} ", ip, registers, next_inst);
registers = next_inst.execute(®isters);
println!("{:?}", registers);
ip = registers[ip_reg] + 1;
if ip == 1 {
break;
}
}
println!("Registers after setup: {:?}", registers);
let mut r0 = 0;
let r3 = registers[3];
for r5 in 1..r3 + 1 {
print!("r5: {} of {}\r", r5, r3);
for r1 in 1..r3 + 1 {
if r1 * r5 == r3 {
r0 += r5;
}
if r1 * r5 > r3 {
break;
}
}
}
println!("\n r0 halt: {}", r0);
}
| true |
02002c9821b1178d30a85cbac76853affe515ab0
|
Rust
|
TakiKazuya/rust-opencv-sample
|
/src/colors.rs
|
UTF-8
| 227 | 2.53125 | 3 |
[] |
no_license
|
use opencv::core::Scalar;
pub fn red() -> Scalar {
Scalar::new(0.0, 0.0, 255.0, 1.0)
}
pub fn green() -> Scalar {
Scalar::new(0.0, 255.0, 0.0, 1.0)
}
pub fn blue() -> Scalar {
Scalar::new(255.0, 0.0, 0.0, 1.0)
}
| true |
8995751c9249b7934ac022017584af4e8a3a3343
|
Rust
|
andyherbert/raytracer
|
/src/renderer/camera_compute.rs
|
UTF-8
| 1,520 | 2.953125 | 3 |
[] |
no_license
|
use crate::{Camera, Matrix, Ray, Vert};
pub struct CameraCompute {
half_width: f64,
half_height: f64,
pixel_size: f64,
transform: Matrix,
}
impl CameraCompute {
pub fn new(camera: &Camera) -> CameraCompute {
let half_view = (camera.fov / 2.0).tan();
let (width, height) = camera.dimensions();
let aspect_ratio = width as f64 / height as f64;
let (half_width, half_height) = if aspect_ratio >= 1.0 {
(half_view, half_view / aspect_ratio)
} else {
(half_view * aspect_ratio, half_view)
};
let pixel_size = half_width * 2.0 / width as f64;
let orientation = Matrix::orientation(&camera.to, &camera.from, &camera.up);
let transform = (orientation * Matrix::translate(-camera.from.x, -camera.from.y, -camera.from.z)).inverse();
CameraCompute {
half_width,
half_height,
pixel_size,
transform,
}
}
pub fn ray_for_pixel(&self, x: f64, y: f64) -> Ray {
let x_offset = (x + 0.5) * self.pixel_size;
let y_offset = (y + 0.5) * self.pixel_size;
let world_x = self.half_width - x_offset;
let world_y = self.half_height - y_offset;
let pixel = self.transform.multiply_with_vert(&Vert::new(world_x, world_y, -1.0));
let origin = self.transform.multiply_with_vert(&Vert::new(0.0, 0.0, 0.0));
let direction = (pixel - origin.clone()).normalise();
Ray::new(origin, direction)
}
}
| true |
dfe2987836e9b38811cab5f693cd74c5d13444b6
|
Rust
|
arkhe634/markab
|
/markab_parser/src/character_class_parser/error.rs
|
UTF-8
| 1,115 | 2.890625 | 3 |
[] |
no_license
|
use crate::{
character_class_parser::CharacterClassParserRequirement,
Error,
};
use std::fmt::{
Display,
Formatter,
Result as FmtResult,
};
#[derive(Debug)]
pub struct CharacterClassParserError<'a>
{
from: usize,
requirement: CharacterClassParserRequirement<'a>,
found: Option<char>,
}
impl<'a> CharacterClassParserError<'a>
{
pub fn new(
from: usize,
requirement: CharacterClassParserRequirement<'a>,
found: Option<char>,
) -> Self
{
Self {
from,
requirement,
found,
}
}
}
impl<'a> Error for CharacterClassParserError<'a>
{
fn from(&self, f: &mut Formatter) -> FmtResult
{
write!(f, "{}", self.from)
}
fn requirement(&self, f: &mut Formatter) -> FmtResult
{
write!(f, "{}", self.requirement)
}
fn result(&self, f: &mut Formatter) -> FmtResult
{
match self.found
{
Some(found) => write!(f, "{:?} found", found),
None => write!(f, "not found"),
}
}
fn causes(&self, _: &mut Formatter, _: usize) -> FmtResult
{
Ok(())
}
}
impl<'a> Display for CharacterClassParserError<'a>
{
fn fmt(&self, f: &mut Formatter) -> FmtResult
{
self.print(f, 0)
}
}
| true |
cb107468725657526b74b0571c9775e6ba79f8a1
|
Rust
|
BruceBrown/d3
|
/src/lib.rs
|
UTF-8
| 3,986 | 3.15625 | 3 |
[
"LicenseRef-scancode-unknown-license-reference",
"Apache-2.0",
"MIT"
] |
permissive
|
//! A Framework for Server Development
//! This crate provides a set of tools, for implementing a server. It is especially
//! well suited for those cases where the server employs a pipeline architecture.
//! There are two core concepts, the machine, and the instruction set. Combined
//! with a channel sender and receiver, you have all of the parts necessary for
//! building a server.
//!
//! ## The Machine
//! The Machine starts with any kind of struct. It becomes a machine by implementing
//! the Machine trait for an instruction set and joining the collective. There's only
//! one method that requires implementing. Joining the collective is a single call
//! into the core, which returns a wrapped machine and a sender for the instruction
//! set. The machine is the receiver for any instruction sent to that sender. In
//! most cases the wrapped instance can be ignored.
//!
//! ## The Instruction Set
//! The Instruction Set starts with any kind of enum. It becomes an instruction set
//! when MachineImpl is derived.
//!
//! ## Example
//!
//! This example shows how easy it is to create an instruction set, create a machine
//! and send an instruction to that machine.
//! ``` text
//! // A trivial instruction set
//! #[derive(Debug, MachineImpl)]
//! enum StateTable { Init, Start, Stop }
//!
//! pub struct Alice {}
//! // implement the Machine trait for Alice
//! impl Machine<StateTable> for Alice {
//! fn receive(&self, cmd: StateTable) {
//! }
//!
//! // create the Machine from Alice, getting back a machine and Sender<StateTable>.
//! let (alice, sender) = executor::connect(Alice{});
//!
//! // send a command to Alice
//! // Alice's receive method will be called, likely on a different thread than this thread.
//! sender.send(StateTable::Init).expect("send failed");
//! ```
//! The main `d3` crate just re-exports tools from smaller subrates:
//! ## Derive Macro
//!
//! * [`d3-derive`], a derive macro for transforming an enum into an instruction set.
//!
//! ## Core
//!
//! * [`d3-core`], a sceduler and executor for machines.
//!
//!
//! ## Components
//!
//! * [`d3-components`], provides a component/coordinator heirachy of machines.
//!
//!
//! ## Instruction Sets and Test Drivers
//!
//! * [`d3-dev-instruction-sets`](https://github.com/BruceBrown/d3/tree/master/d3-dev-instruction-sets/src),
//! example of some simple instruction sets.
//! * [`d3-test-driver`](https://github.com/BruceBrown/d3/tree/master/d3-test-drivers/src),
//! example of implementing an instruction set. The test driver
//! is used by bench and test.
//!
//! ## Examples
//!
//! ### Services
//! * [`alice-service`](https://github.com/BruceBrown/d3/tree/master/examples/alice-service/src/alice.rs),
//! an example of a web-service sending a form and processing the POST.
//! * [`chat-service`](https://github.com/BruceBrown/d3/tree/master/examples/chat-service/src),
//! an example of a trivial chat service.
//! * [`echo-service`](https://github.com/BruceBrown/d3/tree/master/examples/echo-service/src),
//! an example of a trivial echo service.
//! * [`monitor-service`](https://github.com/BruceBrown/d3/tree/master/examples/monitor-service/src),
//! an example of service for monitoring the core.
//!
//! ### Server
//! * [`test-server`](https://github.com/BruceBrown/d3/tree/master/examples/test-server/src),
//! an example of a server running the aforementioned services
// re-publish all the bits, so that you only need d3.
pub mod d3_derive {
pub use d3_derive::*;
}
pub mod core {
pub mod machine_impl {
pub use d3_core::machine_impl::{self, *};
}
pub mod executor {
pub use d3_core::executor::{self, *};
}
pub use d3_core::send_cmd;
}
pub mod components {
pub mod network {
pub use d3_components::network::{self, *};
}
pub use d3_components::components::{self, *};
pub use d3_components::coordinators::{self, *};
pub mod settings {
pub use d3_components::settings::{self, *};
}
}
| true |
724f4b7cb7ea0b495a3e0467d98eaa45815e6e82
|
Rust
|
asvedr/oolang
|
/src/bytecode/registers.rs
|
UTF-8
| 1,678 | 3.34375 | 3 |
[] |
no_license
|
#[derive(Debug,PartialEq,Clone)]
pub enum Reg {
IVar(u8), // index of 'int' var
RVar(u8), // index of 'double' var
Var(u8), // index of 'Var' var
IStack(u8), // stack of int
RStack(u8), // stack of real
VStack(u8), // stack of Var
RSelf, // var 'self'
Arg(u8), // fun args
Env(u8), // closure env(outer vars)
Temp, // SINGLE temp var
TempI,
TempR,
Exc, // exception value
Null, // no value
Name(Box<String>), // getting global symbol
Res
}
impl Reg {
pub fn is_int(&self) -> bool {
match *self {
Reg::IVar(_)|Reg::IStack(_)|Reg::TempI => true,
_ => false
}
}
pub fn is_real(&self) -> bool {
match *self {
Reg::RVar(_)|Reg::RStack(_)|Reg::TempR => true,
_ => false
}
}
pub fn is_obj(&self) -> bool {
match *self {
Reg::Var(_)|Reg::VStack(_)|Reg::Arg(_)|Reg::Env(_)|Reg::Temp|Reg::Exc|Reg::Res|Reg::RSelf => true,
_ => false
}
}
pub fn is_stack(&self) -> bool {
match *self {
Reg::IStack(_)|Reg::RStack(_)|Reg::VStack(_) => true,
_ => false
}
}
pub fn is_var(&self) -> bool {
match *self {
Reg::IVar(_)|Reg::RVar(_)|Reg::Var(_)|Reg::Arg(_)|Reg::Env(_) => true,
_ => false
}
}
pub fn is_name(&self) -> bool {
match *self {
Reg::Name(_) => true,
_ => false
}
}
pub fn is_null(&self) -> bool {
match *self {
Reg::Null => true,
_ => false
}
}
}
| true |
4ee406f241d0bc27bbe59d5069df333b6ff22391
|
Rust
|
d3zd3z/rdump
|
/filer/tests/data.rs
|
UTF-8
| 3,675 | 2.953125 | 3 |
[] |
no_license
|
// Test data.
use std::io;
use cas::Oid;
use cas::pool::RamPool;
use cas::pool::ChunkSource;
use filer::data::DataWrite;
use rand::isaac::IsaacRng;
use rand::Rng;
use std::cell::RefCell;
extern crate cas;
extern crate filer;
extern crate rand;
#[macro_use]
extern crate log;
#[test]
fn indirection() {
let limit = 1 * 1024 * 1024 + 136;
let mut pool = RefCell::new(RamPool::new());
let top;
{
pool.borrow_mut().begin_writing().unwrap();
{
let mut rd = FakeRead::new(limit);
let mut wr = DataWrite::new_limit(&mut pool, 256 * 1024);
top = wr.write(&mut rd).unwrap();
}
pool.borrow_mut().flush().unwrap();
}
// Read it back and make sure it is ok.
{
let mut w = Walker::new(&pool, limit);
w.walk(&top).unwrap();
}
}
struct Walker<'a> {
reader: FakeRead,
pool: &'a RefCell<ChunkSource>,
}
impl<'a> Walker<'a> {
fn new<'b>(pool: &'b RefCell<ChunkSource>, limit: usize) -> Walker<'b> {
Walker {
reader: FakeRead::new(limit),
pool: pool,
}
}
fn walk(&mut self, oid: &Oid) -> cas::Result<()> {
use filer::decode::decode;
use filer::decode::Node;
use std::io::prelude::*;
let ch = try!(self.pool.borrow().find(oid));
trace!("Chunk: {}", ch.oid().to_hex());
match try!(decode(ch)) {
Node::Blob(data) => {
let mut temp = vec![0u8; data.len()];
assert_eq!(try!(self.reader.read(&mut temp)), temp.len());
assert_eq!(&data, &temp);
}
Node::Indirect { level, children } => {
trace!("Indirect: {} {}", level, children.len());
for child in children.iter() {
try!(self.walk(child));
}
}
}
Ok(())
}
}
// The IsaacRng fills based on 32-bit values. Because of this, calls to
// fill_bytes() don't work right if the fill amount is not a multiple of 4.
// Test that it does work right with unusual 4-byte values.
#[test]
fn fill_bytes() {
// use cas::pdump::HexDump;
let mut arng = IsaacRng::new_unseeded();
let mut brng = arng.clone();
let mut b1 = vec![0u8; 256];
arng.fill_bytes(&mut b1);
// println!("{}", b1.len());
// b1.dump();
let mut b2 = vec![0u8; 256];
brng.fill_bytes(&mut b2[0..8]);
brng.fill_bytes(&mut b2[8..32]);
brng.fill_bytes(&mut b2[32..128]);
brng.fill_bytes(&mut b2[128..248]);
brng.fill_bytes(&mut b2[248..256]);
// println!("Second");
// b2.dump();
assert_eq!(&b1, &b2);
}
// A fake reader that always provides data, up to a given length.
struct FakeRead {
offset: usize,
limit: usize,
rng: IsaacRng,
}
impl FakeRead {
fn new(limit: usize) -> FakeRead {
FakeRead {
offset: 0,
limit: limit,
rng: IsaacRng::new_unseeded(),
}
}
}
impl io::Read for FakeRead {
fn read(&mut self, buf: &mut [u8]) -> io::Result<usize> {
let mut size = buf.len();
if self.offset + size > self.limit {
size = self.limit - self.offset;
}
// The IsaacRng fills based on 32-bit values, which is discards
// across calls. As such, this only is possible with 32-bit
// aligned buffers. This should be OK, as long as the test data
// above respects this. Check alignment to 8-bytes, since that
// seems to not be guaranteed on 64-bit.
assert!(size & 7 == 0);
self.rng.fill_bytes(buf);
self.offset += size;
Ok(size)
}
}
| true |
b451366f23b898e5b6720dca9ca6c09fb0a389c2
|
Rust
|
Denaun/aoc_2020
|
/src/ticket_translation.rs
|
UTF-8
| 8,282 | 3.015625 | 3 |
[
"MIT"
] |
permissive
|
//! Day 16
use std::{cmp::PartialOrd, collections::HashSet};
use itertools::Itertools;
trait Solution {
fn part_1(&self) -> usize;
fn part_2(&self) -> usize;
}
impl Solution for str {
fn part_1(&self) -> usize {
let (rules, _, tickets) = parsers::input(self).expect("Failed to parse the input");
let rules = rules.into_iter().map(|(_, rule)| rule).collect::<Vec<_>>();
tickets
.iter()
.flat_map(|ticket| invalid_fields(ticket, &rules))
.sum()
}
fn part_2(&self) -> usize {
let (named_rules, your_ticket, tickets) =
parsers::input::<usize>(self).expect("Failed to parse the input");
let rules = named_rules
.iter()
.map(|(_, rule)| *rule)
.collect::<Vec<_>>();
find_fields(&tickets, &rules)
.expect("Field mapping not found")
.into_iter()
.enumerate()
.filter_map(|(rule_ix, field_ix)| {
if named_rules[rule_ix].0.starts_with("departure") {
Some(your_ticket[field_ix])
} else {
None
}
})
.product()
}
}
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
struct Range<T> {
min: T,
max: T,
}
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
pub struct RangeUnion<T> {
first: Range<T>,
second: Range<T>,
}
fn invalid_fields<'a, T: PartialOrd>(
fields: &'a [T],
rules: &'a [RangeUnion<T>],
) -> impl Iterator<Item = &'a T> {
fields
.iter()
.filter(move |field| !rules.into_iter().any(|rule| rule.is_valid(field)))
}
fn find_fields<'a, T: PartialOrd>(
tickets: &[Vec<T>],
rules: &[RangeUnion<T>],
) -> Option<Vec<usize>> {
let valid_tickets: Vec<_> = tickets
.iter()
.filter(|ticket| invalid_fields(ticket, rules).next().is_none())
.collect();
let n_fields = valid_tickets.first().map(|t| t.len()).unwrap_or(0);
assert!(valid_tickets.iter().all(|ticket| ticket.len() == n_fields));
let mut valid_fields_per_rule = rules
.iter()
.map(|rule| {
(0..n_fields)
.filter(|&field_ix| {
valid_tickets
.iter()
.all(|ticket| rule.is_valid(&ticket[field_ix]))
})
.collect::<HashSet<_>>()
})
.collect_vec();
let mut field_indices = vec![0; valid_fields_per_rule.len()];
// Iteratively remove rules that identify exactly one field from the
// candidates, until we either find a solution or end up in an undecidable
// state.
while let Some((rule_ix, field_ix)) = valid_fields_per_rule
.iter()
.enumerate()
.find(|c| c.1.len() == 1)
{
let field_ix = *field_ix.iter().exactly_one().unwrap();
for candidates in valid_fields_per_rule.iter_mut() {
candidates.remove(&field_ix);
}
field_indices[rule_ix] = field_ix;
}
if valid_fields_per_rule.iter().any(|c| !c.is_empty()) {
None
} else {
Some(field_indices)
}
}
impl<T: PartialOrd> Range<T> {
fn is_valid(&self, v: &T) -> bool {
&self.min <= v && v <= &self.max
}
}
impl<T: PartialOrd> RangeUnion<T> {
fn is_valid(&self, v: &T) -> bool {
self.first.is_valid(v) || self.second.is_valid(v)
}
}
mod parsers {
use std::str::FromStr;
use nom::{
bytes::complete::{tag, take_till},
character::complete::{char, line_ending},
error::Error,
multi::separated_list1,
sequence::{separated_pair, terminated},
IResult,
};
use crate::parsers::{finished_parser, integer};
use super::{Range, RangeUnion};
pub fn input<T: FromStr>(
s: &str,
) -> Result<(Vec<(&str, RangeUnion<T>)>, Vec<T>, Vec<Vec<T>>), Error<&str>> {
finished_parser(move |s| {
let (s, rules) = terminated(separated_list1(line_ending, rule), line_ending)(s)?;
let (s, _) = line_ending(s)?;
let (s, _) = terminated(tag("your ticket:"), line_ending)(s)?;
let (s, yours) = terminated(separated_list1(char(','), integer), line_ending)(s)?;
let (s, _) = line_ending(s)?;
let (s, _) = terminated(tag("nearby tickets:"), line_ending)(s)?;
let (s, nearby) = separated_list1(line_ending, separated_list1(char(','), integer))(s)?;
Ok((s, (rules, yours, nearby)))
})(s)
}
fn rule<T: FromStr>(s: &str) -> IResult<&str, (&str, RangeUnion<T>)> {
separated_pair(take_till(|c| c == ':'), tag(": "), range_union)(s)
}
fn range_union<T: FromStr>(s: &str) -> IResult<&str, RangeUnion<T>> {
let (s, (first, second)) = separated_pair(range, tag(" or "), range)(s)?;
Ok((s, RangeUnion { first, second }))
}
fn range<T: FromStr>(s: &str) -> IResult<&str, Range<T>> {
let (s, (min, max)) = separated_pair(integer, char('-'), integer)(s)?;
Ok((s, Range { min, max }))
}
}
#[cfg(test)]
mod tests {
use itertools::assert_equal;
use super::*;
#[test]
fn example_input() {
assert_eq!(
parsers::input(
"\
class: 1-3 or 5-7
row: 6-11 or 33-44
seat: 13-40 or 45-50
your ticket:
7,1,14
nearby tickets:
7,3,47
40,4,50
55,2,20
38,6,12"
),
Ok((
vec![
(
"class",
RangeUnion {
first: Range { min: 1, max: 3 },
second: Range { min: 5, max: 7 }
}
),
(
"row",
RangeUnion {
first: Range { min: 6, max: 11 },
second: Range { min: 33, max: 44 }
}
),
(
"seat",
RangeUnion {
first: Range { min: 13, max: 40 },
second: Range { min: 45, max: 50 }
}
),
],
vec![7, 1, 14],
vec![
vec![7, 3, 47],
vec![40, 4, 50],
vec![55, 2, 20],
vec![38, 6, 12],
]
))
);
}
#[test]
fn example_1() {
let rules = &[
RangeUnion {
first: Range { min: 1, max: 3 },
second: Range { min: 5, max: 7 },
},
RangeUnion {
first: Range { min: 6, max: 11 },
second: Range { min: 33, max: 44 },
},
RangeUnion {
first: Range { min: 13, max: 40 },
second: Range { min: 45, max: 50 },
},
];
assert_equal(invalid_fields(&[7, 3, 47], rules), &[]);
assert_equal(invalid_fields(&[40, 4, 50], rules), &[4]);
assert_equal(invalid_fields(&[55, 2, 20], rules), &[55]);
assert_equal(invalid_fields(&[38, 6, 12], rules), &[12]);
}
#[test]
fn part_1() {
assert_eq!(include_str!("inputs/day_16").part_1(), 23115);
}
#[test]
fn example_2() {
assert_eq!(
find_fields(
&[vec![3, 9, 18], vec![15, 1, 5], vec![5, 14, 9]],
&[
RangeUnion {
first: Range { min: 0, max: 1 },
second: Range { min: 4, max: 19 },
},
RangeUnion {
first: Range { min: 0, max: 5 },
second: Range { min: 8, max: 19 },
},
RangeUnion {
first: Range { min: 0, max: 13 },
second: Range { min: 16, max: 19 },
},
]
),
Some(vec![1, 0, 2])
);
}
#[test]
fn part_2() {
assert_eq!(include_str!("inputs/day_16").part_2(), 239_727_793_813);
}
}
| true |
38aa06123339b47059270f6a01acb2aa1ef81e4b
|
Rust
|
mich101mich/aoc-2018
|
/src/days/day_18.rs
|
UTF-8
| 2,319 | 2.671875 | 3 |
[] |
no_license
|
use crate::utils::*;
#[allow(unused)]
pub fn run() {
#[allow(unused_variables)]
let input = include_str!("../input/18.txt");
let mut grid = char_grid(input);
let mut next_grid = grid.clone();
let neighborhood = grid.moore();
let final_grid = detect_loop(1000000000, || {
next_grid.grid_iter_mut_index().for_each(|(pos, v)| {
let mut trees = 0;
let mut lumbers = 0;
for p in neighborhood.get_all_neighbors(pos) {
match grid[p] {
'|' => trees += 1,
'#' => lumbers += 1,
_ => {}
}
}
*v = match grid[pos] {
'.' if trees >= 3 => '|',
'|' if lumbers >= 3 => '#',
'#' if trees == 0 || lumbers == 0 => '.',
c => c,
}
});
std::mem::swap(&mut grid, &mut next_grid);
grid.clone()
});
let mut trees = 0;
let mut lumbers = 0;
for v in final_grid.grid_iter() {
match v {
'|' => trees += 1,
'#' => lumbers += 1,
_ => {}
}
}
pv!(trees * lumbers);
}
#[allow(unused)]
pub fn part_one() {
#[allow(unused_variables)]
let input = include_str!("../input/18.txt");
let mut grid = char_grid(input);
let mut next_grid = grid.clone();
let neighborhood = grid.moore();
for _ in 0..10 {
next_grid.grid_iter_mut_index().for_each(|(pos, v)| {
let mut trees = 0;
let mut lumbers = 0;
for p in neighborhood.get_all_neighbors(pos) {
match grid[p] {
'|' => trees += 1,
'#' => lumbers += 1,
_ => {}
}
}
*v = match grid[pos] {
'.' if trees >= 3 => '|',
'|' if lumbers >= 3 => '#',
'#' if trees == 0 || lumbers == 0 => '.',
c => c,
}
});
std::mem::swap(&mut grid, &mut next_grid);
}
let mut trees = 0;
let mut lumbers = 0;
for v in grid.grid_iter() {
match v {
'|' => trees += 1,
'#' => lumbers += 1,
_ => {}
}
}
pv!(trees * lumbers);
}
| true |
7b73dc9cb6e2074cb8023ddd9b8e35c03f279d21
|
Rust
|
NattapongSiri/tokenizer_rs
|
/src/tokenizer/en/mod.rs
|
UTF-8
| 723 | 3.46875 | 3 |
[
"BSD-3-Clause",
"LicenseRef-scancode-unknown-license-reference"
] |
permissive
|
//! A whitespace based word tokenizer.
//!
//! It split each word by using white space, nothing else is done.
/// White space based tokenizer. It split word based on white space.
pub struct Tokenizer;
// /// Common acronym. This should be useful for sentence tokenizer.
// const acronym: &'static [&'static str] = &[
// "Mr.", "Mrs.", "Doc.", "Prof.", // People honorific
// "Mon.", "Tue.", "Wed.", "Thu.", "Fri.", "Sat.", "Sun.", // Three chars date
// "Jan.", "Feb.", "Mar.", "Aprl.", "Jun.", "Sep.", "Aug.", "Oct.", "Nov.", "Dec.", // 3-4 Chars month
// ];
impl super::Tokenizer for Tokenizer {
fn tokenize<'a>(&self, text: &'a str) -> Vec<&'a str> {
text.split_whitespace().collect()
}
}
| true |
9bd90d7dcdcb697570f1164324d1497be6b02f55
|
Rust
|
Noah2610/hello-specs-physics
|
/src/music.rs
|
UTF-8
| 1,015 | 2.625 | 3 |
[
"Unlicense"
] |
permissive
|
use std::iter::Cycle;
use std::vec::IntoIter;
use amethyst::assets::Loader;
use amethyst::audio::{AudioSink, OggFormat, SourceHandle};
use amethyst::ecs::{World, WorldExt};
use crate::helpers::*;
const MUSIC: &[&str] = &["audio/song.ogg"];
pub struct Music {
pub music: Cycle<IntoIter<SourceHandle>>,
}
pub fn initialize_music(world: &mut World) {
let music = {
let loader = world.read_resource::<Loader>();
let mut sink = world.write_resource::<AudioSink>();
sink.set_volume(0.5);
let music = MUSIC
.iter()
.map(|file| load_audio_track(&loader, &world, file))
.collect::<Vec<_>>()
.into_iter()
.cycle();
Music { music }
};
world.insert(music);
}
// from `amethyst/examples/pong/audio.rs#18`
// Loads an ogg audio track.
fn load_audio_track(
loader: &Loader,
world: &World,
file: &str,
) -> SourceHandle {
loader.load(resource(file), OggFormat, (), &world.read_resource())
}
| true |
a9d294193d26a4b24e3f31c30da720e11de8da46
|
Rust
|
raymundovr/exercism-rust
|
/triangle/src/lib_u64.rs
|
UTF-8
| 718 | 3.640625 | 4 |
[] |
no_license
|
pub struct Triangle {
a: u64,
b: u64,
c: u64,
}
impl Triangle {
pub fn build(sides: [u64; 3]) -> Option<Triangle> {
let (a, b, c) = (sides[0], sides[1], sides[2]);
if a == 0 || b == 0 || c == 0 {
return None;
}
if !(a + b >= c && a + c >= b && b + c >= a) {
return None;
}
Some(Triangle { a, b, c })
}
pub fn is_equilateral(&self) -> bool {
self.a == self.c && self.a == self.b
}
pub fn is_scalene(&self) -> bool {
self.a != self.c && self.a != self.b && self.b != self.c
}
pub fn is_isosceles(&self) -> bool {
self.a == self.c || self.a == self.b || self.c == self.b
}
}
| true |
936ac09b41b81d34b8bfc815baba04352ec8b1a4
|
Rust
|
dogamak/transmission_rpc
|
/src/requests/torrent_action.rs
|
UTF-8
| 1,819 | 2.828125 | 3 |
[] |
no_license
|
use serde_json::Value;
use super::{Request, RequestArguments};
use std::collections::BTreeMap;
pub enum ActionType {
Start,
Stop,
Verify,
Reannounce
}
pub enum ActionTarget {
Single { id: u64 },
All,
List { ids: Vec<u64>, hashes: Vec<String> },
RecentlyActive
}
pub struct TorrentAction {
typ: ActionType,
target: ActionTarget
}
impl TorrentAction {
pub fn new(typ: ActionType, target: ActionTarget) -> TorrentAction {
TorrentAction {
typ: typ,
target: target
}
}
}
impl Request for TorrentAction {
type Response = ::responses::TorrentAction;
fn method_name(&self) -> &'static str {
match self.typ {
ActionType::Start => "torrent-start",
ActionType::Stop => "torrent-stop",
ActionType::Verify => "torrent-verify",
ActionType::Reannounce => "torrent-reannounce"
}
}
}
impl RequestArguments for TorrentAction {
fn arguments(&self) -> Value {
let mut args = BTreeMap::new();
match self.target {
ActionTarget::All => (),
ActionTarget::List { ref ids, ref hashes } => {
let mut vIds = ids.iter().map(|id| Value::U64(*id));
let mut vHashes = hashes.iter().map(|hash| Value::String(hash.clone()));
let list = vIds.chain(vHashes).collect();
args.insert("id".to_string(), Value::Array(list));
},
ActionTarget::RecentlyActive => {
args.insert("id".to_string(), Value::String("recently-active".to_string()));
},
ActionTarget::Single { id } => {
args.insert("id".to_string(), Value::U64(id));
}
}
Value::Object(args)
}
}
| true |
118444d161333e6cce7018c5c7a52fc9300770d1
|
Rust
|
TheGhostHuCodes/programming-rust
|
/hello/src/main.rs
|
UTF-8
| 1,363 | 3.78125 | 4 |
[] |
no_license
|
use std::io::Write;
use std::num::NonZeroU64;
use std::str::FromStr;
fn gcd(n: NonZeroU64, m: NonZeroU64) -> NonZeroU64 {
let mut n_ = n.get();
let mut m_ = m.get();
while m_ != 0 {
if m_ < n_ {
let temp = m_;
m_ = n_;
n_ = temp
}
m_ = m_ % n_;
}
NonZeroU64::new(n_).unwrap()
}
fn main() {
let mut numbers = Vec::new();
for arg in std::env::args().skip(1) {
numbers.push(NonZeroU64::from_str(&arg).expect("error parsing argument"));
}
if numbers.len() == 0 {
writeln!(std::io::stderr(), "Usage: gcd NUMBER ...").unwrap();
std::process::exit(1);
}
let mut d = numbers[0];
for &m in &numbers[1..] {
d = gcd(d, m);
}
println!("The greatest common divisor of {:?} is {}", numbers, d);
}
#[cfg(test)]
mod tests {
use crate::*;
#[test]
fn test_gcd() {
assert_eq!(
gcd(
NonZeroU64::new(2 * 5 * 11 * 17).unwrap(),
NonZeroU64::new(3 * 7 * 13 * 19).unwrap()
)
.get(),
1
);
assert_eq!(
gcd(
NonZeroU64::new(2 * 3 * 5 * 11 * 17).unwrap(),
NonZeroU64::new(3 * 7 * 11 * 13 * 19).unwrap()
)
.get(),
3 * 11
);
}
}
| true |
42ce604ff439599698c64728cdc275a99f9bd30a
|
Rust
|
mast22/rust-snake-game
|
/src/main.rs
|
UTF-8
| 6,987 | 2.890625 | 3 |
[] |
no_license
|
use kiss3d::camera::{Camera, FirstPerson};
use kiss3d::event::{Key, WindowEvent};
use kiss3d::light::Light;
use kiss3d::nalgebra::{geometry::Point, UnitQuaternion, Vector3};
use kiss3d::nalgebra::{Point3, Transform3, Translation3};
use kiss3d::window::Window;
use std::time::Duration;
use std::{thread, time};
use core::panic;
use rand::Rng;
use std::collections::VecDeque;
use std::io::{self, Write};
const H: i32 = 10;
const W: i32 = 15;
trait Drawable {
fn should_draw(&self, x: i32, y: i32) -> Option<char>;
}
struct Snack {
x: i32,
y: i32,
}
impl Drawable for Snack {
fn should_draw(&self, x: i32, y: i32) -> Option<char> {
if *&self.x == x && *&self.y == y {
return Some('S');
};
return None;
}
}
impl Snack {
fn new_place(&mut self, snake: &Snake) {
let mut rng = rand::thread_rng();
let mut new_x: i32;
let mut new_y: i32;
// do while
while {
new_x = rng.gen_range(0..W);
new_y = rng.gen_range(0..H);
snake.collide(new_x, new_y)
} {}
self.x = new_x;
self.y = new_y;
}
}
struct BodyPart {
x: i32,
y: i32,
}
impl Drawable for BodyPart {
fn should_draw(&self, x: i32, y: i32) -> Option<char> {
if *&self.x == x && *&self.y == y {
return Some('o');
};
None
}
}
enum Direction {
Up,
Right,
Left,
Down,
}
struct Snake {
body: VecDeque<BodyPart>,
direction: Direction,
skip_next_pop: bool, // removal need to be skipped in case snake was feed
}
impl Drawable for Snake {
fn should_draw(&self, x: i32, y: i32) -> Option<char> {
for body_part in &self.body {
let body_part_should_draw = body_part.should_draw(x, y);
if body_part_should_draw.is_some() {
if std::ptr::eq(body_part, self.body.front().unwrap()) {
return Some('0');
}
return body_part_should_draw;
}
}
None
}
}
impl Snake {
fn move_snake(&mut self, direction: Direction) {
let head = self.body.front().unwrap();
let new_body_part = match direction {
Direction::Up => (head.x, head.y - 1),
Direction::Right => (head.x + 1, head.y),
Direction::Left => (head.x - 1, head.y),
Direction::Down => (head.x, head.y + 1),
};
self.body.push_front(BodyPart {
x: new_body_part.0,
y: new_body_part.1,
});
if !self.skip_next_pop {
self.body.pop_back();
} else {
self.skip_next_pop = false;
}
}
fn feed(&mut self, snack: &mut Snack) {
let head = self.body.front().unwrap();
if snack.x == head.x && snack.y == head.y {
self.skip_next_pop = true;
snack.new_place(&self);
}
}
fn collide(&self, x: i32, y: i32) -> bool {
for body_part in &self.body {
if body_part.x == x && body_part.y == y {
return true;
}
}
false
}
fn check_for_game_over(&self) {
let head = self.body.front().unwrap();
// if self.collide(head.x, head.y) {
// panic!("You ate yourself, Uroboros!");
// }
if head.x <= 0 || head.x >= W || head.y <= 0 || head.y >= H {
panic!("Out of bounds!");
}
}
}
fn render_field(snake: &Snake, snack: &Snack) {
for i in 0..H {
let mut row = String::from("|");
for j in 0..W {
let mut symbol_to_draw = snack.should_draw(j, i);
match snake.should_draw(j, i) {
Some(symbol) => symbol_to_draw = Some(symbol),
None => {}
}
match symbol_to_draw {
None => {
row.push_str("_|");
}
Some(sym) => {
row.push_str(format!("{}|", sym).as_str());
}
};
}
println!("{}", row);
}
}
pub fn run() {
let mut snack = Snack { x: 3, y: 5 };
let mut snake_body = VecDeque::new();
snake_body.push_front(BodyPart { x: 1, y: 1 });
snake_body.push_front(BodyPart { x: 2, y: 1 });
snake_body.push_front(BodyPart { x: 3, y: 1 });
let mut snake = Snake {
body: snake_body,
direction: Direction::Right,
skip_next_pop: false,
};
loop {
snake.check_for_game_over();
render_field(&snake, &snack);
let mut next_move = String::new();
io::stdout().flush().expect("Some error");
io::stdin()
.read_line(&mut next_move)
.expect("Failed to read line");
snake.feed(&mut snack);
match &*next_move.as_str().trim().replace("\n", "") {
"w" => snake.move_snake(Direction::Up),
"s" => snake.move_snake(Direction::Down),
"d" => snake.move_snake(Direction::Right),
"a" => snake.move_snake(Direction::Left),
_ => {
panic!("Wrong direction {} given", next_move);
}
};
}
}
fn main() {
let mut window = Window::new_with_size("Kiss3d: Cube", 700, 500);
window.set_background_color(0.0, 0.0, 0.3);
let mut field = vec![];
let look_at_y = (H - 1) as f32 / 2.0;
let look_at_x = (W - 1) as f32 / 2.0;
let mut camera = FirstPerson::new(
Point3::new(look_at_x, look_at_y, 15.0),
Point3::new(look_at_x, look_at_y, 0.0),
);
for row in 0..W {
for cell in 0..H {
let mut cube = window.add_cube(1.0, 1.0, 0.0);
cube.append_translation(&Translation3::new(row as f32, cell as f32, 0.0));
if (row + cell) % 2 == 0 {
cube.set_color(0.7, 0.7, 0.7);
} else {
cube.set_color(1.0, 1.0, 1.0);
}
field.push(cube);
}
}
let mut head = window.add_cube(0.9, 0.9, 0.9);
head.append_translation(&Translation3::new(5.0, 6.0, 1.1));
head.set_color(0.3, 0.7, 0.3);
window.set_light(Light::StickToCamera);
let mut movement = &Translation3::new(1.0, 0.0, 0.0);
while window.render_with_camera(&mut camera) {
for event in window.events().iter() {
match event.value {
WindowEvent::Key(key, action, modif) => {
// movement = match key {
// Key::D => &Translation3::new(1.0, 0.0, 0.0),
// Key::A => &Translation3::new(-1.0, 0.0, 0.0),
// Key::W => &Translation3::new(0.0, 1.0, 0.0),
// Key::S => &Translation3::new(0.0, -1.0, 0.0),
// _ => movement,
// };
}
_ => {}
}
}
head.prepend_to_local_translation(movement);
}
}
| true |
38f9049e740525a6bad966780654189aeb49d0a9
|
Rust
|
dennisss/dacha
|
/pkg/http/src/client/client_interface.rs
|
UTF-8
| 1,380 | 2.921875 | 3 |
[
"Apache-2.0"
] |
permissive
|
use common::errors::*;
use crate::request::Request;
use crate::response::Response;
#[derive(Default, Clone)]
pub struct ClientRequestContext {
pub wait_for_ready: bool,
}
#[async_trait]
pub trait ClientInterface {
async fn request(
&self,
request: Request,
request_context: ClientRequestContext,
) -> Result<Response>;
async fn current_state(&self) -> ClientState;
}
#[derive(Clone, Copy, Debug, PartialEq)]
pub enum ClientState {
/// Initial state of the client.
/// No attempt has been made yet to connect to a remote server so the health
/// is still unknown but we should start connecting soon.
Idle,
Connecting,
Ready,
Failure,
Shutdown,
}
impl ClientState {
/// Returns whether or not the request should be instantly rejected (return
/// an error).
///
/// TODO: Use this for something.
pub fn should_reject_request(&self, request_context: &ClientRequestContext) -> bool {
match *self {
ClientState::Idle => false,
ClientState::Connecting => false,
ClientState::Ready => false,
ClientState::Failure => request_context.wait_for_ready,
ClientState::Shutdown => true,
}
}
}
#[async_trait]
pub trait ClientEventListener: Send + Sync + 'static {
async fn handle_client_state_change(&self);
}
| true |
349354750efacdb2b79f8daeea5182b89f855e5a
|
Rust
|
Atixx/BOFE-Rust
|
/src/boletin/mod.rs
|
UTF-8
| 5,868 | 2.75 | 3 |
[] |
no_license
|
use crate::articles::Article;
use chrono::naive::NaiveDate;
use reqwest::header::{
HeaderMap, HeaderValue, ACCEPT, ACCEPT_ENCODING, CONNECTION, CONTENT_TYPE, USER_AGENT,
};
use serde::Deserialize;
use soup::prelude::*;
use std::collections::HashMap;
mod query;
const POST_URL: &str = "https://www.boletinoficial.gob.ar/busquedaAvanzada/realizarBusqueda";
const BASE_URL: &str = "https://www.boletinoficial.gob.ar";
#[derive(Deserialize, Debug)]
struct BoletinResponse {
error: u32,
content: BoletinContent,
mensajes: Vec<String>,
}
#[derive(Deserialize, Debug)]
struct BoletinContent {
html: String,
sig_pag: u32,
ult_seccion: String,
ult_rubro: String,
cantidad_result_seccion: ResultsBySection,
}
#[derive(Deserialize, Debug)]
#[serde(untagged)]
enum ResultsBySection {
ValueResponse(HashMap<String, u32>),
Empty(Vec<u32>),
}
fn request_articles(
query_info: &query::QueryInfo,
) -> Result<BoletinResponse, Box<dyn std::error::Error>> {
let client = reqwest::blocking::Client::new();
fn construct_headers() -> HeaderMap {
let mut headers = HeaderMap::new();
headers.insert(USER_AGENT, HeaderValue::from_static("Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_5) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/84.0.4147.89 Safari/537.36"));
headers.insert(
CONTENT_TYPE,
HeaderValue::from_static("application/x-www-form-urlencoded; charset=UTF-8"),
);
headers.insert(ACCEPT_ENCODING, HeaderValue::from_static("gzip, deflate"));
headers.insert(ACCEPT, HeaderValue::from_static("*/*"));
headers.insert(CONNECTION, HeaderValue::from_static("keep-alive"));
headers
}
let query = query::BoletinQuery::new(query_info).build_query()?;
let res = client
.post(POST_URL)
.headers(construct_headers())
.body(query)
.send()?;
let body = res.json::<BoletinResponse>()?;
Ok(body)
}
/// Query and parse results of boletin oficial's to a list of articles
/// pagination is not built in so it will return at most 100 results
pub fn fetch_articles(
search_string: &str,
from_date: &str,
to_date: &str,
) -> Result<Vec<Article>, Box<dyn std::error::Error>> {
let from = NaiveDate::parse_from_str(&from_date, "%Y-%m-%d")?;
let to = NaiveDate::parse_from_str(&to_date, "%Y-%m-%d")?;
let query_info = query::QueryInfo::new(&search_string, from, to);
let body = request_articles(&query_info).expect("Error parsing JSON response");
let soup = Soup::new(&body.content.html);
let articles = extract_articles(&soup);
Ok(articles)
}
fn extract_articles(soup: &Soup) -> Vec<Article> {
let mut articles: Vec<Article> = vec![];
for article in soup.tag("p").class("item").find_all() {
let mut parents = article.parents();
let mut link: String = String::from(BASE_URL);
let a_tag = parents.find(|tag| tag.name().to_string() == "a");
let href = a_tag.unwrap().get("href").unwrap();
link.push_str(&href);
let raw_title = String::from(&article.text());
let title = raw_title.trim().replace('\u{a0}', " ");
articles.push(Article { title, link });
}
articles
}
#[cfg(test)]
mod tests {
use super::*;
use std::fs;
fn response_from_mock(mock_name: &str) -> BoletinResponse {
let mut file_name = String::from("src/boletin/mocks/");
file_name.push_str(mock_name);
let raw = fs::read_to_string(file_name).expect("Unable to read file");
serde_json::from_str(&raw).expect("Unable to parse json file")
}
fn soup_from_response(boletin_response: BoletinResponse) -> Soup {
let content = &boletin_response.content.html;
Soup::new(content)
}
#[test]
fn extact_single_article() {
let title = String::from("Testing Title");
let link = String::from("https://www.boletinoficial.gob.ar/testing_title_link");
let article = Article { title, link };
let response = response_from_mock("single-result-response.json");
let soup = soup_from_response(response);
assert_eq!(extract_articles(&soup), vec![article]);
}
#[test]
fn extract_empty_string_on_empty_response() {
let response = response_from_mock("empty-response.json");
let soup = soup_from_response(response);
assert_eq!(extract_articles(&soup), vec![]);
}
#[test]
fn extract_multiple_articles_single_page() {
let response = response_from_mock("single-page-multi-result-response.json");
let soup = soup_from_response(response);
let title = String::from("PRESUPUESTO");
let link = String::from(
"https://www.boletinoficial.gob.ar/detalleAviso/primera/247706/20210805?busqueda=1",
);
let article_1 = Article { title, link };
let title = String::from("POLICÍA DE SEGURIDAD AEROPORTUARIA");
let link = String::from(
"https://www.boletinoficial.gob.ar/detalleAviso/primera/247743/20210805?busqueda=1",
);
let article_2 = Article { title, link };
let title = String::from("SUBSECRETARÍA DE INVESTIGACIÓN CRIMINAL Y COOPERACIÓN JUDICIAL");
let link = String::from(
"https://www.boletinoficial.gob.ar/detalleAviso/primera/247653/20210803?busqueda=1",
);
let article_3 = Article { title, link };
let articles = vec![article_1, article_2, article_3];
assert_eq!(extract_articles(&soup), articles);
}
#[test]
fn extract_first_page_multi_page() {
let response = response_from_mock("multi-page-multi-result-response.json");
let soup = soup_from_response(response);
let extracted = extract_articles(&soup);
assert_eq!(extracted.len(), 100);
}
}
| true |
da31c1385d6264ca8c4046d37a65b29d508f7a2c
|
Rust
|
mhmoudr/ml-lessons
|
/l01-linear-regression-one-dimension/rust/src/main.rs
|
UTF-8
| 1,690 | 3.421875 | 3 |
[
"Apache-2.0"
] |
permissive
|
type Float = f64;
struct Point {x: Float,y: Float }
type Data = Vec<Point>;
use std::time::Instant;
fn main() {
let data:Data = vec!(
Point{x:1.,y:4.}
, Point{x:2.,y:5.5}
, Point{x:2.,y:6.}
, Point{x:3.,y:9.}
, Point{x:3.,y:10.}
);
let start = Instant::now();
let iterations = 1000000;
let m = LinearRegression::fit(data,iterations);
let duration = start.elapsed();
println!("{} , {} ",m.a,m.b);
println!("Model build is done in {:?} s, for {} iterations!" , duration , iterations);
}
struct Model {
a: Float,
b: Float
}
impl Model {
fn predict(&self, x: Float) -> Float {
self.b + self.a * x
}
}
struct MSE;
impl MSE{
fn calc(data: &Data, model:&Model) -> Float {
data.iter().map(|d| (d.y - model.predict(d.x)).powi(2) ).sum::<Float>() / (data.len() as Float)
}
fn gradient_a(data:&Data, model:&Model) -> Float {
data.iter().map(|d| d.x * (d.y - model.predict(d.x))).sum::<Float>() / (-2. * data.len() as Float)
}
fn gradient_b(data:&Data, model:&Model) -> Float {
data.iter().map(|d| d.y - model.predict(d.x)).sum::<Float>() / (-2. * data.len() as Float)
}
}
struct LinearRegression ;
impl LinearRegression {
fn fit(data:Data,iterations:i32) -> Model{
(0 .. iterations).fold( Model{a:10.,b:10.}, |m,idx| {
let ga = MSE::gradient_a(&data, &m);
let gb = MSE::gradient_b(&data, &m);
let step_size = 0.1;
let err = MSE::calc(&data,&m);
println!("{idx} : {err}, {ga}, {gb}, {}, {}",m.a,m.b);
Model{a:m.a-ga* step_size, b:m.b-gb* step_size }
})
}
}
| true |
03856d5ea17f0dc6894ad7fd09f22da6195ebfbe
|
Rust
|
fbaiesec/rust-libp2p
|
/protocols/ping/src/handler.rs
|
UTF-8
| 13,016 | 2.6875 | 3 |
[
"MIT"
] |
permissive
|
// Copyright 2019 Parity Technologies (UK) Ltd.
//
// Permission is hereby granted, free of charge, to any person obtaining a
// copy of this software and associated documentation files (the "Software"),
// to deal in the Software without restriction, including without limitation
// the rights to use, copy, modify, merge, publish, distribute, sublicense,
// and/or sell copies of the Software, and to permit persons to whom the
// Software is furnished to do so, subject to the following conditions:
//
// The above copyright notice and this permission notice shall be included in
// all copies or substantial portions of the Software.
//
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
// FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
// DEALINGS IN THE SOFTWARE.
use crate::protocol;
use futures::prelude::*;
use libp2p_core::ProtocolsHandlerEvent;
use libp2p_core::protocols_handler::{
KeepAlive,
SubstreamProtocol,
ProtocolsHandler,
ProtocolsHandlerUpgrErr,
};
use std::{error::Error, io, fmt, num::NonZeroU32, time::Duration};
use std::collections::VecDeque;
use tokio_io::{AsyncRead, AsyncWrite};
use wasm_timer::{Delay, Instant};
use void::Void;
/// The configuration for outbound pings.
#[derive(Clone, Debug)]
pub struct PingConfig {
/// The timeout of an outbound ping.
timeout: Duration,
/// The duration between the last successful outbound or inbound ping
/// and the next outbound ping.
interval: Duration,
/// The maximum number of failed outbound pings before the associated
/// connection is deemed unhealthy, indicating to the `Swarm` that it
/// should be closed.
max_failures: NonZeroU32,
/// Whether the connection should generally be kept alive unless
/// `max_failures` occur.
keep_alive: bool,
}
impl PingConfig {
/// Creates a new `PingConfig` with the following default settings:
///
/// * [`PingConfig::with_interval`] 15s
/// * [`PingConfig::with_timeout`] 20s
/// * [`PingConfig::with_max_failures`] 1
/// * [`PingConfig::with_keep_alive`] false
///
/// These settings have the following effect:
///
/// * A ping is sent every 15 seconds on a healthy connection.
/// * Every ping sent must yield a response within 20 seconds in order to
/// be successful.
/// * A single ping failure is sufficient for the connection to be subject
/// to being closed.
/// * The connection may be closed at any time as far as the ping protocol
/// is concerned, i.e. the ping protocol itself does not keep the
/// connection alive.
pub fn new() -> Self {
Self {
timeout: Duration::from_secs(20),
interval: Duration::from_secs(15),
max_failures: NonZeroU32::new(1).expect("1 != 0"),
keep_alive: false
}
}
/// Sets the ping timeout.
pub fn with_timeout(mut self, d: Duration) -> Self {
self.timeout = d;
self
}
/// Sets the ping interval.
pub fn with_interval(mut self, d: Duration) -> Self {
self.interval = d;
self
}
/// Sets the maximum number of consecutive ping failures upon which the remote
/// peer is considered unreachable and the connection closed.
pub fn with_max_failures(mut self, n: NonZeroU32) -> Self {
self.max_failures = n;
self
}
/// Sets whether the ping protocol itself should keep the connection alive,
/// apart from the maximum allowed failures.
///
/// By default, the ping protocol itself allows the connection to be closed
/// at any time, i.e. in the absence of ping failures the connection lifetime
/// is determined by other protocol handlers.
///
/// If the maximum number of allowed ping failures is reached, the
/// connection is always terminated as a result of [`PingHandler::poll`]
/// returning an error, regardless of the keep-alive setting.
pub fn with_keep_alive(mut self, b: bool) -> Self {
self.keep_alive = b;
self
}
}
/// The result of an inbound or outbound ping.
pub type PingResult = Result<PingSuccess, PingFailure>;
/// The successful result of processing an inbound or outbound ping.
#[derive(Debug)]
pub enum PingSuccess {
/// Received a ping and sent back a pong.
Pong,
/// Sent a ping and received back a pong.
///
/// Includes the round-trip time.
Ping { rtt: Duration },
}
/// An outbound ping failure.
#[derive(Debug)]
pub enum PingFailure {
/// The ping timed out, i.e. no response was received within the
/// configured ping timeout.
Timeout,
/// The ping failed for reasons other than a timeout.
Other { error: Box<dyn std::error::Error + Send + 'static> }
}
impl fmt::Display for PingFailure {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
match self {
PingFailure::Timeout => f.write_str("Ping timeout"),
PingFailure::Other { error } => write!(f, "Ping error: {}", error)
}
}
}
impl Error for PingFailure {
fn source(&self) -> Option<&(dyn Error + 'static)> {
match self {
PingFailure::Timeout => None,
PingFailure::Other { error } => Some(&**error)
}
}
}
/// Protocol handler that handles pinging the remote at a regular period
/// and answering ping queries.
///
/// If the remote doesn't respond, produces an error that closes the connection.
pub struct PingHandler<TSubstream> {
/// Configuration options.
config: PingConfig,
/// The timer for when to send the next ping.
next_ping: Delay,
/// The pending results from inbound or outbound pings, ready
/// to be `poll()`ed.
pending_results: VecDeque<PingResult>,
/// The number of consecutive ping failures that occurred.
failures: u32,
_marker: std::marker::PhantomData<TSubstream>
}
impl<TSubstream> PingHandler<TSubstream> {
/// Builds a new `PingHandler` with the given configuration.
pub fn new(config: PingConfig) -> Self {
PingHandler {
config,
next_ping: Delay::new(Instant::now()),
pending_results: VecDeque::with_capacity(2),
failures: 0,
_marker: std::marker::PhantomData
}
}
}
impl<TSubstream> ProtocolsHandler for PingHandler<TSubstream>
where
TSubstream: AsyncRead + AsyncWrite,
{
type InEvent = Void;
type OutEvent = PingResult;
type Error = PingFailure;
type Substream = TSubstream;
type InboundProtocol = protocol::Ping;
type OutboundProtocol = protocol::Ping;
type OutboundOpenInfo = ();
fn listen_protocol(&self) -> SubstreamProtocol<protocol::Ping> {
SubstreamProtocol::new(protocol::Ping)
}
fn inject_fully_negotiated_inbound(&mut self, _: ()) {
// A ping from a remote peer has been answered.
self.pending_results.push_front(Ok(PingSuccess::Pong));
}
fn inject_fully_negotiated_outbound(&mut self, rtt: Duration, _info: ()) {
// A ping initiated by the local peer was answered by the remote.
self.pending_results.push_front(Ok(PingSuccess::Ping { rtt }));
}
fn inject_event(&mut self, _: Void) {}
fn inject_dial_upgrade_error(&mut self, _info: (), error: ProtocolsHandlerUpgrErr<io::Error>) {
self.pending_results.push_front(
Err(match error {
ProtocolsHandlerUpgrErr::Timeout => PingFailure::Timeout,
e => PingFailure::Other { error: Box::new(e) }
}))
}
fn connection_keep_alive(&self) -> KeepAlive {
if self.config.keep_alive {
KeepAlive::Yes
} else {
KeepAlive::No
}
}
fn poll(&mut self) -> Poll<ProtocolsHandlerEvent<protocol::Ping, (), PingResult>, Self::Error> {
if let Some(result) = self.pending_results.pop_back() {
if let Ok(PingSuccess::Ping { .. }) = result {
let next_ping = Instant::now() + self.config.interval;
self.failures = 0;
self.next_ping.reset(next_ping);
}
if let Err(e) = result {
self.failures += 1;
if self.failures >= self.config.max_failures.get() {
return Err(e)
} else {
return Ok(Async::Ready(ProtocolsHandlerEvent::Custom(Err(e))))
}
}
return Ok(Async::Ready(ProtocolsHandlerEvent::Custom(result)))
}
match self.next_ping.poll() {
Ok(Async::Ready(())) => {
self.next_ping.reset(Instant::now() + self.config.timeout);
let protocol = SubstreamProtocol::new(protocol::Ping)
.with_timeout(self.config.timeout);
Ok(Async::Ready(ProtocolsHandlerEvent::OutboundSubstreamRequest {
protocol,
info: (),
}))
},
Ok(Async::NotReady) => Ok(Async::NotReady),
Err(e) => Err(PingFailure::Other { error: Box::new(e) })
}
}
}
#[cfg(test)]
mod tests {
use super::*;
use futures::future;
use quickcheck::*;
use rand::Rng;
use tokio_tcp::TcpStream;
use tokio::runtime::current_thread::Runtime;
impl Arbitrary for PingConfig {
fn arbitrary<G: Gen>(g: &mut G) -> PingConfig {
PingConfig::new()
.with_timeout(Duration::from_secs(g.gen_range(0, 3600)))
.with_interval(Duration::from_secs(g.gen_range(0, 3600)))
.with_max_failures(NonZeroU32::new(g.gen_range(1, 100)).unwrap())
}
}
fn tick(h: &mut PingHandler<TcpStream>) -> Result<
ProtocolsHandlerEvent<protocol::Ping, (), PingResult>,
PingFailure
> {
Runtime::new().unwrap().block_on(future::poll_fn(|| h.poll() ))
}
#[test]
fn ping_interval() {
fn prop(cfg: PingConfig, ping_rtt: Duration) -> bool {
let mut h = PingHandler::<TcpStream>::new(cfg);
// The first ping is scheduled "immediately".
let start = h.next_ping.deadline();
assert!(start <= Instant::now());
// Send ping
match tick(&mut h) {
Ok(ProtocolsHandlerEvent::OutboundSubstreamRequest { protocol, info: _ }) => {
// The handler must use the configured timeout.
assert_eq!(protocol.timeout(), &h.config.timeout);
// The next ping must be scheduled no earlier than the ping timeout.
assert!(h.next_ping.deadline() >= start + h.config.timeout);
}
e => panic!("Unexpected event: {:?}", e)
}
let now = Instant::now();
// Receive pong
h.inject_fully_negotiated_outbound(ping_rtt, ());
match tick(&mut h) {
Ok(ProtocolsHandlerEvent::Custom(Ok(PingSuccess::Ping { rtt }))) => {
// The handler must report the given RTT.
assert_eq!(rtt, ping_rtt);
// The next ping must be scheduled no earlier than the ping interval.
assert!(now + h.config.interval <= h.next_ping.deadline());
}
e => panic!("Unexpected event: {:?}", e)
}
true
}
quickcheck(prop as fn(_,_) -> _);
}
#[test]
fn max_failures() {
let cfg = PingConfig::arbitrary(&mut StdGen::new(rand::thread_rng(), 100));
let mut h = PingHandler::<TcpStream>::new(cfg);
for _ in 0 .. h.config.max_failures.get() - 1 {
h.inject_dial_upgrade_error((), ProtocolsHandlerUpgrErr::Timeout);
match tick(&mut h) {
Ok(ProtocolsHandlerEvent::Custom(Err(PingFailure::Timeout))) => {}
e => panic!("Unexpected event: {:?}", e)
}
}
h.inject_dial_upgrade_error((), ProtocolsHandlerUpgrErr::Timeout);
match tick(&mut h) {
Err(PingFailure::Timeout) => {
assert_eq!(h.failures, h.config.max_failures.get());
}
e => panic!("Unexpected event: {:?}", e)
}
h.inject_fully_negotiated_outbound(Duration::from_secs(1), ());
match tick(&mut h) {
Ok(ProtocolsHandlerEvent::Custom(Ok(PingSuccess::Ping { .. }))) => {
// A success resets the counter for consecutive failures.
assert_eq!(h.failures, 0);
}
e => panic!("Unexpected event: {:?}", e)
}
}
}
| true |
990b516941c9be36ec39f655c2cf33fa885d59ab
|
Rust
|
archetect/archetect
|
/archetect-core/src/rules.rs
|
UTF-8
| 3,133 | 2.890625 | 3 |
[
"MIT"
] |
permissive
|
use std::path::Path;
use linked_hash_map::LinkedHashMap;
use log::trace;
use crate::config::{Pattern, RuleAction, RuleConfig};
#[derive(Debug, Serialize, Deserialize, Clone)]
pub struct RulesContext {
overwrite: bool,
#[serde(skip_serializing_if = "Option::is_none")]
path_rules: Option<LinkedHashMap<String, RuleConfig>>,
break_triggered: bool,
}
impl RulesContext {
pub fn new() -> RulesContext {
RulesContext {
overwrite: false,
path_rules: None,
break_triggered: false,
}
}
pub fn set_overwrite(&mut self, overwrite: bool) {
self.overwrite = overwrite;
}
pub fn overwrite(&self) -> bool {
self.overwrite
}
pub fn path_rules_mut(&mut self) -> Option<&mut LinkedHashMap<String, RuleConfig>> {
self.path_rules.as_mut()
}
pub fn path_rules(&self) -> Option<&LinkedHashMap<String, RuleConfig>> {
self.path_rules.as_ref()
}
pub fn break_triggered(&self) -> bool {
self.break_triggered
}
pub fn set_break_triggered(&mut self, break_triggered: bool) {
self.break_triggered = break_triggered;
}
pub fn insert_path_rules(&mut self, insert: &LinkedHashMap<String, RuleConfig>) {
let mut results = insert.clone();
let path_rules = self.path_rules.get_or_insert_with(|| LinkedHashMap::new());
for (name, options) in path_rules {
results.insert(name.to_owned(), options.clone());
}
self.path_rules = Some(results);
}
pub fn append_path_rules(&mut self, append: &LinkedHashMap<String, RuleConfig>) {
let path_rules = self.path_rules.get_or_insert_with(|| LinkedHashMap::new());
for (name, options) in append {
path_rules.insert(name.to_owned(), options.clone());
}
}
pub fn get_source_action<P: AsRef<Path>>(&self, path: P) -> RuleAction {
if let Some(path_rules) = self.path_rules() {
let path = path.as_ref();
for (name, path_rule) in path_rules {
for pattern in path_rule.patterns() {
match pattern {
Pattern::GLOB(pattern) => {
let matcher = glob::Pattern::new(pattern).unwrap();
if matcher.matches_path(&path) {
trace!(
"Source Rule [{}: {:?} {:?}] matched '{}'",
name,
&path_rule.action(),
pattern,
path.display()
);
return path_rule.action().clone();
}
}
_ => unimplemented!(),
}
}
}
}
RuleAction::RENDER
}
}
#[derive(Debug, Serialize, Deserialize, Clone)]
pub enum WriteRule {
#[serde(rename = "IF_MISSING")]
IsMissing,
#[serde(rename = "ALWAYS")]
Always,
}
| true |
ee7db35f07c6820ba1b6beb0691f7c4efa7573d0
|
Rust
|
romatthe/remoc
|
/remoc/tests/rfn/rfn_once.rs
|
UTF-8
| 747 | 2.625 | 3 |
[
"Apache-2.0"
] |
permissive
|
use remoc::rfn::{CallError, RFnOnce};
use crate::loop_channel;
#[tokio::test]
async fn simple() {
crate::init();
let ((mut a_tx, _), (_, mut b_rx)) = loop_channel::<RFnOnce<_, Result<String, CallError>>>().await;
let reply_value = "reply".to_string();
let fn_value = reply_value.clone();
let rfn = RFnOnce::new_1(|arg: i16| async move {
assert_eq!(arg, 123);
Ok(fn_value)
});
println!("Sending remote function");
a_tx.send(rfn).await.unwrap();
println!("Receiving remote function");
let rfn = b_rx.recv().await.unwrap().unwrap();
println!("calling function");
let result = rfn.call(123).await.unwrap();
println!("result: {}", result);
assert_eq!(result, reply_value);
}
| true |
573264761763569e5e997630532a46e5cc9cded2
|
Rust
|
eldruin/tm4c-hal
|
/tm4c-hal/src/i2c.rs
|
UTF-8
| 12,070 | 2.65625 | 3 |
[
"Apache-2.0",
"LicenseRef-scancode-unknown-license-reference",
"MIT"
] |
permissive
|
//! Common I2C code for TM4C123 and TM4C129
/// I2C error
#[derive(Debug)]
#[non_exhaustive]
pub enum Error {
/// Bus Busy
BusBusy,
/// Arbitration loss
Arbitration,
/// Missing Data ACK
DataAck,
/// Missing Address ACK
AdrAck,
/// I2C Timeout
Timeout,
}
#[macro_export]
/// Implements the traits for an I2C peripheral
macro_rules! i2c_pins {
($I2Cn:ident,
scl: [$(($($sclgpio: ident)::*, $sclaf: ident)),*],
sda: [$(($($sdagpio: ident)::*, $sdaaf: ident)),*],
) => {
$(
impl<T> SclPin<$I2Cn> for $($sclgpio)::*<AlternateFunction<$sclaf, T>>
where
T: OutputMode,
{}
)*
$(
impl<T> SdaPin<$I2Cn> for $($sdagpio)::*<AlternateFunction<$sdaaf, T>>
where
T: OutputMode,
{}
)*
}
}
#[macro_export]
/// Spins until the controler is ready (mcs.busy is clear) and optionally on
/// another field of the mcs register until it is clear or set (depending on op
/// parameter).
macro_rules! i2c_busy_wait {
($i2c:expr $(, $field:ident, $op:ident)? ) => {{
// in 'release' builds, the time between setting the `run` bit and checking the `busy`
// bit is too short and the `busy` bit is not reliably set by the time you get there,
// it can take up to 8 clock cycles for the `run` to begin so this delay allows time
// for that hardware synchronization
delay(8);
// Allow 1,000 clock cycles before we timeout. At 100 kHz, this is 10 ms.
$i2c.mclkocnt
.write(|w| unsafe { w.cntl().bits((1_000 >> 4) as u8) });
let mcs = loop {
let mcs = $i2c.mcs.read();
if mcs.busy().bit_is_clear() {
break mcs;
}
};
if mcs.clkto().bit_is_set() {
return Err(Error::Timeout)
} else if mcs.arblst().bit_is_set() {
return Err(Error::Arbitration)
} else if mcs.error().bit_is_set() {
if mcs.adrack().bit_is_set() {
return Err(Error::AdrAck);
} else { // if mcs.datack().bit_is_set() {
return Err(Error::DataAck);
}
}
$( loop {
if mcs.clkto().bit_is_set() {
return Err(Error::Timeout)
} else if mcs.arblst().bit_is_set() {
return Err(Error::Arbitration)
} else if mcs.error().bit_is_set() {
if mcs.adrack().bit_is_set() {
return Err(Error::AdrAck);
} else { // if mcs.datack().bit_is_set() {
return Err(Error::DataAck);
}
} else if mcs.$field().$op() {
break;
} else {
// try again
}
};)?
Ok(())
}};
}
#[macro_export]
/// Implements embedded-hal for an TM4C I2C peripheral
macro_rules! i2c_hal {
($($I2CX:ident: ($powerDomain:ident, $i2cX:ident),)+) => {
$(
impl<SCL, SDA> I2c<$I2CX, (SCL, SDA)> {
/// Configures the I2C peripheral to work in master mode
pub fn $i2cX<F>(
i2c: $I2CX,
pins: (SCL, SDA),
freq: F,
clocks: &Clocks,
pc: &sysctl::PowerControl,
) -> Self where
F: Into<Hertz>,
SCL: SclPin<$I2CX>,
SDA: SdaPin<$I2CX>,
{
sysctl::control_power(
pc, sysctl::Domain::$powerDomain,
sysctl::RunMode::Run, sysctl::PowerState::On);
sysctl::reset(pc, sysctl::Domain::$powerDomain);
// set Master Function Enable, and clear other bits.
i2c.mcr.write(|w| w.mfe().set_bit());
// Write TimerPeriod configuration and clear other bits.
let freq = freq.into().0;
let tpr = ((clocks.sysclk.0/(2*10*freq))-1) as u8;
i2c.mtpr.write(|w| unsafe {w.tpr().bits(tpr)});
I2c { i2c, pins }
}
/// Releases the I2C peripheral and associated pins
pub fn free(self) -> ($I2CX, (SCL, SDA)) {
(self.i2c, self.pins)
}
}
impl<PINS> Write for I2c<$I2CX, PINS> {
type Error = Error;
fn write(&mut self, addr: u8, bytes: &[u8]) -> Result<(), Error> {
// Write Slave address and clear Receive bit
self.i2c.msa.write(|w| unsafe {
w.sa().bits(addr)
});
// Put first byte in data register
self.i2c.mdr.write(|w| unsafe {
w.data().bits(bytes[0])
});
let sz = bytes.len();
i2c_busy_wait!(self.i2c, busbsy, bit_is_clear)?;
// Send START + RUN
// If single byte transfer, set STOP
self.i2c.mcs.write(|w| {
if sz == 1 {
w.stop().set_bit();
}
w.start().set_bit()
.run().set_bit()
});
for (i,byte) in (&bytes[1..]).iter().enumerate() {
i2c_busy_wait!(self.i2c)?;
// Put next byte in data register
self.i2c.mdr.write(|w| unsafe {
w.data().bits(*byte)
});
// Send RUN command (Burst continue)
// Set STOP on last byte
self.i2c.mcs.write(|w| {
if (i+1) == (sz-1) {
w.stop().set_bit();
}
w.run().set_bit()
});
}
i2c_busy_wait!(self.i2c)?;
Ok(())
}
}
impl<PINS> Read for I2c<$I2CX, PINS> {
type Error = Error;
fn read(
&mut self,
addr: u8,
buffer: &mut [u8],
) -> Result<(), Error> {
// Write Slave address and set Receive bit
self.i2c.msa.write(|w| unsafe {
w.sa().bits(addr)
.rs().set_bit()
});
i2c_busy_wait!(self.i2c, busbsy, bit_is_clear)?;
let recv_sz = buffer.len();
if recv_sz == 1 {
// Single receive
self.i2c.mcs.write(|w| {
w.run().set_bit()
.start().set_bit()
.stop().set_bit()
});
i2c_busy_wait!(self.i2c)?;
buffer[0] = self.i2c.mdr.read().data().bits();
} else {
self.i2c.mcs.write(|w| {
w.start().set_bit()
.run().set_bit()
.ack().set_bit()
});
i2c_busy_wait!(self.i2c)?;
buffer[0] = self.i2c.mdr.read().data().bits();
for byte in &mut buffer[1..recv_sz-1] {
self.i2c.mcs.write(|w| {
w.run().set_bit()
.ack().set_bit()
});
i2c_busy_wait!(self.i2c)?;
*byte = self.i2c.mdr.read().data().bits();
}
self.i2c.mcs.write(|w| {
w.run().set_bit()
.stop().set_bit()
});
i2c_busy_wait!(self.i2c)?;
buffer[recv_sz-1] = self.i2c.mdr.read().data().bits();
}
Ok(())
}
}
impl<PINS> WriteRead for I2c<$I2CX, PINS> {
type Error = Error;
fn write_read(
&mut self,
addr: u8,
bytes: &[u8],
buffer: &mut [u8],
) -> Result<(), Error> {
let write_len = bytes.len();
if buffer.len() == 0 {
return self.write(addr, bytes);
}
if bytes.len() == 0 {
return self.read(addr, buffer);
}
// Write Slave address and clear Receive bit
self.i2c.msa.write(|w| unsafe {
w.sa().bits(addr)
});
// send first byte
self.i2c.mdr.write(|w| unsafe {
w.data().bits(bytes[0])
});
i2c_busy_wait!(self.i2c, busbsy, bit_is_clear)?;
self.i2c.mcs.write(|w| {
w.start().set_bit()
.run().set_bit()
});
i2c_busy_wait!(self.i2c)?;
for byte in (&bytes[1..write_len]).iter() {
self.i2c.mdr.write(|w| unsafe {
w.data().bits(*byte)
});
self.i2c.mcs.write(|w| {
w.run().set_bit()
});
i2c_busy_wait!(self.i2c)?;
}
// Write Slave address and set Receive bit
self.i2c.msa.write(|w| unsafe {
w.sa().bits(addr)
.rs().set_bit()
});
let recv_sz = buffer.len();
if recv_sz == 1 {
// emit Repeated START and STOP for single receive
self.i2c.mcs.write(|w| {
w.run().set_bit()
.start().set_bit()
.stop().set_bit()
});
i2c_busy_wait!(self.i2c)?;
buffer[0] = self.i2c.mdr.read().data().bits();
} else {
// emit Repeated START
self.i2c.mcs.write(|w| {
w.run().set_bit()
.start().set_bit()
.ack().set_bit()
});
i2c_busy_wait!(self.i2c)?;
buffer[0] = self.i2c.mdr.read().data().bits();
for byte in &mut buffer[1..recv_sz-1] {
self.i2c.mcs.write(|w| {
w.run().set_bit()
.ack().set_bit()
});
i2c_busy_wait!(self.i2c)?;
*byte = self.i2c.mdr.read().data().bits();
}
self.i2c.mcs.write(|w| {
w.run().set_bit()
.stop().set_bit()
});
i2c_busy_wait!(self.i2c)?;
buffer[recv_sz-1] = self.i2c.mdr.read().data().bits();
}
Ok(())
}
}
)+
}
}
| true |
2f71c36126e75e164a05a7650b7e4a64d60bda39
|
Rust
|
maniankara/rustbyexample
|
/tutorial/21ownership_borrowing.rs
|
UTF-8
| 785 | 3.6875 | 4 |
[
"Apache-2.0"
] |
permissive
|
fn eat_box_i32(i: Box<i32>) {
println!("box eaten: {}",i);
}
fn borrow_box_i32(i: &Box<i32>) {
println!("borrowed: {}",i);
}
fn eat_i32(j: i32) {
println!("eaten i32: {}", j);
}
fn borrow_i32(j: &i32) {
println!("borrow: {}",j);
}
fn main() {
let stack_i32 = 5i32;
borrow_i32(&stack_i32);
eat_i32(stack_i32);
// println!("Try to print i32: ", stack_i32);
// box
let box_i32 = Box::new(5i32);
borrow_box_i32(&box_i32);
// eat while ref exists
{
let _tmp : &i32 = &box_i32;
// eat_box_i32(box_i32); // error as ref exists
eat_i32(stack_i32); // Works perfect !! 'cos its stack and not box (heap) and inside a block
}
eat_box_i32(box_i32);
// println!("Try to print box: {}", box_i32);
}
| true |
817040532c83dd6255c74a6ddacdd38ba6b9cadc
|
Rust
|
svenschmidt75/Rust
|
/DSandAlg/Queue/src/lib.rs
|
UTF-8
| 6,726 | 3.328125 | 3 |
[] |
no_license
|
use std::ops::{Deref, DerefMut};
use std::rc::Rc;
use std::cell::RefCell;
use std::borrow::{BorrowMut, Borrow};
struct Node {
value: u64,
next: Option<Rc<RefCell<Node>>>,
}
impl Node {
fn new(value: u64) -> Node {
Node { value, next: None }
}
fn find_leaf_mut(&mut self) -> &mut Node {
let mut current_node = self;
loop {
if current_node.next.is_none() {
return current_node;
} else {
let mut a = current_node.next.as_ref().unwrap().clone();
let mut b = a.borrow_mut().get_mut();
current_node = b;
}
}
}
fn find(&self, value: u64) -> Option<&Node> {
if self.value == value {
return Some(self);
}
match self.next {
None => None,
Some(ref node) => node.deref().borrow().find(value),
}
}
fn find_mut(&mut self, value: u64) -> Option<&mut Node> {
if self.value == value {
return Some(self);
}
match self.next {
None => None,
Some(ref node) => {
node.deref().borrow().find_mut(value)
},
}
}
fn remove(&mut self, value: u64) {
// SS: find the parent node of the node to delete
let mut parent = self;
while parent.next.is_some() && parent.next.unwrap().deref().borrow().value != value {
parent = parent.next.unwrap().deref().borrow_mut().deref_mut();
}
if let Some(ref child) = parent.next {
// assert_eq!(child.deref().value, value);
let child_next = child.deref().borrow_mut().next.take();
parent.next = child_next;
}
}
fn append(&mut self, value: u64) -> Rc<RefCell<Node>> {
let mut leaf = self.find_leaf_mut();
let node = Rc::new(RefCell::new(Node::new(value)));
leaf.next = Some(node.clone());
node
}
fn print(&self) {
print!("{} ", self.value);
match self.next {
None => {}
Some(ref next) => {
next.deref().borrow().print();
}
}
}
}
struct LinkedList {
head: Option<Rc<RefCell<Node>>>,
tail: Option<Rc<RefCell<Node>>>,
}
impl LinkedList {
fn new() -> LinkedList {
LinkedList {
head: None,
tail: None,
}
}
fn append(&mut self, value: u64) {
match self.head {
None => {
let node = Rc::new(RefCell::new(Node::new(value)));
self.head = Some(node.clone());
self.tail = Some(node);
}
Some(ref node) => {
let new_tail = node.deref().borrow_mut().append(value);
self.tail = Some(new_tail);
}
}
}
fn prepend(&mut self, value: u64) {
let mut node = RefCell::new(Node::new(value));
let head = self.head.take();
node.borrow_mut().next = match head {
None => None,
Some(ref next) => Some(next.clone()),
};
let rc = Rc::new(node);
if head.is_none() {
// SS: set tail as well
self.tail = Some(rc.clone());
}
self.head = Some(rc.clone());
}
fn remove(&mut self, value: u64) {
if let Some(ref head) = self.head {
if head.deref().borrow().value == value {
// SS: remove head
// SS: next node after head
let next = self.head.unwrap().deref().borrow().next.take();
if let Some(n) = next {
self.head = Some(n);
}
} else {
head.deref().borrow().remove(value);
}
}
}
fn find(&self, value: u64) -> Option<&Node> {
match self.head {
None => None,
Some(ref node) => node.deref().borrow().find(value),
}
}
fn find_mut(&mut self, value: u64) -> Option<&mut Node> {
match self.head {
None => None,
Some(ref node) => {
node.deref().borrow_mut().find_mut(value)
},
}
}
fn length(&self) -> usize {
if let None = self.head {
0
} else {
let mut count = 1;
let mut node = self.head.unwrap().deref().borrow().deref();
while node.next.is_some() {
count = count + 1;
node = node.next.unwrap().deref().borrow().deref();
}
count
}
}
fn print(&self) {
match self.head {
None => println!("empty"),
Some(ref node) => {
node.deref().borrow().print();
println!()
}
}
}
}
struct Queue {
linked_list: LinkedList,
}
impl Queue {
fn new() -> Queue {
Queue {
linked_list: LinkedList::new(),
}
}
fn enqueue(&mut self, value: u64) {
// SS: prepend an element
self.linked_list.append(value);
}
fn dequeue(&mut self) -> Option<u64> {
let mut head = self.linked_list.head.take();
match head {
None => None,
Some(ref rc) => {
// SS: set new head to next node
self.linked_list.head = rc.deref().borrow().next;
Some(rc.deref().borrow().value)
}
}
}
fn peek(&self) -> Option<u64> {
match self.linked_list.head {
None => None,
Some(ref rc) => Some(rc.deref().borrow().value),
}
}
}
#[cfg(test)]
mod tests {
use crate::{Queue, LinkedList};
#[test]
fn prepend_empty() {
// Arrange
let mut linked_list = LinkedList::new();
// Act
linked_list.prepend(1);
linked_list.prepend(2);
// Assert
assert_eq!(linked_list.length(), 2);
}
#[test]
fn append_empty() {
// Arrange
let mut linked_list = LinkedList::new();
// Act
linked_list.append(1);
linked_list.append(2);
// Assert
assert_eq!(linked_list.length(), 2);
}
#[test]
fn enqueue() {
// Arrange
let mut queue = Queue::new();
// Act
queue.enqueue(1);
// Assert
assert_eq!(queue.peek().unwrap(), 1);
}
#[test]
fn lifo() {
// Arrange
let mut queue = Queue::new();
queue.enqueue(1);
queue.enqueue(2);
// Act
let value = queue.dequeue().unwrap();
// Assert
assert_eq!(1, value);
}
}
| true |
614a6bb8d8fb0493738fb08f135e7885bef1487a
|
Rust
|
sneakstarberry/rustlang
|
/projects/algorithm/q2941/src/main.rs
|
UTF-8
| 386 | 2.953125 | 3 |
[] |
no_license
|
use std::io::BufReader;
use std::io::BufRead;
fn main() {
let array = ["c=", "c-", "dz=", "d-", "lj", "nj", "s=", "z="];
let mut alphabat = String::new();
let mut rd = BufReader::new(std::io::stdin());
rd.read_line(&mut alphabat).unwrap();
for i in array.iter() {
alphabat = alphabat.replace(i, "1");
}
println!("{}", alphabat.trim().len());
}
| true |
fb68621138e9c980a90017d2a4e35d2ce95e37e1
|
Rust
|
JochemKlingeler/AdventOfCode2020
|
/src/solutions/day_07.rs
|
UTF-8
| 1,117 | 3.328125 | 3 |
[] |
no_license
|
use std::fs;
pub fn part1() -> usize {
do_part1(&get_day_07_input())
}
fn do_part1(_input: &str) -> usize {
0
}
pub fn part2() -> usize {
do_part2(&get_day_07_input())
}
fn do_part2(_input: &str) -> usize {
0
}
fn get_day_07_input() -> String {
fs::read_to_string("./input/day_07.txt").expect("Something went wrong reading the file")
}
#[cfg(test)]
mod tests {
use super::*;
const DEMO: &str = "light red bags contain 1 bright white bag, 2 muted yellow bags.
dark orange bags contain 3 bright white bags, 4 muted yellow bags.
bright white bags contain 1 shiny gold bag.
muted yellow bags contain 2 shiny gold bags, 9 faded blue bags.
shiny gold bags contain 1 dark olive bag, 2 vibrant plum bags.
dark olive bags contain 3 faded blue bags, 4 dotted black bags.
vibrant plum bags contain 5 faded blue bags, 6 dotted black bags.
faded blue bags contain no other bags.
dotted black bags contain no other bags.";
#[test]
fn part1_demo() {
assert_eq!(do_part1(self::DEMO), 4);
}
#[test]
fn part2_demo() {
assert_eq!(do_part2(self::DEMO), 1);
}
}
| true |
019070aa09d3d61de05290530fdbfd1c1daf2c6f
|
Rust
|
trishume/syntect
|
/tests/error_handling.rs
|
UTF-8
| 2,341 | 2.921875 | 3 |
[
"MIT"
] |
permissive
|
use std::{
error::Error,
fmt::Display,
io::{Error as IoError, ErrorKind},
};
use syntect::{
parsing::{ParseScopeError, ParseSyntaxError},
LoadingError,
};
#[test]
fn loading_error_bad_path_display() {
assert_display(LoadingError::BadPath, "Invalid path");
}
#[test]
fn loading_error_parse_syntax_display() {
assert_display(
LoadingError::ParseSyntax(
ParseSyntaxError::MissingMandatoryKey("main"),
String::from("file.sublime-syntax"),
),
"file.sublime-syntax: Missing mandatory key in YAML file: main",
);
}
#[test]
fn loading_error_io_source() {
let io_error_source = IoError::new(ErrorKind::Other, "this is an error string");
assert_display(
LoadingError::Io(io_error_source).source().unwrap(),
"this is an error string",
);
}
#[test]
fn parse_syntax_error_missing_mandatory_key_display() {
assert_display(
ParseSyntaxError::MissingMandatoryKey("mandatory_key"),
"Missing mandatory key in YAML file: mandatory_key",
);
}
#[test]
fn parse_syntax_error_regex_compile_error_display() {
assert_display(
ParseSyntaxError::RegexCompileError("[a-Z]".to_owned(), LoadingError::BadPath.into()),
"Error while compiling regex '[a-Z]': Invalid path",
);
}
#[test]
fn parse_scope_error_display() {
assert_display(
ParseScopeError::TooLong,
"Too long scope. Scopes can be at most 8 atoms long.",
)
}
#[test]
fn parse_syntax_error_regex_compile_error_source() {
let error = ParseSyntaxError::RegexCompileError(
"[[[[[[[[[[[[[[[".to_owned(),
LoadingError::BadPath.into(),
);
assert_display(error.source().unwrap(), "Invalid path");
}
#[test]
fn loading_error_parse_syntax_source() {
let error = LoadingError::ParseSyntax(
ParseSyntaxError::RegexCompileError("[a-Z]".to_owned(), LoadingError::BadPath.into()),
String::from("any-file.sublime-syntax"),
);
assert_display(
error.source().unwrap(),
"Error while compiling regex '[a-Z]': Invalid path",
)
}
/// Helper to assert that a given implementation of [Display] generates the
/// expected string.
fn assert_display(display: impl Display, expected_display: &str) {
assert_eq!(format!("{}", display), String::from(expected_display));
}
| true |
0b05186580f5a71d9e5f372ce88718852b402646
|
Rust
|
zxk7516/exercise
|
/rust/nth-prime/src/lib.rs
|
UTF-8
| 863 | 3.03125 | 3 |
[] |
no_license
|
pub fn nth(n: u32) -> Option<u32> {
if n<=0 {
None
}else{
let mut prime:Vec<u32> = Vec::with_capacity(128);
prime.push(2);prime.push(3);prime.push(5);
// prime[0]=2;prime[1]=3;prime[2]=5;
if n <= 3{
return Some(prime[(n-1) as usize]);
}else{
// let r = n-3
let mut r=0;
for i in 0..(n-3){
r = prime[(2+i) as usize]+2;
loop {
let factor_limit = (r as f64).sqrt() as u32;
if prime.iter().filter(|&&r|r<=factor_limit).any(|x| r%x == 0) {
r = r+2;
continue
}else{
break;
}
}
prime.push(r)
}
Some(r)
}
}
}
| true |
5e07b7aa88c0b2820fe18a572ab708b5d1375eb9
|
Rust
|
togglebyte/mixel2
|
/src/plugins.rs
|
UTF-8
| 4,631 | 2.75 | 3 |
[] |
no_license
|
use std::cell::RefCell;
use std::env::{set_current_dir, current_dir};
use std::fs::{read, read_dir, DirEntry};
use std::path::{Component, Path};
use mlua::prelude::*;
use mlua::{Lua, Result, Function, Variadic};
use nightmare::Position;
use nightmare::pixels::Pixel;
use crate::canvas::Containers;
#[derive(Debug)]
pub enum Arg {
String(String),
Number(f64),
Bool(bool),
}
impl Arg {
pub fn from_str(s: String) -> Option<Arg> {
if s == "false" {
return Some(Arg::Bool(false));
}
if s == "true" {
return Some(Arg::Bool(true));
}
let arg = match s.parse::<f64>() {
Ok(num) => Arg::Number(num),
Err(_) => Arg::String(s),
};
Some(arg)
}
}
// -----------------------------------------------------------------------------
// - Plugin call -
// -----------------------------------------------------------------------------
#[derive(Debug)]
pub struct PluginCall {
name: String,
args: Vec<Arg>,
}
impl PluginCall {
pub fn new(name: String, args: Vec<Arg>) -> Self {
Self {
name,
args,
}
}
}
// -----------------------------------------------------------------------------
// - Plugin -
// -----------------------------------------------------------------------------
fn load_plugin(lua: &Lua, path: &Path) -> Result<()> {
let cwd = current_dir().unwrap();
let name = match path.components().last() {
Some(Component::Normal(name)) => name.to_str().unwrap(),
_ => return Ok(()), // TODO: meh, fix this you lazy sausage
};
let path = path.join("autoload.lua");
let plugin_src = read(path)?;
let new_path = cwd.join("plugins").join(name);
set_current_dir(new_path);
match lua.load(&plugin_src).exec() {
Ok(_) => eprintln!("Loaded: {}", name),
Err(e) => eprintln!("Loading {} failed: {:?}", name, e),
}
set_current_dir(cwd);
Ok(())
}
pub struct Plugin {
lua: Lua,
}
impl Plugin {
pub fn new() -> Result<Self> {
let lua = Lua::new();
// TODO: It's obvious that we shouldn't do this:
// Plugins should be read from XDG_CONFIG/mixel/plugins
// and make sure the dir exists (create?)
let plugins = read_dir("plugins")?;
for entry in plugins {
let path = entry?.path();
if path.is_dir() {
load_plugin(&lua, &path);
}
}
let inst = Self {
lua
};
Ok(inst)
}
pub fn reload(&mut self, path: impl AsRef<Path>) {
// TODO: we are now reloading ALL plugins.
// That's silly.
if let Ok(inst) = Self::new() {
*self = inst;
}
}
// TODO add app context, that contains viewport, app things
pub fn exec_code(&mut self, code: &str, containers: &mut Containers) -> LuaResult<()> {
let containers = RefCell::new(containers);
self.lua.scope(|scope| {
let globals = self.lua.globals();
let f = scope.create_function_mut(|_, (x, y): (i32, i32)| {
let mut containers = containers.borrow_mut();
// containers.draw(Position::new(x, y));
Ok(())
}).unwrap();
globals.set("putPixel", f);
let f = scope.create_function_mut(|_, (r, g, b): (u8, u8, u8)| {
let mut containers = containers.borrow_mut();
let pixel = Pixel { r, g, b, ..Default::default() };
containers.set_colour(pixel);
Ok(())
}).unwrap();
globals.set("setColor", f);
match self.lua.load(code).exec() {
Ok(_) => {}
Err(e) => eprintln!("Lua err: {:?}", e),
}
Ok(())
});
Ok(())
}
// pub fn exec(&mut self, call: &PluginCall) -> LuaResult<()> {
// let PluginCall { name, args } = call;
// let globals = self.lua.globals();
// let parts = call.name.split('.');
// let f: Function = globals.raw_get(&call.name as &str)?;
// eprintln!("{:?}", call.name);
// let args = call.args.iter().map(|a| to_lua(&self.lua, a)).collect::<Variadic<_>>();
// f.call::<_, ()>(args);
// Ok(())
// }
}
fn to_lua<'a>(lua: &'a Lua, arg: &'a Arg) -> LuaValue<'a> {
match arg {
Arg::String(s) => s.clone().to_lua(lua).unwrap(),
Arg::Bool(b) => LuaValue::Boolean(*b),
Arg::Number(n) => LuaValue::Number(*n)
}
}
| true |
7c0951071458cceabe06399ae384e10701a2d4d9
|
Rust
|
mahkoh/elang
|
/src/funcs.rs
|
UTF-8
| 4,927 | 2.5625 | 3 |
[] |
no_license
|
#![allow(unused)]
use crate::{
types::{
span::Span,
store::Store,
tree::{Expr, ExprType, FnType, NativeFn},
},
Elang, Error, ErrorType, ExprKind,
};
use std::rc::Rc;
macro_rules! bi {
($f:expr) => {
ExprType::Fn {
func: FnType::Native { func: Rc::new($f) },
}
};
}
pub fn to_list() -> Rc<dyn NativeFn> {
let f = move |eval: &mut Elang, e: Rc<Expr>| {
let fields = eval.get_map(e.id)?;
let mut list = Vec::with_capacity(fields.len());
for &val in fields.values() {
list.push(val);
}
Ok(ExprType::List {
elements: Rc::from(list.into_boxed_slice()),
})
};
Rc::new(f)
}
pub fn assert() -> Rc<dyn NativeFn> {
let f = move |eval: &mut Elang, cond: Rc<Expr>| {
if eval.get_bool(cond.id)? {
let f = |_: &mut Elang, _msg: Rc<Expr>| {
let f = |_: &mut Elang, expr: Rc<Expr>| {
Ok(ExprType::Resolved {
ident: None,
dest: expr.id,
})
};
Ok(bi!(f))
};
Ok(bi!(f))
} else {
let f = |eval: &mut Elang, msg: Rc<Expr>| {
let str = eval.get_string(msg.id)?;
Err(eval.error(msg.id, ErrorType::AssertionFailed { msg: str }))
};
Ok(bi!(f))
}
};
Rc::new(f)
}
pub fn contains() -> Rc<dyn NativeFn> {
let f = move |eval: &mut Elang, list: Rc<Expr>| {
let list = eval.get_list(list.id)?;
let f = move |eval: &mut Elang, val: Rc<Expr>| {
for &el in list.iter() {
if eval.equal_to(el, val.id)? {
return Ok(ExprType::Bool { val: true });
}
}
Ok(ExprType::Bool { val: false })
};
Ok(bi!(f))
};
Rc::new(f)
}
pub fn raise() -> Rc<dyn NativeFn> {
let f = move |eval: &mut Elang, msg: Rc<Expr>| {
let str = eval.get_string(msg.id)?;
Err(eval.error(msg.id, ErrorType::Raised { msg: str }))
};
Rc::new(f)
}
pub fn filter() -> Rc<dyn NativeFn> {
let f = move |eval: &mut Elang, cond: Rc<Expr>| {
let f = move |eval: &mut Elang, olist: Rc<Expr>| {
let list = eval.get_list(olist.id)?;
let mut nlist = Vec::with_capacity(list.len());
for &el in list.iter() {
let span = Span::new(olist.span.lo, cond.span.hi);
let expr = eval.add_expr(
span,
ExprType::Apl {
func: cond.id,
arg: el,
},
);
if eval.get_bool(expr)? {
nlist.push(el);
}
}
Ok(ExprType::List {
elements: Rc::from(nlist.into_boxed_slice()),
})
};
Ok(bi!(f))
};
Rc::new(f)
}
pub fn ty() -> Rc<dyn NativeFn> {
let f = move |eval: &mut Elang, e: Rc<Expr>| {
let val = eval.resolve(e.id)?;
let ty = match *val.val.borrow() {
ExprType::Number { .. } => "number",
ExprType::String { .. } => "string",
ExprType::Fn { .. } => "fn",
ExprType::Map { .. } => "map",
ExprType::List { .. } => "list",
ExprType::Bool { .. } => "bool",
ExprType::Null => "null",
ref o => {
return Err(eval.perror(
e.id,
ErrorType::UnexpectedExprKind {
expected: &[
ExprKind::Number,
ExprKind::String,
ExprKind::Fn,
ExprKind::Map,
ExprKind::List,
ExprKind::Bool,
ExprKind::Null,
],
encountered: o.kind(),
},
))
}
};
let s = eval.intern(ty);
Ok(ExprType::String { content: s })
};
Rc::new(f)
}
macro_rules! is {
($name:ident, $pat:pat) => {
pub fn $name() -> Rc<dyn NativeFn> {
let f = move |eval: &mut Elang, e: Rc<Expr>| {
let val = eval.resolve(e.id)?;
let val = match *val.val.borrow() {
$pat => true,
_ => false,
};
Ok(ExprType::Bool { val })
};
Rc::new(f)
}
};
}
is!(is_number, ExprType::Number { .. });
is!(is_string, ExprType::String { .. });
is!(is_fn, ExprType::Fn { .. });
is!(is_map, ExprType::Map { .. });
is!(is_list, ExprType::List { .. });
is!(is_bool, ExprType::Bool { .. });
is!(is_null, ExprType::Null);
| true |
204f52d6c0edb2047b4195a662d702c368c586d0
|
Rust
|
fabura/icu4x
|
/experimental/list_formatter/src/list_formatter.rs
|
UTF-8
| 7,466 | 3.125 | 3 |
[
"Apache-2.0",
"ICU",
"MIT",
"LicenseRef-scancode-unicode"
] |
permissive
|
// This file is part of ICU4X. For terms of use, please see the file
// called LICENSE at the top level of the ICU4X source tree
// (online at: https://github.com/unicode-org/icu4x/blob/main/LICENSE ).
use crate::patterns::get_patterns;
use displaydoc::Display;
use formatted_string_builder::FormattedStringBuilder;
#[derive(Debug, Display)]
pub enum Error {
#[displaydoc("cannot create a ListFormatter for the given locale")]
UnknownLocale,
}
#[derive(Copy, Clone, PartialEq, Debug)]
pub enum FieldType {
Element,
Literal,
}
#[derive(Copy, Clone, PartialEq, Debug)]
pub enum Type {
And,
Or,
Unit,
}
#[derive(Copy, Clone, PartialEq, Debug)]
pub enum Width {
Wide,
Short,
Narrow,
}
pub struct ListFormatter<'a> {
first: &'a Pattern<'a>,
pair: &'a Pattern<'a>,
middle: &'a Pattern<'a>,
last: &'a Pattern<'a>,
}
impl<'a> ListFormatter<'a> {
pub fn new(locale: &str, type_: Type, width: Width) -> Result<ListFormatter<'static>, Error> {
match get_patterns(locale, type_, width) {
None => Err(Error::UnknownLocale),
Some(patterns) => {
let [first, pair, middle, last] = patterns;
Ok(ListFormatter {
first,
pair,
middle,
last,
})
}
}
}
fn format_internal<B>(
&self,
values: &[&str],
empty: fn() -> B,
single: fn(&str) -> B,
apply_pattern: fn(&str, &PatternParts<'a>, B) -> B,
) -> B {
match values.len() {
0 => empty(),
1 => single(values[0]),
2 => apply_pattern(values[0], self.pair.get_parts(values[1]), single(values[1])),
n => {
let mut builder = apply_pattern(
values[n - 2],
self.last.get_parts(values[n - 1]),
single(values[n - 1]),
);
for i in (1..n - 2).rev() {
builder =
apply_pattern(values[i], self.middle.get_parts(values[i + 1]), builder);
}
apply_pattern(values[0], self.first.get_parts(values[1]), builder)
}
}
}
pub fn format(&self, values: &[&str]) -> String {
self.format_internal(
values,
|| "".to_string(),
|value| value.to_string(),
|value, (before, between, after), mut builder| {
builder = builder + after;
builder.insert_str(0, between);
builder.insert_str(0, value);
builder.insert_str(0, before);
builder
},
)
}
pub fn format_to_parts(&self, values: &[&str]) -> FormattedStringBuilder<FieldType> {
self.format_internal(
values,
FormattedStringBuilder::<FieldType>::new,
|value| {
let mut builder = FormattedStringBuilder::<FieldType>::new();
builder.append(value, FieldType::Element);
builder
},
|value, (before, between, after), mut builder| {
builder.append(after, FieldType::Literal);
builder.prepend(between, FieldType::Literal);
builder.prepend(value, FieldType::Element);
builder.prepend(before, FieldType::Literal);
builder
},
)
}
}
type PatternParts<'a> = (&'a str, &'a str, &'a str);
pub(crate) enum Pattern<'a> {
Simple {
parts: PatternParts<'a>,
},
Conditional {
cond: fn(&str) -> bool,
then: PatternParts<'a>,
else_: PatternParts<'a>,
},
}
impl<'a> Pattern<'a> {
fn get_parts(&self, following_value: &str) -> &PatternParts<'a> {
match self {
Pattern::Simple { parts } => parts,
Pattern::Conditional { cond, then, else_ } => {
if cond(following_value) {
then
} else {
else_
}
}
}
}
}
#[cfg(test)]
mod tests {
use super::*;
const VALUES: &[&str] = &["one", "two", "three", "four", "five"];
fn test_formatter() -> ListFormatter<'static> {
ListFormatter {
pair: &Pattern::Simple {
parts: ("", "; ", ""),
},
first: &Pattern::Simple {
parts: ("", ": ", ""),
},
middle: &Pattern::Simple {
parts: ("", ", ", ""),
},
last: &Pattern::Simple {
parts: ("", ". ", "!"),
},
}
}
#[test]
fn test_format() {
assert_eq!(test_formatter().format(&VALUES[0..0]), "");
assert_eq!(test_formatter().format(&VALUES[0..1]), "one");
assert_eq!(test_formatter().format(&VALUES[0..2]), "one; two");
assert_eq!(test_formatter().format(&VALUES[0..3]), "one: two. three!");
assert_eq!(
test_formatter().format(&VALUES[0..4]),
"one: two, three. four!"
);
assert_eq!(
test_formatter().format(VALUES),
"one: two, three, four. five!"
);
}
#[test]
fn test_format_to_parts() {
assert_eq!(test_formatter().format_to_parts(&VALUES[0..0]).as_str(), "");
assert_eq!(
test_formatter().format_to_parts(&VALUES[0..1]).as_str(),
"one"
);
assert_eq!(
test_formatter().format_to_parts(&VALUES[0..2]).as_str(),
"one; two"
);
assert_eq!(
test_formatter().format_to_parts(&VALUES[0..3]).as_str(),
"one: two. three!"
);
assert_eq!(
test_formatter().format_to_parts(&VALUES[0..4]).as_str(),
"one: two, three. four!"
);
let parts = test_formatter().format_to_parts(VALUES);
assert_eq!(parts.as_str(), "one: two, three, four. five!");
assert_eq!(parts.field_at(0), FieldType::Element);
assert!(parts.is_field_start(0, 0));
assert_eq!(parts.field_at(2), FieldType::Element);
assert!(!parts.is_field_start(2, 0));
assert_eq!(parts.field_at(3), FieldType::Literal);
assert!(parts.is_field_start(3, 0));
assert_eq!(parts.field_at(4), FieldType::Literal);
assert!(!parts.is_field_start(4, 0));
assert_eq!(parts.field_at(5), FieldType::Element);
assert!(parts.is_field_start(5, 0));
}
#[test]
fn test_spanish() {
let mut formatter = ListFormatter::new("es", Type::And, Width::Wide).unwrap();
assert_eq!(formatter.format(VALUES), "one, two, three, four y five");
assert_eq!(formatter.format(&["Mallorca", "Ibiza"]), "Mallorca e Ibiza");
formatter = ListFormatter::new("es", Type::Or, Width::Wide).unwrap();
assert_eq!(formatter.format(&["7", "8"]), "7 u 8");
assert_eq!(formatter.format(&["siete", "ocho"]), "siete u ocho");
assert_eq!(formatter.format(&["7", "11"]), "7 u 11");
// un millón ciento cuatro mil trescientos veinticuatro
// assert_eq!(formatter.format(&["7", "1104324"]), "7 o 1104324");
// *o*nce millones cuarenta y tres mil doscientos treinta y cuatro
// assert_eq!(formatter.format(&["7", "11043234"]), "7 u 11043234");
}
}
| true |
c3fdd35043bbdb0fc857baae1da794650351afc0
|
Rust
|
siku2/aoc_2019
|
/src/puzzles/day_02.rs
|
UTF-8
| 2,379 | 3.53125 | 4 |
[
"MIT"
] |
permissive
|
use crate::input::Input;
use std::error::Error;
type OpCode = usize;
type Program = Vec<OpCode>;
const OP_ADD: OpCode = 1;
const OP_MUL: OpCode = 2;
const OP_HALT: OpCode = 99;
fn add(p: &mut Program, a: OpCode, b: OpCode, s: OpCode) {
let index = p[s];
p[index] = p[p[a]] + p[p[b]];
}
fn mul(p: &mut Program, a: OpCode, b: OpCode, s: OpCode) {
let index = p[s];
p[index] = p[p[a]] * p[p[b]];
}
fn run_program(mut program: Program) -> Result<OpCode, Box<dyn Error>> {
let mut index = 0;
loop {
let op = program[index];
match op {
OP_ADD => add(&mut program, index + 1, index + 2, index + 3),
OP_MUL => mul(&mut program, index + 1, index + 2, index + 3),
OP_HALT => break,
_ => return Err("unknown opcode".into()),
}
index += 4;
}
Ok(program[0])
}
fn run_program_with_input(
mut program: Program,
noun: OpCode,
verb: OpCode,
) -> Result<OpCode, Box<dyn Error>> {
program[1] = noun;
program[2] = verb;
run_program(program)
}
pub fn first(i: &Input) -> Result<String, Box<dyn Error>> {
let program: Program = i.parse_csv().collect::<Result<Vec<_>, _>>()?;
run_program_with_input(program, 12, 1).and_then(|i| Ok(i.to_string()))
}
pub fn second(i: &Input) -> Result<String, Box<dyn Error>> {
let program: Program = i.parse_csv().collect::<Result<Vec<_>, _>>()?;
let mut solution: Option<_> = None;
for noun in 0..100 {
for verb in 0..100 {
let res = run_program_with_input(program.clone(), noun, verb)?;
if res == 19_690_720 {
solution = Some((noun, verb));
break;
}
}
}
solution
.map(|r| (100 * r.0 + r.1).to_string())
.ok_or_else(|| "no possibility found".into())
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn test_run_program() -> Result<(), Box<dyn Error>> {
assert_eq!(
run_program([1, 9, 10, 3, 2, 3, 11, 0, 99, 30, 40, 50].to_vec())?,
3500
);
assert_eq!(run_program([1, 0, 0, 0, 99].to_vec())?, 2);
assert_eq!(run_program([2, 3, 0, 3, 99].to_vec())?, 2);
assert_eq!(run_program([2, 4, 4, 5, 99, 0].to_vec())?, 2);
assert_eq!(run_program([1, 1, 1, 4, 99, 5, 6, 0, 99].to_vec())?, 30);
Ok(())
}
}
| true |
c00bd5cad44beaa23dfc2be956ff91c0ede47266
|
Rust
|
srwalter/ipp.rs
|
/src/request.rs
|
UTF-8
| 3,926 | 2.796875 | 3 |
[
"MIT",
"Apache-2.0"
] |
permissive
|
//!
//! IPP request
//!
use std::io::{self, Read, Write};
use attribute::{IppAttribute, IppAttributeList};
use ::{Result, IPP_VERSION, IppHeader};
use consts::tag::DelimiterTag;
use consts::operation::Operation;
use consts::attribute::{PRINTER_URI, ATTRIBUTES_CHARSET, ATTRIBUTES_NATURAL_LANGUAGE};
use value::IppValue;
use parser::IppParser;
/// IPP request struct
pub struct IppRequestResponse<'a> {
/// Operation ID
header: IppHeader,
/// IPP attributes
attributes: IppAttributeList,
/// Optional payload to send after IPP-encoded stream (for example Print-Job operation)
payload: Option<&'a mut Read>
}
pub trait IppRequestTrait {
fn header(&self) -> &IppHeader;
}
impl<'a> IppRequestTrait for IppRequestResponse<'a> {
/// Get header
fn header(&self) -> &IppHeader {
&self.header
}
}
impl<'a> IppRequestResponse<'a> {
/// Create new IPP request for the operation and uri
pub fn new(operation: Operation, uri: &str) -> IppRequestResponse<'a> {
let hdr = IppHeader::new(IPP_VERSION, operation as u16, 1);
let mut retval = IppRequestResponse {
header: hdr,
attributes: IppAttributeList::new(),
payload: None };
retval.set_attribute(
DelimiterTag::OperationAttributes,
IppAttribute::new(ATTRIBUTES_CHARSET,
IppValue::Charset("utf-8".to_string())));
retval.set_attribute(
DelimiterTag::OperationAttributes,
IppAttribute::new(ATTRIBUTES_NATURAL_LANGUAGE,
IppValue::NaturalLanguage("en".to_string())));
retval.set_attribute(
DelimiterTag::OperationAttributes,
IppAttribute::new(PRINTER_URI,
IppValue::Uri(uri.replace("http", "ipp").to_string())));
retval
}
pub fn new_response(status: u16, id: u32) -> IppRequestResponse<'a> {
let hdr = IppHeader::new(IPP_VERSION, status, id);
let mut retval = IppRequestResponse {
header: hdr,
attributes: IppAttributeList::new(),
payload: None };
retval.set_attribute(
DelimiterTag::OperationAttributes,
IppAttribute::new(ATTRIBUTES_CHARSET,
IppValue::Charset("utf-8".to_string())));
retval.set_attribute(
DelimiterTag::OperationAttributes,
IppAttribute::new(ATTRIBUTES_NATURAL_LANGUAGE,
IppValue::NaturalLanguage("en".to_string())));
retval
}
/// Create IppRequestResponse from the parser
pub fn from_parser<'b>(parser: &mut IppParser) -> Result<IppRequestResponse<'b>> {
let res = parser.parse()?;
Ok(IppRequestResponse {
header: res.header().clone(),
attributes: res.attributes().clone(),
payload: None,
})
}
pub fn header_mut(&mut self) -> &mut IppHeader {
&mut self.header
}
/// Get attributes
pub fn attributes(&self) -> &IppAttributeList {
&self.attributes
}
/// Set payload
pub fn set_payload(&mut self, payload: &'a mut Read) {
self.payload = Some(payload)
}
pub fn set_attribute(&mut self, group: DelimiterTag, attribute: IppAttribute) {
/// Set attribute
self.attributes.add(group, attribute);
}
/// Serialize request into the binary stream (TCP)
pub fn write(&'a mut self, writer: &mut Write) -> Result<usize> {
let mut retval = self.header.write(writer)?;
retval += self.attributes.write(writer)?;
debug!("Wrote {} bytes IPP stream", retval);
if let Some(ref mut payload) = self.payload {
let size = io::copy(payload, writer)? as usize;
debug!("Wrote {} bytes payload", size);
retval += size;
}
Ok(retval)
}
}
| true |
12e9cd99e15eab8b07df2b8bf4ffde3a6e10ce9d
|
Rust
|
Airtnp/collection_traits
|
/src/allocator.rs
|
UTF-8
| 2,254 | 3.171875 | 3 |
[
"MIT"
] |
permissive
|
use crate::capacity::CapacityAware;
/// Types that have allocator.
pub trait AllocatorAware {
/// Allocator type
type Allocator: std::alloc::Allocator;
/// Returns the allocator associated with `Self`
fn allocator(&self) -> &Self::Allocator;
}
pub trait AllocatorConstructible: AllocatorAware {
/// Construct `Self` without data, given an allocator.
fn new_in(alloc: <Self as AllocatorAware>::Allocator) -> Self;
}
pub trait AllocatorCapacityConstructible: AllocatorAware + CapacityAware + AllocatorConstructible {
/// Construct `Self` without data, given capacity and an allocator.
fn with_capacity_in(capacity: Self::CapacityType, alloc: Self::Allocator) -> Self;
}
mod impls {
use super::*;
macro_rules! allocator_impls {
() => {};
([@Alloc $($args: ident $(: $bound: path $(| $others:path )*)?),* => $t: ty]; $($tail:tt)*) => {
impl<$($args $(: $bound $(+ $others)*)?),*> AllocatorAware for $t {
type Allocator = A;
fn allocator(&self) -> &Self::Allocator {
<$t>::allocator(self)
}
}
allocator_impls!($($tail)*);
};
([@AllocCtor $($args: ident $(: $bound: path $(| $others:path )*)?),* => $t: ty]; $($tail:tt)*) => {
impl<$($args $(: $bound $(+ $others)*)?),*> AllocatorConstructible for $t {
fn new_in(alloc: Self::Allocator) -> Self {
<$t>::new_in(alloc)
}
}
allocator_impls!($($tail)*);
};
([@AllocCapCtor $($args: ident $(: $bound: path $(| $others:path )*)?),* => $t: ty]; $($tail:tt)*) => {
impl<$($args $(: $bound $(+ $others)*)?),*> AllocatorCapacityConstructible for $t {
fn with_capacity_in(capacity: Self::CapacityType, alloc: Self::Allocator) -> Self {
<$t>::with_capacity_in(capacity, alloc)
}
}
allocator_impls!($($tail)*);
};
}
allocator_impls!(
[@Alloc T, A: std::alloc::Allocator => Vec<T, A>];
[@AllocCtor T, A: std::alloc::Allocator => Vec<T, A>];
[@AllocCapCtor T, A: std::alloc::Allocator => Vec<T, A>];
);
}
| true |
8d2c7a354f4bf559f54762d770d15578eff4985d
|
Rust
|
Nekrolm/reversi
|
/src/game/board.rs
|
UTF-8
| 5,264 | 3.328125 | 3 |
[] |
no_license
|
use std::ops::{Index, IndexMut, Range};
use std::error::Error;
pub const BOARD_SIZE: usize = 8;
#[derive(Copy, Clone)]
#[derive(PartialEq)]
pub enum PlayerId {
Black,
White,
}
#[derive(Copy, Clone)]
#[derive(PartialEq)]
pub enum MoveError {
CellNotEmpty,
MoveHasNoEffect,
InvalidCell
}
fn flip(p: PlayerId) -> PlayerId {
return match p {
PlayerId::Black => PlayerId::White,
PlayerId::White => PlayerId::Black,
}
}
pub type CellState = Option<PlayerId>;
#[derive(Copy, Clone)]
#[derive(PartialEq)]
pub struct Cell {
x: i32,
y: i32
}
impl Cell {
pub fn new(x : i32, y : i32) -> Cell {
return Cell{x, y};
}
}
type Direction = Cell;
pub struct Board {
field: [[CellState; BOARD_SIZE]; BOARD_SIZE]
}
impl Index<Cell> for Board {
type Output = CellState;
fn index(&self, cell: Cell) -> &Self::Output {
return &(self.field[cell.x as usize][cell.y as usize]);
}
}
impl IndexMut<Cell> for Board {
fn index_mut(&mut self, cell: Cell) -> &mut Self::Output {
return &mut (self.field[cell.x as usize][cell.y as usize]);
}
}
macro_rules! dir {
($x:expr, $y:expr) => {
Direction{x:$x,y:$y}
};
}
const DIRECTIONS: [Direction; 8] = [dir!(-1,-1), dir!(-1, 0), dir!(-1, 1),
dir!(0, -1), /* pivot */ dir!(0, 1),
dir!(1, -1), dir!(1, 0), dir!(1, 1)];
const VALID_INDEX_RANGE: Range<i32> = 0i32..(BOARD_SIZE as i32);
pub fn advance(cell: Cell, dir: Direction) -> Cell {
return Cell { x: cell.x + dir.x, y: cell.y + dir.y }
}
impl Board {
pub fn new() -> Board {
let mut field = [[CellState::None; BOARD_SIZE]; BOARD_SIZE];
let mid = BOARD_SIZE / 2;
field[mid-1][mid-1] = Some(PlayerId::Black);
field[mid][mid] = Some(PlayerId::Black);
field[mid][mid-1] = Some(PlayerId::White);
field[mid-1][mid] = Some(PlayerId::White);
return Board{field};
}
pub fn size(&self) -> usize {
return BOARD_SIZE;
}
pub fn is_valid_cell(&self, cell: Cell) -> bool {
return VALID_INDEX_RANGE.contains(&cell.x)
&& VALID_INDEX_RANGE.contains(&cell.y)
}
fn check_direction(&self, start: Cell, direction: Direction, player: PlayerId) -> bool {
let mut cell = advance(start, direction);
let mut cell_counter = 0;
while self.is_valid_cell(cell) && match self[cell] {
Some(filled) => player == filled,
_ => false
} {
cell_counter += 1;
cell = advance(cell, direction);
}
return self.is_valid_cell(cell) && cell_counter > 0 && !self[cell].is_none()
}
fn check_move(&self, cell: Cell, player: PlayerId) -> bool {
let other_player = flip(player);
return DIRECTIONS.iter().any(
|dir| self.check_direction(cell, *dir, other_player));
}
pub fn can_move_cell(&self, cell: Cell, player: PlayerId) -> Option<MoveError> {
use MoveError::{InvalidCell, CellNotEmpty, MoveHasNoEffect};
if !self.is_valid_cell(cell) {
return Some(InvalidCell);
}
match self[cell] {
Some(_) => Some(CellNotEmpty),
None => if self.check_move(cell, player) {
None
} else {
Some(MoveHasNoEffect)
}
}
}
pub fn available_moves(&self, player: PlayerId) -> Vec<Cell> {
let mut moves = Vec::new();
for x in VALID_INDEX_RANGE {
for y in VALID_INDEX_RANGE {
let cell = Cell { x, y };
if self.can_move_cell(cell, player).is_none() {
moves.push(cell)
}
}
}
return moves;
}
pub fn count(&self, player: PlayerId) -> u32 {
let mut cnt = 0u32;
for x in VALID_INDEX_RANGE {
for y in VALID_INDEX_RANGE {
let cell = Cell { x, y };
match self[cell] {
Some(filled) => if filled == player { cnt += 1},
_ => continue
}
}
}
return cnt;
}
fn apply_move_direction(&mut self, start: Cell, direction: Direction, player: PlayerId) {
let mut cell = advance(start, direction);
if !self.check_direction(start, direction, flip(player)) {
return;
}
while self.is_valid_cell(cell) && match self[cell] {
Some(filled) => player != filled,
None => false
} {
self[cell] = Some(player);
cell = advance(cell, direction);
}
}
fn apply_move(&mut self, cell: Cell, player: PlayerId) {
self[cell] = Some(player);
for dir in &DIRECTIONS {
self.apply_move_direction(cell, *dir, player);
}
}
pub fn try_move(&mut self, cell: Cell, player: PlayerId) -> Option<MoveError> {
return match self.can_move_cell(cell, player) {
None => {
self.apply_move(cell, player);
return None;
},
err => err
}
}
pub fn can_move(&self, player: PlayerId) -> bool {
return !self.available_moves(player).is_empty();
}
}
| true |
a61fedec64e6b4d43f2d0f06e6ede5b20151bfb6
|
Rust
|
mas-yo/ragtime-old
|
/src/sample1/sub_component/position.rs
|
UTF-8
| 325 | 2.578125 | 3 |
[] |
no_license
|
pub struct Position {
pos: (f32, f32),
move_per_frame: (f32, f32),
}
impl Position {
pub fn start(&mut self) {}
pub fn update(&mut self) {
self.pos.0 += self.move_per_frame.0;
self.pos.1 += self.move_per_frame.1;
}
pub fn move_to(&mut self, tgt_pos: (f32, f32), after_msec: i32) {}
}
| true |
3e5f1ff48f49a253130dae3de8a420c574067015
|
Rust
|
jasonwhite/button-rs
|
/src/button/events/mod.rs
|
UTF-8
| 18,933 | 2.65625 | 3 |
[
"MIT"
] |
permissive
|
// Copyright (c) 2019 Jason White
//
// Permission is hereby granted, free of charge, to any person obtaining a copy
// of this software and associated documentation files (the "Software"), to deal
// in the Software without restriction, including without limitation the rights
// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
// copies of the Software, and to permit persons to whom the Software is
// furnished to do so, subject to the following conditions:
//
// The above copyright notice and this permission notice shall be included in
// all copies or substantial portions of the Software.
//
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
// THE SOFTWARE.
//! The build itself does not handle any sort of display for the user. Instead,
//! it sends events along a channel for consumption by a user-facing logging
//! system. This way, events can be sent across the network transparently.
use std::fmt;
use std::io;
use std::sync::mpsc::{Receiver, Sender};
use std::thread::{self, JoinHandle};
use bincode;
use bytes::Bytes;
use chrono::{DateTime, Utc};
use derive_more::{Display, From};
use serde::{Deserialize, Serialize};
use crate::detect::Detected;
use crate::res;
use crate::task;
mod binary;
mod console;
pub use self::binary::Binary;
pub use self::console::Console;
/// A build has begun.
#[derive(Clone, Debug, Serialize, Deserialize)]
pub struct BeginBuildEvent {
/// The number of threads used during the build.
pub threads: usize,
/// The name of the build.
pub name: String,
}
/// A build has ended.
#[derive(Clone, Debug, Serialize, Deserialize)]
pub struct EndBuildEvent {
/// The result of the build.
pub result: Result<(), String>,
}
/// A task has started executing.
#[derive(Clone, Debug, Serialize, Deserialize)]
pub struct BeginTaskEvent {
/// The thread this task is getting executed on. This is stable
/// throughout the execution of the task.
pub id: usize,
/// The task that has started. If this needs to be stored for later, use
/// the thread number in conjunction with the total number of threads to
/// store task information in a `Vec`.
pub task: task::Any,
}
/// A task has had output written to it.
#[derive(Clone, Debug, Serialize, Deserialize)]
pub struct TaskOutputEvent {
/// The thread this task is getting executed on. This is stable
/// throughout the execution of the task.
pub id: usize,
/// The chunk of data that has been output by the task.
pub chunk: Bytes,
}
/// The task has finished.
#[derive(Clone, Debug, Serialize, Deserialize)]
pub struct EndTaskEvent {
/// The thread this task is getting executed on. This is stable
/// throughout the execution of the task.
pub id: usize,
/// The result of this task.
pub result: Result<Detected, String>,
}
/// A resource is getting deleted.
#[derive(Clone, Debug, Serialize, Deserialize)]
pub struct DeleteEvent {
/// The thread this deletion is getting executed on.
pub id: usize,
/// The resource that is getting deleted.
pub resource: res::Any,
/// The stringy result of the deletion.
pub result: Result<(), String>,
}
/// The checksum of a resource failed to compute.
#[derive(Clone, Debug, Serialize, Deserialize)]
pub struct ChecksumErrorEvent {
/// The thread this deletion is getting executed on.
pub id: usize,
/// The resource that is getting deleted.
pub resource: res::Any,
/// The stringy error of the deletion.
pub error: String,
}
/// A single build event.
#[derive(Clone, Debug, Serialize, Deserialize, From)]
pub enum Event {
/// A build has begun.
BeginBuild(BeginBuildEvent),
/// A build has finished.
EndBuild(EndBuildEvent),
/// A task has started.
BeginTask(BeginTaskEvent),
/// A task has had output written to it.
TaskOutput(TaskOutputEvent),
/// The task has finished.
EndTask(EndTaskEvent),
/// A resource is getting deleted.
Delete(DeleteEvent),
/// The checksum of a resource failed to compute.
ChecksumError(ChecksumErrorEvent),
}
pub type Timestamp = DateTime<Utc>;
pub type EventSender = Sender<(Timestamp, Event)>;
pub type EventReceiver = Receiver<(Timestamp, Event)>;
/// Trait for receiving timestamped events.
///
/// Implementors of this can do interesting things like:
///
/// - Write the events to stdout.
/// - Write to a web page.
/// - Write to a binary log file for later replay.
/// - Send the events to another process for consumption.
/// - Forward to another event handler.
pub trait EventHandler: Send {
type Error: std::error::Error;
/// Listens for events on a channel, sending them to an event handler. If
/// the event handler returns an error, this function stops listening for
/// events and returns the error as well.
///
/// This function returns when the sending channel has hung up (i.e., all
/// senders have been dropped).
fn read_channel(
&mut self,
receiver: EventReceiver,
) -> Result<(), Self::Error>
where
Self: Sized,
{
for (timestamp, event) in receiver.iter() {
self.call(timestamp, event)?;
}
self.finish()
}
/// Gets events from a reader. Stops reading when an error occurs.
///
/// If `realtime` is `true`, then an appropriate amount of time is waited
/// between each event.
fn read_bincode<R>(
&mut self,
mut reader: R,
realtime: bool,
) -> Result<(), Self::Error>
where
Self: Sized,
R: io::Read,
{
// Grab the first event. We need the initial timestamp to calculate
// sleep deltas when doing realtime playback.
let (mut prev, event) = match bincode::deserialize_from(&mut reader) {
Ok(x) => x,
Err(_) => return self.finish(),
};
self.call(prev, event)?;
while let Ok((timestamp, event)) =
bincode::deserialize_from::<_, (Timestamp, _)>(&mut reader)
{
if realtime {
if let Ok(delta) =
timestamp.signed_duration_since(prev).to_std()
{
thread::sleep(delta);
}
}
prev = timestamp;
self.call(timestamp, event)?;
}
self.finish()
}
/// Handles an event.
fn call(
&mut self,
timestamp: Timestamp,
event: Event,
) -> Result<(), Self::Error>;
/// Called when there are no more events.
fn finish(&mut self) -> Result<(), Self::Error> {
Ok(())
}
}
impl<H> EventHandler for Box<H>
where
H: EventHandler + ?Sized,
{
type Error = H::Error;
fn call(
&mut self,
timestamp: Timestamp,
event: Event,
) -> Result<(), Self::Error> {
(**self).call(timestamp, event)
}
fn finish(&mut self) -> Result<(), Self::Error> {
(**self).finish()
}
}
impl<H> EventHandler for [H]
where
H: EventHandler,
{
type Error = H::Error;
fn call(
&mut self,
timestamp: Timestamp,
event: Event,
) -> Result<(), Self::Error> {
for handler in self.iter_mut() {
handler.call(timestamp, event.clone())?;
}
Ok(())
}
fn finish(&mut self) -> Result<(), Self::Error> {
for handler in self.iter_mut() {
handler.finish()?;
}
Ok(())
}
}
impl<H> EventHandler for Vec<H>
where
H: EventHandler,
{
type Error = H::Error;
fn call(
&mut self,
timestamp: Timestamp,
event: Event,
) -> Result<(), Self::Error> {
for handler in self.iter_mut() {
handler.call(timestamp, event.clone())?;
}
Ok(())
}
fn finish(&mut self) -> Result<(), Self::Error> {
for handler in self.iter_mut() {
handler.finish()?;
}
Ok(())
}
}
#[derive(Debug, Display)]
pub enum AnyHandlerError {
Binary(<Binary as EventHandler>::Error),
Console(<Console as EventHandler>::Error),
}
impl std::error::Error for AnyHandlerError {}
#[derive(From)]
pub enum AnyHandler {
Binary(Binary),
Console(Console),
}
impl EventHandler for AnyHandler {
type Error = AnyHandlerError;
fn call(
&mut self,
timestamp: Timestamp,
event: Event,
) -> Result<(), Self::Error> {
match self {
Self::Binary(h) => {
h.call(timestamp, event).map_err(AnyHandlerError::Binary)
}
Self::Console(h) => {
h.call(timestamp, event).map_err(AnyHandlerError::Console)
}
}
}
fn finish(&mut self) -> Result<(), Self::Error> {
match self {
Self::Binary(h) => h.finish().map_err(AnyHandlerError::Binary),
Self::Console(h) => h.finish().map_err(AnyHandlerError::Console),
}
}
}
/// A helper trait for sending events to a sink.
pub trait EventSink {
/// Sends a `BeginBuildEvent` to the sink.
fn begin_build<S>(&self, threads: usize, name: S)
where
S: Into<String>;
/// Sends a `EndBuildEvent` to the sink.
fn end_build<E>(&self, result: &Result<(), E>)
where
E: fmt::Display;
/// Sends a `BeginTaskEvent` to the sink and returns an output writer.
fn begin_task(&self, id: usize, task: task::Any)
-> TaskOutputWriter<&Self>;
/// Sends a `TaskOutputEvent` to the sink.
fn task_output(&self, id: usize, chunk: Bytes);
/// Sends a `EndTaskEvent` to the sink.
fn end_task<E>(&self, id: usize, result: &Result<Detected, E>)
where
E: fmt::Display;
/// Sends a `DeleteEvent` to the sink.
fn delete<E>(&self, id: usize, resource: res::Any, result: &Result<(), E>)
where
E: fmt::Display;
/// Sends a `ChecksumErrorEvent` to the sink.
fn checksum_error<E>(&self, id: usize, resource: res::Any, error: &E)
where
E: fmt::Display;
}
// TODO: Don't unwrap. Log the errors instead.
impl EventSink for EventSender {
fn begin_build<S>(&self, threads: usize, name: S)
where
S: Into<String>,
{
let event = BeginBuildEvent {
threads,
name: name.into(),
};
self.send((Utc::now(), Event::BeginBuild(event))).unwrap();
}
fn end_build<E>(&self, result: &Result<(), E>)
where
E: fmt::Display,
{
let result = match result {
Ok(()) => Ok(()),
Err(err) => Err(err.to_string()),
};
let event = EndBuildEvent { result };
self.send((Utc::now(), Event::EndBuild(event))).unwrap();
}
fn begin_task(
&self,
id: usize,
task: task::Any,
) -> TaskOutputWriter<&Self> {
let event = BeginTaskEvent { id, task };
self.send((Utc::now(), Event::BeginTask(event))).unwrap();
TaskOutputWriter { id, sink: self }
}
fn task_output(&self, id: usize, chunk: Bytes) {
let event = TaskOutputEvent { id, chunk };
self.send((Utc::now(), Event::TaskOutput(event))).unwrap();
}
fn end_task<E>(&self, id: usize, result: &Result<Detected, E>)
where
E: fmt::Display,
{
let result = match result {
Ok(x) => Ok(x.clone()),
Err(err) => Err(err.to_string()),
};
let event = EndTaskEvent {
id,
result: result.map_err(|e| e.to_string()),
};
self.send((Utc::now(), Event::EndTask(event))).unwrap();
}
fn delete<E>(&self, id: usize, resource: res::Any, result: &Result<(), E>)
where
E: fmt::Display,
{
let result = match result {
Ok(()) => Ok(()),
Err(err) => Err(err.to_string()),
};
let event = DeleteEvent {
id,
resource,
result,
};
self.send((Utc::now(), Event::Delete(event))).unwrap();
}
fn checksum_error<E>(&self, id: usize, resource: res::Any, error: &E)
where
E: fmt::Display,
{
let event = ChecksumErrorEvent {
id,
resource,
error: error.to_string(),
};
self.send((Utc::now(), Event::ChecksumError(event)))
.unwrap();
}
}
impl<'a> EventSink for &'a EventSender {
fn begin_build<S>(&self, threads: usize, name: S)
where
S: Into<String>,
{
let event = BeginBuildEvent {
threads,
name: name.into(),
};
self.send((Utc::now(), Event::BeginBuild(event))).unwrap();
}
fn end_build<E>(&self, result: &Result<(), E>)
where
E: fmt::Display,
{
let result = match result {
Ok(()) => Ok(()),
Err(err) => Err(err.to_string()),
};
let event = EndBuildEvent { result };
self.send((Utc::now(), Event::EndBuild(event))).unwrap();
}
fn begin_task(
&self,
id: usize,
task: task::Any,
) -> TaskOutputWriter<&Self> {
let event = BeginTaskEvent { id, task };
self.send((Utc::now(), Event::BeginTask(event))).unwrap();
TaskOutputWriter { id, sink: self }
}
fn task_output(&self, id: usize, chunk: Bytes) {
let event = TaskOutputEvent { id, chunk };
self.send((Utc::now(), Event::TaskOutput(event))).unwrap();
}
fn end_task<E>(&self, id: usize, result: &Result<Detected, E>)
where
E: fmt::Display,
{
let result = match result {
Ok(x) => Ok(x.clone()),
Err(err) => Err(err.to_string()),
};
let event = EndTaskEvent {
id,
result: result.map_err(|e| e.to_string()),
};
self.send((Utc::now(), Event::EndTask(event))).unwrap();
}
fn delete<E>(&self, id: usize, resource: res::Any, result: &Result<(), E>)
where
E: fmt::Display,
{
let result = match result {
Ok(()) => Ok(()),
Err(err) => Err(err.to_string()),
};
let event = DeleteEvent {
id,
resource,
result,
};
self.send((Utc::now(), Event::Delete(event))).unwrap();
}
fn checksum_error<E>(&self, id: usize, resource: res::Any, error: &E)
where
E: fmt::Display,
{
let event = ChecksumErrorEvent {
id,
resource,
error: error.to_string(),
};
self.send((Utc::now(), Event::ChecksumError(event)))
.unwrap();
}
}
/// Helper for writing task output more ergonomically.
pub struct TaskOutputWriter<S> {
id: usize,
sink: S,
}
impl<S> TaskOutputWriter<S>
where
S: EventSink,
{
pub fn finish<E>(self, result: &Result<Detected, E>)
where
E: fmt::Display,
{
self.sink.end_task(self.id, result);
}
}
impl<S> io::Write for TaskOutputWriter<S>
where
S: EventSink,
{
fn write(&mut self, buf: &[u8]) -> io::Result<usize> {
// TODO: Do buffering?
self.sink.task_output(self.id, Bytes::from(buf));
Ok(buf.len())
}
fn write_all(&mut self, buf: &[u8]) -> io::Result<()> {
self.sink.task_output(self.id, Bytes::from(buf));
Ok(())
}
fn flush(&mut self) -> io::Result<()> {
// noop
Ok(())
}
}
/// Helper for creating and destroying an event handler thread that receives
/// events.
pub struct EventThread<H>
where
H: EventHandler,
{
handle: Option<JoinHandle<Result<H, H::Error>>>,
}
impl<H> EventThread<H>
where
H: EventHandler,
{
pub fn new(mut handler: H, receiver: EventReceiver) -> Self
where
H: EventHandler + Send + 'static,
H::Error: Send,
{
EventThread {
handle: Some(thread::spawn(move || {
handler.read_channel(receiver)?;
Ok(handler)
})),
}
}
pub fn join(mut self) -> Result<H, H::Error> {
let handle = self.handle.take().unwrap();
handle.join().unwrap()
}
}
impl<H> Drop for EventThread<H>
where
H: EventHandler,
{
fn drop(&mut self) {
if let Some(handle) = self.handle.take() {
let _ = handle.join().unwrap();
}
}
}
/// An event handler that simply keeps a count for each event. Useful for
/// testing.
#[cfg(test)]
mod test {
use super::*;
use std::sync::mpsc;
#[derive(Default)]
pub struct Stat {
pub begin_build: usize,
pub end_build: usize,
pub begin_task: usize,
pub task_output: usize,
pub end_task: usize,
pub delete: usize,
pub checksum_error: usize,
}
impl EventHandler for Stat {
type Error = io::Error;
fn call(
&mut self,
_timestamp: Timestamp,
event: Event,
) -> Result<(), Self::Error> {
match event {
Event::BeginBuild(_) => {
self.begin_build += 1;
}
Event::EndBuild(_) => {
self.end_build += 1;
}
Event::BeginTask(_) => {
self.begin_task += 1;
}
Event::TaskOutput(_) => {
self.task_output += 1;
}
Event::EndTask(_) => {
self.end_task += 1;
}
Event::Delete(_) => {
self.delete += 1;
}
Event::ChecksumError(_) => {
self.checksum_error += 1;
}
}
Ok(())
}
}
#[test]
fn event_handler() -> Result<(), Box<dyn std::error::Error>> {
let (sender, receiver) = mpsc::channel();
let event_thread = EventThread::new(Stat::default(), receiver);
sender.begin_build(42, "build");
let result: Result<(), &str> = Ok(());
sender.end_build(&result);
drop(sender);
let stats = event_thread.join()?;
assert_eq!(stats.begin_build, 1);
assert_eq!(stats.end_build, 1);
assert_eq!(stats.begin_task, 0);
assert_eq!(stats.task_output, 0);
assert_eq!(stats.end_task, 0);
assert_eq!(stats.delete, 0);
assert_eq!(stats.checksum_error, 0);
Ok(())
}
}
| true |
c13f7507882c9b0314dc27079fe92090458d7b0f
|
Rust
|
Bassel-Bakr/Data_Structures
|
/Rust/splay.rs
|
UTF-8
| 8,584 | 2.953125 | 3 |
[] |
no_license
|
#![allow(dead_code)]
#![allow(unused_variables, unused_imports)]
use std::cell::RefCell;
use std::cmp;
use std::io::*;
use std::rc::{Rc, Weak};
use std::{mem, mem::ManuallyDrop};
fn main() {
// let input = &mut Input::new();
// let output = &mut BufWriter::new(stdout());
let mut t = Tree::new();
for i in 0..1000_000 {
t.insert(unsafe { rand() });
}
}
extern "C" {
fn rand() -> i32;
}
type Branch<T> = Option<Rc<ManuallyDrop<RefCell<Node<T>>>>>;
type Parent<T> = Option<Weak<ManuallyDrop<RefCell<Node<T>>>>>;
struct Node<T> {
x: T,
l: Branch<T>,
r: Branch<T>,
p: Parent<T>,
}
impl<T> Node<T> {
fn new(x: T) -> Self {
Node {
x: x,
l: None,
r: None,
p: None,
}
}
fn new_branch(x: T) -> Branch<T> {
Some(Rc::new(ManuallyDrop::new(RefCell::new(Node::new(x)))))
}
}
struct Tree<T: std::fmt::Display> {
root: Branch<T>,
}
impl<T: std::fmt::Display> Drop for Tree<T> {
fn drop(&mut self) {
let root = mem::take(&mut self.root);
let mut a = Vec::new();
a.push(root);
while let Some(mut p) = a.pop() {
let l = mem::take(&mut p.as_ref().unwrap().borrow_mut().l);
let r = mem::take(&mut p.as_ref().unwrap().borrow_mut().r);
let mut x = Rc::get_mut(p.as_mut().unwrap());
if x.is_some() {
unsafe {
ManuallyDrop::drop(x.as_mut().unwrap());
}
} else {
a.push(p);
}
if l.is_some() {
a.push(l);
}
if r.is_some() {
a.push(r);
}
}
}
}
impl<T: PartialOrd + std::fmt::Display> Tree<T> {
fn new() -> Self {
Tree { root: None }
}
fn insert(&mut self, x: T) {
let (l, r) = Self::split(mem::take(&mut self.root), &x);
let node = Node::new_branch(x);
{
let mut m = node.as_ref().unwrap().borrow_mut();
if l.is_some() {
l.as_ref().unwrap().borrow_mut().p = Some(Rc::downgrade(node.as_ref().unwrap()));
m.l = l;
}
if r.is_some() {
r.as_ref().unwrap().borrow_mut().p = Some(Rc::downgrade(node.as_ref().unwrap()));
m.r = r;
}
}
self.root = node;
}
fn split(root: Branch<T>, x: &T) -> (Branch<T>, Branch<T>) {
if root.is_none() {
(None, None)
} else {
let mut p = Rc::clone(root.as_ref().unwrap());
let mut left;
loop {
if x < &p.borrow().x {
left = false;
if p.borrow().l.is_some() {
p = {
let b = p.borrow();
Rc::clone(b.l.as_ref().unwrap())
}
} else {
break;
}
} else {
left = true;
if p.borrow().r.is_some() {
p = {
let b = p.borrow();
Rc::clone(b.r.as_ref().unwrap())
}
} else {
break;
}
}
}
if left {
let l = Some(p);
Self::splay(&l);
let r = mem::take(&mut l.as_ref().unwrap().borrow_mut().r);
if r.is_some() {
r.as_ref().unwrap().borrow_mut().p = None;
}
(l, r)
} else {
let r = Some(p);
Self::splay(&r);
let l = mem::take(&mut r.as_ref().unwrap().borrow_mut().l);
if l.is_some() {
l.as_ref().unwrap().borrow_mut().p = None;
}
(l, r)
}
}
}
fn join(l: Branch<T>, r: Branch<T>) -> Branch<T> {
match (l.as_ref(), r.as_ref()) {
(None, _) => r,
(_, None) => l,
_ => {
// splay rightmost node in `l`
let mut p = Rc::clone(l.as_ref().unwrap());
while p.borrow().r.is_some() {
p = {
let pm = p.borrow();
Rc::clone(pm.r.as_ref().unwrap())
}
}
r.as_ref().unwrap().borrow_mut().p = Some(Rc::downgrade(&p));
let root = Some(p);
Self::splay(&root);
root.as_ref().unwrap().borrow_mut().r = r;
root
}
}
}
fn is_left(c: &Branch<T>) -> bool {
let me = c.as_ref().unwrap().borrow();
if me.p.is_some() {
let pp = me.p.as_ref().unwrap().upgrade();
if pp.is_none() {
return false;
}
let up = pp.as_ref().unwrap().borrow();
up.l.is_some() && Rc::ptr_eq(c.as_ref().unwrap(), up.l.as_ref().unwrap())
} else {
false
}
}
fn splay(c: &Branch<T>) -> &Branch<T> {
while c.as_ref().unwrap().borrow().p.is_some() {
let p = c.as_ref().unwrap().borrow().p.as_ref().unwrap().upgrade();
let rot_parent = {
let g = &p.as_ref().unwrap().borrow().p;
g.is_some() && (Self::is_left(c) == Self::is_left(&p))
};
if rot_parent {
Self::rot(&p);
}
Self::rot(c);
}
c
}
fn rot(c: &Branch<T>) -> &Branch<T> {
// get the parent
let mut p = c.as_ref().unwrap().borrow().p.as_ref().unwrap().upgrade();
// where is grandpa?
let g = {
let node = p.as_ref().unwrap().borrow();
match node.p.as_ref() {
Some(weak_g) => weak_g.upgrade(),
_ => None,
}
};
let is_left_parent = Self::is_left(&p);
// ignore the grandpa now
let is_left_child = Self::is_left(c);
if is_left_child {
let mut cm = c.as_ref().unwrap().borrow_mut();
{
let mut pm = p.as_ref().unwrap().borrow_mut();
let kid = mem::take(&mut cm.r);
if kid.is_some() {
kid.as_ref().unwrap().borrow_mut().p = Some(Rc::downgrade(p.as_ref().unwrap()));
}
cm.p = mem::replace(&mut pm.p, Some(Rc::downgrade(c.as_ref().unwrap())));
if g.is_some() {
if is_left_parent {
g.as_ref().unwrap().borrow_mut().l = mem::take(&mut pm.l);
} else {
g.as_ref().unwrap().borrow_mut().r = mem::take(&mut pm.l);
}
}
pm.l = kid;
}
cm.r = mem::take(&mut p);
} else {
let mut cm = c.as_ref().unwrap().borrow_mut();
{
let mut pm = p.as_ref().unwrap().borrow_mut();
let kid = mem::take(&mut cm.l);
if kid.is_some() {
kid.as_ref().unwrap().borrow_mut().p = Some(Rc::downgrade(p.as_ref().unwrap()));
}
cm.p = mem::replace(&mut pm.p, Some(Rc::downgrade(c.as_ref().unwrap())));
if g.is_some() {
if is_left_parent {
g.as_ref().unwrap().borrow_mut().l = mem::take(&mut pm.r);
} else {
g.as_ref().unwrap().borrow_mut().r = mem::take(&mut pm.r);
}
}
pm.r = kid;
}
cm.l = mem::take(&mut p);
}
c
}
}
//{{{
struct Input {
buf: Vec<String>,
}
impl Input {
fn new() -> Self {
Input { buf: Vec::new() }
}
fn next<T: std::str::FromStr>(&mut self) -> T {
if self.buf.is_empty() {
self.buf = self
.next_line()
.split_whitespace()
.rev()
.map(String::from)
.collect();
}
self.buf.pop().unwrap().parse().ok().expect("err")
}
fn next_line(&mut self) -> String {
let mut line = String::new();
stdin().read_line(&mut line).expect("read err");
line
}
} //}}}
| true |
4576721c2a8ce842b39829df992e68a6f2d31bb7
|
Rust
|
fzuellich/chtbtr
|
/src/types/patch_status.rs
|
UTF-8
| 4,895 | 3.125 | 3 |
[
"MIT"
] |
permissive
|
use serde::{Deserialize, Serialize};
use crate::types::{CodeReviewStatus, VerifiedStatus};
#[derive(PartialEq, Debug, Serialize, Deserialize)]
pub enum PatchStatus {
Both(CodeReviewStatus, VerifiedStatus),
CodeReview(CodeReviewStatus),
Verified(VerifiedStatus),
ReadyForSubmit,
None,
}
pub fn patch_status(
code_review: &CodeReviewStatus,
code_review_old: &Option<CodeReviewStatus>,
verified: &VerifiedStatus,
verified_old: &Option<VerifiedStatus>,
) -> PatchStatus {
if code_review == &CodeReviewStatus::PlusTwo && verified == &VerifiedStatus::PlusOne {
return PatchStatus::ReadyForSubmit;
}
if verified_old.is_some() {
let verified = verified.clone();
return PatchStatus::Verified(verified);
}
if code_review_old.is_some() {
let code_review = code_review.clone();
return PatchStatus::CodeReview(code_review);
}
return PatchStatus::None;
}
#[cfg(test)]
mod test {
use super::patch_status;
use super::PatchStatus;
use crate::types::{CodeReviewStatus, VerifiedStatus};
#[test]
fn recognize_no_change() {
// TODO If we have a status set, but it wasn't changed, do we want to say: PatchStatus::None?
let expected = PatchStatus::None;
let actual = patch_status(
&CodeReviewStatus::PlusOne,
&None,
&VerifiedStatus::None,
&None,
);
assert_eq!(actual, expected);
let actual = patch_status(
&CodeReviewStatus::None,
&None,
&VerifiedStatus::MinusOne,
&None,
);
assert_eq!(actual, expected);
}
#[test]
fn recognize_ready_for_submit_on_both_change() {
let expected = PatchStatus::ReadyForSubmit;
let actual = patch_status(
&CodeReviewStatus::PlusTwo,
&Some(CodeReviewStatus::None),
&VerifiedStatus::PlusOne,
&Some(VerifiedStatus::None),
);
assert_eq!(actual, expected);
}
#[test]
fn recognize_ready_for_submit_on_code_review_change() {
let expected = PatchStatus::ReadyForSubmit;
let actual = patch_status(
&CodeReviewStatus::PlusTwo,
&Some(CodeReviewStatus::None),
&VerifiedStatus::PlusOne,
&None,
);
assert_eq!(actual, expected);
}
#[test]
fn recognize_ready_for_submit_on_verified_change() {
let expected = PatchStatus::ReadyForSubmit;
let actual = patch_status(
&CodeReviewStatus::PlusTwo,
&None,
&VerifiedStatus::PlusOne,
&Some(VerifiedStatus::None),
);
assert_eq!(actual, expected);
}
///////////////////////////////////////////////////////////////////////
// Tests that assume only one status (verified or code review) changed
///////////////////////////////////////////////////////////////////////
#[test]
fn recognize_changed_verified_status() {
for new_verified in [
VerifiedStatus::None,
VerifiedStatus::MinusOne,
VerifiedStatus::PlusOne,
]
.iter()
{
let test_params = [
&Some(VerifiedStatus::None),
&Some(VerifiedStatus::PlusOne),
&Some(VerifiedStatus::MinusOne),
];
let new_code_review = &CodeReviewStatus::None;
let old_code_review = &None;
let expected = PatchStatus::Verified(new_verified.clone());
for old_verified in test_params.iter() {
let actual =
patch_status(new_code_review, old_code_review, new_verified, old_verified);
assert_eq!(actual, expected);
}
}
}
#[test]
fn recognize_changed_code_review_status() {
for new_code_review in [
CodeReviewStatus::None,
CodeReviewStatus::MinusOne,
CodeReviewStatus::PlusOne,
CodeReviewStatus::MinusTwo,
CodeReviewStatus::PlusTwo,
]
.iter()
{
let old_code_reviews = [
Some(CodeReviewStatus::None),
Some(CodeReviewStatus::PlusOne),
Some(CodeReviewStatus::PlusTwo),
Some(CodeReviewStatus::MinusOne),
Some(CodeReviewStatus::MinusTwo),
];
let new_verified = &VerifiedStatus::None;
let old_verified = &None;
let expected = PatchStatus::CodeReview(new_code_review.clone());
for old_code_review in old_code_reviews.iter() {
let actual =
patch_status(new_code_review, old_code_review, new_verified, old_verified);
assert_eq!(actual, expected);
}
}
}
}
| true |
168c7039afa0aa90d285faa708feb110f4d6f3f8
|
Rust
|
gitcare-io/anton
|
/src/infrastructure/api/helpers/mod.rs
|
UTF-8
| 2,330 | 2.984375 | 3 |
[] |
no_license
|
use rocket::http::{ContentType, Status};
use rocket::request::Request;
use rocket::response;
use rocket::response::{Responder, Response};
use rocket_contrib::json::JsonValue;
#[derive(Debug)]
pub struct ApiResponse {
pub json: JsonValue,
pub status: Status,
}
impl<'r> Responder<'r> for ApiResponse {
fn respond_to(self, req: &Request) -> response::Result<'r> {
Response::build_from(self.json.respond_to(&req).unwrap())
.status(self.status)
.header(ContentType::JSON)
.ok()
}
}
impl ApiResponse {
pub fn ok(json: JsonValue) -> Self {
Self {
json,
status: Status::Ok,
}
}
pub fn bad_request(msg: &'static str) -> Self {
Self {
json: json!({
"message": msg,
"status": 400,
"name": "Bad Request"
}),
status: Status::BadRequest,
}
}
pub fn unprocessable_entity(msg: &'static str) -> Self {
Self {
json: json!({
"message": msg,
"status": 422,
"name": "Unprocessable Entity"
}),
status: Status::UnprocessableEntity,
}
}
pub fn forbidden(msg: &'static str) -> Self {
Self {
json: json!({
"message": msg,
"status": 403,
"name": "Forbidden"
}),
status: Status::Forbidden,
}
}
pub fn unauthorized(msg: &'static str) -> Self {
Self {
json: json!({
"message": msg,
"status": 401,
"name": "Unauthorized"
}),
status: Status::Unauthorized,
}
}
pub fn conflict(msg: &'static str) -> Self {
Self {
json: json!({
"message": msg,
"status": 409,
"name": "Conflict"
}),
status: Status::Conflict,
}
}
pub fn internal_server_error(msg: &'static str) -> Self {
Self {
json: json!({
"message": msg,
"status": 500,
"name": "InternalServerError"
}),
status: Status::InternalServerError,
}
}
}
| true |
81b5314b48a7682d7a6f29205686971738e30b0d
|
Rust
|
mvidigueira/MasterProject
|
/rules_playtest/common/src/records.rs
|
UTF-8
| 943 | 2.578125 | 3 |
[] |
no_license
|
extern crate bincode;
extern crate base64;
use std::collections::HashMap;
pub trait WasiSerializable {
fn serialize_wasi(&self) -> String;
}
pub trait WasiDeserializable {
fn deserialize_wasi(enc: &str) -> Self;
}
pub type Ledger = HashMap<String, Vec<u8>>;
impl WasiSerializable for Ledger {
fn serialize_wasi(&self) -> String {
base64::encode(&bincode::serialize(self).unwrap())
}
}
impl WasiDeserializable for Ledger {
fn deserialize_wasi(enc: &str) -> Ledger {
bincode::deserialize(&base64::decode(enc).unwrap()).unwrap()
}
}
pub fn extract_result(enc: &str) -> Result<Ledger, String> {
bincode::deserialize(&base64::decode(enc).unwrap()).unwrap()
}
pub fn create_result(r: Result<Ledger, String>) -> String {
base64::encode(&bincode::serialize(&r).unwrap())
}
pub fn serialize_args<T: serde::Serialize>(args: &T) -> String {
base64::encode(&bincode::serialize(args).unwrap())
}
| true |
fcd6f1135d6b369a0ae23b065a278e312577e12b
|
Rust
|
rust-itertools/itertools
|
/src/unziptuple.rs
|
UTF-8
| 3,547 | 3.40625 | 3 |
[
"MIT",
"Apache-2.0"
] |
permissive
|
/// Converts an iterator of tuples into a tuple of containers.
///
/// `multiunzip()` consumes an entire iterator of n-ary tuples, producing `n` collections, one for each
/// column.
///
/// This function is, in some sense, the opposite of [`multizip`].
///
/// ```
/// use itertools::multiunzip;
///
/// let inputs = vec![(1, 2, 3), (4, 5, 6), (7, 8, 9)];
///
/// let (a, b, c): (Vec<_>, Vec<_>, Vec<_>) = multiunzip(inputs);
///
/// assert_eq!(a, vec![1, 4, 7]);
/// assert_eq!(b, vec![2, 5, 8]);
/// assert_eq!(c, vec![3, 6, 9]);
/// ```
///
/// [`multizip`]: crate::multizip
pub fn multiunzip<FromI, I>(i: I) -> FromI
where
I: IntoIterator,
I::IntoIter: MultiUnzip<FromI>,
{
i.into_iter().multiunzip()
}
/// An iterator that can be unzipped into multiple collections.
///
/// See [`.multiunzip()`](crate::Itertools::multiunzip) for more information.
pub trait MultiUnzip<FromI>: Iterator {
/// Unzip this iterator into multiple collections.
fn multiunzip(self) -> FromI;
}
macro_rules! impl_unzip_iter {
($($T:ident => $FromT:ident),*) => (
#[allow(non_snake_case)]
impl<IT: Iterator<Item = ($($T,)*)>, $($T, $FromT: Default + Extend<$T>),* > MultiUnzip<($($FromT,)*)> for IT {
fn multiunzip(self) -> ($($FromT,)*) {
// This implementation mirrors the logic of Iterator::unzip resp. Extend for (A, B) as close as possible.
// Unfortunately a lot of the used api there is still unstable (https://github.com/rust-lang/rust/issues/72631).
//
// Iterator::unzip: https://doc.rust-lang.org/src/core/iter/traits/iterator.rs.html#2825-2865
// Extend for (A, B): https://doc.rust-lang.org/src/core/iter/traits/collect.rs.html#370-411
let mut res = ($($FromT::default(),)*);
let ($($FromT,)*) = &mut res;
// Still unstable #72631
// let (lower_bound, _) = self.size_hint();
// if lower_bound > 0 {
// $($FromT.extend_reserve(lower_bound);)*
// }
self.fold((), |(), ($($T,)*)| {
// Still unstable #72631
// $( $FromT.extend_one($T); )*
$( $FromT.extend(std::iter::once($T)); )*
});
res
}
}
);
}
impl_unzip_iter!();
impl_unzip_iter!(A => FromA);
impl_unzip_iter!(A => FromA, B => FromB);
impl_unzip_iter!(A => FromA, B => FromB, C => FromC);
impl_unzip_iter!(A => FromA, B => FromB, C => FromC, D => FromD);
impl_unzip_iter!(A => FromA, B => FromB, C => FromC, D => FromD, E => FromE);
impl_unzip_iter!(A => FromA, B => FromB, C => FromC, D => FromD, E => FromE, F => FromF);
impl_unzip_iter!(A => FromA, B => FromB, C => FromC, D => FromD, E => FromE, F => FromF, G => FromG);
impl_unzip_iter!(A => FromA, B => FromB, C => FromC, D => FromD, E => FromE, F => FromF, G => FromG, H => FromH);
impl_unzip_iter!(A => FromA, B => FromB, C => FromC, D => FromD, E => FromE, F => FromF, G => FromG, H => FromH, I => FromI);
impl_unzip_iter!(A => FromA, B => FromB, C => FromC, D => FromD, E => FromE, F => FromF, G => FromG, H => FromH, I => FromI, J => FromJ);
impl_unzip_iter!(A => FromA, B => FromB, C => FromC, D => FromD, E => FromE, F => FromF, G => FromG, H => FromH, I => FromI, J => FromJ, K => FromK);
impl_unzip_iter!(A => FromA, B => FromB, C => FromC, D => FromD, E => FromE, F => FromF, G => FromG, H => FromH, I => FromI, J => FromJ, K => FromK, L => FromL);
| true |
d96f2b7fa4e0e34a511968a33813035a940fcb36
|
Rust
|
mactsouk/introRustLC
|
/day2/L4/using_channel.rs
|
UTF-8
| 974 | 3.6875 | 4 |
[] |
no_license
|
use std::env;
use std::thread;
use std::sync::mpsc::{channel, Sender, Receiver};
fn fibonacci(n: i32) -> i32 {
if n == 0 {
return 0;
}
if n <= 1 {
return 1;
}
else {
return fibonacci(n - 1) + fibonacci(n - 2);
}
}
fn main() {
let args: Vec<_> = env::args().collect();
if args.len() != 2 {
println!("Usage: {} n", args[0]);
return;
}
let n: i32;
let m = &args[1];
match m.parse::<i32>() {
Ok(n1) => n = n1,
Err(e) => panic!("{} is NOT a number!", e),
}
println!("Going to create {} threads!", n);
let (tx, rx): (Sender<i32>, Receiver<i32>) = channel();
for i in 0..n {
let thread_tx = tx.clone();
thread::spawn( move || {
let n = fibonacci(i);
thread_tx.send(n).unwrap();
});
}
let mut f = Vec::with_capacity(n as usize);
for _ in 0..n {
f.push(rx.recv());
}
let mut sum = 0;
for k in f {
sum = sum + k.unwrap();
print!("{} ", k.unwrap());
}
println!("\nSum = {}.", sum);
}
| true |
38c3232a30f96db59bfa26dc0dbbee4d912070f8
|
Rust
|
10allday-kai/api-daemon
|
/third-party/actix/src/fut/stream_fold.rs
|
UTF-8
| 3,000 | 2.984375 | 3 |
[
"MIT",
"Apache-2.0"
] |
permissive
|
use std::mem;
use std::pin::Pin;
use std::task::{Context, Poll};
use pin_project::pin_project;
use crate::actor::Actor;
use crate::fut::{ActorFuture, ActorStream, IntoActorFuture};
/// A future used to collect all the results of a stream into one generic type.
///
/// This future is returned by the `ActorStream::fold` method.
#[pin_project]
#[derive(Debug)]
#[must_use = "streams do nothing unless polled"]
pub struct StreamFold<S, F, Fut, T>
where
Fut: IntoActorFuture,
{
#[pin]
stream: S,
f: F,
state: State<T, Fut::Future>,
}
#[derive(Debug)]
enum State<T, F>
where
F: ActorFuture,
{
/// Placeholder state when doing work
Empty,
/// Ready to process the next stream item; current accumulator is the `T`
Ready(T),
/// Working on a future the process the previous stream item
Processing(F),
}
pub fn new<S, F, Fut, T>(stream: S, f: F, t: T) -> StreamFold<S, F, Fut, T>
where
S: ActorStream,
F: FnMut(T, S::Item, &mut S::Actor, &mut <S::Actor as Actor>::Context) -> Fut,
Fut: IntoActorFuture<Output = T, Actor = S::Actor>,
{
StreamFold {
stream,
f,
state: State::Ready(t),
}
}
impl<S, F, Fut, T> ActorFuture for StreamFold<S, F, Fut, T>
where
S: ActorStream + Unpin,
F: FnMut(T, S::Item, &mut S::Actor, &mut <S::Actor as Actor>::Context) -> Fut,
Fut: IntoActorFuture<Output = T, Actor = S::Actor>,
Fut::Future: ActorFuture + Unpin,
{
type Output = T;
type Actor = S::Actor;
fn poll(
self: Pin<&mut Self>,
act: &mut S::Actor,
ctx: &mut <S::Actor as Actor>::Context,
task: &mut Context<'_>,
) -> Poll<T> {
let mut this = self.get_mut();
loop {
match mem::replace(&mut this.state, State::Empty) {
State::Empty => panic!("cannot poll Fold twice"),
State::Ready(state) => {
match Pin::new(&mut this.stream).poll_next(act, ctx, task) {
Poll::Ready(Some(e)) => {
let future = (this.f)(state, e, act, ctx);
let future = future.into_future();
this.state = State::Processing(future);
}
Poll::Ready(None) => return Poll::Ready(state),
Poll::Pending => {
this.state = State::Ready(state);
return Poll::Pending;
}
}
}
State::Processing(mut fut) => {
match Pin::new(&mut fut).poll(act, ctx, task) {
Poll::Ready(state) => this.state = State::Ready(state),
Poll::Pending => {
this.state = State::Processing(fut);
return Poll::Pending;
}
}
}
}
}
}
}
| true |
0b2edd9fc1334d8a368423c05434f38a94bd100a
|
Rust
|
deontologician/parry
|
/src/transformation/convex_hull2.rs
|
UTF-8
| 6,183 | 2.84375 | 3 |
[
"Apache-2.0"
] |
permissive
|
use std::marker::PhantomData;
use crate::math::Real;
use crate::transformation::convex_hull_utils::{indexed_support_point_id, support_point_id};
use na::{self, Point2, Vector2};
use num_traits::Zero;
/// Computes the convex hull of a set of 2d points.
///
/// The computed convex-hull have its points given in counter-clockwise order.
#[cfg(feature = "dim2")]
pub fn convex_hull2(points: &[Point2<Real>]) -> Vec<Point2<Real>> {
convex_hull2_idx(points)
.into_iter()
.map(|id| points[id])
.collect()
}
/// Computes the convex hull of a set of 2d points and returns only the indices of the hull
/// vertices.
///
/// The computed convex-hull have its points given in counter-clockwise order.
pub fn convex_hull2_idx(points: &[Point2<Real>]) -> Vec<usize> {
let mut undecidable_points = Vec::new();
let mut segments = get_initial_polyline(points, &mut undecidable_points);
let mut i = 0;
while i != segments.len() {
if !segments[i].valid {
i = i + 1;
continue;
}
let pt_id = indexed_support_point_id(
&segments[i].normal,
points,
segments[i].visible_points.iter().copied(),
);
if let Some(point) = pt_id {
segments[i].valid = false;
attach_and_push_facets2(
segments[i].prev,
segments[i].next,
point,
&points[..],
&mut segments,
i,
&mut undecidable_points,
);
}
i = i + 1;
}
let mut idx = Vec::new();
let mut curr_facet = 0;
while !segments[curr_facet].valid {
curr_facet = curr_facet + 1
}
let first_facet = curr_facet;
loop {
let curr = &segments[curr_facet];
if curr.valid {
idx.push(curr.pts[0]);
}
curr_facet = curr.next;
if curr_facet == first_facet {
break;
}
}
idx
}
fn get_initial_polyline(
points: &[Point2<Real>],
undecidable: &mut Vec<usize>,
) -> Vec<SegmentFacet> {
let mut res = Vec::new();
assert!(points.len() >= 2);
let p1 = support_point_id(&Vector2::x(), points).unwrap();
let mut p2 = p1;
let direction = [-Vector2::x(), -Vector2::y(), Vector2::y()];
for dir in direction.iter() {
p2 = support_point_id(dir, points).unwrap();
let p1p2 = points[p2] - points[p1];
if !p1p2.norm_squared().is_zero() {
break;
}
}
assert!(
p1 != p2,
"Failed to build the 2d convex hull of this point cloud."
);
// Build two facets with opposite normals.
let mut f1 = SegmentFacet::new(p1, p2, 1, 1, points);
let mut f2 = SegmentFacet::new(p2, p1, 0, 0, points);
// Attribute points to each facet.
for i in 0..points.len() {
if i == p1 || i == p2 {
continue;
}
if f1.can_be_seen_by(i, points) {
f1.visible_points.push(i);
} else if f2.can_be_seen_by(i, points) {
f2.visible_points.push(i);
} else {
// The point is collinear.
undecidable.push(i);
}
}
res.push(f1);
res.push(f2);
res
}
fn attach_and_push_facets2(
prev_facet: usize,
next_facet: usize,
point: usize,
points: &[Point2<Real>],
segments: &mut Vec<SegmentFacet>,
removed_facet: usize,
undecidable: &mut Vec<usize>,
) {
let new_facet1_id = segments.len();
let new_facet2_id = new_facet1_id + 1;
let prev_pt = segments[prev_facet].pts[1];
let next_pt = segments[next_facet].pts[0];
let mut new_facet1 = SegmentFacet::new(prev_pt, point, prev_facet, new_facet2_id, points);
let mut new_facet2 = SegmentFacet::new(point, next_pt, new_facet1_id, next_facet, points);
segments[prev_facet].next = new_facet1_id;
segments[next_facet].prev = new_facet2_id;
// Assign to each facets some of the points which can see it.
for visible_point in segments[removed_facet].visible_points.iter() {
if *visible_point == point {
continue;
}
if new_facet1.can_be_seen_by(*visible_point, points) {
new_facet1.visible_points.push(*visible_point);
} else if new_facet2.can_be_seen_by(*visible_point, points) {
new_facet2.visible_points.push(*visible_point);
}
// If none of the facet can be seen from the point, it is naturally deleted.
}
// Try to assign collinear points to one of the new facets
let mut i = 0;
while i != undecidable.len() {
if new_facet1.can_be_seen_by(undecidable[i], points) {
new_facet1.visible_points.push(undecidable[i]);
let _ = undecidable.swap_remove(i);
} else if new_facet2.can_be_seen_by(undecidable[i], points) {
new_facet2.visible_points.push(undecidable[i]);
let _ = undecidable.swap_remove(i);
} else {
i = i + 1;
}
}
segments.push(new_facet1);
segments.push(new_facet2);
}
struct SegmentFacet {
pub valid: bool,
pub normal: Vector2<Real>,
pub next: usize,
pub prev: usize,
pub pts: [usize; 2],
pub visible_points: Vec<usize>,
pt_type: PhantomData<Point2<Real>>,
}
impl SegmentFacet {
pub fn new(
p1: usize,
p2: usize,
prev: usize,
next: usize,
points: &[Point2<Real>],
) -> SegmentFacet {
let p1p2 = points[p2] - points[p1];
let mut normal = Vector2::new(p1p2.y, -p1p2.x);
let norm = normal.normalize_mut();
SegmentFacet {
valid: norm != 0.0,
normal,
prev,
next,
pts: [p1, p2],
visible_points: Vec::new(),
pt_type: PhantomData,
}
}
pub fn can_be_seen_by(&self, point: usize, points: &[Point2<Real>]) -> bool {
let p0 = &points[self.pts[0]];
let pt = &points[point];
let _eps = crate::math::DEFAULT_EPSILON;
(*pt - *p0).dot(&self.normal) > _eps * na::convert::<f64, Real>(100.0f64)
}
}
| true |
0c05ff170290032b527c047f885a033b2c587b7b
|
Rust
|
JohnBSmith/peat
|
/peatc/src/lib.rs
|
UTF-8
| 455 | 2.875 | 3 |
[
"CC0-1.0"
] |
permissive
|
use std::fs::File;
use std::io;
use std::io::Read;
mod parser;
pub fn read_file(path: &str) -> Result<String,io::Error> {
let mut file = File::open(path)?;
let mut input = String::new();
file.read_to_string(&mut input)?;
return Ok(input);
}
pub fn compile(input: &str) {
let _t = match parser::parse(input) {
Ok(value) => value,
Err(e) => {
println!("{}",e.text);
return;
}
};
}
| true |
74ef6387bf0a5beae6054435c97c09a604d80973
|
Rust
|
95th/pyret
|
/src/symbol.rs
|
UTF-8
| 2,356 | 3.109375 | 3 |
[] |
no_license
|
use std::cell::RefCell;
use std::{collections::HashMap, fmt, mem, str::FromStr};
#[derive(Copy, Clone, Eq, PartialEq, Hash)]
pub struct Symbol(u32);
impl Symbol {
pub fn dummy() -> Self {
Self::intern("")
}
pub fn intern(s: &str) -> Self {
with_interner(|interner| interner.intern(s))
}
pub fn parse<T: FromStr>(&self) -> Result<T, T::Err> {
with_interner(|interner| interner.lookup(self.0).parse())
}
pub fn as_str_with<T>(&self, f: impl FnOnce(&str) -> T) -> T {
with_interner(|interner| f(interner.lookup(self.0)))
}
}
impl fmt::Debug for Symbol {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
fmt::Display::fmt(self, f)
}
}
impl fmt::Display for Symbol {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
with_interner(|interner| f.write_str(interner.lookup(self.0)))
}
}
fn with_interner<T>(f: impl FnOnce(&mut Interner) -> T) -> T {
thread_local! {
static INTERNER: RefCell<Interner> = RefCell::new(Interner::default());
}
INTERNER.with(|i| f(&mut *i.borrow_mut()))
}
#[derive(Default)]
pub struct Interner {
map: HashMap<&'static str, Symbol>,
vec: Vec<&'static str>,
buf: String,
full: Vec<String>,
}
impl Interner {
fn intern(&mut self, name: &str) -> Symbol {
if let Some(&id) = self.map.get(name) {
return id;
}
let name = unsafe { self.alloc(name) };
let id = Symbol(self.map.len() as u32);
self.map.insert(name, id);
self.vec.push(name);
debug_assert!(self.lookup(id.0) == name);
debug_assert!(self.intern(name) == id);
id
}
pub fn lookup(&self, id: u32) -> &str {
self.vec[id as usize]
}
unsafe fn alloc(&mut self, name: &str) -> &'static str {
let cap = self.buf.capacity();
if cap < self.buf.len() + name.len() {
let new_cap = (cap.max(name.len()) + 1).next_power_of_two();
let new_buf = String::with_capacity(new_cap);
let old_buf = mem::replace(&mut self.buf, new_buf);
self.full.push(old_buf);
}
let interned = {
let start = self.buf.len();
self.buf.push_str(name);
&self.buf[start..]
};
&*(interned as *const str)
}
}
| true |
3dcc2e5b0ce2ddde6a5f73b3d7f58fc33e64ea3b
|
Rust
|
shakyShane/wf2
|
/wf2_core/src/recipes/wp/pass_thru.rs
|
UTF-8
| 2,606 | 3.140625 | 3 |
[] |
no_license
|
use crate::context::Context;
use crate::dc_tasks::DcTasks;
use crate::recipes::m2::subcommands::composer::composer;
use crate::subcommands::dc::dc_passthru;
use crate::task::Task;
use std::cmp;
///
/// Enum to represent all of the possible pass-thru commands available
///
#[derive(Debug, Clone)]
pub enum WpPassThru {
Composer,
Dc,
WpCli,
}
impl WpPassThru {
///
/// Passthru command names
///
const COMPOSER: &'static str = "composer";
const DC: &'static str = "dc";
const WP_CLI: &'static str = "wp";
///
/// Helper method for converting an enum member to a String
///
pub fn name(&self) -> String {
match self {
WpPassThru::Composer => WpPassThru::COMPOSER,
WpPassThru::Dc => WpPassThru::DC,
WpPassThru::WpCli => WpPassThru::WP_CLI,
}
.to_string()
}
pub fn resolve_cmd(
ctx: &Context,
cmd: String,
trailing: &[String],
dc: DcTasks,
) -> Option<Vec<Task>> {
match cmd {
ref x if *x == WpPassThru::Dc => {
let res = dc_passthru(ctx, trailing);
Some(res.unwrap_or_else(Task::task_err_vec))
}
ref x if *x == WpPassThru::WpCli => Some(wp_cli_passthru(trailing, dc)),
ref x if *x == WpPassThru::Composer => Some(composer(&ctx, trailing)),
_ => None,
}
}
pub fn commands() -> Vec<(String, String)> {
vec![
(
WpPassThru::Composer,
"[wp] Run composer commands with the correct user",
),
(WpPassThru::Dc, "[wp] Run docker-compose commands"),
(WpPassThru::WpCli, "[wp] Run Wordpress CLI commands"),
]
.into_iter()
.map(|(name, help)| (name.into(), help.into()))
.collect()
}
}
pub fn wp_cli_passthru(trailing: &[String], dc: DcTasks) -> Vec<Task> {
let dc_command = format!(r#"run --no-deps {}"#, trailing.join(" "));
vec![dc.cmd_task(vec![dc_command])]
}
impl From<WpPassThru> for String {
fn from(m2p: WpPassThru) -> Self {
m2p.name()
}
}
///
/// Allow a comparison to String
///
/// ```
/// use wf2_core::recipes::wp::pass_thru::WpPassThru;
/// assert_eq!(true, WpPassThru::Composer == String::from("composer"));
/// ```
///
impl cmp::PartialEq<String> for WpPassThru {
fn eq(&self, other: &String) -> bool {
self.name() == *other
}
}
impl cmp::PartialEq<WpPassThru> for String {
fn eq(&self, other: &WpPassThru) -> bool {
*self == other.name()
}
}
| true |
0b8239ab4e8c5c4fc3e2d91dcf52c6b4afe49803
|
Rust
|
isgasho/shine
|
/crates/shine-graph/src/svec/hashstore.rs
|
UTF-8
| 1,183 | 3.25 | 3 |
[
"Apache-2.0",
"MIT",
"LicenseRef-scancode-unknown-license-reference"
] |
permissive
|
use crate::svec::{Store, StoreMut};
use std::collections::HashMap;
pub struct HashStore<T> {
values: HashMap<usize, T>,
}
impl<T> HashStore<T> {
pub fn new() -> Self {
HashStore { values: HashMap::new() }
}
pub fn new_with_capacity(capacity: usize) -> Self {
HashStore {
values: HashMap::with_capacity(capacity),
}
}
}
impl<T> Default for HashStore<T> {
fn default() -> Self {
Self::new()
}
}
impl<T> Store for HashStore<T> {
type Item = T;
fn get(&self, idx: usize) -> &Self::Item {
#[allow(clippy::get_unwrap)]
&self.values[&idx]
}
}
impl<T> StoreMut for HashStore<T> {
fn clear(&mut self) {
self.values.clear();
}
fn add(&mut self, idx: usize, value: Self::Item) {
self.values.insert(idx, value);
}
fn remove(&mut self, idx: usize) -> Self::Item {
self.values.remove(&idx).unwrap()
}
fn replace(&mut self, idx: usize, value: Self::Item) -> Self::Item {
self.values.insert(idx, value).unwrap()
}
fn get_mut(&mut self, idx: usize) -> &mut Self::Item {
self.values.get_mut(&idx).unwrap()
}
}
| true |
a1101e1deb84f164fb9da5b219c4da45b0879585
|
Rust
|
Tuxinet/rust-path-tracer
|
/src/camera.rs
|
UTF-8
| 1,659 | 2.984375 | 3 |
[] |
no_license
|
extern crate nalgebra as na;
use na::{Vector3, Rotation3};
use crate::ray::Ray;
use crate::vecutil::VecUtil;
use rand::prelude::SmallRng;
use crate::lehmer::Lehmer;
#[derive(Clone, Debug)]
pub struct Camera {
lower_left_corner: Vector3<f64>,
horizontal: Vector3<f64>,
vertical: Vector3<f64>,
origin: Vector3<f64>,
lens_radius: f64,
u: Vector3<f64>,
v: Vector3<f64>,
w: Vector3<f64>,
}
impl Camera {
pub fn new(origin: Vector3<f64>, look_at: Vector3<f64>, up: Vector3<f64>, vfov: f64, aspect_ratio: f64, aperture: f64, focus_dist: f64) -> Self {
let theta = vfov.to_radians();
let h = (theta/2.0).tan();
let viewport_height = 2.0 * h;
let viewport_width = aspect_ratio * viewport_height;
let w = (origin - look_at).normalize();
let u = up.cross(&w).normalize();
let v = w.cross(&u);
let horizontal = focus_dist * viewport_width * u;
let vertical = focus_dist * viewport_height * v;
let lower_left_corner = origin - horizontal/2.0 - vertical/2.0 - focus_dist * w;
return Self {
lower_left_corner,
horizontal,
vertical,
origin,
lens_radius: aperture / 2.0,
u,
v,
w,
}
}
#[inline]
pub fn get_ray(&self, s: f64, t: f64, rng: &mut Lehmer) -> Ray {
let rd = self.lens_radius * VecUtil::random_in_unit_disk(rng);
let offset = self.u * rd.x + self.v * rd.y;
return Ray::new(self.origin + offset, self.lower_left_corner + s*self.horizontal + t*self.vertical - self.origin - offset);
}
}
| true |
3a71a9b47413b921366593623eb3b03e9f6280a4
|
Rust
|
eupn/actix-erlay-sim
|
/src/recset.rs
|
UTF-8
| 9,054 | 3.15625 | 3 |
[] |
no_license
|
//! Defines set that can be reconciled.
use minisketch_rs;
use minisketch_rs::Minisketch;
use std::collections::HashSet;
use std::fmt::Debug;
use std::hash::Hash;
/// Types that can produce short ID (short hash) can implement this trait.
pub trait ShortId<I> {
fn short_id(&self) -> I;
}
/// A set that supports reconciliation by using short IDs (`I`) of its elements (`V`)
#[derive(Debug)]
pub struct RecSet<I: Hash + Eq + Copy + From<u64> + Into<u64> + Debug> {
capacity: usize,
seed: Option<u64>,
sketch: Minisketch,
set: HashSet<I>,
}
impl<I: Hash + Eq + Copy + From<u64> + Into<u64> + Debug> RecSet<I> {
/// Creates new set with given `capacity`.
pub fn new(capacity: usize) -> Self {
let _bits = std::mem::size_of::<I>() * 8;
let sketch = Self::create_minisketch(capacity, None);
RecSet {
seed: None,
capacity,
sketch,
set: HashSet::with_capacity(capacity),
}
}
/// Creates new set with given `capacity` and `seed` for underlying Minisketch math.
#[allow(dead_code)]
pub fn with_seed(capacity: usize, seed: u64) -> Self {
let sketch = Self::create_minisketch(capacity, Some(seed));
RecSet {
seed: None,
capacity,
sketch,
set: HashSet::with_capacity(capacity),
}
}
/// Adds element to the sketch.
/// Element will be added only if it's not already in the set.
pub fn insert(&mut self, id: I) {
if !self.set.contains(&id) {
self.set.insert(id);
self.sketch.add(id.into());
}
}
fn create_minisketch(capacity: usize, seed: Option<u64>) -> Minisketch {
let bits = std::mem::size_of::<I>() * 8;
let mut minisketch = Minisketch::try_new(bits as u32, 0, capacity).unwrap();
if let Some(seed) = seed {
minisketch.set_seed(seed);
}
minisketch
}
pub fn reconcile(
sketch_a: &[u8],
sketch_b: &[u8],
capacity: usize,
seed: Option<u64>,
) -> Result<Vec<I>, ()> {
let mut a = Self::create_minisketch(capacity, seed);
a.deserialize(sketch_a);
let mut b = Self::create_minisketch(capacity, seed);
b.deserialize(sketch_b);
a.merge(&b).expect("Minisketch merge");
let mut diffs = vec![0u64; capacity];
let num_diffs = a.decode(&mut diffs).map_err(|_| ())?;
let diff_ids = diffs
.iter()
.map(|id| Into::<I>::into(*id))
.collect::<Vec<_>>();
Ok(diff_ids.into_iter().take(num_diffs).collect())
}
/// Produces list of IDs that are missing in the set given as its `sketch`.
pub fn reconcile_with(&mut self, sketch_b: &[u8]) -> Result<Vec<I>, ()> {
Self::reconcile(&self.sketch(), sketch_b, self.capacity, self.seed)
}
#[allow(dead_code)]
pub fn bisect_with(
a_whole: &[u8],
a_half: &[u8],
b_whole: &[u8],
b_half: &[u8],
capacity: usize,
seed: Option<u64>,
) -> Result<Vec<I>, ()> {
// Extracts remainder sketch from a difference of two sketches
pub fn sub_sketches(s1: &[u8], s2: &[u8], d: usize, seed: Option<u64>) -> Vec<u8> {
let mut a = minisketch_rs::Minisketch::try_new(64, 0, d).unwrap();
if let Some(seed) = seed {
a.set_seed(seed);
}
a.deserialize(s1);
let mut b = minisketch_rs::Minisketch::try_new(64, 0, d).unwrap();
if let Some(seed) = seed {
b.set_seed(seed);
}
b.deserialize(s2);
a.merge(&b).expect("Sketch sub merge");
let mut sketch = vec![0u8; a.serialized_size()];
a.serialize(&mut sketch).expect("Serialize sketch sub");
sketch
}
// Try bisection:
//
// res_1 = reconcile(a_half, b_half)
// res_2 = reconcile(a_whole - a_half, b_whole - b_half)
//
// differences = res_1 U res_2
//
// b_half is known to Alice since Bob sent his b_half sketch to her before bisect
let a_minus_a_2 = sub_sketches(&a_whole, &a_half, capacity, seed);
let b_minus_b_2 = sub_sketches(&b_whole, &b_half, capacity, seed);
let res_1 = RecSet::<I>::reconcile(&a_half, &b_half, capacity, seed);
let res_2 = RecSet::<I>::reconcile(&a_minus_a_2, &b_minus_b_2, capacity, seed);
res_1.and_then(|diffs1| {
res_2.and_then(|diffs2| {
Ok(diffs1
.into_iter()
.chain(diffs2.into_iter())
.collect::<Vec<_>>())
})
})
}
/// Produces sketch for this set.
/// It is used in set reconciliation to find out what elements are missing in this set.
pub fn sketch(&self) -> Vec<u8> {
let mut buf = vec![0u8; self.sketch.serialized_size()];
self.sketch
.serialize(&mut buf)
.expect("Minisketch serialize");
buf
}
pub fn contains(&self, id: &I) -> bool {
self.set.contains(id)
}
#[allow(dead_code)]
pub fn len(&self) -> usize {
self.set.len()
}
}
#[cfg(test)]
mod test {
use super::{RecSet, ShortId};
use siphasher::sip::SipHasher;
use std::hash::Hasher;
#[derive(Debug, Copy, Clone, PartialEq)]
pub struct Tx(pub [u8; 32]);
impl ShortId<u64> for Tx {
fn short_id(&self) -> u64 {
let mut hasher = SipHasher::new_with_keys(0xDEu64, 0xADu64);
hasher.write(&self.0);
hasher.finish()
}
}
#[test]
pub fn test_reconciliation() {
let txs_alice = vec![Tx([1u8; 32]), Tx([2u8; 32]), Tx([3u8; 32]), Tx([4u8; 32])];
let txs_bob = vec![Tx([1u8; 32]), Tx([2u8; 32])];
let mut rec_set_alice = RecSet::<u64>::with_seed(16, 42u64);
for tx in txs_alice.iter() {
rec_set_alice.insert(tx.clone().short_id());
}
let mut rec_set_bob = RecSet::<u64>::with_seed(16, 42u64);
for tx in txs_bob {
rec_set_bob.insert(tx.short_id());
}
let bob_sketch = rec_set_bob.sketch();
let missing = rec_set_alice
.reconcile_with(&bob_sketch)
.expect("Reconcile with Alice");
assert_eq!(missing.len(), 2);
for id in missing {
assert!(rec_set_alice.contains(&id));
}
}
#[test]
pub fn test_bisect_reconciliation() {
let d = 16; // You can change it to 24 to not perform bisect and compare results
// There is exactly 24 differences, but since d = 16, simple set reconciliation will fail
let a = 0..32;
let b = 0..8;
// Take only even elements of a_whole set, so they're uniform,
// to increase chance of bisect success
let b_half = b
.clone()
.into_iter()
.enumerate()
.filter(|(i, _)| *i % 2 == 0)
.map(|(_, n)| n)
.collect::<Vec<_>>();
let a_half = a
.clone()
.into_iter()
.enumerate()
.filter(|(i, _)| *i % 2 == 0)
.map(|(_, n)| n)
.collect::<Vec<_>>();
// Creates a_whole set from a_whole range of elements
pub fn set_from_range(range: impl IntoIterator<Item = u8>, capacity: usize) -> RecSet<u64> {
let txs = range.into_iter().map(|b| Tx([b; 32]));
let mut set = RecSet::<u64>::new(capacity);
for tx in txs {
set.insert(tx.short_id());
}
set
}
// Try regular reconciliation
let alice_set_full = set_from_range(a, d);
let a_whole = alice_set_full.sketch();
let a_half = set_from_range(a_half, d).sketch();
let bob_set_full = set_from_range(b, d);
let b_whole = bob_set_full.sketch();
let b_half = set_from_range(b_half, d).sketch();
let first_try = RecSet::<u64>::reconcile(&a_whole, &b_whole, d, None);
if let Err(()) = first_try {
println!("Set overfull, trying bisect...");
// Try bisection:
//
// res_1 = reconcile(a_half, b_half)
// res_2 = reconcile(a_whole - a_half, b_whole - b_half)
//
// differences = res_1 U res_2
//
// b_half is known to Alice since Bob sent his b_half sketch to her before bisect
let res = RecSet::<u64>::bisect_with(&a_whole, &a_half, &b_whole, &b_half, d, None);
match res {
Ok(diffs) => println!("Success: {} diffs {:?}", diffs.len(), diffs),
Err(_) => println!("Bisection failed"),
}
} else {
let mut diffs = first_try.ok().unwrap();
diffs.sort();
println!("Success: {} diffs: {:?}", diffs.len(), diffs);
}
}
}
| true |
c96df48dfc2517deca337e304d710bd4fa0e834d
|
Rust
|
brundonsmith/strainer
|
/src/counting.rs
|
UTF-8
| 6,144 | 3.3125 | 3 |
[] |
no_license
|
use std::{collections::{HashMap, HashSet}, fmt::Display, path::{Path, PathBuf}};
use crate::{options::Options, pattern::matches};
pub type Occurrences = HashMap<String, Vec<FileLocation>>;
/// Return a record of all occurrences of every line in `text`
pub fn count_lines(
file_path: &Path,
text: &str,
options: &Options,
) -> Occurrences {
let mut records = HashMap::new();
let mut current_line_number = 0;
walk_lines(text, options,
|next| {
if let CharOrLine::Line(line) = next {
current_line_number += 1;
record_line(
&options,
&mut records,
file_path,
&line,
current_line_number,
);
}
});
return records;
}
/// Return a copy of `text` with all duplicate lines removed (the first
/// instance remains)
pub fn strip_lines(
text: &str,
options: &Options,
) -> String {
let mut found_lines = HashSet::new();
let mut new_text = String::new();
walk_lines(text, options,
|next| {
match next {
CharOrLine::Char(ch) => new_text.push(ch),
CharOrLine::Line(line) => {
if found_lines.contains(&line) {
// do nothing
} else {
new_text.push_str(&line);
found_lines.insert(line);
}
}
}
});
return new_text;
}
enum CharOrLine {
Char(char),
Line(String),
}
/// Walk through the lines in `text`, following the specified behavior from
/// `options`, and do something on each line and each character between lines
fn walk_lines(
text: &str,
options: &Options,
mut handle_next: impl FnMut(CharOrLine) -> (),
) {
let mut current_line = String::new();
let mut prev_char: Option<char> = None;
for c in text.chars() {
let squashing = prev_char
.map(|prev| prev == c && options.squash_chars.contains(&prev))
.unwrap_or(false);
if squashing {
handle_next(CharOrLine::Char(c));
} else if c == options.line_delimiter {
let completed_line = current_line;
current_line = String::new();
handle_next(CharOrLine::Line(completed_line));
handle_next(CharOrLine::Char(c));
} else {
current_line.push(c);
handle_next(CharOrLine::Char(c));
}
prev_char = Some(c);
}
handle_next(CharOrLine::Line(current_line));
}
// pub fn count_lines(
// file_path: &Path,
// text: &str,
// options: &CountingOptions,
// ) -> LineRecords {
// let mut records = HashMap::new();
// let mut current_line_number = 0;
// let mut current_line = String::new();
// let mut prev_char: Option<char> = None;
// for c in text.chars() {
// let squashing = prev_char
// .map(|prev| prev == c && options.squash_chars.contains(&prev))
// .unwrap_or(false);
// if squashing {
// // do nothing
// } else if c == options.line_delimiter {
// let completed_line = current_line;
// current_line = String::new();
// record_line(
// &options,
// &mut records,
// file_path,
// completed_line,
// current_line_number,
// );
// current_line_number += 1;
// } else {
// current_line.push(c);
// }
// prev_char = Some(c);
// }
// record_line(
// &options,
// &mut records,
// file_path,
// current_line,
// current_line_number,
// );
// return records;
// }
pub fn merge_records(
target: &mut Occurrences,
source: Occurrences,
) {
let mut source = source;
let source_keys = source.keys().map(|k| k.clone()).collect::<Vec<String>>();
for key in source_keys {
let mut source_val = source.remove(&key).unwrap();
match target.get_mut(&key) {
Some(existing_vec) => existing_vec.append(&mut source_val),
None => {
target.insert(key, source_val);
}
}
}
}
pub fn record_line(
options: &Options,
records: &mut Occurrences,
file_path: &Path,
line: &str,
line_number: usize,
) {
let line = if options.trim_whitespace {
String::from(line.trim())
} else {
line.to_owned()
};
if line.len() > 0 && matches(&line, &options.line_pattern) {
let file_location = FileLocation {
path: PathBuf::from(file_path),
line_number,
};
match records.get_mut(&line) {
Some(existing_locations) => existing_locations.push(file_location),
None => {
records.insert(line, vec![file_location]);
}
}
}
}
/// A fully-qualified line location within a file (file path + line number)
#[derive(Debug, PartialEq, Eq, PartialOrd, Ord)]
pub struct FileLocation {
pub path: PathBuf,
pub line_number: usize,
}
impl Display for FileLocation {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
write!(f, "{}:{}", self.path.to_str().unwrap(), self.line_number)
}
}
// #[cfg(test)]
// mod tests {
// #[test]
// fn test_strip_lines_with_no_duplicates() {
// let s = "[Adblock Plus 2.0]
// ||apps.facebook.com^
// ||apps.facebook.com^$popup
// ||apps.facebook.com^$third-party";
// let stripped = strip_lines(s, &(CountingOptions {
// line_delimiter: '\n',
// squash_chars: vec![],
// // aren't used by this function
// line_pattern: vec![],
// ignore_delimiters: vec![],
// trim_whitespace: false,
// same_file: false,
// remove_duplicates: false,
// }));
// assert_eq!(s, stripped);
// }
// }
| true |
eda64373a05198682648eca8615bca12b7c58fd7
|
Rust
|
8pockets/exercism-rust
|
/difference-of-squares/src/lib.rs
|
UTF-8
| 334 | 3.4375 | 3 |
[] |
no_license
|
pub fn square_of_sum(n: usize) -> usize {
let sum: usize = (1..).take(n).fold(0, |a, n| a + n);
return sum * sum;
}
pub fn sum_of_squares(n: usize) -> usize {
let sum: usize = (1..).take(n).map(|a| a * a).sum();
return sum;
}
pub fn difference(n: usize) -> usize {
return square_of_sum(n) - sum_of_squares(n);
}
| true |
2bffe73de8b00829d5a4261c77b67738d02f2a88
|
Rust
|
ondich/euler100
|
/pe001/src/main.rs
|
UTF-8
| 223 | 3.390625 | 3 |
[] |
no_license
|
// Project Euler, Problem 1
// Multiples of 3 and 5
fn main() {
let mut sum = 0;
for k in 1..1000 {
if k % 3 == 0 || k % 5 == 0 {
sum = sum + k
}
}
println!("Answer: {}", sum)
}
| true |
526eaef9483552b66fc0aa194bf53ed1b15283b7
|
Rust
|
madhav-madhusoodanan/actix-web
|
/src/functions/AuthenticateUser.rs
|
UTF-8
| 362 | 2.609375 | 3 |
[] |
no_license
|
use hmac:: Hmac;
use jwt::VerifyWithKey;
use sha2::Sha265;
use std::collections::BTreeMap;
use crate::config::key;
pub fn AuthenticateUser(bearer: String) -> Result<String, String> {
// extract the user from the token, given the secret key
// and check if the user is available
let claim: BTreeMap<String, String> = bearer.verify_with_key(&key);
}
| true |
7e89bc8732218abe3d860d96eb260e30ac4fd79d
|
Rust
|
nickolay/git-interactive-rebase-tool
|
/src/todo_file/src/history/operation.rs
|
UTF-8
| 106 | 2.546875 | 3 |
[
"BSD-3-Clause",
"GPL-3.0-only",
"Apache-2.0",
"BSD-2-Clause",
"GPL-3.0-or-later",
"LicenseRef-scancode-unknown-license-reference",
"GPL-1.0-or-later"
] |
permissive
|
#[derive(Debug, PartialEq, Eq)]
pub(crate) enum Operation {
Modify,
SwapUp,
SwapDown,
Add,
Remove,
}
| true |
1848e58515ff2d7eb89a21cfb385008545518c53
|
Rust
|
chrisemerson/adventOfCode
|
/2020-Rust/Day16/main.rs
|
UTF-8
| 5,646 | 3.140625 | 3 |
[] |
no_license
|
use std::collections::HashMap;
use std::fs;
#[derive(Debug)]
struct Range {
low: i32,
high: i32,
}
#[derive(Debug)]
struct Requirement {
name: String,
ranges: Vec<Range>,
}
fn main() {
let input = fs::read_to_string("input.txt")
.expect("Something went wrong reading the file");
let (requirements, your_ticket, nearby_tickets) = parse_input(input);
let ticket_scanning_error_rate: i32 = nearby_tickets
.iter()
.map(|ticket| find_invalid_value_in_ticket(ticket, &requirements))
.filter(|invalid_value| invalid_value.is_some())
.map(|invalid_value| invalid_value.unwrap())
.fold(0, |a, b| a + b);
println!("Ticket scanning error rate is: {}", ticket_scanning_error_rate);
let mut valid_tickets: Vec<Vec<i32>> = nearby_tickets
.iter()
.filter(|ticket| find_invalid_value_in_ticket(ticket, &requirements).is_none())
.map(|ticket| ticket.to_owned())
.collect::<Vec<Vec<i32>>>();
valid_tickets.push(your_ticket.clone());
let mut possible_requirements:HashMap<usize, Vec<String>> = HashMap::new();
let mut final_requirements:HashMap<usize, String> = HashMap::new();
for i in 0..your_ticket.clone().len() {
let field_values = valid_tickets
.iter()
.map(|ticket| ticket.get(i).unwrap().to_owned())
.collect::<Vec<i32>>();
let possible_requirements_for_field = get_possible_requirements_for_field(&field_values, &requirements);
possible_requirements.insert(i, possible_requirements_for_field);
}
loop {
for (i, possible_requirement_list) in &possible_requirements {
if possible_requirement_list.len() == 1 {
let requirement = possible_requirement_list.get(0).unwrap().to_string();
final_requirements.insert(*i, requirement.clone());
possible_requirements = remove_item_from_lists(&requirement, &possible_requirements);
break;
}
}
if final_requirements.len() == possible_requirements.len() {
break;
}
}
let pt2_answer = final_requirements
.iter()
.filter(|(_k, v)| v.len() >= 9 && v[0..9] == "departure".to_string())
.map(|(k, _v)| *your_ticket.get(*k as usize).unwrap() as i64)
.fold(1, |a, b| a * b);
println!("Multiplying all the departure fields gives: {}", pt2_answer);
}
fn find_invalid_value_in_ticket(ticket: &Vec<i32>, requirements: &Vec<Requirement>) -> Option<i32> {
return requirements
.iter()
.map(|requirement|
find_invalid_value_in_ticket_by_single_requirement(ticket, requirement)
)
.fold(Some(0), |a, b| return if a.is_none() { a } else { b });
}
fn find_invalid_value_in_ticket_by_single_requirement(ticket: &Vec<i32>, requirement: &Requirement) -> Option<i32> {
return ticket
.iter()
.map(|ticket_value| if requirement.ranges
.iter()
.map(|range| ticket_value >= &range.low && ticket_value <= &range.high)
.fold(false, |a, b| if b { b } else { a }) {
None
} else {
Some(*ticket_value)
}
)
.fold(None, |a, b| return if b.is_some() { b } else { a });
}
fn get_possible_requirements_for_field(field_values: &Vec<i32>, requirements: &Vec<Requirement>) -> Vec<String> {
return requirements
.iter()
.filter(|requirement| find_invalid_value_in_ticket_by_single_requirement(field_values, requirement).is_none())
.map(|r| r.name.clone())
.collect::<Vec<String>>();
}
fn remove_item_from_lists(requirement: &String, possible_requirements: &HashMap<usize, Vec<String>>) -> HashMap<usize, Vec<String>> {
let mut new_possible_requirements:HashMap<usize, Vec<String>> = HashMap::new();
for (i, list) in possible_requirements {
new_possible_requirements.insert(*i, list.iter().filter(|r| r != &requirement).map(|r| r.to_string()).collect::<Vec<String>>());
}
return new_possible_requirements;
}
fn parse_input(input: String) -> (Vec<Requirement>, Vec<i32>, Vec<Vec<i32>>) {
let mut requirements: Vec<Requirement> = Vec::new();
let mut your_ticket: Vec<i32> = Vec::new();
let mut nearby_tickets: Vec<Vec<i32>> = Vec::new();
let mut your_ticket_next = false;
for line in input.lines().map(|line| line.trim().to_string()) {
if line != "" && line != "nearby tickets:" {
if your_ticket.len() != 0 {
nearby_tickets.push(line.split(",").map(|x| x.parse::<i32>().unwrap()).collect());
} else if your_ticket_next {
your_ticket = line.split(",").map(|x| x.parse::<i32>().unwrap()).collect();
your_ticket_next = false;
} else if line == "your ticket:" {
your_ticket_next = true;
} else {
let requirement_parts = line.split(": ").map(|x| x.to_string()).collect::<Vec<String>>();
let mut ranges: Vec<Range> = Vec::new();
for range in requirement_parts.get(1).unwrap().split(" or ") {
let range_parts = range.split("-").map(|x| x.parse::<i32>().unwrap()).collect::<Vec<i32>>();
ranges.push(Range { low: *range_parts.get(0).unwrap(), high: *range_parts.get(1).unwrap() });
}
requirements.push(Requirement { name: requirement_parts.get(0).unwrap().to_string(), ranges });
}
}
}
return (requirements, your_ticket, nearby_tickets);
}
| true |
7ce885908c1e48e5fc27f0ff51341afbd9ce1729
|
Rust
|
ganga1980/rust-workshop
|
/part-1/solution/src/lib.rs
|
UTF-8
| 3,902 | 3.375 | 3 |
[] |
no_license
|
use std::collections::hash_map::DefaultHasher;
use std::hash::{Hash, Hasher};
// How many buckets the hashmap begins with
const INITIAL_BUCKET_COUNT: usize = 16;
// The max load factor as computed by the average number of items per slot
const MAX_LOAD_FACTOR: f64 = 0.9;
type Slot<K, V> = Option<((K, V), usize)>;
pub struct HashMap<K: Hash + Eq, V> {
slots: Vec<Slot<K, V>>,
slot_count: usize,
item_count: usize,
}
impl<K: Hash + Eq, V> HashMap<K, V> {
pub fn new() -> HashMap<K, V> {
HashMap {
slots: Self::create_slots(INITIAL_BUCKET_COUNT),
slot_count: INITIAL_BUCKET_COUNT,
item_count: 0,
}
}
pub fn insert(&mut self, key: K, value: V) -> Option<V> {
let load_factor = self.item_count as f64 / self.slot_count as f64;
if load_factor >= MAX_LOAD_FACTOR {
// The load factor is higher than what we want. We must resize.
self.resize();
}
let new_slot_index = self.slot_index(&key);
let slot = self.slot_mut(new_slot_index, &key);
match slot {
Some(slot) => {
let old = slot.replace(((key, value), new_slot_index));
match old {
Some(((_, v), _)) => Some(v),
None => {
self.item_count += 1;
None
}
}
}
None => {
self.slots.push(Some(((key, value), new_slot_index)));
None
}
}
}
pub fn get(&self, key: &K) -> Option<&V> {
let slot_index = self.slot_index(key);
let slot = self.slot(slot_index, key)?;
let ((_, v), _) = slot.as_ref()?;
Some(v)
}
pub fn remove(&mut self, key: &K) -> Option<V> {
let slot_index = self.slot_index(&key);
let slot = self.slot_mut(slot_index, key)?;
let ((_, v), _) = slot.take()?;
Some(v)
}
fn resize(&mut self) {
self.slot_count *= 2;
let new_slots = Self::create_slots(self.slot_count);
let old_slots = std::mem::replace(&mut self.slots, new_slots);
for old_slot in old_slots.into_iter() {
if let Some(((key, value), slot_index)) = old_slot {
let slot = self.slot_mut(slot_index, &key).unwrap();
*slot = Some(((key, value), slot_index));
}
}
}
fn slot_mut(&mut self, slot_index: usize, key: &K) -> Option<&mut Slot<K, V>> {
self.slots
.iter_mut()
.skip(slot_index)
.find(|item| match item {
Some(((k, _), _)) => k == key,
None => true,
})
}
fn slot(&self, slot_index: usize, key: &K) -> Option<&Slot<K, V>> {
self.slots.iter().skip(slot_index).find(|item| match item {
Some(((k, _), _)) => k == key,
None => true,
})
}
fn slot_index(&self, key: &K) -> usize {
let mut hasher = DefaultHasher::new();
key.hash(&mut hasher);
let hash = hasher.finish();
(hash % self.slot_count as u64) as usize
}
fn create_slots(slot_count: usize) -> Vec<Slot<K, V>> {
let mut new_slots = Vec::with_capacity(slot_count);
for _ in 0..slot_count {
new_slots.push(None);
}
new_slots
}
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn map_works() {
let mut map = HashMap::new();
assert_eq!(map.insert("foo", "bar"), None);
assert_eq!(map.insert("foo", "lol"), Some("bar"));
assert_eq!(map.get(&"foo"), Some(&"lol"));
assert_eq!(map.get(&"foo"), Some(&"lol"));
assert_eq!(map.get(&"qux"), None);
assert_eq!(map.remove(&"foo"), Some("lol"));
assert_eq!(map.get(&"foo"), None);
}
}
| true |
3b5362c152b251df0997625bfe9c64517d2b853e
|
Rust
|
jroweboy/marauder
|
/visualizer/gl_helpers.rs
|
UTF-8
| 3,879 | 2.546875 | 3 |
[
"MIT"
] |
permissive
|
// See LICENSE file for copyright and license details.
use std;
use gl;
use gl::types::{GLuint, GLsizeiptr};
use cgmath::matrix::{Matrix, Mat4, Mat3, ToMat4};
use cgmath::vector::{Vec2, Vec3};
use cgmath::angle;
use core::misc::deg_to_rad;
use core::types::{Size2, MInt};
use visualizer::types::{MFloat};
pub use load_gl_funcs_with = gl::load_with;
pub enum MeshRenderMode {
Triangles,
Lines,
}
impl MeshRenderMode {
fn to_gl_type(&self) -> GLuint {
match *self {
Triangles => gl::TRIANGLES,
Lines => gl::LINES,
}
}
}
pub fn tr(m: Mat4<MFloat>, v: Vec3<MFloat>) -> Mat4<MFloat> {
let mut t = Mat4::<MFloat>::identity();
*t.mut_cr(3, 0) = v.x;
*t.mut_cr(3, 1) = v.y;
*t.mut_cr(3, 2) = v.z;
m.mul_m(&t)
}
pub fn rot_x(m: Mat4<MFloat>, angle: MFloat) -> Mat4<MFloat> {
let rad = angle::rad(deg_to_rad(angle));
let r = Mat3::from_angle_x(rad).to_mat4();
m.mul_m(&r)
}
pub fn rot_z(m: Mat4<MFloat>, angle: MFloat) -> Mat4<MFloat> {
let rad = angle::rad(deg_to_rad(angle));
let r = Mat3::from_angle_z(rad).to_mat4();
m.mul_m(&r)
}
pub fn init_opengl() {
gl::Enable(gl::DEPTH_TEST);
gl::Enable(gl::BLEND);
gl::BlendFunc(gl::SRC_ALPHA, gl::ONE_MINUS_SRC_ALPHA);
}
pub fn set_clear_color(r: MFloat, g: MFloat, b: MFloat) {
gl::ClearColor(r, g, b, 1.0);
}
pub fn clear_screen() {
gl::Clear(gl::COLOR_BUFFER_BIT | gl::DEPTH_BUFFER_BIT);
}
pub fn set_viewport(size: Size2<MInt>) {
gl::Viewport(0, 0, size.w, size.h);
}
pub struct Vao {
id: GLuint,
}
impl Vao {
pub fn new() -> Vao {
let mut id = 0;
unsafe {
gl::GenVertexArrays(1, &mut id);
}
let vao = Vao{id: id};
vao.bind();
gl::EnableVertexAttribArray(id);
vao
}
pub fn bind(&self) {
gl::BindVertexArray(self.id);
}
pub fn unbind(&self) {
gl::BindVertexArray(0);
}
pub fn draw_array(&self, mesh_mode: MeshRenderMode, faces_count: MInt) {
let starting_index = 0;
let vertices_count = faces_count * 3;
let mode = mesh_mode.to_gl_type();
gl::DrawArrays(mode, starting_index, vertices_count);
}
}
impl Drop for Vao {
fn drop(&mut self) {
unsafe {
gl::DeleteVertexArrays(1, &self.id);
}
}
}
pub struct Vbo {
id: GLuint,
}
fn get_new_vbo_id() -> GLuint {
let mut id = 0;
unsafe {
gl::GenBuffers(1, &mut id);
}
id
}
impl Vbo {
pub fn from_data<T>(data: &[T]) -> Vbo {
let vbo = Vbo{id: get_new_vbo_id()};
vbo.bind();
let size = std::mem::size_of::<T>();
let buf_size = (data.len() * size) as GLsizeiptr;
if data.len() != 0 {
unsafe {
let data_ptr = std::cast::transmute(&data[0]);
gl::BufferData(
gl::ARRAY_BUFFER,
buf_size,
data_ptr,
gl::STATIC_DRAW,
);
}
}
vbo
}
pub fn bind(&self) {
gl::BindBuffer(gl::ARRAY_BUFFER, self.id);
}
}
impl Drop for Vbo {
fn drop(&mut self) {
unsafe {
gl::DeleteBuffers(1, &self.id);
}
}
}
pub fn read_pixel_bytes(
win_size: Size2<MInt>,
mouse_pos: Vec2<MInt>,
) -> (MInt, MInt, MInt, MInt) {
let height = win_size.h;
let reverted_h = height - mouse_pos.y;
let data: [u8, ..4] = [0, 0, 0, 0]; // mut
unsafe {
let data_ptr = std::cast::transmute(&data[0]);
gl::ReadPixels(
mouse_pos.x, reverted_h, 1, 1,
gl::RGBA,
gl::UNSIGNED_BYTE,
data_ptr
);
}
(data[0] as MInt, data[1] as MInt, data[2] as MInt, data[3] as MInt)
}
// vim: set tabstop=4 shiftwidth=4 softtabstop=4 expandtab:
| true |
a004c9f9579967a293daff057df6836e8d43b2c1
|
Rust
|
gadomski/velodyne
|
/src/source.rs
|
UTF-8
| 485 | 3.515625 | 4 |
[] |
no_license
|
//! Sources of Velodyne data.
use Point;
/// A source of Velodyne data.
#[derive(Clone, Copy, Debug)]
pub struct Source;
impl Source {
/// Returns an interator over this source's points.
pub fn points(&mut self) -> Points {
unimplemented!()
}
}
/// An iterator over a source's points.
#[derive(Clone, Copy, Debug)]
pub struct Points;
impl Iterator for Points {
type Item = Point;
fn next(&mut self) -> Option<Point> {
unimplemented!()
}
}
| true |
9a16d9ad0070ea087b830e566e40571dd44e9a76
|
Rust
|
bertptrs/adventofcode
|
/2022/src/day25.rs
|
UTF-8
| 1,557 | 3.5625 | 4 |
[
"MIT"
] |
permissive
|
use anyhow::Result;
fn parse_num(num: &[u8]) -> Result<i64> {
let mut total = 0;
let mut factor = 1;
for &b in num.iter().rev() {
match b {
b'0' => (),
b'1' => total += factor,
b'2' => total += 2 * factor,
b'-' => total -= factor,
b'=' => total -= 2 * factor,
other => anyhow::bail!("Invalid digit {other}"),
}
factor *= 5;
}
Ok(total)
}
fn encode(mut num: i64) -> String {
let mut buffer = Vec::new();
while num > 0 {
match num % 5 {
0 => buffer.push(b'0'),
1 => buffer.push(b'1'),
2 => buffer.push(b'2'),
3 => {
buffer.push(b'=');
num += 2
}
4 => {
buffer.push(b'-');
num += 1;
}
_ => unreachable!("math"),
}
num /= 5;
}
// We've built the string right to left, to print we must reverse
buffer.reverse();
// Safe unwrap as we've only pushed valid ascii characters
String::from_utf8(buffer).unwrap()
}
pub fn part1(input: &[u8]) -> Result<String> {
let total = input
.split(|&b| b == b'\n')
.map(parse_num)
.try_fold(0, |acc, val| val.map(|val| val + acc))?;
Ok(encode(total))
}
#[cfg(test)]
mod tests {
use super::*;
const SAMPLE: &[u8] = include_bytes!("./samples/25.txt");
#[test]
fn sample_part1() {
assert_eq!(part1(SAMPLE).unwrap(), "2=-1=0");
}
}
| true |
852b778797a7ed8381c1e87db8037d93dc945697
|
Rust
|
Lol3rrr/rizm
|
/compiler/src/pretty_print.rs
|
UTF-8
| 1,122 | 3.640625 | 4 |
[] |
no_license
|
pub trait PrettyPrint {
fn print(&self, formatter: &mut PrettyFormatter);
}
fn gen_padding(indentation: usize) -> String {
if indentation == 0 {
return "".to_owned();
}
let mut result = String::new();
if indentation > 2 {
for _ in 0..(indentation - 2) {
result.push(' ');
}
}
result.push('-');
result.push('>');
result
}
pub struct PrettyFormatter {
step_size: usize,
indentation: usize,
padding: String,
}
impl PrettyFormatter {
pub fn new(step_size: usize, indentation: usize) -> Self {
let padding = gen_padding(indentation);
Self {
step_size,
indentation,
padding,
}
}
pub fn print_str(&mut self, content: &str) {
println!("{}{}", self.padding, content);
}
pub fn print_sub(&mut self) -> Self {
Self::new(self.step_size, self.indentation + self.step_size)
}
}
pub fn pretty_print<E>(element: &E)
where
E: PrettyPrint,
{
let mut init_formatter = PrettyFormatter::new(2, 0);
element.print(&mut init_formatter);
}
| true |
ae9244b8d582ebfe8fae09cce5ad13b42bc2f53e
|
Rust
|
H2CO3/database_dsl_complexity
|
/ExampleData/complexity_metrics/src/main.rs
|
UTF-8
| 8,164 | 2.734375 | 3 |
[] |
no_license
|
use std::io::{ stdin, Read, Result as IoResult };
use structopt::StructOpt;
use serde::Serialize;
use serde_json::{ json, Value };
use sqlparser::{
tokenizer::{ Tokenizer, Token, TokenizerError },
parser::{ Parser, ParserError },
ast::Statement,
};
use error::SqlError;
use dialect::SqlDialect;
mod error;
mod dialect;
mod visitor;
mod histogram;
mod recursive;
mod metrics;
/// Compute complexity metrics for the given SQL string.
#[derive(Debug, Clone, StructOpt)]
enum Command {
/// Compute complexity metrics for the SQL code read from `stdin`.
Complexity {
/// The SQL dialect to adhere to when parsing the input.
#[structopt(short = "d", long = "dialect", default_value = "generic")]
dialect: SqlDialect,
},
/// Compute a histogram of token kinds using SQL queries in the
/// appropriately-formatted SQLite database, containing samples
/// from the Stack Exchange Data Explorer.
Histogram {
/// File name of the SQLite database containing the queries.
/// If `-`, queries are read from standard input instead of a DB.
#[structopt(short = "i", long = "input", default_value = "sede_stackoverflow_favorites.sqlite3")]
input: String,
/// The SQL dialect to adhere to when parsing the input.
#[structopt(short = "d", long = "dialect", default_value = "mssql")]
dialect: SqlDialect,
},
/// Searches for potentially recursive queries, i.e. where
/// the name of a CTE is referenced in its own definition.
Recursive {
/// File name of the SQLite database containing the queries.
#[structopt(short = "i", long = "input", default_value = "sede_stackoverflow_favorites.sqlite3")]
input: String,
/// The SQL dialect to adhere to when parsing the input.
#[structopt(short = "d", long = "dialect", default_value = "mssql")]
dialect: SqlDialect,
}
}
impl Command {
fn run(self) -> Result<(), SqlError> {
match self {
Command::Complexity { dialect } => Self::compute_complexity(dialect),
Command::Histogram {
input,
dialect,
} => Self::compute_histogram(input, dialect),
Command::Recursive {
input,
dialect,
} => Self::search_recursive(input, dialect),
}
}
fn compute_complexity(dialect: SqlDialect) -> Result<(), SqlError> {
let sql = read_preprocess_sql()?;
let tokens = tokenize(&sql, dialect)?;
// Token-based metrics
let token_count = metrics::token_count(&tokens);
let token_entropy = metrics::token_entropy(&tokens);
// AST Node-based metrics
let stmt = parse(tokens, dialect)?;
let typed_node_count = metrics::node_count(&stmt);
let weighted_node_count = metrics::weighted_node_count(&stmt);
let v = serde_json::to_value(&stmt)?;
let v = thin_value(v);
let json_node_count = json_node_count(&v);
// Halstead complexity metrics
let halstead = metrics::halstead_metrics(&stmt);
// Print results in JSON
let value = json!({
"token_count": token_count,
"token_entropy": token_entropy,
"node_count": typed_node_count,
"weighted_node_count": weighted_node_count,
// "json_node_count": json_node_count,
"halstead_vocabulary": halstead.vocabulary,
"halstead_length": halstead.length,
"halstead_estimated_length": halstead.estimated_length,
"halstead_volume": halstead.volume,
"halstead_difficulty": halstead.difficulty,
"halstead_effort": halstead.effort,
});
json_dump(&value)?;
Ok(())
}
fn compute_histogram(filename: String, dialect: SqlDialect) -> Result<(), SqlError> {
let histogram = if filename == "-" {
let mut code = String::new();
std::io::stdin().read_to_string(&mut code)?;
histogram::token_histogram_str(&code, dialect)?
} else {
let conn = rusqlite::Connection::open(&filename)?;
histogram::token_histogram_db(&conn, dialect)?
};
json_dump(&histogram)?;
Ok(())
}
fn search_recursive(filename: String, dialect: SqlDialect) -> Result<(), SqlError> {
let conn = rusqlite::Connection::open(&filename)?;
let queries = recursive::recursive_queries(&conn, dialect)?;
json_dump(&queries)?;
Ok(())
}
}
/// Dump a serializable value as JSON to the standard output stream.
fn json_dump<T: Serialize>(value: &T) -> serde_json::Result<()> {
serde_json::to_writer_pretty(std::io::stdout(), value)
}
/// Read SQL from standard input and replace
/// argument placeholders with literal zero.
fn read_preprocess_sql() -> IoResult<String> {
let mut buf = String::new();
stdin().read_to_string(&mut buf)?;
// Replace argument placeholders with literal zeroes.
// This doesn't even affect the character count, so
// it won't affect any complexity metrics, either.
// (The resulting SQL statement might not be correct
// with regards to types, but we are not trying to
// typecheck the source here, so that is fine.)
//
// This also replaces *every* question mark with the
// character '0', but our queries only use question
// marks for placeholders (and even inside string
// literals, this would not change the complexity
// of the query either, so this is fine as well.)
let processed = buf.replace('?', "0");
Ok(processed)
}
/// Split a SQL statement into tokens.
fn tokenize(sql: &str, dialect: SqlDialect) -> Result<Vec<Token>, TokenizerError> {
Tokenizer::new(&dialect, &sql).tokenize()
}
/// Parse a SQL statement. Only a single statement is supported.
fn parse(tokens: Vec<Token>, dialect: SqlDialect) -> Result<Statement, SqlError> {
let mut parser = Parser::new(tokens, &dialect);
let stmt = parser.parse_statement()?;
// skip trailing semicolon, if any
let _ = parser.consume_token(&Token::SemiColon);
if parser.peek_token() == Token::EOF {
Ok(stmt)
} else {
println!("{:?}", parser.peek_token());
Err(ParserError::ParserError("got multiple statements".into()).into())
}
}
/// Replace externally-tagged enums with their inner value
/// in order to remove one extra, effectively artefactual
/// level of nesting, which does not relate to the inherent
/// depth of the AST node but to its JSON representation.
fn thin_value(value: Value) -> Value {
match value {
Value::Object(map) => {
// Replace enum with its value
if map.len() == 1 {
let key = map.keys().next().unwrap();
if let Some(ch) = key.chars().next() {
if ch.is_ascii_uppercase() {
let inner = map.into_iter().next().unwrap().1;
return thin_value(inner);
}
}
}
// remove nulls and empty arrays
map.into_iter()
.filter(|&(_, ref v)| {
match *v {
Value::Null => false,
Value::Array(ref vec) => vec.len() > 0,
_ => true,
}
})
.map(|(k, v)| (k, thin_value(v)))
.collect()
},
Value::Array(vec) => vec.into_iter().map(thin_value).collect(),
_ => value,
}
}
/// Count the total number of nodes in a JSON `Value` tree.
fn json_node_count(value: &Value) -> usize {
match *value {
Value::Array(ref arr) => {
arr.iter().map(json_node_count).sum::<usize>() + 1
}
Value::Object(ref object) => {
// keys are all strings, so we just count entries
object.values().map(json_node_count).sum::<usize>() + 1
}
_ => 1 // scalars
}
}
fn main() -> Result<(), SqlError> {
Command::from_args().run()
}
| true |
5110314cc41fa9e94a1aea67965c7e781b12d6dd
|
Rust
|
2color/prisma-engines
|
/query-engine/core/src/schema/enum_type.rs
|
UTF-8
| 3,544 | 2.984375 | 3 |
[
"Apache-2.0"
] |
permissive
|
use super::*;
use prisma_models::{InternalEnumRef, PrismaValue, ScalarFieldRef};
#[derive(Debug, Clone, PartialEq)]
pub enum EnumType {
/// Generic, prisma-application specific string enum.
/// Semantics are defined by the component interpreting the contents.
String(StringEnumType),
/// Enum from the internal data model, representing an enum on the database level.
Database(DatabaseEnumType),
/// Enum referencing fields on a model.
FieldRef(FieldRefEnumType),
}
impl EnumType {
pub fn name(&self) -> &str {
match self {
Self::String(s) => &s.name,
Self::Database(db) => &db.name,
Self::FieldRef(f) => &f.name,
}
}
// Used as cache keys, for example.
pub fn identifier(&self) -> Identifier {
Identifier::new(self.name().to_owned(), self.namespace())
}
pub fn namespace(&self) -> String {
match self {
Self::String(_) => PRISMA_NAMESPACE,
Self::Database(_) => MODEL_NAMESPACE,
Self::FieldRef(_) => PRISMA_NAMESPACE,
}
.to_string()
}
}
#[derive(Debug, Clone, PartialEq)]
pub struct StringEnumType {
pub name: String,
pub values: Vec<String>,
}
impl StringEnumType {
/// Attempts to find an enum value for the given value key.
pub fn value_for(&self, name: &str) -> Option<&str> {
self.values
.iter()
.find_map(|val| if val == name { Some(val.as_str()) } else { None })
}
pub fn values(&self) -> &[String] {
&self.values
}
}
impl From<InternalEnumRef> for EnumType {
fn from(internal_enum: InternalEnumRef) -> EnumType {
EnumType::Database(DatabaseEnumType {
name: internal_enum.name.clone(),
internal_enum,
})
}
}
#[derive(Debug, Clone, PartialEq)]
pub struct DatabaseEnumType {
pub name: String,
pub internal_enum: InternalEnumRef,
}
impl DatabaseEnumType {
pub fn map_input_value(&self, val: &str) -> Option<PrismaValue> {
Some(PrismaValue::Enum(
self.internal_enum
.values
.iter()
.find(|ev| ev.name == val)?
.db_name()
.clone(),
))
}
pub fn map_output_value(&self, val: &str) -> Option<PrismaValue> {
Some(PrismaValue::Enum(
self.internal_enum
.values
.iter()
.find(|ev| ev.db_name() == val)?
.name
.clone(),
))
}
pub fn external_values(&self) -> Vec<String> {
self.internal_enum
.values
.iter()
.map(|v| v.name.to_string())
.collect::<Vec<String>>()
}
}
#[derive(Debug, Clone, PartialEq)]
pub struct FieldRefEnumType {
pub name: String,
pub values: Vec<(String, ScalarFieldRef)>,
}
impl FieldRefEnumType {
/// Attempts to find an enum value for the given value key.
pub fn value_for(&self, name: &str) -> Option<&ScalarFieldRef> {
self.values
.iter()
.find_map(|val| if val.0 == name { Some(&val.1) } else { None })
}
pub fn values(&self) -> Vec<String> {
self.values.iter().map(|(name, _)| name.to_owned()).collect()
}
}
impl From<EnumType> for OutputType {
fn from(e: EnumType) -> Self {
OutputType::Enum(Arc::new(e))
}
}
impl From<EnumType> for InputType {
fn from(e: EnumType) -> Self {
InputType::Enum(Arc::new(e))
}
}
| true |
0fb01d7d8202cf931d2ce6548d3ce6d511ef1d8c
|
Rust
|
Nasupl/embedded-rust-devcontainer
|
/wio-examples/examples/6-3-uart.rs
|
UTF-8
| 807 | 2.71875 | 3 |
[
"MIT",
"Apache-2.0"
] |
permissive
|
//! 6-3 シリアル入出力/UARTのサンプルコードです。
//! ホストPCのシリアルターミナルに
//! ```text
//! hello world
//! this is UART example!
//! ```
//! と出力します。
//!
//! ### 実行方法
//! ```sh
//! $ cargo hf2 --example 6-3-uart
//! ```
#![no_std]
#![no_main]
use panic_halt as _;
use wio_terminal as wio;
use core::fmt::Write;
use wio::hal::clock::GenericClockController;
use wio::pac::Peripherals;
use wio::prelude::*;
use wio::{entry, Pins, Sets};
#[entry]
fn main() -> ! {
let mut peripherals = Peripherals::take().unwrap();
// クロックを初期化する
// TODO: UARTドライバオブジェクトを初期化する
// TODO: 「hello world」と出力する
// TODO: 「this is UART example!」と出力する
loop {}
}
| true |
d0eaaefa12cb5db7645541cb211cdb62c14270d0
|
Rust
|
yukihir0/rust_sandbox
|
/fizzbuzz_iterator/src/main.rs
|
UTF-8
| 1,487 | 3.515625 | 4 |
[] |
no_license
|
use std::fmt;
enum Result {
Number(u64),
Fizz,
Buzz,
FizzBuzz,
}
impl fmt::Display for Result {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
match self {
Result::Number(n) => write!(f, "{}", n),
Result::Fizz => write!(f, "Fizz"),
Result::Buzz => write!(f, "Buzz"),
Result::FizzBuzz => write!(f, "Fizz Buzz"),
}
}
}
impl fmt::Debug for Result {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
match self {
Result::Number(n) => write!(f, "{}", n),
Result::Fizz => write!(f, "Fizz"),
Result::Buzz => write!(f, "Buzz"),
Result::FizzBuzz => write!(f, "Fizz Buzz"),
}
}
}
struct FizzBuzzGenerator {
num: u64,
}
impl FizzBuzzGenerator {
fn new() -> FizzBuzzGenerator {
FizzBuzzGenerator { num: 0 }
}
}
impl Iterator for FizzBuzzGenerator {
type Item = Result;
fn next(&mut self) -> Option<Self::Item> {
self.num = self.num + 1;
if self.num % 15 == 0 {
Some(Result::FizzBuzz)
} else if self.num % 3 == 0 {
Some(Result::Fizz)
} else if self.num % 5 == 0 {
Some(Result::Buzz)
} else {
Some(Result::Number(self.num))
}
}
}
fn main() {
let fizzbuzz= FizzBuzzGenerator::new();
for n in fizzbuzz.take(20) {
println!("{}", n)
}
}
| true |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.