blob_id
stringlengths 40
40
| language
stringclasses 1
value | repo_name
stringlengths 5
140
| path
stringlengths 5
183
| src_encoding
stringclasses 6
values | length_bytes
int64 12
5.32M
| score
float64 2.52
4.94
| int_score
int64 3
5
| detected_licenses
listlengths 0
47
| license_type
stringclasses 2
values | text
stringlengths 12
5.32M
| download_success
bool 1
class |
---|---|---|---|---|---|---|---|---|---|---|---|
e79ac673c8be7b34b62f1f03aba2b0cd4f3c9796
|
Rust
|
rendlein/rustlike
|
/src/ui/mod.rs
|
UTF-8
| 1,617 | 2.890625 | 3 |
[] |
no_license
|
use specs::{Join, World, WorldExt};
use tcod::console::*;
use tcod::colors::*;
use crate::config::CONFIG;
use crate::components::{Position, Renderable};
use crate::handle_keys;
use tcod::console;
pub struct Ui {
pub(crate) root: Root,
pub con: Offscreen,
}
impl Ui {
pub fn new() -> Self {
// Set up the console.
let root = Root::initializer()
.font(CONFIG.font.clone(), FontLayout::Tcod)
.font_type(FontType::Greyscale)
.size(CONFIG.width, CONFIG.height)
.title("Rustlike")
.init();
// Con uses the whole screen right now.
let con = Offscreen::new(CONFIG.width, CONFIG.height);
Ui { root, con }
}
/// Collect all of the screens/consoles and render them to root.
pub fn render(&mut self, entities: &mut World) -> bool {
self.con.set_default_foreground(WHITE);
self.con.clear();
{
// Draw all the entities
let positions = &entities.read_storage::<Position>();
let renderables = &entities.read_storage::<Renderable>();
for (pos, render) in (positions, renderables).join() {
self.con.put_char_ex(pos.x, pos.y, render.glyph, render.fg, render.bg);
}
}
console::blit(&self.con,
(0,0),
(CONFIG.width, CONFIG.height),
&mut self.root,
(0,0), 1.0, 1.0);
self.root.flush();
if handle_keys(self, entities) {
return false;
}
return true;
}
}
| true |
ee95a41091052e1ba3e4fec195d6abf6fb983e43
|
Rust
|
shff/paq
|
/tests/bundler_tests.rs
|
UTF-8
| 1,500 | 2.796875 | 3 |
[
"MIT"
] |
permissive
|
use paq::bundle;
#[test]
fn test_bundler() {
fn assert_bundle(path: &str, substring: &str) {
let cur_dir = std::env::current_dir().unwrap();
let fixtures = cur_dir.join("tests/fixtures/bundler");
let result = bundle(&fixtures.join(path).join("index.js")).expect("Error");
assert!(result.contains(substring))
}
fn assert_node(path: &str, value: &str) {
let cur_dir = std::env::current_dir().unwrap();
let fixtures = cur_dir.join("tests/fixtures/bundler");
let result = bundle(&fixtures.join(path).join("index.js")).expect("Error");
let output = std::process::Command::new("node")
.arg("-e")
.arg(&result)
.output()
.expect("Error running node");
assert_eq!(String::from_utf8_lossy(&output.stdout).trim(), value);
}
assert_bundle("basic", "console.log('hello')");
assert_bundle("with-dep", "/* math.js */");
assert_bundle("double-quotes", "/* math.js */");
assert_bundle("crazy-indent", "/* math.js */");
assert_bundle("with-modules", "PETER RULES");
assert_bundle("with-modules-2", "PartitionIter");
assert_node("basic", "hello");
assert_node("with-dep", "2");
assert_node("double-quotes", "");
assert_node("crazy-indent", "");
assert_node("with-modules", "Once upon a day there was a person, named Peter DeMartini\nHe is awesome!\nHave a nice day...\n \n \n \nPETER RULES!!!");
assert_node("with-modules-2", "0 1 2 3 4");
}
| true |
f76631f4166b9026b39118302fc39b54858f8e28
|
Rust
|
gengteng/impl-leetcode-for-rust
|
/src/longest_common_prefix.rs
|
UTF-8
| 2,002 | 4 | 4 |
[
"MIT"
] |
permissive
|
/// # 14. Longest Common Prefix
///
/// Write a function to find the longest common prefix string amongst an array of strings.
///
/// If there is no common prefix, return an empty string "".
///
/// # Example 1:
///
/// Input: ["flower","flow","flight"]
/// Output: "fl"
/// # Example 2:
///
/// Input: ["dog","racecar","car"]
/// Output: ""
/// Explanation: There is no common prefix among the input strings.
/// # Note:
///
/// All given inputs are in lowercase letters a-z.
pub trait LongestCommonPrefix {
fn longest_common_prefix(strings: &[&str]) -> String;
}
pub struct Solution1;
impl LongestCommonPrefix for Solution1 {
fn longest_common_prefix(strings: &[&str]) -> String {
let mut len = 0;
if let Some(f) = strings.iter().nth(0) {
for (i, c) in f.chars().enumerate() {
for s in strings.iter().skip(1) {
if let Some(v) = s.chars().nth(i) {
if v != c {
return f[0..len].to_string();
}
} else {
return f[0..len].to_string();
}
}
len += 1;
}
f[0..len].to_string()
} else {
String::new()
}
}
}
#[cfg(test)]
mod test {
use super::LongestCommonPrefix;
use test::Bencher;
use super::Solution1;
#[test]
fn test_solution1() {
assert_eq!(Solution1::longest_common_prefix(&["flower","flow","flight"]), "fl");
assert_eq!(Solution1::longest_common_prefix(&["dog","race_car","car"]), "");
assert_eq!(Solution1::longest_common_prefix(&["same","same","same"]), "same");
assert_eq!(Solution1::longest_common_prefix(&["test"]), "test");
assert_eq!(Solution1::longest_common_prefix(&[]), "");
}
#[bench]
fn bench_solution1(b: &mut Bencher) {
b.iter(|| Solution1::longest_common_prefix(&["flower","flow","flight"]));
}
}
| true |
41afa7dab517ccec93f880593d57511201802265
|
Rust
|
psyashes/rust-udp-server
|
/src/main.rs
|
UTF-8
| 879 | 3.03125 | 3 |
[] |
no_license
|
use std::net::UdpSocket;
use std::thread;
use std::str;
fn main() -> std::io::Result<()> {
let socket = UdpSocket::bind("127.0.0.1:34254")?;
let mut buf = [0; 2048];
loop {
match socket.recv_from(&mut buf) {
Ok((buf_size, src_addr)) => {
thread::spawn(move || {
let buf = &mut buf[..buf_size];
let req_msg = str::from_utf8(&buf).unwrap();
println!("{:}", "=".repeat(80));
println!("buffer size: {:?}", buf_size);
println!("src address: {:?}", src_addr);
println!("request message: {:?}", req_msg);
// let res_msg = b"Thanks for sending message!";
// socket.send_to(res_msg, &src_addr).expect("couldn't send data");
});
},
Err(e) => {
println!("couldn't recieve request: {:?}", e);
}
}
}
}
| true |
2b5abd0b1d5027e9b08408a50fc8db084ea5ff45
|
Rust
|
StoriqaTeam/gateway
|
/src/graphql/schema/search.rs
|
UTF-8
| 12,951 | 2.6875 | 3 |
[] |
no_license
|
//! File containing search object of graphql schema
use std::cmp;
use std::str::FromStr;
use futures::future;
use futures::Future;
use hyper::Method;
use juniper::FieldResult;
use juniper::ID as GraphqlID;
use serde_json;
use stq_routes::model::Model;
use stq_routes::service::Service;
use stq_static_resources::ModerationStatus;
use stq_types::CategoryId;
use graphql::context::Context;
use graphql::models::*;
graphql_object!(Search: Context as "Search" |&self| {
description: "Searching endpoint."
field find_product(&executor,
first = None : Option<i32> as "First edges",
after = None : Option<GraphqlID> as "Offset form beginning",
search_term : SearchProductInput as "Search pattern",
visibility: Option<Visibility> as "Specifies allowed visibility of the base product"
) -> FieldResult<Option<Connection<BaseProduct, PageInfoProductsSearch>>> as "Find products by name using relay connection." {
let context = executor.context();
let visibility = visibility.unwrap_or_default();
let offset = after
.and_then(|id|{
i32::from_str(&id).map(|i| i + 1).ok()
})
.unwrap_or_default();
let records_limit = context.config.gateway.records_limit;
let count = cmp::min(first.unwrap_or(records_limit as i32), records_limit as i32);
let url = format!("{}/{}/search?offset={}&count={}&visibility={}",
context.config.service_url(Service::Stores),
Model::BaseProduct.to_url(),
offset,
count + 1,
visibility
);
let mut options = search_term.options.clone().unwrap_or_default();
if visibility == Visibility::Published {
options.status = Some(ModerationStatus::Published);
};
let mut search_term = search_term;
search_term.options = Some(options);
let body = serde_json::to_string(&search_term)?;
context.request::<Vec<BaseProduct>>(Method::Post, url, Some(body))
.map (|products| {
let mut product_edges = Edge::create_vec(products, offset);
let search_filters = ProductsSearchFilters::new(search_term);
let has_next_page = product_edges.len() as i32 == count + 1;
if has_next_page {
product_edges.pop();
};
let has_previous_page = true;
let start_cursor = product_edges.get(0).map(|e| e.cursor.clone());
let end_cursor = product_edges.iter().last().map(|e| e.cursor.clone());
let page_info = PageInfoProductsSearch {
has_next_page,
has_previous_page,
search_filters: Some(search_filters),
start_cursor,
end_cursor};
Connection::new(product_edges, page_info)
})
.wait()
.map(Some)
}
field auto_complete_product_name(&executor,
first = None : Option<i32> as "First edges",
after = None : Option<GraphqlID> as "Offset form beginning",
name : String as "Name part")
-> FieldResult<Option<Connection<String, PageInfo>>> as "Finds products full name by part of the name." {
let context = executor.context();
let offset = after
.and_then(|id|{
i32::from_str(&id).map(|i| i + 1).ok()
})
.unwrap_or_default();
let records_limit = context.config.gateway.records_limit;
let count = cmp::min(first.unwrap_or(records_limit as i32), records_limit as i32);
let url = format!("{}/{}/auto_complete?offset={}&count={}",
context.config.service_url(Service::Stores),
Model::BaseProduct.to_url(),
offset,
count + 1,
);
let search_term = AutoCompleteProductNameInput {
name,
store_id : None,
status: Some(ModerationStatus::Published),
};
let body = serde_json::to_string(&search_term)?;
context.request::<Vec<String>>(Method::Post, url, Some(body))
.map (|full_names| {
let mut full_name_edges = Edge::create_vec(full_names, offset);
let has_next_page = full_name_edges.len() as i32 == count + 1;
if has_next_page {
full_name_edges.pop();
};
let has_previous_page = true;
let start_cursor = full_name_edges.get(0).map(|e| e.cursor.clone());
let end_cursor = full_name_edges.iter().last().map(|e| e.cursor.clone());
let page_info = PageInfo {
has_next_page,
has_previous_page,
start_cursor,
end_cursor};
Connection::new(full_name_edges, page_info)
})
.wait()
.map(Some)
}
field find_store(&executor,
first = None : Option<i32> as "First edges",
after = None : Option<GraphqlID> as "Offset form beginning",
search_term : SearchStoreInput as "Search store input",
visibility: Option<Visibility> as "Specifies allowed visibility of the store"
) -> FieldResult<Option<Connection<Store, PageInfoStoresSearch>>> as "Finds stores by name using relay connection." {
let context = executor.context();
let visibility = visibility.unwrap_or_default();
let offset = after
.and_then(|id|{
i32::from_str(&id).map(|i| i + 1).ok()
})
.unwrap_or_default();
let records_limit = context.config.gateway.records_limit;
let count = cmp::min(first.unwrap_or(records_limit as i32), records_limit as i32);
let body = serde_json::to_string(&search_term)?;
println!("{}", body);
let url = format!("{}/{}/search?offset={}&count={}&visibility={}",
context.config.service_url(Service::Stores),
Model::Store.to_url(),
offset,
count + 1,
visibility
);
context.request::<Vec<Store>>(Method::Post, url, Some(body))
.and_then (|stores| {
let mut store_edges = Edge::create_vec(stores, offset);
let has_next_page = store_edges.len() as i32 == count + 1;
if has_next_page {
store_edges.pop();
};
let has_previous_page = true;
let start_cursor = store_edges.get(0).map(|e| e.cursor.clone());
let end_cursor = store_edges.iter().last().map(|e| e.cursor.clone());
let search_filters = StoresSearchFilters::new(search_term);
let page_info = PageInfoStoresSearch {
has_next_page,
has_previous_page,
search_filters,
start_cursor,
end_cursor
};
future::ok(Connection::new(store_edges, page_info))
})
.wait()
.map(Some)
}
field auto_complete_store_name(&executor,
first = None : Option<i32> as "First edges",
after = None : Option<GraphqlID> as "Offset form beginning",
name : String as "Name part")
-> FieldResult<Option<Connection<String, PageInfo>>> as "Finds stores full name by part of the name." {
let context = executor.context();
let offset = after
.and_then(|id|{
i32::from_str(&id).map(|i| i + 1).ok()
})
.unwrap_or_default();
let records_limit = context.config.gateway.records_limit;
let count = cmp::min(first.unwrap_or(records_limit as i32), records_limit as i32);
let url = format!("{}/{}/auto_complete?offset={}&count={}",
context.config.service_url(Service::Stores),
Model::Store.to_url(),
offset,
count + 1
);
context.request::<Vec<String>>(Method::Post, url, Some(name))
.map (|full_names| {
let mut full_name_edges = Edge::create_vec(full_names, offset);
let has_next_page = full_name_edges.len() as i32 == count + 1;
if has_next_page {
full_name_edges.pop();
};
let start_cursor = full_name_edges.get(0).map(|e| e.cursor.clone());
let end_cursor = full_name_edges.iter().last().map(|e| e.cursor.clone());
let has_previous_page = true;
let page_info = PageInfo {
has_next_page,
has_previous_page,
start_cursor,
end_cursor};
Connection::new(full_name_edges, page_info)
})
.wait()
.map(Some)
}
});
graphql_object!(ProductsSearchFilters: Context as "ProductsSearchFilters" |&self| {
description: "Products Search Filters options endpoint."
field price_range(&executor) -> FieldResult<Option<RangeFilter>> as "Price filter."{
let context = executor.context();
let body = serde_json::to_string(&self.search_term)?;
let url = format!("{}/{}/search/filters/price",
context.config.service_url(Service::Stores),
Model::BaseProduct.to_url(),
);
context.request::<RangeFilter>(Method::Post, url, Some(body))
.wait()
.map(Some)
}
field categories(&executor) -> FieldResult<Option<SearchCategory>> as "Category."{
let context = executor.context();
let body = serde_json::to_string(&self.search_term)?;
let url = format!("{}/{}/search/filters/category",
context.config.service_url(Service::Stores),
Model::BaseProduct.to_url(),
);
context.request::<SearchCategory>(Method::Post, url, Some(body))
.wait()
.map(Some)
}
field attr_filters(&executor) -> FieldResult<Option<Vec<AttributeFilter>>> as "Attribute filters for whole category."{
let context = executor.context();
let mut options = ProductsSearchOptionsInput::default();
options.category_id = self.search_term.options
.clone()
.map(|o| o.category_id)
.and_then(|x| x);
options.status = Some(ModerationStatus::Published);
let mut search_term_only_category = SearchProductInput::default();
search_term_only_category.options = Some(options);
let body = serde_json::to_string(&search_term_only_category)?;
let url = format!("{}/{}/search/filters/attributes",
context.config.service_url(Service::Stores),
Model::BaseProduct.to_url(),
);
context.request::<Option<Vec<AttributeFilter>>>(Method::Post, url, Some(body))
.wait()
}
});
graphql_object!(StoresSearchFilters: Context as "StoresSearchFilters" |&self| {
description: "Stores Search Filters options endpoint."
field total_count(&executor) -> FieldResult<i32> as "Total count."{
let context = executor.context();
let body = serde_json::to_string(&self.search_term)?;
let url = format!("{}/{}/search/filters/count",
context.config.service_url(Service::Stores),
Model::Store.to_url(),
);
context.request::<i32>(Method::Post, url, Some(body))
.wait()
}
field category(&executor) -> FieldResult<Option<Category>> as "Category."{
let context = executor.context();
let body = serde_json::to_string(&self.search_term)?;
let url = format!("{}/{}/search/filters/category",
context.config.service_url(Service::Stores),
Model::Store.to_url(),
);
context.request::<Category>(Method::Post, url, Some(body))
.wait()
.map(|mut cat|{
cat.id = CategoryId(-1); //for Relay: root category and searched category must not have equal id
Some(cat)
})
}
field country(&executor) -> FieldResult<Option<Vec<String>>> as "Countries"{
let context = executor.context();
let body = serde_json::to_string(&self.search_term)?;
let url = format!("{}/{}/search/filters/country",
context.config.service_url(Service::Stores),
Model::Store.to_url(),
);
context.request::<Vec<String>>(Method::Post, url, Some(body))
.wait()
.map(Some)
}
});
| true |
1079e498dd8edea4b8d66915d62abb113744f734
|
Rust
|
keiichiw/advent-of-code-2020
|
/day17/src/main.rs
|
UTF-8
| 4,036 | 2.890625 | 3 |
[
"MIT"
] |
permissive
|
#![allow(clippy::needless_range_loop)]
use std::collections::BTreeSet;
#[derive(Copy, Clone, Eq, PartialEq, Ord, PartialOrd, Debug)]
struct Pos3 {
x: i32,
y: i32,
z: i32,
}
trait Pos {
type P;
fn new(x: i32, y: i32) -> Self::P;
fn neighbors(&self) -> BTreeSet<Self::P>;
fn next_active(&self, actives: &BTreeSet<Self::P>) -> bool;
}
impl Pos for Pos3 {
type P = Pos3;
fn new(x: i32, y: i32) -> Self::P {
Pos3 { x, y, z: 0 }
}
fn neighbors(&self) -> BTreeSet<Self::P> {
let mut st = BTreeSet::<Self::P>::new();
for dx in -1..=1 {
for dy in -1..=1 {
for dz in -1..=1 {
if dx == 0 && dy == 0 && dz == 0 {
continue;
}
st.insert(Self::P {
x: self.x + dx,
y: self.y + dy,
z: self.z + dz,
});
}
}
}
st
}
fn next_active(&self, actives: &BTreeSet<Self::P>) -> bool {
let ns = self.neighbors();
let num = ns.intersection(&actives).count();
if actives.contains(&self) {
num == 2 || num == 3
} else {
num == 3
}
}
}
#[derive(Copy, Clone, Eq, PartialEq, Ord, PartialOrd, Debug)]
struct Pos4 {
x: i32,
y: i32,
z: i32,
w: i32,
}
impl Pos for Pos4 {
type P = Pos4;
fn new(x: i32, y: i32) -> Self::P {
Pos4 { x, y, z: 0, w: 0 }
}
fn neighbors(&self) -> BTreeSet<Self::P> {
let mut st = BTreeSet::<Self::P>::new();
for dx in -1..=1 {
for dy in -1..=1 {
for dz in -1..=1 {
for dw in -1..=1 {
if dx == 0 && dy == 0 && dz == 0 && dw == 0 {
continue;
}
st.insert(Self::P {
x: self.x + dx,
y: self.y + dy,
z: self.z + dz,
w: self.w + dw,
});
}
}
}
}
st
}
fn next_active(&self, actives: &BTreeSet<Self::P>) -> bool {
let ns = self.neighbors();
let num = ns.intersection(&actives).count();
if actives.contains(&self) {
num == 2 || num == 3
} else {
num == 3
}
}
}
trait State {
type P;
fn new(grid: &[Vec<char>]) -> Self;
fn neighbors(&self) -> BTreeSet<Self::P>;
fn cycle(&mut self);
}
struct StateImpl<T> {
actives: BTreeSet<T>,
}
impl<T: Pos<P = T> + Clone + Ord> State for StateImpl<T> {
type P = T;
fn new(grid: &[Vec<char>]) -> Self {
let mut actives: BTreeSet<Self::P> = Default::default();
for x in 0..grid.len() {
for y in 0..grid[x].len() {
if grid[x][y] == '#' {
actives.insert(Self::P::new(x as i32, y as i32));
}
}
}
Self { actives }
}
fn neighbors(&self) -> BTreeSet<Self::P> {
let mut s: BTreeSet<Self::P> = self.actives.clone();
for pos in self.actives.iter() {
s = s.union(&pos.neighbors()).cloned().collect();
}
s
}
fn cycle(&mut self) {
let mut next_actives = BTreeSet::<Self::P>::new();
let ps = self.neighbors();
for p in ps {
if p.next_active(&self.actives) {
next_actives.insert(p);
}
}
self.actives = next_actives;
}
}
fn main() {
let grid = utils::read_grid("./day17.txt");
let mut s = StateImpl::<Pos3>::new(&grid);
for _ in 0..6 {
s.cycle();
}
println!("Part 1: {}", s.actives.len());
let mut s = StateImpl::<Pos4>::new(&grid);
for _ in 0..6 {
s.cycle();
}
println!("Part 2: {}", s.actives.len());
}
| true |
e7b03b61551b4621482786fbef688f371d5eca43
|
Rust
|
isabella232/erlang-graph
|
/native/graph.rs
|
UTF-8
| 3,986 | 2.546875 | 3 |
[
"Apache-2.0"
] |
permissive
|
use petgraph::{graph::EdgeIndex, graph::NodeIndex, Graph};
use rustler::{
env::{Env, OwnedEnv, SavedTerm},
resource::ResourceArc,
Encoder, Term,
};
use std::sync::Mutex;
////////////////////////////////////////////////////////////////////////////
// Atoms //
////////////////////////////////////////////////////////////////////////////
mod atom {
rustler::atoms! {
badindex
}
}
////////////////////////////////////////////////////////////////////////////
// Resource //
////////////////////////////////////////////////////////////////////////////
#[derive(Default)]
struct TermGraph {
env: OwnedEnv,
graph: Graph<SavedTerm, SavedTerm>,
}
struct GraphResource(Mutex<TermGraph>);
type Rsc = ResourceArc<GraphResource>;
////////////////////////////////////////////////////////////////////////////
// NIFs //
////////////////////////////////////////////////////////////////////////////
#[rustler::nif]
fn new() -> Rsc {
ResourceArc::new(GraphResource(Mutex::new(TermGraph::default())))
}
#[rustler::nif]
fn node_count(rsc: Rsc) -> usize {
let graph_guard = rsc.0.lock().unwrap();
(*graph_guard).graph.node_count()
}
#[rustler::nif]
fn add_node(rsc: Rsc, term: Term<'_>) -> usize {
let mut graph_guard = rsc.0.lock().unwrap();
let saved_term = (*graph_guard).env.save(term);
(*graph_guard).graph.add_node(saved_term).index()
}
#[rustler::nif]
fn add_edge(rsc: Rsc, a: usize, b: usize, term: Term<'_>) -> usize {
let mut graph_guard = rsc.0.lock().unwrap();
let saved_term = (*graph_guard).env.save(term);
(*graph_guard)
.graph
.add_edge(NodeIndex::new(a), NodeIndex::new(b), saved_term)
.index()
}
#[rustler::nif]
fn get_node(env: Env<'_>, rsc: Rsc, idx: usize) -> Term<'_> {
let graph_guard = rsc.0.lock().unwrap();
let tg = &*graph_guard;
tg.graph
.node_weight(NodeIndex::new(idx))
.map(|term| tg.env.run(|e| term.load(e).in_env(env)))
.unwrap_or_else(|| (atom::badindex(), idx).encode(env))
}
#[rustler::nif]
fn get_edge(env: Env<'_>, rsc: Rsc, idx: usize) -> Term<'_> {
let graph_guard = rsc.0.lock().unwrap();
let tg = &*graph_guard;
tg.graph
.edge_weight(EdgeIndex::new(idx))
.map(|term| tg.env.run(|e| term.load(e).in_env(env)))
.unwrap_or_else(|| (atom::badindex(), idx).encode(env))
}
#[rustler::nif]
fn remove_node(env: Env<'_>, rsc: Rsc, idx: usize) -> Option<Term<'_>> {
let mut graph_guard = rsc.0.lock().unwrap();
(*graph_guard)
.graph
.remove_node(NodeIndex::new(idx))
.map(|term| (*graph_guard).env.run(|e| term.load(e).in_env(env)))
}
#[rustler::nif]
fn remove_edge(env: Env<'_>, rsc: Rsc, idx: usize) -> Option<Term<'_>> {
let mut graph_guard = rsc.0.lock().unwrap();
(*graph_guard)
.graph
.remove_edge(EdgeIndex::new(idx))
.map(|term| (*graph_guard).env.run(|e| term.load(e).in_env(env)))
}
#[rustler::nif]
fn find_edge(rsc: Rsc, a: usize, b: usize) -> Option<usize> {
let graph_guard = rsc.0.lock().unwrap();
let tg = &*graph_guard;
tg.graph
.find_edge(NodeIndex::new(a), NodeIndex::new(b))
.map(|term| term.index())
}
////////////////////////////////////////////////////////////////////////////
// Init //
////////////////////////////////////////////////////////////////////////////
rustler::init!(
"graph",
[
new,
node_count,
add_node,
add_edge,
get_node,
get_edge,
remove_node,
remove_edge,
find_edge
],
load = on_load
);
fn on_load<'a>(env: Env<'a>, _term: rustler::Term<'a>) -> bool {
rustler::resource!(GraphResource, env);
true
}
| true |
56654a1417ac597420fd53b49dda952d2fbd9c1e
|
Rust
|
CaspianA1/term-graph
|
/src/lib.rs
|
UTF-8
| 3,290 | 2.953125 | 3 |
[] |
no_license
|
use pancurses::{initscr, endwin, noecho};
use line_drawing::Bresenham;
use std::vec::Vec;
pub struct Screen {
max_y: i32,
max_x: i32,
_w: pancurses::Window
}
impl Screen {
pub fn deinit(self) {endwin();}
}
pub enum GraphPlacement {
_TopLeft, _TopMiddle, _TopRight,
_BottomLeft, _BottomMiddle, _BottomRight
}
pub fn init_screen() -> Screen {
let win = initscr();
win.keypad(true);
noecho();
Screen {max_y: win.get_max_y(), max_x: win.get_max_x(), _w: win}
}
pub fn make_plot_area(screen: &Screen, plot_pos: GraphPlacement) -> pancurses::Window {
let corner_origin = match plot_pos { // y, x
GraphPlacement::_TopLeft => [0, 0],
GraphPlacement::_TopMiddle => [0, screen.max_x / 3],
GraphPlacement::_TopRight => [0, 2 * (screen.max_x / 3)],
GraphPlacement::_BottomLeft => [screen.max_y / 2, 0],
GraphPlacement::_BottomMiddle => [screen.max_y / 2, screen.max_x / 3],
GraphPlacement::_BottomRight => [screen.max_y / 2, 2 * (screen.max_x / 3)]
};
pancurses::newwin(corner_origin[0] + screen.max_y / 2, corner_origin[1] + screen.max_x / 3, corner_origin[0], corner_origin[1])
}
fn draw_border_and_crosshatch(sub_win: &pancurses::Window, panel_height: i32, panel_width: i32) {
for across_y in 0..panel_height {
for across_x in 0..panel_width {
if across_y == 0 || across_y == panel_height - 1 {sub_win.mvprintw(across_y, across_x, "-");}
else if across_x == 0 || across_x == panel_width - 1 {sub_win.mvprintw(across_y, across_x, "'");}
else if across_y == panel_height / 2 {sub_win.mvprintw(across_y, across_x, "-");}
else if across_x == panel_width / 2 {sub_win.mvprintw(across_y, across_x, "'");}
}
}
}
fn plot_points(sub_win: &pancurses::Window, point_slice: Vec<[i32; 2]>) {
let mut t_previous = (point_slice[0][0], point_slice[0][1]);
let mut a_previous = point_slice[0];
for a_current in point_slice.iter() {
let t_current = (a_current[0], a_current[1]);
let rise = a_previous[1] - a_current[1];
let run = a_current[0] - a_previous[0];
let mut slope_segment = "";
if rise == 0 || run == 0 {slope_segment = "_";}
else {
let slope = rise / run;
if i32::abs(slope) >= 10 {slope_segment = "|";}
else if i32::abs(slope) as f32 > 0.3 {
if slope > 0 {slope_segment = "/";}
else {slope_segment = "\\";}
}
}
for (x, y) in Bresenham::new(t_previous, t_current) {
sub_win.mvprintw(y, x, slope_segment);
}
t_previous = t_current;
a_previous = *a_current;
}
}
pub fn draw_function<F: Fn(f32) -> f32>(sub_win: pancurses::Window, function: F) {
let (begin_y, begin_x) = sub_win.get_beg_yx();
let (end_y, end_x) = sub_win.get_max_yx();
draw_border_and_crosshatch(&sub_win, end_y - begin_y, end_x - begin_x);
let center_x = end_x - begin_x;
let graph_half_width = center_x - center_x / 2;
let scale_vert = (end_y - begin_y) / 2;
let scale_hori = (end_x - begin_x) / 2;
let mut points_to_plot = Vec::new(); // x, y
for graph_x in -graph_half_width..graph_half_width {
let x = scale_hori + graph_x;
let y = scale_vert - function(graph_x as f32) as i32;
points_to_plot.push([x, y]);
}
plot_points(&sub_win, points_to_plot);
sub_win.refresh();
sub_win.getch();
}
| true |
55d2a2afd8fcdeab02247d406f244a1b8d243b49
|
Rust
|
slazicoicr/noodles
|
/noodles-cram/src/writer/record/tag.rs
|
UTF-8
| 2,716 | 2.65625 | 3 |
[
"MIT"
] |
permissive
|
use std::{
ffi::CString,
io::{self, Write},
};
use byteorder::{LittleEndian, WriteBytesExt};
use noodles_bam::record::data::field::Value;
pub fn write_value<W>(writer: &mut W, value: &Value) -> io::Result<()>
where
W: Write,
{
if let Some(subtype) = value.subtype() {
writer.write_u8(char::from(subtype) as u8)?;
}
match value {
Value::Char(c) => writer.write_u8(*c as u8),
Value::Int8(n) => writer.write_i8(*n),
Value::UInt8(n) => writer.write_u8(*n),
Value::Int16(n) => writer.write_i16::<LittleEndian>(*n),
Value::UInt16(n) => writer.write_u16::<LittleEndian>(*n),
Value::Int32(n) => writer.write_i32::<LittleEndian>(*n),
Value::UInt32(n) => writer.write_u32::<LittleEndian>(*n),
Value::Float(n) => writer.write_f32::<LittleEndian>(*n),
Value::String(s) | Value::Hex(s) => {
let c_str = CString::new(s.as_bytes())
.map_err(|e| io::Error::new(io::ErrorKind::InvalidInput, e))?;
writer.write_all(c_str.as_bytes_with_nul())
}
Value::Int8Array(values) => {
writer.write_u32::<LittleEndian>(values.len() as u32)?;
for &n in values {
writer.write_i8(n)?;
}
Ok(())
}
Value::UInt8Array(values) => {
writer.write_u32::<LittleEndian>(values.len() as u32)?;
for &n in values {
writer.write_u8(n)?;
}
Ok(())
}
Value::Int16Array(values) => {
writer.write_u32::<LittleEndian>(values.len() as u32)?;
for &n in values {
writer.write_i16::<LittleEndian>(n)?;
}
Ok(())
}
Value::UInt16Array(values) => {
writer.write_u32::<LittleEndian>(values.len() as u32)?;
for &n in values {
writer.write_u16::<LittleEndian>(n)?;
}
Ok(())
}
Value::Int32Array(values) => {
writer.write_u32::<LittleEndian>(values.len() as u32)?;
for &n in values {
writer.write_i32::<LittleEndian>(n)?;
}
Ok(())
}
Value::UInt32Array(values) => {
writer.write_u32::<LittleEndian>(values.len() as u32)?;
for &n in values {
writer.write_u32::<LittleEndian>(n)?;
}
Ok(())
}
Value::FloatArray(values) => {
writer.write_u32::<LittleEndian>(values.len() as u32)?;
for &n in values {
writer.write_f32::<LittleEndian>(n)?;
}
Ok(())
}
}
}
| true |
af3c8a8948951326d045cdcd08c13ca8175664f5
|
Rust
|
microrack/coresynth
|
/fw/rust_lib/src/os/linux_os/mutex.rs
|
UTF-8
| 440 | 2.984375 | 3 |
[] |
no_license
|
use std::sync::Mutex as StdMutex;
pub use std::sync::{LockResult, MutexGuard};
pub struct Mutex<T> {
inner: StdMutex<T>,
}
impl<T> Mutex<T> {
// TODO use ! for error type when exhaustive patterns is available. See #35121
pub fn new(t: T) -> Result<Mutex<T>, ()> {
Ok(Mutex {
inner: StdMutex::new(t),
})
}
pub fn lock(&self) -> LockResult<MutexGuard<T>> {
self.inner.lock()
}
}
| true |
7fc2dd33c8aab4b97d058ede9aeb0bb6e3cbb030
|
Rust
|
oblivious-file-sharing/netherite-algebra
|
/src/shacham_encryption/mod.rs
|
UTF-8
| 5,992 | 2.65625 | 3 |
[
"Apache-2.0"
] |
permissive
|
use ark_ec::ProjectiveCurve;
use ark_ff::PrimeField;
use ark_std::{marker::PhantomData, vec::Vec, UniformRand};
#[derive(Clone)]
pub struct ShachamPublicParameters<G: ProjectiveCurve> {
pub u: G,
pub v: G,
pub w: G,
}
#[derive(Clone)]
pub struct ShachamSecretKey<G: ProjectiveCurve> {
pub scalar_x: Vec<G::ScalarField>,
pub scalar_y: Vec<G::ScalarField>,
pub scalar_z: Vec<G::ScalarField>,
}
#[derive(Clone)]
pub struct ShachamPublicKey<G: ProjectiveCurve> {
pub pp: ShachamPublicParameters<G>,
pub y: Vec<G>,
pub z: Vec<G>,
}
#[derive(Clone)]
pub struct ShachamCiphertext<G: ProjectiveCurve> {
pub r1: G,
pub r2: G,
pub r3: G,
pub e: Vec<G>,
}
pub struct ShachamEncryption<G: ProjectiveCurve> {
pub pairing_engine_phantom: PhantomData<G>,
}
impl<G: ProjectiveCurve> ShachamEncryption<G> {
pub fn setup<R: ark_std::rand::Rng>(rng: &mut R) -> ShachamPublicParameters<G> {
let u: G = G::rand(rng);
let v: G = G::rand(rng);
let w: G = G::rand(rng);
ShachamPublicParameters::<G> { u, v, w }
}
pub fn key_generation<R: ark_std::rand::Rng>(
pp: &ShachamPublicParameters<G>,
len: usize,
rng: &mut R,
) -> (ShachamSecretKey<G>, ShachamPublicKey<G>) {
let mut scalar_x = Vec::<G::ScalarField>::new();
let mut scalar_y = Vec::<G::ScalarField>::new();
let mut scalar_z = Vec::<G::ScalarField>::new();
for _ in 0..len {
scalar_x.push(G::ScalarField::rand(rng));
scalar_y.push(G::ScalarField::rand(rng));
scalar_z.push(G::ScalarField::rand(rng));
}
let mut y = Vec::<G>::new();
let mut z = Vec::<G>::new();
for i in 0..len {
y.push(pp.u.mul(&scalar_x[i].into_repr()) + pp.w.mul(&scalar_z[i].into_repr()));
z.push(pp.v.mul(&scalar_y[i].into_repr()) + pp.w.mul(&scalar_z[i].into_repr()));
}
let sk = ShachamSecretKey::<G> {
scalar_x,
scalar_y,
scalar_z,
};
let pk = ShachamPublicKey::<G> {
pp: (*pp).clone(),
y,
z,
};
(sk, pk)
}
pub fn encrypt<R: ark_std::rand::Rng>(
pk: &ShachamPublicKey<G>,
plaintext: &Vec<G>,
rng: &mut R,
) -> ShachamCiphertext<G> {
assert!(plaintext.len() <= pk.y.len());
let len = plaintext.len();
let a = G::ScalarField::rand(rng);
let b = G::ScalarField::rand(rng);
let r1 = pk.pp.u.mul(&a.into_repr());
let r2 = pk.pp.v.mul(&b.into_repr());
let r3 = pk.pp.w.mul(&(a + b).into_repr());
let mut e = Vec::<G>::new();
for i in 0..len {
e.push(plaintext[i] + pk.y[i].mul(&a.into_repr()) + pk.z[i].mul(&b.into_repr()));
}
ShachamCiphertext::<G> { r1, r2, r3, e }
}
pub fn decrypt(sk: &ShachamSecretKey<G>, ciphertext: &ShachamCiphertext<G>) -> Vec<G> {
let mut plaintext = Vec::new();
let len = sk.scalar_x.len();
for i in 0..len {
plaintext.push(
ciphertext.e[i]
- ciphertext.r1.mul(&sk.scalar_x[i].into_repr())
- ciphertext.r2.mul(&sk.scalar_y[i].into_repr())
- ciphertext.r3.mul(&sk.scalar_z[i].into_repr()),
);
}
plaintext
}
pub fn rerand<R: ark_std::rand::Rng>(
pk: &ShachamPublicKey<G>,
ciphertext: &ShachamCiphertext<G>,
rng: &mut R,
) -> ShachamCiphertext<G> {
let len = ciphertext.e.len();
let a_new = G::ScalarField::rand(rng);
let b_new = G::ScalarField::rand(rng);
let r1_new = ciphertext.r1 + pk.pp.u.mul(&a_new.into_repr());
let r2_new = ciphertext.r2 + pk.pp.v.mul(&b_new.into_repr());
let r3_new = ciphertext.r3 + pk.pp.w.mul(&(a_new + b_new).into_repr());
let mut e_new = Vec::<G>::new();
for i in 0..len {
e_new.push(
ciphertext.e[i] + pk.y[i].mul(&a_new.into_repr()) + pk.z[i].mul(&b_new.into_repr()),
);
}
ShachamCiphertext::<G> {
r1: r1_new,
r2: r2_new,
r3: r3_new,
e: e_new,
}
}
}
#[cfg(test)]
mod test {
use crate::shacham_encryption::ShachamEncryption;
use ark_bls12_381::G1Projective;
use ark_std::UniformRand;
#[test]
fn test_encrypt_decrypt() {
let mut rng = ark_std::test_rng();
let len = 10;
let mut pt = Vec::new();
for _ in 0..len {
pt.push(G1Projective::rand(&mut rng));
}
let pp = ShachamEncryption::<G1Projective>::setup(&mut rng);
let (sk, pk) = ShachamEncryption::<G1Projective>::key_generation(&pp, len, &mut rng);
let ct = ShachamEncryption::encrypt(&pk, &pt, &mut rng);
let pt_recovered = ShachamEncryption::decrypt(&sk, &ct);
for i in 0..len {
assert!(
pt[i].eq(&pt_recovered[i]),
"Decrypted results do not match the plaintexts."
);
}
}
#[test]
fn test_rerandomization() {
let mut rng = ark_std::test_rng();
let len = 10;
let mut pt = Vec::new();
for _ in 0..len {
pt.push(G1Projective::rand(&mut rng));
}
let pp = ShachamEncryption::<G1Projective>::setup(&mut rng);
let (sk, pk) = ShachamEncryption::<G1Projective>::key_generation(&pp, len, &mut rng);
let ct = ShachamEncryption::encrypt(&pk, &pt, &mut rng);
let ct_rerand = ShachamEncryption::rerand(&pk, &ct, &mut rng);
let pt_recovered = ShachamEncryption::decrypt(&sk, &ct_rerand);
for i in 0..len {
assert!(
pt[i].eq(&pt_recovered[i]),
"Decrypted results of rerandomized ciphertexts do not match the plaintexts."
);
}
}
}
| true |
247584924d988ec0adaa3eeadc733802f450946c
|
Rust
|
bahelms/advent_of_code_2020
|
/src/day16.rs
|
UTF-8
| 8,118 | 2.96875 | 3 |
[] |
no_license
|
use regex::Regex;
use std::{
collections::{HashMap, HashSet},
fs,
ops::Range,
};
struct Rule {
name: String,
first_range: Range<i32>,
second_range: Range<i32>,
}
pub fn execute() {
part_one();
part_two();
}
fn part_one() {
let notes = get_notes();
let rules = extract_rules(¬es[0]);
let nearby_tickets = extract_tickets(¬es[2]);
print!(
"Day 16 - A: {:?}",
scanning_error_rate(&rules, &nearby_tickets)
);
}
fn part_two() {
let notes = get_notes();
let rules = extract_rules(¬es[0]);
let my_ticket = extract_my_ticket(¬es[1]);
let nearby_tickets = extract_tickets(¬es[2]);
let prepared_tickets = transpose_tickets(&remove_invalid_tickets(&rules, &nearby_tickets));
let map = map_rules_to_columns(&rules, prepared_tickets);
let rule_cols = reduce_to_unique(map);
let answer: i64 = rule_cols
.iter()
.filter(|(rule, _)| rule.starts_with("departure"))
.map(|(_, col)| my_ticket[*col as usize] as i64)
.fold(1, |product, n| product * n);
println!(" - B: {:?}", answer);
}
fn remove_invalid_tickets(rules: &Vec<Rule>, tickets: &Vec<Vec<i32>>) -> Vec<Vec<i32>> {
let valid_numbers = valid_numbers(&rules);
let mut valid_tickets = Vec::new();
for ticket in tickets {
let mut valid = true;
for num in ticket {
if !valid_numbers.contains(&num) {
valid = false;
break;
}
}
if valid {
valid_tickets.push(ticket.to_owned());
}
}
valid_tickets
}
fn transpose_tickets(tickets: &Vec<Vec<i32>>) -> Vec<Vec<i32>> {
let mut transposed = Vec::new();
for _ in 0..tickets[0].len() {
transposed.push(Vec::new());
}
for ticket in tickets {
for (i, &num) in ticket.iter().enumerate() {
transposed[i].push(num);
}
}
transposed
}
fn extract_my_ticket(ticket_string: &str) -> Vec<i32> {
ticket_string.trim().split(":\n").collect::<Vec<&str>>()[1]
.split(",")
.map(|num| num.parse().unwrap())
.collect()
}
fn extract_rules(rules_string: &str) -> Vec<Rule> {
let rule_regex = Regex::new(r"(.+): (\d+)-(\d+) or (\d+)-(\d+)").unwrap();
rules_string
.split("\n")
.map(|line| {
let matches = rule_regex.captures(&line).unwrap();
Rule {
name: matches.get(1).unwrap().as_str().to_string(),
first_range: matches.get(2).unwrap().as_str().parse().unwrap()
..matches.get(3).unwrap().as_str().parse::<i32>().unwrap() + 1,
second_range: matches.get(4).unwrap().as_str().parse().unwrap()
..matches.get(5).unwrap().as_str().parse::<i32>().unwrap() + 1,
}
})
.collect()
}
fn extract_tickets(tickets_string: &str) -> Vec<Vec<i32>> {
let mut tickets = Vec::new();
let nearby_tickets: Vec<&str> = tickets_string.trim().split("\n").collect();
for data in &nearby_tickets[1..] {
tickets.push(data.split(",").map(|n| n.parse().unwrap()).collect());
}
tickets
}
fn scanning_error_rate(rules: &Vec<Rule>, tickets: &Vec<Vec<i32>>) -> i32 {
let valid_numbers = valid_numbers(&rules);
let mut rate = 0;
for ticket in tickets {
for num in ticket {
if !valid_numbers.contains(num) {
rate += num;
break;
}
}
}
rate
}
fn valid_numbers(rules: &Vec<Rule>) -> HashSet<i32> {
let mut nums = HashSet::new();
for rule in rules {
for num in rule.first_range.start..rule.first_range.end {
nums.insert(num);
}
for num in rule.second_range.start..rule.second_range.end {
nums.insert(num);
}
}
nums
}
fn map_rules_to_columns(rules: &Vec<Rule>, values: Vec<Vec<i32>>) -> HashMap<String, HashSet<i32>> {
let mut map = HashMap::new();
for rule in rules {
for (i, column) in values.iter().enumerate() {
let mut not_in_range = false;
for num in column {
if !rule.first_range.contains(num) && !rule.second_range.contains(num) {
not_in_range = true;
break;
}
}
if !not_in_range {
map.entry(rule.name.to_string())
.or_insert(HashSet::new())
.insert(i as i32);
}
}
}
map
}
fn reduce_to_unique(map: HashMap<String, HashSet<i32>>) -> HashMap<String, i32> {
let mut cols_found = HashSet::new();
let mut reduced = HashMap::new();
let mut done = false;
while !done {
for (rule, cols) in map.iter() {
let difference = cols.difference(&cols_found).cloned().collect::<Vec<i32>>();
if cols.len() == 1 {
let col = cols.iter().cloned().collect::<Vec<i32>>()[0];
reduced.insert(rule.to_string(), col);
cols_found.insert(col);
} else if difference.len() == 1 {
reduced.insert(rule.to_string(), difference[0]);
cols_found.insert(difference[0]);
}
}
done = cols_found.len() == map.keys().len();
}
reduced
}
fn get_notes() -> Vec<String> {
fs::read_to_string("data/day16.txt")
.unwrap()
.split("\n\n")
.map(String::from)
.collect()
}
#[cfg(test)]
mod tests {
use super::Rule;
use std::collections::{HashMap, HashSet};
#[test]
fn reduce_to_unique_works() {
let mut map = HashMap::new();
map.insert(
"A".to_string(),
[1, 2].iter().cloned().collect::<HashSet<i32>>(),
);
map.insert(
"B".to_string(),
[0, 1, 2].iter().cloned().collect::<HashSet<i32>>(),
);
map.insert(
"C".to_string(),
[2].iter().cloned().collect::<HashSet<i32>>(),
);
let unique_map = super::reduce_to_unique(map);
assert_eq!(unique_map.get("A").unwrap(), &1);
assert_eq!(unique_map.get("B").unwrap(), &0);
assert_eq!(unique_map.get("C").unwrap(), &2);
}
#[test]
fn map_rules_to_columns_works() {
let rules = vec![
Rule {
name: "A".to_string(),
first_range: 0..2,
second_range: 4..20,
},
Rule {
name: "B".to_string(),
first_range: 0..6,
second_range: 8..20,
},
Rule {
name: "C".to_string(),
first_range: 0..14,
second_range: 16..20,
},
];
let tickets = vec![vec![3, 15, 5], vec![9, 1, 14], vec![18, 5, 9]];
let map = super::map_rules_to_columns(&rules, tickets);
assert_eq!(
map.get("A").unwrap(),
&[1, 2].iter().cloned().collect::<HashSet<i32>>()
);
assert_eq!(
map.get("B").unwrap(),
&[0, 1, 2].iter().cloned().collect::<HashSet<i32>>()
);
assert_eq!(
map.get("C").unwrap(),
&[2].iter().cloned().collect::<HashSet<i32>>()
);
}
#[test]
fn scanning_error_rate_works() {
let rules = vec![
Rule {
name: "A".to_string(),
first_range: 1..4,
second_range: 5..8,
},
Rule {
name: "B".to_string(),
first_range: 6..12,
second_range: 33..44,
},
Rule {
name: "C".to_string(),
first_range: 13..41,
second_range: 45..51,
},
];
let tickets = vec![
vec![7, 3, 47],
vec![40, 4, 50],
vec![55, 2, 20],
vec![38, 6, 12],
];
let rate = super::scanning_error_rate(&rules, &tickets);
assert_eq!(rate, 71);
}
}
| true |
d62ca575c4fb01851660479ce9c3f567fb3bb344
|
Rust
|
risooonho/dotrix
|
/dotrix_core/src/renderer/overlay.rs
|
UTF-8
| 2,929 | 3 | 3 |
[
"MIT",
"Apache-2.0"
] |
permissive
|
use crate::{
ecs::{ Const, Mut },
renderer::{ Widget },
services::{
Assets,
Input,
Renderer,
},
};
use std::any::Any;
pub struct Overlay {
pub provider: Box<dyn Provider>,
}
impl Overlay {
pub fn new(provider: Box<dyn Provider>) -> Self {
Self {
provider
}
}
pub fn provider<T: 'static + Send + Sync>(&self) -> Option<&T> {
self.provider.downcast_ref::<T>()
}
pub fn provider_mut<T: 'static + Send + Sync>(&mut self) -> Option<&mut T> {
self.provider.downcast_mut::<T>()
}
pub fn update(
&mut self,
assets: &mut Assets,
input: &Input,
scale_factor: f32,
surface_width: f32,
surface_height: f32,
) {
self.provider.feed(assets, input, scale_factor, surface_width, surface_height);
}
pub fn widgets(
&self,
scale_factor: f32,
surface_width: f32,
surface_height: f32,
) -> Vec<Widget> {
self.provider.tessellate(scale_factor, surface_width, surface_height)
}
}
pub trait Provider: Any + Send + Sync {
fn feed(
&mut self,
assets: &mut Assets,
input: &Input,
scale_factor: f32,
surface_width: f32,
surface_height: f32,
);
fn tessellate(
&self,
scale_factor: f32,
surface_width: f32,
surface_height: f32,
) -> Vec<Widget>;
}
impl dyn Provider {
#[inline]
pub fn downcast_ref<T: Any>(&self) -> Option<&T> {
if self.is::<T>() {
// SAFETY: just checked whether we are pointing to the correct type, and we can rely on
// that check for memory safety because we have implemented Any for all types; no other
// impls can exist as they would conflict with our impl.
unsafe { Some(&*(self as *const dyn Provider as *const T)) }
} else {
None
}
}
#[inline]
pub fn downcast_mut<T: Any>(&mut self) -> Option<&mut T> {
if self.is::<T>() {
// SAFETY: just checked whether we are pointing to the correct type, and we can rely on
// that check for memory safety because we have implemented Any for all types; no other
// impls can exist as they would conflict with our impl.
unsafe { Some(&mut *(self as *mut dyn Provider as *mut T)) }
} else {
None
}
}
#[inline]
fn is<T: Any>(&self) -> bool {
std::any::TypeId::of::<T>() == self.type_id()
}
}
pub fn overlay_update(
mut assets: Mut<Assets>,
input: Const<Input>,
mut renderer: Mut<Renderer>
) {
let (width, height) = renderer.display_size();
let scale_factor = renderer.scale_factor();
for overlay in &mut renderer.overlay {
overlay.update(&mut assets, &input, scale_factor, width as f32, height as f32);
}
}
| true |
24415595d7ebd71ca1b6689de526bd429a4e3383
|
Rust
|
AlexPl292/MIC-1
|
/src/alu.rs
|
UTF-8
| 8,192 | 2.75 | 3 |
[
"MIT"
] |
permissive
|
use crate::bus::Bus32;
use crate::decoders::decoder_2x4;
use crate::main_memory::{fast_decode, fast_encode};
fn adder(a: bool, b: bool, carry_in: bool) -> (bool, bool) {
let (sum1, carry1) = half_adder(a, b);
let (sum, carry2) = half_adder(sum1, carry_in);
let carry_out = carry1 || carry2;
(sum, carry_out)
}
fn half_adder(a: bool, b: bool) -> (bool, bool) { (a ^ b, a && b) }
pub struct AluControl {
f0: bool,
f1: bool,
en_a: bool,
en_b: bool,
inv_a: bool,
inc: bool,
}
impl AluControl {
pub fn from(code: [bool; 6]) -> AluControl {
AluControl {
f0: code[0],
f1: code[1],
en_a: code[2],
en_b: code[3],
inv_a: code[4],
inc: code[5],
}
}
fn new() -> AluControl { AluControl { f0: false, f1: false, en_a: false, en_b: false, inv_a: false, inc: false } }
fn f0(&mut self) -> &mut AluControl {
self.f0 = true;
self
}
fn f1(&mut self) -> &mut AluControl {
self.f1 = true;
self
}
fn ena(&mut self) -> &mut AluControl {
self.en_a = true;
self
}
fn enb(&mut self) -> &mut AluControl {
self.en_b = true;
self
}
fn inva(&mut self) -> &mut AluControl {
self.inv_a = true;
self
}
fn inc(&mut self) -> &mut AluControl {
self.inc = true;
self
}
fn alu_b_dec() -> AluControl { AluControl { f0: true, f1: true, en_a: false, en_b: true, inv_a: true, inc: false } }
fn alu_b_inc() -> AluControl { AluControl { f0: true, f1: true, en_a: false, en_b: true, inv_a: false, inc: true } }
fn alu_sum() -> AluControl { AluControl { f0: true, f1: true, en_a: true, en_b: true, inv_a: false, inc: false } }
fn alu_sum_inc() -> AluControl { AluControl { f0: true, f1: true, en_a: true, en_b: true, inv_a: false, inc: true } }
fn alu_sub() -> AluControl { AluControl { f0: true, f1: true, en_a: true, en_b: true, inv_a: true, inc: true } }
fn alu_and() -> AluControl { AluControl { f0: false, f1: false, en_a: true, en_b: true, inv_a: false, inc: false } }
fn alu_or() -> AluControl { AluControl { f0: false, f1: true, en_a: true, en_b: true, inv_a: false, inc: false } }
fn alu_b() -> AluControl { AluControl { f0: false, f1: true, en_a: false, en_b: true, inv_a: false, inc: false } }
fn alu_a() -> AluControl { AluControl { f0: false, f1: true, en_a: true, en_b: false, inv_a: false, inc: false } }
}
fn alu_unit(a: bool, b: bool, inv_a: bool, en_a: bool, en_b: bool, carry_in: bool, f0: bool, f1: bool) -> (bool, bool) {
let a_enabled = a && en_a;
let b_signal = b && en_b;
let a_signal = a_enabled ^ inv_a;
// Decode allow signals
// f1 and f0 should be in this order because mic-1 uses different bit ordering
let allowed = decoder_2x4(f1, f0);
// Compute simple resultes
let a_and_b_res = (a_signal && b_signal) && allowed[0];
let a_or_b_res = (a_signal || b_signal) && allowed[1];
let not_b_res = !b_signal && allowed[2];
// A and B sum
let (a_plus_b_res_temp, carry_temp) = adder(a_signal, b_signal, carry_in);
let a_plus_b_res = a_plus_b_res_temp && allowed[3];
let carry_out = carry_temp && allowed[3];
// Final result
let res = a_and_b_res || a_or_b_res || not_b_res || a_plus_b_res;
(res, carry_out)
}
pub fn alu_32(a: Bus32, b: Bus32, control: AluControl) -> (Bus32, bool, bool) {
let mut result = [false; 32];
let mut carry = control.inc;
for i in 0..32 {
let (res, alu_carry) = alu_unit(a.data[i], b.data[i], control.inv_a, control.en_a, control.en_b, carry, control.f0, control.f1);
result[i] = res;
carry = alu_carry;
}
let n_bit = result[31];
let mut z_bit = false;
for i in 0..32 {
z_bit |= result[i];
}
z_bit = !z_bit;
(Bus32::from(result), n_bit, z_bit)
}
fn alu_32_i(a: i32, b: i32, control: AluControl) -> (i32, bool, bool) {
let a_bus = Bus32::from(fast_decode(a));
let b_bus = Bus32::from(fast_decode(b));
let (alu_res, n, z) = alu_32(a_bus, b_bus, control);
return (fast_encode(&alu_res.data), n, z);
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn decrement() {
let (res, n, z) = alu_32_i(0, 1, AluControl::alu_b_dec());
assert_eq!(0, res);
assert_eq!(false, n);
assert_eq!(true, z);
}
#[test]
fn decrement_1() {
let (res, n, z) = alu_32_i(0, 0, AluControl::alu_b_dec());
assert_eq!(-1, res);
assert_eq!(true, n);
assert_eq!(false, z);
}
#[test]
fn decrement_2() {
let (res, n, z) = alu_32_i(0, 10, AluControl::alu_b_dec());
assert_eq!(9, res);
assert_eq!(false, n);
assert_eq!(false, z);
}
#[test]
fn increment() {
let (res, n, z) = alu_32_i(0, 10, AluControl::alu_b_inc());
assert_eq!(11, res);
assert_eq!(false, n);
assert_eq!(false, z);
}
#[test]
fn increment_1() {
let (res, n, z) = alu_32_i(0, -2, AluControl::alu_b_inc());
assert_eq!(-1, res);
assert_eq!(true, n);
assert_eq!(false, z);
}
#[test]
fn increment_2() {
let (res, n, z) = alu_32_i(0, -1, AluControl::alu_b_inc());
assert_eq!(0, res);
assert_eq!(false, n);
assert_eq!(true, z);
}
#[test]
fn sum() {
let (res, n, z) = alu_32_i(0, -1, AluControl::alu_sum());
assert_eq!(-1, res);
assert_eq!(true, n);
assert_eq!(false, z);
}
#[test]
fn sum_1() {
let (res, n, z) = alu_32_i(1, 2, AluControl::alu_sum());
assert_eq!(3, res);
assert_eq!(false, n);
assert_eq!(false, z);
}
#[test]
fn sum_2() {
let (res, n, z) = alu_32_i(0, 0, AluControl::alu_sum());
assert_eq!(0, res);
assert_eq!(false, n);
assert_eq!(true, z);
}
#[quickcheck]
fn quick_sum(a: i32, b: i32) {
let (res, n, z) = alu_32_i(a, b, AluControl::alu_sum());
let res = a + b;
assert_eq!(res, res);
assert_eq!(res < 0, n);
assert_eq!(res == 0, z);
}
#[quickcheck]
fn quick_dec(a: i32, b: i32) {
let (res, n, z) = alu_32_i(a, b, AluControl::alu_b_dec());
let res = b - 1;
assert_eq!(res, res);
assert_eq!(res < 0, n);
assert_eq!(res == 0, z);
}
#[quickcheck]
fn quick_inc(a: i32, b: i32) {
let (res, n, z) = alu_32_i(a, b, AluControl::alu_b_inc());
let res = b + 1;
assert_eq!(res, res);
assert_eq!(res < 0, n);
assert_eq!(res == 0, z);
}
#[quickcheck]
fn quick_sum_inc(a: i32, b: i32) {
let (res, n, z) = alu_32_i(a, b, AluControl::alu_sum_inc());
let res = a + b + 1;
assert_eq!(res, res);
assert_eq!(res < 0, n);
assert_eq!(res == 0, z);
}
#[quickcheck]
fn quick_sub(a: i32, b: i32) {
let (res, n, z) = alu_32_i(a, b, AluControl::alu_sub());
let res = b - a;
assert_eq!(res, res);
assert_eq!(res < 0, n);
assert_eq!(res == 0, z);
}
#[quickcheck]
fn quick_and(a: i32, b: i32) {
let (res, n, z) = alu_32_i(a, b, AluControl::alu_and());
let res = b & a;
assert_eq!(res, res);
assert_eq!(res < 0, n);
assert_eq!(res == 0, z);
}
#[quickcheck]
fn quick_or(a: i32, b: i32) {
let (res, n, z) = alu_32_i(a, b, AluControl::alu_or());
let res = b | a;
assert_eq!(res, res);
assert_eq!(res < 0, n);
assert_eq!(res == 0, z);
}
#[quickcheck]
fn quick_b(a: i32, b: i32) {
let (res, n, z) = alu_32_i(a, b, AluControl::alu_b());
let res = b;
assert_eq!(res, res);
assert_eq!(res < 0, n);
assert_eq!(res == 0, z);
}
#[quickcheck]
fn quick_a(a: i32, b: i32) {
let (res, n, z) = alu_32_i(a, b, AluControl::alu_a());
let res = a;
assert_eq!(res, res);
assert_eq!(res < 0, n);
assert_eq!(res == 0, z);
}
}
| true |
c72f771576604f99e0bc81d697f37bf5e699d399
|
Rust
|
Jordan-Rowland/rust-sandbox
|
/reader/src/csv_writer.rs
|
UTF-8
| 5,478 | 3.3125 | 3 |
[] |
no_license
|
use std::collections::HashMap;
use std::fs::File;
use std::io::prelude::*;
#[derive(Debug)]
pub struct Data {
filename: String,
headers: Vec<String>,
rows: Vec<Row>,
pub rows_len: usize,
}
pub enum Filename {
Existing,
New(String),
}
type Row = HashMap<String, String>;
// New and initialization methods
impl Data {
pub fn new(filename: String, headers: Vec<String>) -> Self {
Self {
filename,
headers,
rows: Vec::new(),
rows_len: 0,
}
}
pub fn from_rows(rows: Vec<Row>) -> Self {
let mut keys = Vec::new();
let mut owned_rows = Vec::new();
for key in rows[0].keys() {
keys.push(key.to_string());
}
for row in rows {
owned_rows.push(row.to_owned());
}
Self {
filename: "".to_string(),
headers: keys,
rows: owned_rows.clone(),
rows_len: owned_rows.len().to_owned(),
}
}
}
// Getters
impl Data {
pub fn get_headers(&self) -> &Vec<String> {
&self.headers
}
pub fn get_rows(&self) -> &Vec<Row> {
&self.rows
}
pub fn get_filename(&self) -> &str {
&self.filename
}
pub fn get_column(&self, column: &str) -> Option<Vec<String>> {
if !self.headers.contains(&column.to_string()) {
return Option::None;
}
let mut found_columns = Vec::new();
for row in &self.rows {
let c = row.get(column).unwrap();
found_columns.push(c.into());
}
Some(found_columns)
}
pub fn find_rows_by_column(&self, column: String, value: String) -> Vec<Row> {
self.rows
.clone()
.into_iter() // READ ON THIS -> .iter
.filter(|row| {
// READ ON THIS -> .filter
row.get(&column.to_lowercase())
.unwrap()
.contains(&value.to_lowercase())
})
.collect() // READ ON THIS -> .collect
}
}
// Setters
impl Data {
pub fn add_row(&mut self, row: Row) {
let mut proceed = true;
for header in &self.headers {
if !row.contains_key(&header.to_lowercase()) {
proceed = false;
}
}
if proceed {
self.rows.push(row);
self.rows_len += 1;
if self.filename.len() > 1 {
if let Ok(()) = self.write_csv(Filename::Existing) {
println!("File updated: {}", self.filename);
} else {
println!("Couldn't write to file: {}", self.filename);
self.rows.pop();
self.rows_len -= 1;
}
}
} else {
println!("Some headers are not valid or missing");
}
}
pub fn edit_row(&mut self, index: usize, row: Row) {
self.rows[index] = row;
}
pub fn drop_row(&mut self, index: usize) {
self.rows.remove(index);
self.rows_len -= 1;
}
pub fn set_filename(&mut self, new_filename: &str) {
self.filename = new_filename.into()
}
pub fn calc_rows_len(&mut self) {
self.rows_len = self.rows.len();
}
}
// File io
impl Data {
pub fn open_file(filename: &str) -> std::io::Result<Self> {
let mut contents = String::new();
File::open(filename)?.read_to_string(&mut contents)?;
let mut contents_iter = contents.split("\n");
let header_iter = contents_iter.next().unwrap().split(",");
let mut headers = Vec::new();
for header in header_iter {
headers.push(header.to_string().to_lowercase())
}
let mut rows = Vec::new();
for row in contents_iter {
let mut row_hashmap = HashMap::new();
if row.len() != 0 {
let mut row_split = row.split(",");
for header in headers.clone() {
row_hashmap
.insert(header, row_split.next().unwrap().to_string().to_lowercase());
}
rows.push(row_hashmap);
}
}
Ok(Self {
headers,
rows: rows.clone(),
filename: String::from(filename),
rows_len: rows.len(),
})
}
pub fn write_csv(&self, filename: Filename) -> std::io::Result<()> {
let string_filename: String;
match filename {
Filename::New(filename) => string_filename = filename.to_owned(),
Filename::Existing => string_filename = self.filename.to_owned(),
}
let mut file = File::create(string_filename)?;
for i in 0..self.headers.len() {
if i == self.headers.len() - 1 {
writeln!(file, "{}", &self.headers[i].to_lowercase())?;
} else {
write!(file, "{},", &self.headers[i].to_lowercase())?;
}
}
for row in &self.rows {
for header_index in 0..self.headers.len() {
let value = row.get(&self.headers[header_index]).unwrap().to_string();
if header_index == self.headers.len() - 1 {
writeln!(file, "{}", value.to_lowercase())?;
} else {
write!(file, "{},", value.to_lowercase())?;
}
}
}
Ok(())
}
}
| true |
646f59ff8e89884dd8d3cee6a1b6fd791324aa5c
|
Rust
|
cvgore/grrr
|
/bot/src/upload/client.rs
|
UTF-8
| 608 | 2.640625 | 3 |
[] |
no_license
|
pub(crate) struct Client {
req: reqwest::Client,
base_url: String,
}
impl Client {
#[inline]
fn get_user_agent() -> String {
format!("grrr@{}", crate::ver::VERSION)
}
pub fn new(base_url: String) -> Self {
let req = reqwest::Client::builder()
.user_agent(Client::get_user_agent())
.build()
.expect("failed to create client");
Client {
req,
base_url
}
}
pub async fn ping(&self) -> Result<(), reqwest::Error> {
self.req.get(&self.base_url).send().await.map(|_| ())
}
}
| true |
f410353883131cbf0b7f17c5a2d1ab7f219a1c9b
|
Rust
|
tesuji/rust-oom
|
/tests/non_empty.rs
|
UTF-8
| 1,422 | 2.765625 | 3 |
[
"MIT"
] |
permissive
|
use oom::{NonEmptyMutSlice, NonEmptyVec};
use std::path::Path;
#[test]
fn test_muts() {
let config_dirs = &mut [
Path::new("/home/user/.config/nvim"),
Path::new("/etc/nvim"),
Path::new("/usr/share/nvim"),
];
let mut s = NonEmptyMutSlice::from_slice_checked(config_dirs).unwrap();
assert_eq!(s.len().get(), 3);
assert_eq!(s.is_empty(), false);
let first = Path::new("/home/user/.config/neovim");
let last = Path::new("/home/user/.config/vim");
{
*(s.first_mut()) = first;
assert_eq!(*s.first(), first);
}
{
*(s.last_mut()) = last;
assert_eq!(*s.last(), last);
}
let arr = &mut [0, 1, 2];
let mut s = NonEmptyMutSlice::from_slice_checked(arr).unwrap();
{
let (first, rest) = s.split_first_mut();
*first = 42;
rest[0] = 2;
rest[1] = 3;
assert_eq!(s.as_slice(), &[42, 2, 3][..]);
}
{
let (last, rest) = s.split_last_mut();
*last = 0;
rest[0] = 42;
rest[1] = 42;
assert_eq!(s.as_slice(), &[42, 42, 0][..]);
}
let v = vec![1, 2, 3];
let v = NonEmptyVec::from_vec_checked(v).unwrap();
assert_eq!(v.as_slice(), &[1, 2, 3]);
let v = Vec::<u32>::with_capacity(42);
match NonEmptyVec::from_vec_checked(v) {
Ok(_) => panic!("slice is empty"),
Err(v) => assert!(v.is_empty()),
}
}
| true |
bbabf0696a5e88f4dba6d6006516f33d33422917
|
Rust
|
g-s-k/jval
|
/src/number.rs
|
UTF-8
| 2,084 | 3.296875 | 3 |
[] |
no_license
|
#![cfg(test)]
use super::*;
#[test]
fn int() {
assert_eq!(
try_get_number("12345").unwrap().0 .0,
Token::NumberLiteral(12345.)
);
}
#[test]
fn signed_int() {
assert_eq!(
try_get_number("-98765").unwrap().0 .0,
Token::NumberLiteral(-98765.)
);
}
#[test]
fn only_fract() {
assert_eq!(
try_get_number("0.111").unwrap().0 .0,
Token::NumberLiteral(0.111)
);
}
#[test]
fn signed_only_fract() {
assert_eq!(
try_get_number("-0.9").unwrap().0 .0,
Token::NumberLiteral(-0.9)
);
}
#[test]
fn int_fract() {
assert_eq!(
try_get_number("3.14159").unwrap().0 .0,
Token::NumberLiteral(3.14159)
);
}
#[test]
fn signed_int_fract() {
assert_eq!(
try_get_number("-444.55555678").unwrap().0 .0,
Token::NumberLiteral(-444.55555678)
);
}
#[test]
fn exp() {
assert_eq!(
try_get_number("0e67").unwrap().0 .0,
Token::NumberLiteral(0e67)
);
assert_eq!(
try_get_number("0E67").unwrap().0 .0,
Token::NumberLiteral(0e67)
);
assert_eq!(
try_get_number("123e4").unwrap().0 .0,
Token::NumberLiteral(123e4)
);
assert_eq!(
try_get_number("0.5e0").unwrap().0 .0,
Token::NumberLiteral(0.5)
);
assert_eq!(
try_get_number("6.67e-11").unwrap().0 .0,
Token::NumberLiteral(6.67e-11)
);
}
#[test]
fn trailing_content() {
assert_eq!(
try_get_number("3.14159, null").unwrap().0 .0,
Token::NumberLiteral(3.14159)
);
assert_eq!(
try_get_number("3.14159 , \"what\"").unwrap().0 .0,
Token::NumberLiteral(3.14159)
);
}
#[test]
fn reject_invalid() {
assert!(try_get_number("+3").is_err());
assert!(try_get_number("03.2").is_err());
assert!(try_get_number("-01").is_err());
assert!(try_get_number(".5").is_err());
assert!(try_get_number("5.").is_err());
assert!(try_get_number("e123").is_err());
assert!(try_get_number("2.78E").is_err());
assert!(try_get_number("3e-").is_err());
}
| true |
9fdcdb20260f4142a222d1b3c64b6711aec22349
|
Rust
|
Skallwar/suckit
|
/tests/auth.rs
|
UTF-8
| 1,862 | 2.765625 | 3 |
[
"Apache-2.0",
"MIT",
"LicenseRef-scancode-unknown-license-reference"
] |
permissive
|
//! Tests for using --auth flags for suckit
mod fixtures;
use std::fs::read_dir;
use std::process::Command;
use std::process::Stdio;
const PAGE: &'static str = "tests/fixtures/";
const IP: &'static str = "0.0.0.0";
// Shouldn't supply credentials to a non-matching host
#[test]
fn auth_different_host() {
let ip = fixtures::spawn_local_http_server(PAGE, true, None);
let url = format!("http://{}", ip);
let tempdir = mktemp::Temp::new_dir().unwrap();
let output_dir = tempdir.to_str().unwrap();
let mut cmd = Command::new(env!("CARGO_BIN_EXE_suckit"))
.args(&[
&url,
"-o",
output_dir,
"-a",
"username password example.com",
])
.stdout(Stdio::inherit())
.stderr(Stdio::inherit())
.spawn()
.unwrap();
let status = cmd.wait().unwrap();
assert!(status.success());
let paths = read_dir(format!("{}/{}", output_dir, IP)).unwrap();
// Only the initial invalid response file should be present
assert_eq!(paths.count(), 1);
}
// Should authenticate with credentials to host (defaulting to origin host)
#[test]
fn auth_valid() {
let ip = fixtures::spawn_local_http_server(PAGE, false, None);
let url = format!("http://{}", ip);
let tempdir = mktemp::Temp::new_dir().unwrap();
let output_dir = tempdir.to_str().unwrap();
let mut cmd = Command::new(env!("CARGO_BIN_EXE_suckit"))
.args(&[&url, "-o", output_dir, "-a", "username password"])
.stdout(Stdio::inherit())
.stderr(Stdio::inherit())
.spawn()
.unwrap();
let status = cmd.wait().unwrap();
assert!(status.success());
let paths = read_dir(format!("{}/{}", output_dir, IP)).unwrap();
// Should load multiple paths, not just the invalid auth response
assert!(paths.count() > 1);
}
| true |
063b0214348eb9f3b98e289ae0a1000e110b9bc1
|
Rust
|
pikelet-lang/pikelet
|
/pikelet-cli/src/lib.rs
|
UTF-8
| 1,413 | 2.640625 | 3 |
[
"Apache-2.0"
] |
permissive
|
use anyhow::anyhow;
pub mod check;
pub mod repl;
/// The Pikelet command line interface.
#[derive(structopt::StructOpt)]
pub enum Options {
/// Check some Pikelet source files.
#[structopt(name = "check")]
Check(check::Options),
/// Runs the structured editor.
#[cfg(feature = "editor")]
#[structopt(name = "editor")]
Editor,
/// Runs the language server.
#[cfg(feature = "language-server")]
#[structopt(name = "language-server")]
LanguageServer,
/// Runs the REPL/interactive mode.
#[structopt(name = "repl")]
Repl(repl::Options),
}
/// Run the CLI with the given options
pub fn run(options: Options) -> anyhow::Result<()> {
match options {
Options::Check(options) => check::run(options),
#[cfg(feature = "editor")]
Options::Editor => {
// FIXME: `iced::Error` is not `Send + Sync`, and so is incompatible with `anyhow::Result`.
// See this issue for more information: https://github.com/hecrj/iced/issues/516
pikelet_editor::run().map_err(|err| anyhow!("{}", err))
}
#[cfg(feature = "language-server")]
Options::LanguageServer => pikelet_language_server::run(),
Options::Repl(options) => repl::run(options),
}
}
fn term_width() -> usize {
match term_size::dimensions() {
Some((width, _)) => width,
None => std::usize::MAX,
}
}
| true |
001a503a1e678a4693f1e0571c02beab330551b9
|
Rust
|
beanz/adventofcode-2020
|
/day19/src/ast.rs
|
UTF-8
| 3,005 | 3.046875 | 3 |
[] |
no_license
|
use crate::Error;
use lazy_static::lazy_static;
use regex::Regex;
use std::{collections::HashMap, convert::TryFrom, path::Path, str::FromStr};
lazy_static! {
static ref TERM_LITERAL: Regex = Regex::new(r#""(\w)""#).unwrap();
static ref RULE: Regex = Regex::new(r"^(\d+): (.*)$").unwrap();
}
pub type Ident = usize;
pub type Subrules = Vec<Ident>;
pub enum RuleTerm {
Literal(char),
Subrules(Vec<Subrules>),
}
impl FromStr for RuleTerm {
type Err = Error;
fn from_str(s: &str) -> Result<Self, Self::Err> {
if let Some(capture) = TERM_LITERAL.captures(s) {
Ok(RuleTerm::Literal(
capture[1]
.chars()
.next()
.expect("regex guarantees at least 1 char; qed"),
))
} else {
let mut subrules = Vec::new();
let mut current_subrule = Vec::new();
for token in s.split_whitespace() {
if token == "|" {
subrules.push(std::mem::take(&mut current_subrule));
} else {
current_subrule.push(token.parse()?);
}
}
subrules.push(current_subrule);
Ok(RuleTerm::Subrules(subrules))
}
}
}
pub struct Rule {
pub ident: Ident,
pub term: RuleTerm,
}
impl FromStr for Rule {
type Err = Error;
fn from_str(s: &str) -> Result<Self, Self::Err> {
let captures = RULE
.captures(s)
.ok_or_else(|| Error::Parse(s.to_string(), "did not match RULE regex".to_string()))?;
let ident = captures[1].parse()?;
let term = captures[2].parse()?;
Ok(Rule { ident, term })
}
}
type Message = String;
#[derive(Default)]
pub struct Input {
pub rules: HashMap<Ident, Rule>,
pub messages: Vec<Message>,
}
impl FromStr for Input {
type Err = Error;
fn from_str(s: &str) -> Result<Self, Self::Err> {
let mut input = Input::default();
for (idx, section) in s.split("\n\n").enumerate() {
match idx {
0 => {
// rules
for rule in section.split('\n') {
let rule: Rule = rule.parse()?;
input.rules.insert(rule.ident, rule);
}
}
1 => {
// messages
input.messages = section.split('\n').map(|msg| msg.to_string()).collect();
}
_ => {
return Err(Error::Parse(
section[..50].to_string(),
"more sections than expected".to_string(),
));
}
}
}
Ok(input)
}
}
impl TryFrom<&Path> for Input {
type Error = Error;
fn try_from(value: &Path) -> Result<Self, Self::Error> {
let data = std::fs::read_to_string(value)?;
data.parse()
}
}
| true |
e050b1015122407915114d55ff18497f96d98256
|
Rust
|
rcore-os-infohub/ossoc2020-JohnWestonNull-daily
|
/lab/os/src/memory/mapping/segment.rs
|
UTF-8
| 723 | 2.859375 | 3 |
[] |
no_license
|
use crate::memory::{address::*, mapping::Flags, range::Range};
#[derive(Debug)]
pub enum MapType {
Linear,
Framed,
}
#[derive(Debug)]
pub struct Segment {
pub map_type: MapType,
pub range: Range<VirtualAddress>,
pub flags: Flags,
}
impl Segment {
pub fn iter_mapped(&self) -> Option<impl Iterator<Item=PhysicalPageNumber>> {
match self.map_type {
MapType::Linear => Some(self.page_range().iter().map(PhysicalPageNumber::from)),
MapType::Framed => None
}
}
pub fn page_range(&self) -> Range<VirtualPageNumber> {
Range::from(
VirtualPageNumber::floor(self.range.start)..VirtualPageNumber::ceil(self.range.end),
)
}
}
| true |
231ff39cfa01faffe4d84f90e73ae85156d064d1
|
Rust
|
keserima/keserima.github.io
|
/mihosmeya_converter/src/lib.rs
|
UTF-8
| 10,556 | 2.84375 | 3 |
[] |
no_license
|
#[test]
fn test1() {
assert_eq!(
convert("KESERIMA TONADORAPAMO HIFI, MAHOSMA NINIBINIYANA,").unwrap(),
"かぃさぃりま となろらぱも いヒ,まおしま ににぴにいあな,"
);
}
#[test]
fn test2() {
assert_eq!(
convert("HAYONTI MA NINIBINIYAFI, TONADORAMINIYA.").unwrap(),
"あいおんち ま ににぴにいあヒ,となろらみにいあ."
);
}
#[test]
fn test3() {
assert_eq!(
convert("SEMIGOHA, PIYA MA HOMI MEHIGAMIFI,").unwrap(),
"さぃみこあ,ぴいあ ま おみ まぃいがみヒ,"
);
}
#[test]
fn test4() {
assert_eq!(
convert("SANGAPAMO TONAMIYAFI MOHONIYA.").unwrap(),
"さんがぱも となみいあヒ もおにいあ."
);
}
#[test]
fn test5() {
assert_eq!(
convert("MIHOSMEYA SANTSEGIPAMO HIME,").unwrap(),
"みおしまぃいあ さんさぃきぱも いまぃ,"
);
}
#[test]
fn test6() {
assert_eq!(
convert("MEGAYEDI HOMI HINA TONADORAMINI.").unwrap(),
"まぃがいあぃり おみ いな となろらみに."
);
}
#[test]
fn test7() {
assert_eq!(
convert("HASTE, MAHOSMA, DIRETSO, SAMEGO, MOHONTA, HAYONTI MA SERIMIYA. OMMAFI MIRA, SEMIGOHA, PIYA MA SEGORIME SAMBATI SAMBATI GATONA").unwrap(),
"あしたぃ,まおしま,りらぃそ,さまぃこ,もおんた,あいおんち ま さぃりみいあ.おんまヒ みら,さぃみこあ,ぴいあ ま さぃこりまぃ さんばち さんばち がとな"
)
}
use log::warn;
#[derive(Debug, Clone, PartialEq, Eq, PartialOrd, Ord)]
pub struct Error(String);
#[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord)]
enum Onset {
Hor0,
P,
B,
T,
DorR,
K,
G,
F,
SorTs,
M,
N,
Y,
}
#[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord)]
enum Vowel {
A,
I,
E,
O,
}
enum ParserState {
WordInitial,
OpenSyllParsed,
SkipSpaces,
OnsetParsed(Onset),
N,
S,
}
mod lex;
pub fn convert(a: &str) -> Result<String, Error> {
use lex::Token::*;
let mut ans = String::new();
let mut state = ParserState::WordInitial;
let lexed = lex::lex(a)?;
let mut iter = lexed.iter();
while let Some(c) = iter.next() {
match c {
Space | Comma | Period => {
if let ParserState::SkipSpaces = state {
if *c != Space {
warn!("duplicate punctuation");
ans.push((*c).into());
}
} else {
state = ParserState::SkipSpaces;
ans.push((*c).into());
}
}
N => match state {
ParserState::N => {
ans += "ん";
state = ParserState::OnsetParsed(Onset::N);
}
ParserState::S => {
ans += "し";
state = ParserState::OnsetParsed(Onset::N);
}
ParserState::OnsetParsed(o) => {
return Err(Error(format!("invalid N encountered after {:?}", o)));
}
ParserState::WordInitial
| ParserState::OpenSyllParsed
| ParserState::SkipSpaces => {
state = ParserState::N;
}
},
S => match state {
ParserState::N => {
ans += "ん";
state = ParserState::OnsetParsed(Onset::SorTs);
}
ParserState::S => {
ans += "し";
state = ParserState::OnsetParsed(Onset::SorTs);
}
ParserState::OnsetParsed(o) => {
return Err(Error(format!("invalid S encountered after {:?}", o)));
}
ParserState::WordInitial
| ParserState::OpenSyllParsed
| ParserState::SkipSpaces => {
state = ParserState::S;
}
},
P | B | D | K | G | F | Y => {
if let ParserState::OnsetParsed(c2) = state {
return Err(Error(format!(
"impossible consonant cluster detected: {:?} followed by {:?}",
c2, c
)));
} else {
if let ParserState::N = state {
ans += "ん"
} else if let ParserState::S = state {
ans += "し"
}
state = ParserState::OnsetParsed(match c {
P => Onset::P,
B => Onset::B,
D => Onset::DorR,
K => Onset::K,
G => Onset::G,
F => Onset::F,
Y => Onset::Y,
_ => panic!("Cannot happen"),
});
}
}
T | R | H | M => {
match state {
ParserState::N => {
ans += "ん";
}
ParserState::S => {
ans += "し";
}
_ => {}
};
let onset = match c {
T => Onset::T,
R => Onset::DorR,
H => Onset::Hor0,
M => Onset::M,
_ => panic!("cannot happen"),
};
match (c, iter.next()) {
(_, Some(A)) => {
ans += make_syllable(onset, Vowel::A);
state = ParserState::OpenSyllParsed;
}
(_, Some(E)) => {
ans += make_syllable(onset, Vowel::E);
state = ParserState::OpenSyllParsed;
}
(_, Some(I)) => {
ans += make_syllable(onset, Vowel::I);
state = ParserState::OpenSyllParsed;
}
(_, Some(O)) => {
ans += make_syllable(onset, Vowel::O);
state = ParserState::OpenSyllParsed;
}
(T, Some(S)) => state = ParserState::OnsetParsed(Onset::SorTs),
(R, Some(R)) => {
ans += "ん";
state = ParserState::OnsetParsed(Onset::DorR)
}
(H, Some(H)) => {
ans += "し";
state = ParserState::OnsetParsed(Onset::Hor0)
}
(M, Some(M)) => {
ans += "ん";
state = ParserState::OnsetParsed(Onset::M)
}
(M, Some(P)) => {
ans += "ん";
state = ParserState::OnsetParsed(Onset::P)
}
(M, Some(B)) => {
ans += "ん";
state = ParserState::OnsetParsed(Onset::B)
}
(M, Some(F)) => {
ans += "ん";
state = ParserState::OnsetParsed(Onset::F)
}
(_, a) => {
return Err(Error(format!(
"Unexpected {:?} encountered after {:?}",
a, c
)))
}
}
}
A | I | E | O => {
let vowel = match c {
A => Vowel::A,
I => Vowel::I,
E => Vowel::E,
O => Vowel::O,
_ => panic!("cannot happen"),
};
match state {
ParserState::OnsetParsed(c2) => {
ans += make_syllable(c2, vowel);
}
ParserState::S => {
ans += make_syllable(Onset::SorTs, vowel);
}
ParserState::N => {
ans += make_syllable(Onset::N, vowel);
}
ParserState::WordInitial
| ParserState::OpenSyllParsed
| ParserState::SkipSpaces => ans += make_syllable(Onset::Hor0, vowel),
}
state = ParserState::OpenSyllParsed;
}
}
}
if let ParserState::N = state {
return Err(Error(
"Unexpected end of input encontered after N".to_string(),
));
} else if let ParserState::S = state {
return Err(Error(
"Unexpected end of input encontered after S".to_string(),
));
}
Ok(ans)
}
fn make_syllable(c2: Onset, vowel: Vowel) -> &'static str {
use Onset::*;
use Vowel::*;
match (c2, vowel) {
(Hor0, A) => "あ",
(Hor0, E) => "あぃ",
(Hor0, I) => "い",
(Hor0, O) => "お",
(P, A) => "ぱ",
(P, E) => "ぱぃ",
(P, I) | (B, I) => "ぴ",
(P, O) => "ぽ",
(B, A) => "ば",
(B, E) => "ばぃ",
(B, O) => "ぼ",
(T, A) => "た",
(T, E) => "たぃ",
(T, I) => "ち",
(T, O) => "と",
(DorR, A) => "ら",
(DorR, E) => "らぃ",
(DorR, I) => "り",
(DorR, O) => "ろ",
(K, A) => "か",
(K, E) => "かぃ",
(K, I) | (G, I) => "き",
(K, O) | (G, O) => "こ",
(G, A) => "が",
(G, E) => "がぃ",
(F, A) => "ハ",
(F, E) => "ハぃ",
(F, I) => "ヒ",
(F, O) => "ホ",
(SorTs, A) => "さ",
(SorTs, E) => "さぃ",
(SorTs, I) => {
warn!("si / tsi detected. Replacing it with い");
"い"
}
(SorTs, O) => "そ",
(M, A) => "ま",
(M, E) => "まぃ",
(M, I) => "み",
(M, O) => "も",
(N, A) => "な",
(N, E) => "なぃ",
(N, I) => "に",
(N, O) => "の",
(Y, A) => "いあ",
(Y, E) => "いあぃ",
(Y, I) => "い", // intentional
(Y, O) => "いお",
}
}
| true |
6a86da745c5f0fc1962c694c5acc5840af27cc77
|
Rust
|
cloud-hypervisor/rust-hypervisor-firmware
|
/src/uart_pl011.rs
|
UTF-8
| 711 | 2.65625 | 3 |
[
"Apache-2.0"
] |
permissive
|
// SPDX-License-Identifier: Apache-2.0
// Copyright (C) 2022 Akira Moroo
use core::fmt;
pub struct Pl011 {
base: usize,
}
impl Pl011 {
pub const fn new(base: usize) -> Self {
Self { base }
}
pub fn init(&mut self) {
// Do nothing
}
pub fn send(&mut self, data: u8) {
unsafe {
core::ptr::write_volatile(self.base as *mut u8, data);
}
}
}
impl fmt::Write for Pl011 {
fn write_str(&mut self, s: &str) -> fmt::Result {
for byte in s.bytes() {
// Unix-like OS treats LF as CRLF
if byte == b'\n' {
self.send(b'\r');
}
self.send(byte);
}
Ok(())
}
}
| true |
2b2a518de3c7c36b86cbddecabd611f490b0962f
|
Rust
|
iptq/wat
|
/src/api_v1/summary.rs
|
UTF-8
| 8,387 | 2.5625 | 3 |
[
"MIT"
] |
permissive
|
use std::collections::{HashMap, HashSet};
use std::hash::{Hash, Hasher};
use chrono::{DateTime, TimeZone, Utc};
use rocket::request::Form;
use rocket_contrib::json::Json;
use super::Auth;
use crate::db::DbConn;
use crate::errors::Error;
use crate::models::{Heartbeat, User};
use crate::utils::FormDate;
#[derive(FromForm)]
pub struct SummaryParams {
start: FormDate,
end: FormDate,
project: Option<String>,
branches: Option<String>,
}
#[derive(Clone, Debug, Serialize)]
struct SummaryInnerItem {
name: String,
total_seconds: f64,
percent: f64,
digital: String,
text: String,
hours: u32,
minutes: u8,
seconds: Option<u8>,
}
#[derive(Clone, Debug, Eq, PartialEq, Serialize)]
struct SummaryRange {
date: String,
start: DateTime<Utc>,
end: DateTime<Utc>,
text: String,
timezone: String,
}
#[derive(Clone, Debug, Serialize)]
struct SummaryItem {
projects: Vec<SummaryInnerItem>,
languages: Vec<SummaryInnerItem>,
editors: Vec<SummaryInnerItem>,
operating_systems: Vec<SummaryInnerItem>,
dependencies: Vec<SummaryInnerItem>,
machines: Vec<SummaryInnerItem>,
branches: Vec<SummaryInnerItem>,
entities: Vec<SummaryInnerItem>,
range: SummaryRange,
}
impl PartialEq for SummaryItem {
fn eq(&self, other: &Self) -> bool {
self.range == other.range
}
}
impl Eq for SummaryItem {}
impl Hash for SummaryItem {
fn hash<H: Hasher>(&self, state: &mut H) {
self.range.start.hash(state);
self.range.end.hash(state);
}
}
#[derive(Serialize)]
pub struct SummaryResult {
data: Vec<SummaryItem>,
start: DateTime<Utc>,
end: DateTime<Utc>,
}
enum StatType {
Project,
Language,
Editor,
OperatingSystem,
}
fn calculate_stat(
by: StatType,
heartbeats: &Vec<Heartbeat>,
timeout: u32,
out: &mut HashSet<SummaryItem>,
) {
// get a guarantee that we have at least 2 heartbeats
if heartbeats.len() < 2 {
return;
}
let mut current_item_time = heartbeats[0].time;
let projects = vec![];
let languages = vec![];
let editors = vec![];
let operating_systems = vec![];
let dependencies = vec![];
let machines = vec![];
let branches = vec![];
let entities = vec![];
let range = ();
let mut prev_heartbeat = &heartbeats[0];
for heartbeat in heartbeats.iter().skip(1) {
let time_delta = heartbeat.time - prev_heartbeat.time;
println!("time_delta: {:?}", time_delta);
if time_delta.num_seconds() >= timeout as i64 {
// time to create a new segment
out.insert(SummaryItem {
projects: projects.clone(),
languages: languages.clone(),
editors: editors.clone(),
operating_systems: operating_systems.clone(),
dependencies: dependencies.clone(),
machines: machines.clone(),
branches: branches.clone(),
entities: entities.clone(),
range: SummaryRange {
date: "".to_owned(),
text: "".to_owned(),
timezone: "".to_owned(),
start: current_item_time,
end: prev_heartbeat.time,
},
});
current_item_time = heartbeat.time;
}
prev_heartbeat = heartbeat;
}
out.insert(SummaryItem {
projects,
languages,
editors,
operating_systems,
dependencies,
machines,
branches,
entities,
range: SummaryRange {
date: "".to_owned(),
text: "".to_owned(),
timezone: "".to_owned(),
start: current_item_time,
end: prev_heartbeat.time,
},
});
}
fn get_user_summaries(
conn: &DbConn,
user: &User,
summary_params: SummaryParams,
) -> Result<Json<SummaryResult>, Error> {
let start_dt = summary_params.start.and_hms(0, 0, 0);
let end_dt = summary_params.end.and_hms(23, 59, 59);
let heartbeats = conn.get_heartbeats_interval(user.id, start_dt, end_dt, None)?;
let mut data = HashSet::new();
calculate_stat(StatType::Project, &heartbeats, 15 * 60, &mut data);
calculate_stat(StatType::Language, &heartbeats, 15 * 60, &mut data);
calculate_stat(StatType::Editor, &heartbeats, 15 * 60, &mut data);
calculate_stat(StatType::OperatingSystem, &heartbeats, 15 * 60, &mut data);
let data = data.into_iter().collect();
let result = SummaryResult {
data,
start: start_dt,
end: end_dt,
};
Ok(Json(result))
}
#[get("/users/<user_id>/summaries?<params..>")]
pub fn user_summaries(
conn: DbConn,
user_id: i32,
params: Form<SummaryParams>,
_auth: Auth,
) -> Result<Json<SummaryResult>, Error> {
let params = params.into_inner();
let user = User::by_id(&conn, user_id).unwrap();
get_user_summaries(&conn, &user, params)
}
#[get("/users/current/summaries?<params..>")]
pub fn current_user_summaries(
conn: DbConn,
params: Form<SummaryParams>,
auth: Auth,
) -> Result<Json<SummaryResult>, Error> {
let user = auth.0;
let params = params.into_inner();
get_user_summaries(&conn, &user, params)
}
#[test]
fn test_calculate_stat() {
let activity = vec![
Heartbeat {
id: 1,
user_id: 1,
entity: "file1.rs".to_owned(),
entity_type: "file".to_owned(),
category: Some("coding".to_owned()),
time: Utc.ymd(2019, 8, 22).and_hms(0, 0, 0),
project: Some("wat".into()),
branch: Some("master".into()),
language: Some("Rust".into()),
dependencies: None,
lines: 128,
line_number: None,
cursor_pos: Some(1770),
is_write: false,
},
Heartbeat {
id: 2,
user_id: 1,
entity: "file1.rs".to_owned(),
entity_type: "file".to_owned(),
category: Some("coding".to_owned()),
time: Utc.ymd(2019, 8, 22).and_hms(0, 1, 0),
project: Some("wat".into()),
branch: Some("master".into()),
language: Some("Rust".into()),
dependencies: None,
lines: 128,
line_number: None,
cursor_pos: Some(1771),
is_write: false,
},
Heartbeat {
id: 3,
user_id: 1,
entity: "file1.rs".to_owned(),
entity_type: "file".to_owned(),
category: Some("coding".to_owned()),
time: Utc.ymd(2019, 8, 22).and_hms(0, 2, 0),
project: Some("wat".into()),
branch: Some("master".into()),
language: Some("Rust".into()),
dependencies: None,
lines: 130,
line_number: None,
cursor_pos: Some(1790),
is_write: false,
},
Heartbeat {
id: 4,
user_id: 1,
entity: "file1.rs".to_owned(),
entity_type: "file".to_owned(),
category: Some("coding".to_owned()),
time: Utc.ymd(2019, 8, 22).and_hms(0, 15, 0),
project: Some("wat".into()),
branch: Some("master".into()),
language: Some("Rust".into()),
dependencies: None,
lines: 131,
line_number: None,
cursor_pos: Some(1801),
is_write: false,
},
Heartbeat {
id: 5,
user_id: 1,
entity: "file1.rs".to_owned(),
entity_type: "file".to_owned(),
category: Some("coding".to_owned()),
time: Utc.ymd(2019, 8, 22).and_hms(0, 16, 0),
project: Some("wat".into()),
branch: Some("master".into()),
language: Some("Rust".into()),
dependencies: None,
lines: 132,
line_number: None,
cursor_pos: Some(1802),
is_write: false,
},
];
let mut out = HashSet::new();
calculate_stat(StatType::Project, &activity, 10 * 60, &mut out);
println!("out(10): {:?}", out);
out.clear();
calculate_stat(StatType::Project, &activity, 15 * 60, &mut out);
println!("out(15): {:?}", out);
panic!();
}
| true |
57b12c810d7dc838f77914cfbbb92fe8349a7325
|
Rust
|
yjhmelody/lua-rs
|
/src/binary/chunk.rs
|
UTF-8
| 3,510 | 2.578125 | 3 |
[] |
no_license
|
#![allow(dead_code)]
use std::hash::{Hash, Hasher};
use std::rc::Rc;
/// "\x1bLua"
pub const LUA_SIGNATURE: [u8; 4] = [0x1b, 0x4c, 0x75, 0x61];
pub const LUAC_VERSION: u8 = 0x53;
pub const LUAC_FORMAT: u8 = 0;
/// "\x19\x93\r\n\x1a\n"
pub const LUAC_DATA: [u8; 6] = [0x19, 0x93, 0x0d, 0x0a, 0x1a, 0x0a];
pub const CINT_SIZE: u8 = 4;
pub const CSIZET_SIZE: u8 = 4;
pub const INSTRUCTION_SIZE: u8 = 4;
pub const LUA_INTEGER_SIZE: u8 = 8;
pub const LUA_NUMBER_SIZE: u8 = 8;
pub const LUAC_INT: i64 = 0x5678;
pub const LUAC_NUM: f64 = 370.5;
pub const TAG_NIL: u8 = 0x00;
pub const TAG_BOOLEAN: u8 = 0x01;
pub const TAG_NUMBER: u8 = 0x03;
pub const TAG_INTEGER: u8 = 0x13;
pub const TAG_SHORT_STR: u8 = 0x04;
pub const TAG_LONG_STR: u8 = 0x14;
/// Lua Binary Chunk
#[derive(Debug)]
struct BinaryChunk {
header: Header,
size_up_values: u8,
main_func: Prototype,
}
/// Lua Header
#[derive(Debug)]
struct Header {
signature: [u8; 4],
version: u8,
format: u8,
luac_data: [u8; 6],
c_int_size: u8,
c_size_t_size: u8,
instruction_size: u8,
lua_integer_size: u8,
lua_number_size: u8,
luac_int: i64,
luac_num: f64,
}
/// Lua Function Prototype
#[derive(Debug)]
pub struct Prototype {
pub source: Option<String>,
/// For debug
pub line_defined: u32,
pub last_line_defined: u32,
pub num_params: u8,
pub is_vararg: u8,
pub max_stack_size: u8,
pub code: Vec<u32>,
pub constants: Vec<Constant>,
pub up_values: Vec<UpValue>,
pub prototypes: Vec<Rc<Prototype>>,
pub line_info: Vec<u32>,
/// For debug
pub local_vars: Vec<LocalVar>,
/// For debug
pub up_value_names: Vec<String>,
}
impl Prototype {
fn to_bytes(self) -> Vec<u8> {
unimplemented!()
}
}
/// Lua Up Value
#[derive(Debug, Copy, Clone)]
pub struct UpValue {
pub instack: u8,
pub idx: u8,
}
impl Default for UpValue {
fn default() -> Self {
Self {
instack: 0,
idx: 0,
}
}
}
impl UpValue {
pub fn new(instack: u8, idx: u8) -> Self {
Self {
instack,
idx,
}
}
}
#[derive(Debug)]
pub struct LocalVar {
pub var_name: String,
pub start_pc: u32,
pub end_pc: u32,
}
/// Constant can be stored in constant pool
#[derive(Debug, Clone)]
pub enum Constant {
Nil,
Boolean(bool),
Number(f64),
Integer(i64),
String(String),
}
impl Hash for Constant {
fn hash<H: Hasher>(&self, state: &mut H) {
match self {
Constant::Nil => 0.hash(state),
Constant::Boolean(b) => b.hash(state),
Constant::Number(n) => n.to_bits().hash(state),
Constant::Integer(i) => i.hash(state),
Constant::String(s) => s.hash(state),
}
}
}
impl PartialEq for Constant {
fn eq(&self, other: &Self) -> bool {
match (self, other) {
(Constant::Nil, Constant::Nil) => true,
(Constant::Boolean(a), Constant::Boolean(b)) if a == b => true,
(Constant::Number(a), Constant::Number(b)) if a == b => true,
(Constant::Integer(a), Constant::Integer(b)) if a == b => true,
(Constant::String(s1), Constant::String(s2)) if s1 == s2 => true,
// todo: cmp f64 and i64
_ => false,
}
}
}
impl Eq for Constant {}
| true |
6c0a62af7bd25b62a756e553a1fe2a7ceb00c459
|
Rust
|
marco-c/gecko-dev-wordified-and-comments-removed
|
/third_party/rust/atomic_refcell/src/lib.rs
|
UTF-8
| 8,631 | 2.84375 | 3 |
[
"Apache-2.0",
"MIT",
"LicenseRef-scancode-unknown-license-reference"
] |
permissive
|
#
!
[
no_std
]
#
!
[
allow
(
unsafe_code
)
]
#
!
[
deny
(
missing_docs
)
]
use
core
:
:
cell
:
:
UnsafeCell
;
use
core
:
:
cmp
;
use
core
:
:
fmt
;
use
core
:
:
fmt
:
:
{
Debug
Display
}
;
use
core
:
:
ops
:
:
{
Deref
DerefMut
}
;
use
core
:
:
sync
:
:
atomic
;
use
core
:
:
sync
:
:
atomic
:
:
AtomicUsize
;
pub
struct
AtomicRefCell
<
T
:
?
Sized
>
{
borrow
:
AtomicUsize
value
:
UnsafeCell
<
T
>
}
pub
struct
BorrowError
{
_private
:
(
)
}
impl
Debug
for
BorrowError
{
fn
fmt
(
&
self
f
:
&
mut
fmt
:
:
Formatter
<
'
_
>
)
-
>
fmt
:
:
Result
{
f
.
debug_struct
(
"
BorrowError
"
)
.
finish
(
)
}
}
impl
Display
for
BorrowError
{
fn
fmt
(
&
self
f
:
&
mut
fmt
:
:
Formatter
<
'
_
>
)
-
>
fmt
:
:
Result
{
Display
:
:
fmt
(
"
already
mutably
borrowed
"
f
)
}
}
pub
struct
BorrowMutError
{
_private
:
(
)
}
impl
Debug
for
BorrowMutError
{
fn
fmt
(
&
self
f
:
&
mut
fmt
:
:
Formatter
<
'
_
>
)
-
>
fmt
:
:
Result
{
f
.
debug_struct
(
"
BorrowMutError
"
)
.
finish
(
)
}
}
impl
Display
for
BorrowMutError
{
fn
fmt
(
&
self
f
:
&
mut
fmt
:
:
Formatter
<
'
_
>
)
-
>
fmt
:
:
Result
{
Display
:
:
fmt
(
"
already
borrowed
"
f
)
}
}
impl
<
T
>
AtomicRefCell
<
T
>
{
#
[
inline
]
pub
const
fn
new
(
value
:
T
)
-
>
AtomicRefCell
<
T
>
{
AtomicRefCell
{
borrow
:
AtomicUsize
:
:
new
(
0
)
value
:
UnsafeCell
:
:
new
(
value
)
}
}
#
[
inline
]
pub
fn
into_inner
(
self
)
-
>
T
{
debug_assert
!
(
self
.
borrow
.
load
(
atomic
:
:
Ordering
:
:
Acquire
)
=
=
0
)
;
self
.
value
.
into_inner
(
)
}
}
impl
<
T
:
?
Sized
>
AtomicRefCell
<
T
>
{
#
[
inline
]
pub
fn
borrow
(
&
self
)
-
>
AtomicRef
<
T
>
{
match
AtomicBorrowRef
:
:
try_new
(
&
self
.
borrow
)
{
Ok
(
borrow
)
=
>
AtomicRef
{
value
:
unsafe
{
&
*
self
.
value
.
get
(
)
}
borrow
}
Err
(
s
)
=
>
panic
!
(
"
{
}
"
s
)
}
}
#
[
inline
]
pub
fn
try_borrow
(
&
self
)
-
>
Result
<
AtomicRef
<
T
>
BorrowError
>
{
match
AtomicBorrowRef
:
:
try_new
(
&
self
.
borrow
)
{
Ok
(
borrow
)
=
>
Ok
(
AtomicRef
{
value
:
unsafe
{
&
*
self
.
value
.
get
(
)
}
borrow
}
)
Err
(
_
)
=
>
Err
(
BorrowError
{
_private
:
(
)
}
)
}
}
#
[
inline
]
pub
fn
borrow_mut
(
&
self
)
-
>
AtomicRefMut
<
T
>
{
match
AtomicBorrowRefMut
:
:
try_new
(
&
self
.
borrow
)
{
Ok
(
borrow
)
=
>
AtomicRefMut
{
value
:
unsafe
{
&
mut
*
self
.
value
.
get
(
)
}
borrow
}
Err
(
s
)
=
>
panic
!
(
"
{
}
"
s
)
}
}
#
[
inline
]
pub
fn
try_borrow_mut
(
&
self
)
-
>
Result
<
AtomicRefMut
<
T
>
BorrowMutError
>
{
match
AtomicBorrowRefMut
:
:
try_new
(
&
self
.
borrow
)
{
Ok
(
borrow
)
=
>
Ok
(
AtomicRefMut
{
value
:
unsafe
{
&
mut
*
self
.
value
.
get
(
)
}
borrow
}
)
Err
(
_
)
=
>
Err
(
BorrowMutError
{
_private
:
(
)
}
)
}
}
#
[
inline
]
pub
fn
as_ptr
(
&
self
)
-
>
*
mut
T
{
self
.
value
.
get
(
)
}
#
[
inline
]
pub
fn
get_mut
(
&
mut
self
)
-
>
&
mut
T
{
debug_assert
!
(
self
.
borrow
.
load
(
atomic
:
:
Ordering
:
:
Acquire
)
=
=
0
)
;
unsafe
{
&
mut
*
self
.
value
.
get
(
)
}
}
}
const
HIGH_BIT
:
usize
=
!
(
:
:
core
:
:
usize
:
:
MAX
>
>
1
)
;
const
MAX_FAILED_BORROWS
:
usize
=
HIGH_BIT
+
(
HIGH_BIT
>
>
1
)
;
struct
AtomicBorrowRef
<
'
b
>
{
borrow
:
&
'
b
AtomicUsize
}
impl
<
'
b
>
AtomicBorrowRef
<
'
b
>
{
#
[
inline
]
fn
try_new
(
borrow
:
&
'
b
AtomicUsize
)
-
>
Result
<
Self
&
'
static
str
>
{
let
new
=
borrow
.
fetch_add
(
1
atomic
:
:
Ordering
:
:
Acquire
)
+
1
;
if
new
&
HIGH_BIT
!
=
0
{
Self
:
:
check_overflow
(
borrow
new
)
;
Err
(
"
already
mutably
borrowed
"
)
}
else
{
Ok
(
AtomicBorrowRef
{
borrow
:
borrow
}
)
}
}
#
[
cold
]
#
[
inline
(
never
)
]
fn
check_overflow
(
borrow
:
&
'
b
AtomicUsize
new
:
usize
)
{
if
new
=
=
HIGH_BIT
{
borrow
.
fetch_sub
(
1
atomic
:
:
Ordering
:
:
Release
)
;
panic
!
(
"
too
many
immutable
borrows
"
)
;
}
else
if
new
>
=
MAX_FAILED_BORROWS
{
struct
ForceAbort
;
impl
Drop
for
ForceAbort
{
fn
drop
(
&
mut
self
)
{
panic
!
(
"
Aborting
to
avoid
unsound
state
of
AtomicRefCell
"
)
;
}
}
let
_abort
=
ForceAbort
;
panic
!
(
"
Too
many
failed
borrows
"
)
;
}
}
}
impl
<
'
b
>
Drop
for
AtomicBorrowRef
<
'
b
>
{
#
[
inline
]
fn
drop
(
&
mut
self
)
{
let
old
=
self
.
borrow
.
fetch_sub
(
1
atomic
:
:
Ordering
:
:
Release
)
;
debug_assert
!
(
old
&
HIGH_BIT
=
=
0
)
;
}
}
struct
AtomicBorrowRefMut
<
'
b
>
{
borrow
:
&
'
b
AtomicUsize
}
impl
<
'
b
>
Drop
for
AtomicBorrowRefMut
<
'
b
>
{
#
[
inline
]
fn
drop
(
&
mut
self
)
{
self
.
borrow
.
store
(
0
atomic
:
:
Ordering
:
:
Release
)
;
}
}
impl
<
'
b
>
AtomicBorrowRefMut
<
'
b
>
{
#
[
inline
]
fn
try_new
(
borrow
:
&
'
b
AtomicUsize
)
-
>
Result
<
AtomicBorrowRefMut
<
'
b
>
&
'
static
str
>
{
let
old
=
match
borrow
.
compare_exchange
(
0
HIGH_BIT
atomic
:
:
Ordering
:
:
Acquire
atomic
:
:
Ordering
:
:
Relaxed
)
{
Ok
(
x
)
=
>
x
Err
(
x
)
=
>
x
}
;
if
old
=
=
0
{
Ok
(
AtomicBorrowRefMut
{
borrow
}
)
}
else
if
old
&
HIGH_BIT
=
=
0
{
Err
(
"
already
immutably
borrowed
"
)
}
else
{
Err
(
"
already
mutably
borrowed
"
)
}
}
}
unsafe
impl
<
T
:
?
Sized
+
Send
>
Send
for
AtomicRefCell
<
T
>
{
}
unsafe
impl
<
T
:
?
Sized
+
Send
+
Sync
>
Sync
for
AtomicRefCell
<
T
>
{
}
impl
<
T
:
Clone
>
Clone
for
AtomicRefCell
<
T
>
{
#
[
inline
]
fn
clone
(
&
self
)
-
>
AtomicRefCell
<
T
>
{
AtomicRefCell
:
:
new
(
self
.
borrow
(
)
.
clone
(
)
)
}
}
impl
<
T
:
Default
>
Default
for
AtomicRefCell
<
T
>
{
#
[
inline
]
fn
default
(
)
-
>
AtomicRefCell
<
T
>
{
AtomicRefCell
:
:
new
(
Default
:
:
default
(
)
)
}
}
impl
<
T
:
?
Sized
+
PartialEq
>
PartialEq
for
AtomicRefCell
<
T
>
{
#
[
inline
]
fn
eq
(
&
self
other
:
&
AtomicRefCell
<
T
>
)
-
>
bool
{
*
self
.
borrow
(
)
=
=
*
other
.
borrow
(
)
}
}
impl
<
T
:
?
Sized
+
Eq
>
Eq
for
AtomicRefCell
<
T
>
{
}
impl
<
T
:
?
Sized
+
PartialOrd
>
PartialOrd
for
AtomicRefCell
<
T
>
{
#
[
inline
]
fn
partial_cmp
(
&
self
other
:
&
AtomicRefCell
<
T
>
)
-
>
Option
<
cmp
:
:
Ordering
>
{
self
.
borrow
(
)
.
partial_cmp
(
&
*
other
.
borrow
(
)
)
}
}
impl
<
T
:
?
Sized
+
Ord
>
Ord
for
AtomicRefCell
<
T
>
{
#
[
inline
]
fn
cmp
(
&
self
other
:
&
AtomicRefCell
<
T
>
)
-
>
cmp
:
:
Ordering
{
self
.
borrow
(
)
.
cmp
(
&
*
other
.
borrow
(
)
)
}
}
impl
<
T
>
From
<
T
>
for
AtomicRefCell
<
T
>
{
fn
from
(
t
:
T
)
-
>
AtomicRefCell
<
T
>
{
AtomicRefCell
:
:
new
(
t
)
}
}
impl
<
'
b
>
Clone
for
AtomicBorrowRef
<
'
b
>
{
#
[
inline
]
fn
clone
(
&
self
)
-
>
AtomicBorrowRef
<
'
b
>
{
AtomicBorrowRef
:
:
try_new
(
self
.
borrow
)
.
unwrap
(
)
}
}
pub
struct
AtomicRef
<
'
b
T
:
?
Sized
+
'
b
>
{
value
:
&
'
b
T
borrow
:
AtomicBorrowRef
<
'
b
>
}
impl
<
'
b
T
:
?
Sized
>
Deref
for
AtomicRef
<
'
b
T
>
{
type
Target
=
T
;
#
[
inline
]
fn
deref
(
&
self
)
-
>
&
T
{
self
.
value
}
}
impl
<
'
b
T
:
?
Sized
>
AtomicRef
<
'
b
T
>
{
#
[
inline
]
pub
fn
clone
(
orig
:
&
AtomicRef
<
'
b
T
>
)
-
>
AtomicRef
<
'
b
T
>
{
AtomicRef
{
value
:
orig
.
value
borrow
:
orig
.
borrow
.
clone
(
)
}
}
#
[
inline
]
pub
fn
map
<
U
:
?
Sized
F
>
(
orig
:
AtomicRef
<
'
b
T
>
f
:
F
)
-
>
AtomicRef
<
'
b
U
>
where
F
:
FnOnce
(
&
T
)
-
>
&
U
{
AtomicRef
{
value
:
f
(
orig
.
value
)
borrow
:
orig
.
borrow
}
}
#
[
inline
]
pub
fn
filter_map
<
U
:
?
Sized
F
>
(
orig
:
AtomicRef
<
'
b
T
>
f
:
F
)
-
>
Option
<
AtomicRef
<
'
b
U
>
>
where
F
:
FnOnce
(
&
T
)
-
>
Option
<
&
U
>
{
Some
(
AtomicRef
{
value
:
f
(
orig
.
value
)
?
borrow
:
orig
.
borrow
}
)
}
}
impl
<
'
b
T
:
?
Sized
>
AtomicRefMut
<
'
b
T
>
{
#
[
inline
]
pub
fn
map
<
U
:
?
Sized
F
>
(
orig
:
AtomicRefMut
<
'
b
T
>
f
:
F
)
-
>
AtomicRefMut
<
'
b
U
>
where
F
:
FnOnce
(
&
mut
T
)
-
>
&
mut
U
{
AtomicRefMut
{
value
:
f
(
orig
.
value
)
borrow
:
orig
.
borrow
}
}
#
[
inline
]
pub
fn
filter_map
<
U
:
?
Sized
F
>
(
orig
:
AtomicRefMut
<
'
b
T
>
f
:
F
)
-
>
Option
<
AtomicRefMut
<
'
b
U
>
>
where
F
:
FnOnce
(
&
mut
T
)
-
>
Option
<
&
mut
U
>
{
Some
(
AtomicRefMut
{
value
:
f
(
orig
.
value
)
?
borrow
:
orig
.
borrow
}
)
}
}
pub
struct
AtomicRefMut
<
'
b
T
:
?
Sized
+
'
b
>
{
value
:
&
'
b
mut
T
borrow
:
AtomicBorrowRefMut
<
'
b
>
}
impl
<
'
b
T
:
?
Sized
>
Deref
for
AtomicRefMut
<
'
b
T
>
{
type
Target
=
T
;
#
[
inline
]
fn
deref
(
&
self
)
-
>
&
T
{
self
.
value
}
}
impl
<
'
b
T
:
?
Sized
>
DerefMut
for
AtomicRefMut
<
'
b
T
>
{
#
[
inline
]
fn
deref_mut
(
&
mut
self
)
-
>
&
mut
T
{
self
.
value
}
}
impl
<
'
b
T
:
?
Sized
+
Debug
+
'
b
>
Debug
for
AtomicRef
<
'
b
T
>
{
fn
fmt
(
&
self
f
:
&
mut
fmt
:
:
Formatter
)
-
>
fmt
:
:
Result
{
self
.
value
.
fmt
(
f
)
}
}
impl
<
'
b
T
:
?
Sized
+
Debug
+
'
b
>
Debug
for
AtomicRefMut
<
'
b
T
>
{
fn
fmt
(
&
self
f
:
&
mut
fmt
:
:
Formatter
)
-
>
fmt
:
:
Result
{
self
.
value
.
fmt
(
f
)
}
}
impl
<
T
:
?
Sized
+
Debug
>
Debug
for
AtomicRefCell
<
T
>
{
fn
fmt
(
&
self
f
:
&
mut
fmt
:
:
Formatter
)
-
>
fmt
:
:
Result
{
write
!
(
f
"
AtomicRefCell
{
{
.
.
.
}
}
"
)
}
}
| true |
2b097ba520ada30745e8e6db16dc8f54ccc7131a
|
Rust
|
stanbar/zkSNARK.wasm
|
/src/qap.rs
|
UTF-8
| 4,097 | 2.828125 | 3 |
[
"MIT"
] |
permissive
|
use super::utils::*;
pub fn r1cs_to_qap(
a: &Vec<Vec<f64>>,
b: &Vec<Vec<f64>>,
c: &Vec<Vec<f64>>,
) -> (Vec<Vec<f64>>, Vec<Vec<f64>>, Vec<Vec<f64>>, Vec<f64>) {
let (a, b, c) = (transpose(a), transpose(b), transpose(c));
let (a, b, c) = (
a.iter().map(lagrange_interop),
b.iter().map(lagrange_interop),
c.iter().map(lagrange_interop),
);
let z = (1..a.len()).fold(vec![1.0], |acc, i| mul_polys(acc, vec![-(i as f64), 1.0]));
return (a.collect(), b.collect(), c.collect(), z);
}
fn lagrange_interop(vec: &Vec<f64>) -> Vec<f64> {
use std::convert::TryInto;
let o = vec.iter().enumerate().fold(vec![0.0], |acc, (i, x)| {
add_polys(
acc,
mk_singleton(
(i + 1).try_into().unwrap(),
x,
vec.len().try_into().unwrap(),
),
)
});
o
}
/// Make a polynomial which is zero at {1, 2 ... total_points}, except for `point_loc` where the
/// value is `height`
fn mk_singleton(point_loc: i32, height: &f64, total_pts: i32) -> Vec<f64> {
let fac = (1..total_pts)
.filter(|i| *i != point_loc)
.fold(1, |acc, i| acc * point_loc - i);
let mut o = vec![height / (fac as f64)];
for i in 1..total_pts {
if i != point_loc {
o = mul_polys(o, vec![-i as f64, 1.0])
}
}
return o;
}
fn add_polys(a: Vec<f64>, b: Vec<f64>) -> Vec<f64> {
let mut o = vec![0.0; std::cmp::max(a.len(), b.len())];
a.iter().enumerate().for_each(|(i, v)| {
o[i] += v;
});
b.iter().enumerate().for_each(|(i, v)| {
o[i] += v;
});
o
}
fn sub_polys(a: Vec<f64>, b: Vec<f64>) -> Vec<f64> {
let mut o = vec![0.0; std::cmp::max(a.len(), b.len())];
a.iter().enumerate().for_each(|(i, v)| {
o[i] += v;
});
b.iter().enumerate().for_each(|(i, v)| {
o[i] += v * -1.0;
});
o
}
fn mul_polys(a: Vec<f64>, b: Vec<f64>) -> Vec<f64> {
let mut o = vec![0.0; a.len() + b.len() - 1];
for i in 0..a.len() {
for j in 0..b.len() {
o[i + j] += a[i] * b[j];
}
}
o
}
pub fn create_solution_polynomials(
r: &Vec<f64>,
a_p: Vec<Vec<f64>>,
b_p: Vec<Vec<f64>>,
c_p: Vec<Vec<f64>>,
) -> (Vec<f64>, Vec<f64>, Vec<f64>, Vec<f64>) {
let a_poly = a_p
.into_iter()
.zip(r.into_iter())
.fold(Vec::<f64>::with_capacity(r.len()), |acc, (a, rval)| {
add_polys(acc, mul_polys(vec![rval.clone()], a))
});
let b_poly = b_p
.into_iter()
.zip(r.into_iter())
.fold(Vec::<f64>::with_capacity(r.len()), |acc, (b, rval)| {
add_polys(acc, mul_polys(vec![rval.clone()], b))
});
let c_poly = c_p
.into_iter()
.zip(r.into_iter())
.fold(Vec::<f64>::with_capacity(r.len()), |acc, (c, rval)| {
add_polys(acc, mul_polys(vec![rval.clone()], c))
});
let o = sub_polys(mul_polys(a_poly.clone(), b_poly.clone()), c_poly.clone());
// add check
(a_poly, b_poly, c_poly, o)
}
pub fn create_divisor_polynomial(sol: Vec<f64>, z: Vec<f64>) -> (Vec<f64>, Vec<f64>) {
div_polys(sol, z)
}
// Divide a/b, return quotient and remainder
fn div_polys(a: Vec<f64>, b: Vec<f64>) -> (Vec<f64>, Vec<f64>) {
use std::iter;
let b_len = b.len();
let mut o = vec![0f64; a.len() - b_len + 1];
let mut rem: Vec<f64> = a;
let mut leading_fac: f64;
let mut pos: usize;
while rem.len() >= b_len {
leading_fac = rem.last().unwrap() / b.last().unwrap();
pos = rem.len() - b.len();
let field = o.get_mut(pos).unwrap();
*field = leading_fac;
let multiplied: Vec<f64> = vec![0f64; pos]
.into_iter()
.chain(iter::once(leading_fac))
.collect();
let substracted = sub_polys(rem, mul_polys(b.clone(), multiplied));
rem = substracted
.clone()
.into_iter()
.take(substracted.clone().len() - 1)
.collect();
}
(o, rem)
}
| true |
bf060d68d533c7d69b9ca535b8965f85e917ad2a
|
Rust
|
kevingzhang/docktape-rs
|
/src/image.rs
|
UTF-8
| 783 | 3.3125 | 3 |
[
"MIT"
] |
permissive
|
/// Struct representing a Docker image with some of its fields
#[derive(Default)]
pub struct Image{
pub id: String,
pub created: Option<u64>,
pub parent_id: Option<String>,
pub repo_digests: Option<Vec<String>>,
pub size: Option<u64>,
pub virtual_size: Option<u64>,
pub labels: Option<std::collections::HashMap<String, String>>,
pub repo_tags: Option<Vec<String>>
}
impl Image{
/// Returns the image ID
pub fn id(&self) -> String{
self.id.clone().replace("\"", "")
}
/// Returns the container tags
pub fn repo_tags(&self) -> Option<Vec<String>>
{
let mut tags = Vec::new();
for tag in self.repo_tags.clone().unwrap(){
tags.push(tag.replace("\"", ""));
}
Some(tags)
}
}
| true |
0b6d97ff700364c7b1bb5164252bbfc85520adc8
|
Rust
|
muzudho/look-ahead-items
|
/src/items.rs
|
UTF-8
| 1,425 | 3.421875 | 3 |
[
"MIT"
] |
permissive
|
//! Please iterate.
//! イテレートしてください。
use crate::{Items, LookAheadItems};
impl<T> Default for Items<T>
where
T: std::clone::Clone,
{
fn default() -> Self {
Items {
items: Vec::new(),
look_ahead_items: LookAheadItems::new(0, &vec![]),
look_ahead_size: 4,
}
}
}
impl<T> Iterator for Items<T>
where
T: std::clone::Clone,
{
type Item = LookAheadItems<T>;
// ここではイテレーションの流れを`.curr`と`.next`を使用して定義している。
// 返り値の型は`Option<T>`で、これは:
// * `Iterator`が終了した時は`None`を返し、
// * そうでなければ`Some`でラップされた値を返す。
fn next(&mut self) -> Option<LookAheadItems<T>> {
// 先読み。
let num = self.look_ahead_size;
if self.look_ahead_items.index < self.items.len() {
let mut vec = Vec::new();
for i in self.look_ahead_items.index..self.look_ahead_items.index + num {
if i < self.items.len() {
vec.push(self.items[i].clone());
}
}
let m = LookAheadItems::new(self.look_ahead_items.index, &vec);
self.look_ahead_items.index += 1;
Some(m)
} else {
None
}
}
}
impl<T> Items<T> where T: std::clone::Clone {}
| true |
a6e7d852464ae58d00f79647364d26828a7ffc6b
|
Rust
|
hatzel/domafic-rs
|
/src/keys.rs
|
UTF-8
| 1,726 | 3.234375 | 3 |
[
"MIT"
] |
permissive
|
const KEY_STACK_LEN: u32 = 32;
#[derive(Clone, Copy, Debug, Hash, Ord, PartialOrd, Eq, PartialEq)]
pub struct Keys {
pub size: u32,
pub stack: [u32; KEY_STACK_LEN as usize],
}
impl Keys {
/// Create a new `Keys` with no elements
#[cfg_attr(not(target_os = "emscripten"), allow(dead_code))]
pub fn new() -> Keys {
Keys { size: 0, stack: [0; KEY_STACK_LEN as usize] }
}
/// Push a new key onto the `Keys`
/// Immutable. Creates a new `Keys` with the top element.
#[cfg_attr(not(target_os = "emscripten"), allow(dead_code))]
pub fn push(&self, key: u32) -> Keys {
let mut stack = self.stack; // Copied
debug_assert!(
self.size < KEY_STACK_LEN,
"Only {} elements fit on a `Keys`. Your structure may be too deep.",
KEY_STACK_LEN
);
stack[self.size as usize] = key;
Keys { size: self.size + 1, stack: stack }
}
}
/// An iterator over keys into a `DomNode` tree.
pub struct KeyIter(Keys, u32);
impl Iterator for KeyIter {
type Item = usize;
fn next(&mut self) -> Option<Self::Item> {
if self.1 < self.0.size {
let result = Some(self.0.stack[self.1 as usize] as usize);
self.1 += 1;
result
} else {
None
}
}
fn size_hint(&self) -> (usize, Option<usize>) {
let remaining = (self.0.size - self.1) as usize;
(remaining, Some(remaining))
}
}
impl ExactSizeIterator for KeyIter {}
impl IntoIterator for Keys {
type Item = usize;
type IntoIter = KeyIter;
/// Returns an iterator over the keys from bottom to top
fn into_iter(self) -> KeyIter {
KeyIter(self, 0)
}
}
| true |
a9a17aa194cc1f83e674a7cb3b3d9df5e95ad0fe
|
Rust
|
mxseev/derpiboorust
|
/src/request/lists.rs
|
UTF-8
| 1,301 | 3.234375 | 3 |
[
"MIT"
] |
permissive
|
use failure::Error;
use reqwest::Url;
use super::{build_url, response::ListsResponse, QueryPairs, Request};
/// Request for fetching image lists (`/lists.json`).
/// ```
/// use derpiboorust::Lists;
///
/// let request = Lists::new()
/// .page(2)
/// .last("2d");
/// ```
#[derive(Debug)]
pub struct Lists<'a> {
query: QueryPairs<'a>,
}
impl<'a> Lists<'a> {
/// Create new lists request.
#[allow(clippy::new_without_default)]
pub fn new() -> Self {
let query = QueryPairs::new();
Lists { query }
}
/// The page offset.
pub fn page(mut self, page: u64) -> Self {
self.query.insert("page", page);
self
}
/// Sampling period, specified in weeks, days, or hours.
pub fn last(mut self, last: &'a str) -> Self {
self.query.insert("last", last);
self
}
}
impl<'a> Request<'a> for Lists<'a> {
type ResponseValue = ListsResponse;
fn build(&self) -> Result<Url, Error> {
build_url("lists.json", &self.query)
}
}
#[test]
fn request() {
let req = Lists::new().page(2).last("2w").build().unwrap();
let expected = Url::parse_with_params(
"https://derpibooru.org/lists.json",
&[("page", "2"), ("last", "2w")],
)
.unwrap();
assert_eq!(req, expected);
}
| true |
1994e2c4ba6f727c0c53917de83211f1f70d3095
|
Rust
|
jdrtommey/rustycoils
|
/src/fieldcalc.rs
|
UTF-8
| 51,571 | 2.734375 | 3 |
[
"MIT"
] |
permissive
|
// REFERENCE PAPER = "Off-Axis Expansion Solution of Laplace's Equation: Application to Accurate
// and Rapid Calculation of Coil Magnetic Fields" by Robert H. Jackson.
// each primitive impliments Prititive trait which contains a get_fields(z,r,tol) function
// which returns the axial and radial fields.
// Primitives are described in Fig.1. of reference paper and include
// an ideal current loop, an annular, a thin solenoid and a coil.
pub mod primitives {
use super::polynomials;
use std::fmt;
const MU0: f64 = 1.25663706212e-6;
//trait which acts on all primitive shapes to sum the analtyic polynomials.
pub trait Primitive {
// finds the value of the nth derivative at location x, specific to each primitive
fn get_nth_derivative(&self, n: &u32, x: &f64) -> f64;
fn get_max_depth(&self) -> u32;
// impliments equation 14a in reference paper.
fn get_axial_field(&self, z: &f64, r: &f64, tol: &f64) -> f64 {
let mut answer = 0.0;
let mut counter = 0;
let mut diff = 1.0;
while diff > *tol && counter < self.get_max_depth() {
let old_answer = answer;
let next_derivative = self.get_nth_derivative(&(2 * counter as u32), &z);
let denominator = f64::powi(2.0, 2 * counter as i32)
* ((factorial(counter as u32) * factorial(counter as u32)) as f64);
let numerator =
i32::pow(-1, counter as u32) as f64 * f64::powi(*r, 2 * counter as i32);
counter += 1;
answer += next_derivative * numerator / denominator;
diff = f64::abs(answer - old_answer);
}
answer
}
fn get_radial_field(&self, z: &f64, r: &f64, tol: &f64) -> f64 {
let mut answer = 0.0;
let mut counter = 0;
let mut diff = 1.0;
while diff > *tol && counter < self.get_max_depth() {
let old_answer = answer;
let next_derivative = self.get_nth_derivative(&(1 + 2 * counter as u32), &z);
let denominator = f64::powi(2.0, (2 * counter + 1) as i32)
* ((factorial(counter as u32) * factorial((counter + 1) as u32)) as f64);
let numerator = i32::pow(-1, 1 + counter as u32) as f64
* f64::powi(*r, (2 * counter + 1) as i32);
answer += next_derivative * numerator / denominator;
counter += 1;
diff = f64::abs(answer - old_answer);
}
answer
}
fn get_fields(&self, z: &f64, r: &f64, tol: &f64) -> [f64; 2] {
[
self.get_axial_field(z, r, tol),
self.get_radial_field(z, r, tol),
]
}
}
fn factorial(x: u32) -> u32 {
if x < 2 {
1
} else {
x * factorial(x - 1)
}
}
fn _get_loop_normalized_b(n: &u32, x: &f64, norm: &f64) -> f64 {
let (poly_hcf, poly) = polynomials::primitive_polynomials::get_loop_poly(*n);
let polynomial = polynomials::SolenoidPolynomial::new(poly, poly_hcf);
let poly_res_x = polynomial.compute(&x);
let x_denom = f64::sqrt(1.0 + x * x) * f64::powi(1.0 + x * x, (n + 1) as i32);
let total_norm = 1.0 / f64::powi(*norm, *n as i32);
total_norm * (poly_res_x / x_denom)
}
//function returns b^(n)(x) normalised by 1/(norm^n) for the annular disk.
fn _get_annular_normalized_b(n: &u32, x: &f64, norm: &f64) -> f64 {
let (p_hcf, p_poly, q_hcf, q_poly) =
polynomials::primitive_polynomials::get_annular_poly(*n);
let p_polynomial = polynomials::SolenoidPolynomial::new(p_poly, p_hcf);
let q_polynomial = polynomials::SolenoidPolynomial::new(q_poly, q_hcf);
let x_res_p = p_polynomial.compute(&x);
let x_res_q = q_polynomial.compute(&x);
fn p_denom(x: &f64, n: &u32) -> f64 {
f64::powi(f64::sqrt(x * x + 1.0) + 1.0, *n as i32) * f64::powi(x * x + 1.0, *n as i32)
}
fn q_denom(x: &f64, n: &u32) -> f64 {
f64::powi(f64::sqrt(x * x + 1.0) + 1.0, *n as i32)
* f64::powi(x * x + 1.0, *n as i32)
* f64::sqrt(x * x + 1.0)
}
let b_deriv_x = x_res_p / p_denom(&x, &n) + x_res_q / q_denom(&x, &n);
(1.0 / f64::powi(*norm, *n as i32)) * b_deriv_x
}
// IDEAL LOOP PRIMITIVE.
#[derive(Debug, PartialEq, Clone, Copy)]
pub struct IdealWire {
radius: f64, //radius of the wire loop
current: f64, //current flowing in the wire
z0: f64, //placement of the wire along the axis of symmetry
max_depth: u32,
}
impl IdealWire {
pub fn new(radius: f64, current: f64, z0: f64) -> IdealWire {
IdealWire {
radius,
current,
z0,
max_depth: 8,
}
}
pub fn set_radius(&mut self, radius: f64) {
self.radius = radius;
}
pub fn set_current(&mut self, current: f64) {
self.current = current;
}
pub fn set_z0(&mut self, z0: f64) {
self.z0 = z0;
}
}
impl Primitive for IdealWire {
fn get_nth_derivative(&self, n: &u32, z: &f64) -> f64 {
let x = (z - self.z0) / self.radius;
let b0 = (self.current * MU0) / (2.0 * self.radius);
let normed_b = _get_loop_normalized_b(&n, &x, &self.radius);
b0 * normed_b
}
fn get_max_depth(&self) -> u32 {
self.max_depth
}
}
impl fmt::Display for IdealWire {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(
f,
"Ideal Wire: radius={},axial position={},current={} ",
self.radius, self.z0, self.current
)
}
}
//THIN ANNULAR PRIMITIVE
#[derive(Debug, PartialEq, Clone, Copy)]
pub struct ThinAnnular {
radius: f64, //units of m
current_density: f64, //units of A/m
thickness: f64, //units of m
z0: f64, //units of m
max_depth: u32, //10 hardwired terms so depth of 5
}
impl ThinAnnular {
pub fn new(radius: f64, thickness: f64, current: f64, z0: f64) -> ThinAnnular {
ThinAnnular {
radius,
current_density: current / thickness,
thickness,
z0,
max_depth: 5,
}
}
pub fn set_radius(&mut self, radius: f64) {
self.radius = radius;
}
//takes a current measured in A and converts it into the current density as
//current/thickness
pub fn set_current(&mut self, current: f64) {
self.current_density = current / self.thickness;
}
pub fn set_z0(&mut self, z0: f64) {
self.z0 = z0;
}
pub fn set_thickness(&mut self, thickness: f64) {
let current = self.thickness * self.current_density; //find the total current.
self.thickness = thickness;
self.set_current(current);
}
}
impl Primitive for ThinAnnular {
fn get_nth_derivative(&self, n: &u32, z: &f64) -> f64 {
let x = (z - self.z0) / self.radius;
let rho = (self.radius + self.thickness) / self.radius;
let xi = x / rho;
let b0 = MU0 * self.current_density / 2.0;
// if zeroth derivative return the on axis field value Eq.32 in reference paper.
// else compute the polynomials.
match n {
0 => _zeroth_order_annular(x, xi, rho, self.current_density),
_ => {
let term1 = _get_annular_normalized_b(n, &x, &self.radius);
let term2 = _get_annular_normalized_b(n, &xi, &(self.radius + self.thickness));
b0 * (term1 - term2)
}
}
}
fn get_max_depth(&self) -> u32 {
self.max_depth
}
}
// computes Eq. 32
fn _zeroth_order_annular(x: f64, xi: f64, rho: f64, current_density: f64) -> f64 {
let prefactor = MU0 * current_density / 2.0;
let b_factor = |i: f64| -> f64 {
1.0 / f64::sqrt(1.0 + i * i) - f64::ln(1.0 + f64::sqrt(1.0 + i * i))
};
let x_factor = b_factor(x);
let xi_factor = b_factor(xi);
let rho_factor = f64::ln(rho);
prefactor * (x_factor - xi_factor + rho_factor)
}
impl fmt::Display for ThinAnnular {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(
f,
"ThinAnnular: radius={},thickness={},axial position={},current={} ",
self.radius, self.thickness, self.z0, self.current_density
)
}
}
// ThinSolenoid primitive.
#[derive(Debug, PartialEq, Clone, Copy)]
pub struct ThinSolenoid {
radius: f64,
current_density: f64,
length: f64,
z0: f64,
max_depth: u32,
}
impl ThinSolenoid {
pub fn new(radius: f64, length: f64, current: f64, z0: f64) -> ThinSolenoid {
ThinSolenoid {
radius,
current_density: current / length,
length,
z0,
max_depth: 7,
}
}
pub fn set_radius(&mut self, radius: f64) {
self.radius = radius;
}
//sets the current_density of the solenoid as current/length
pub fn set_current(&mut self, current: f64) {
self.current_density = current / self.length;
}
pub fn set_z0(&mut self, z0: f64) {
self.z0 = z0;
}
pub fn set_length(&mut self, length: f64) {
let current = self.current_density * self.length;
self.length = length;
self.set_current(current);
}
}
impl Primitive for ThinSolenoid {
fn get_nth_derivative(&self, n: &u32, z: &f64) -> f64 {
let x = (z - self.z0) / self.radius;
let eta = self.length / self.radius;
let b0 = (self.current_density * MU0) / 2.0;
if *n == 0 {
let term1 = x / (f64::sqrt(1.0 + x * x));
let term2 = (x - eta) / f64::sqrt(1.0 + (x - eta) * (x - eta));
return b0 * (term1 - term2);
}
let term_x = (1.0 / f64::powi(self.radius, *n as i32))
* _get_loop_normalized_b(&(n - 1), &x, &1.0);
let term_eta = (1.0 / f64::powi(self.radius, *n as i32))
* _get_loop_normalized_b(&(n - 1), &(x - eta), &1.0);
b0 * (term_x - term_eta)
}
fn get_max_depth(&self) -> u32 {
self.max_depth
}
}
impl fmt::Display for ThinSolenoid {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(
f,
"ThinSolenoid: radius={},length={},axial position={},current={} ",
self.radius, self.length, self.z0, self.current_density
)
}
}
// coil primitive.
#[derive(Debug, PartialEq, Clone, Copy)]
pub struct CoilSolenoid {
radius: f64,
current_density: f64,
length: f64,
thickness: f64,
z0: f64,
max_depth: u32,
}
impl CoilSolenoid {
pub fn new(
radius: f64,
length: f64,
thickness: f64,
current: f64,
z0: f64,
) -> CoilSolenoid {
CoilSolenoid {
radius,
current_density: current / (thickness * length),
thickness,
length,
z0,
max_depth: 5,
}
}
pub fn set_radius(&mut self, radius: f64) {
self.radius = radius;
}
// set current density as current/(thickness*length)
pub fn set_current(&mut self, current: f64) {
self.current_density = current / (self.thickness * self.length);
}
pub fn set_z0(&mut self, z0: f64) {
self.z0 = z0;
}
pub fn set_length(&mut self, length: f64) {
let current = self.current_density * self.length * self.thickness;
self.length = length;
self.set_current(current);
}
pub fn set_thickness(&mut self, thickness: f64) {
let current = self.current_density * self.length * self.thickness;
self.thickness = thickness;
self.set_current(current);
}
}
impl Primitive for CoilSolenoid {
fn get_nth_derivative(&self, n: &u32, z: &f64) -> f64 {
let x = (z - self.z0) / self.radius;
let eta = self.length / self.radius;
let rho = (self.radius + self.thickness) / self.radius;
let xi = x / rho;
let b0 = self.current_density * MU0 / 2.0;
// the zeroth derivative is the axial equation given in Eq. 31.
if *n == 0 {
return _zeroth_order_coil(self.current_density, self.radius, x, rho, eta);
}
//The first derivative is found from the equations for the field of an Annular Eq.32 and Eq.34
if *n == 1 {
let term_entrance = _annular_normalized(x) - _annular_normalized(xi) - f64::ln(rho);
let term_exit = _annular_normalized(x - eta)
- _annular_normalized(xi - eta / rho)
- f64::ln(rho);
return b0 * (term_entrance - term_exit);
}
//All other orders are computed from the polynomials of an annular disk.
let term1 =
(1.0 / f64::powi(self.radius, *n as i32)) * _get_annular_normalized_b(n, &x, &1.0);
let term2 = (1.0 / f64::powi(self.radius + self.thickness, *n as i32))
* _get_annular_normalized_b(n, &(x - eta), &1.0);
let term3 =
(1.0 / f64::powi(self.radius, *n as i32)) * _get_annular_normalized_b(n, &xi, &1.0);
let term4 = (1.0 / f64::powi(self.radius + self.thickness, *n as i32))
* _get_annular_normalized_b(n, &(xi - eta / rho), &1.0);
b0 * (term1 - term2 - (term3 - term4))
}
fn get_max_depth(&self) -> u32 {
self.max_depth
}
}
//returns the normalized b(x) for the annular.
fn _annular_normalized(x: f64) -> f64 {
1.0 / f64::sqrt(1.0 + x * x) - f64::ln(1.0 + f64::sqrt(1.0 + x * x))
}
// impliment the coil axis magnetic field.
fn _zeroth_order_coil(current_density: f64, radius: f64, x: f64, rho: f64, eta: f64) -> f64 {
let physical = MU0 * current_density * radius / 2.0;
let term1_numerator = rho + f64::sqrt(rho * rho + x * x);
let term1_denom = 1.0 + f64::sqrt(1.0 + x * x);
let term1 = x * f64::ln(term1_numerator / term1_denom);
let term2_numerator = rho + f64::sqrt(f64::powi(rho, 2) + (x - eta) * (x - eta));
let term2_denom = 1.0 + f64::sqrt(1.0 + (x - eta) * (x - eta));
let term2 = (x - eta) * f64::ln(term2_numerator / term2_denom);
physical * (term1 - term2)
}
impl fmt::Display for CoilSolenoid {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(
f,
"CoilSolenoid: radius={},length={},thickness={},axial position={},current={} ",
self.radius, self.length, self.thickness, self.z0, self.current_density
)
}
}
#[cfg(test)]
mod test_coil {
use super::*;
//The axial field for a coil if given in Eq. 31.
//
//assuming the following physical dimensions for the coils
//
// radius = 0.5,thickness = 1.0, length = 2.0, z=1.0,z0=0.0,current_density=4.0
//
// x = 2.0
// eta = 4.0
// rho = 3.0
#[test]
fn test_zeroth_order_derivative() {
let current = 4.0;
let z = 1.0;
let z0 = 0.0;
let length = 2.0;
let radius = 0.5;
let thickness = 1.0;
let x = (z - z0) / radius;
let eta = length / radius;
let rho = (radius + thickness) / radius;
let zeroth = _zeroth_order_coil(current, radius, x, rho, eta);
assert_eq!(
zeroth / MU0,
4.0 * f64::ln((3.0 + f64::sqrt(13.0)) / (1.0 + f64::sqrt(5.0)))
);
}
}
#[cfg(test)]
mod test_thin_solenoid {
use super::*;
// the field on axis is analytical and can be found using Eq. 29
//
// B0*(x/(sqrt(1+x*x)) - (x-eta)/(sqrt(1+(x-eta)^2)))
//
// where x = z-z0/radius
// eta = length/radius
//
// if let current = 2.0,z = 1.0, z0 = 0.0, length = 2.0, radius = 0.5
// x = 2
// eta = 4
//
// B/MU0 = 2/sqrt(5) - (2-4)/sqrt(5) = 4/sqrt(5)
#[test]
fn test_order_0() {
let current = 4.0;
let z = 1.0;
let z0 = 0.0;
let length = 2.0;
let radius = 0.5;
let solenoid = ThinSolenoid::new(radius, length, current, z0);
let first = solenoid.get_nth_derivative(&0, &z);
assert_eq!(first / MU0, 4.0 / f64::sqrt(5.0));
}
// the first order is given by
// (B0/r0)*(1/((1+x^2)^n+1+0.5) - 1/((1+(x-eta)^2)^n+1+0.5
//
// if same parameters as above reduces to
// 1/((5)^2.5) - 1/((5)^2.5) = 0.0
#[test]
fn test_order_1() {
let current = 4.0;
let z = 1.0;
let z0 = 0.0;
let length = 2.0;
let radius = 0.5;
let solenoid = ThinSolenoid::new(radius, length, current, z0);
let first = solenoid.get_nth_derivative(&1, &z);
assert_eq!(first / MU0, 0.0);
}
// the second order is given by
//
// P_1(x) = -3x
//
// -3x/(1+x^2)^n-0.5
// 1/(0.25) * (-6/5^2.5 - 6/5^2.5) = -48/5^2.5
#[test]
fn test_order_2() {
let current = 4.0;
let z = 1.0;
let z0 = 0.0;
let length = 2.0;
let radius = 0.5;
let solenoid = ThinSolenoid::new(radius, length, current, z0);
let second = solenoid.get_nth_derivative(&2, &z);
assert_eq!(second / MU0, -48.0 / f64::powf(5.0, 2.5));
}
#[test]
fn test_order_3() {
let current = 4.0;
let z = 1.0;
let z0 = 0.0;
let length = 2.0;
let radius = 0.5;
let solenoid = ThinSolenoid::new(radius, length, current, z0);
let second = solenoid.get_nth_derivative(&3, &z);
assert_eq!(second / MU0, 0.0);
}
#[test]
fn test_order_4() {
let current = 4.0;
let z = 1.0;
let z0 = 0.0;
let length = 2.0;
let radius = 0.5;
let solenoid = ThinSolenoid::new(radius, length, current, z0);
let second = solenoid.get_nth_derivative(&4, &z);
assert_eq!(
second / MU0,
16.0 * (-26.0 * 15.0 * 2.0 / f64::powf(5.0, 4.5))
);
}
// test against the on-axis magnetic field equation given in Eq.29
//
// B_z(0,z) = {MU0*J/2.0][x/sqrt(1+x^2) - (x-eta)/sqrt(1+(x-eta)^2)
fn analytical(current_density: f64, x: f64, eta: f64) -> f64 {
(MU0 * current_density / 2.0)
* (x / f64::sqrt(1.0 + x * x) - (x - eta) / f64::sqrt(1.0 + (x - eta) * (x - eta)))
}
#[test]
fn test_on_axis_field() {
let current = 4.0;
let radius = 0.5;
let length = 2.0;
let z = 1.0;
let z0 = 0.0;
let solenoid = ThinSolenoid::new(radius, length, current, z0);
let x = (z - solenoid.z0) / solenoid.radius;
let eta = solenoid.length / solenoid.radius;
let ana_answer = analytical(current / length, x, eta);
assert_eq!(ana_answer, solenoid.get_axial_field(&z, &0.0, &1e-6));
}
}
#[cfg(test)]
mod test_thin_annular {
use super::*;
// To test the thin annular derivative function have hand computed the expressions up to
// order 5.
// the zeroth order is analytically given in Eq.32. ERRATA. paper shows - ln(rho) when its
// positive.
// [MU0*J/2]*( 1/(sqrt(1+x*x)) - ln(1+sqrt(1+x*x)) + ln(1+sqrt(1+xi*xi)) -
// 1/(sqrt(1+xi*xi)) + ln(rho))
// assuming z = 1.0, z0 = 0.0, radius = 1.0, thickness=1.0,J=1
// x = (z -z0)/radius => x = 1.0
// rho = (radius+thickness)/radius => 2.0
// xi = (x/rho) => 0.5
// B0/MU0 = 1/2 *((1/sqrt(2) - ln(1+sqrt(2)) - 1/sqrt(1.25) + ln(1+sqrt(1.25)) + ln(2) ) =
// -0.5056764413
#[test]
fn annular_disk_order_0() {
let radius = 1.0;
let thickness = 1.0;
let current_density = 1.0;
let z0 = 0.0;
let z = 1.0;
let annular = ThinAnnular::new(radius, thickness, current_density, z0);
let answer = annular.get_nth_derivative(&0, &z);
let error = f64::abs(answer / MU0 - (0.18747073917294));
assert!(error < 1e-9);
}
//the first order can be computed from the polynomial experssions given in Eq.36 using
//polynomials given in Table 2.
//
//P_1(x) = -x = -1.0
//Q_1(x) = -x^3-2x = -3.0
//
//P_1(xi) = -0.5
//Q_1(xi) = -1.125
//Using the same initial conditions stated in above test:
//The nth deriviative is given by:
// b_1(x) => P_1(x)/(2sqrt(2)+2) + Q_1(x)/(4+2sqrt(2))
// b_1(xi)=> P_1(xi)/(sqrt(1.25)+1) + Q_1(xi)/(1.25(1.25+sqrt(1.25)))
//
// => B_0/MU0 = -1.0/(2sqrt(2)+2) + -3.0/(4+2sqrt(2))-( (1/2.0)*((-0.5)/(sqrt(1.25)+1) +
// -1.125/(1.25*(1.25+sqrt(1.25))))) = -0.1809941811033...
#[test]
fn annular_disk_order_1() {
let radius = 1.0;
let thickness = 1.0;
let current_density = 1.0;
let z0 = 0.0;
let z = 1.0;
let annular = ThinAnnular::new(radius, thickness, current_density, z0);
let answer = annular.get_nth_derivative(&1, &z);
let error = f64::abs(answer / MU0 - (-0.1809941811033));
assert!(error < 1e-9);
}
//test the second order derivative for the same parameters again.
//
// P_2(x) = x^4 + 4x^2 - 3
// q_2(x) = 2x^4 + 2x^2 - 3
//
// P_2(1.0) = 2
// Q_2(1.0) = 1
//
// P_2(0.5) = -1.9375
// Q_2(0.5) = -2.375
//
// denominator for P is (sqrt(1+x^2)+1)^n * (x*x+1)^2
// denominator for Q is sqrt(x*x+1) * (sqrt(1+x^2)+1)^n * (x*x+1)^2
//
// P(1.0) = 2/((sqrt(2)+1)^2 * 4)
// Q(1.0) = 1/(4*sqrt(2)*(sqrt(2)+1)^2 )
// P(0.5) = -1.9375/ (1.25^2 *(sqrt(1.25+1)^2 :)
// Q(0.5) = -2.375/(sqrt(1.25)*(1.25^2 *(sqrt(1.25+1)^2)
// answer =0.5*(P(1.0)+Q(1.0) - 0.25*(P(0.5)-Q(0.5))) = 0.130491663998
#[test]
fn annular_disk_order_2() {
let radius = 1.0;
let thickness = 1.0;
let current_density = 1.0;
let z0 = 0.0;
let z = 1.0;
let annular = ThinAnnular::new(radius, thickness, current_density, z0);
let answer = annular.get_nth_derivative(&2, &z);
let error = f64::abs(answer / MU0 - (0.130491663998));
assert!(error < 1e-9);
}
}
#[cfg(test)]
mod test_ideal_loop {
use super::*;
//test against hand computed values for the derivatives using
//equations in reference paper.
#[test]
fn test_field_derivative() {
let radius = 1.0;
let current = 3.0;
let z0 = 0.0;
let myloop = IdealWire::new(radius, current, z0);
let answer = (MU0 * current) / (2.0 * radius) * (-3.0) / (f64::sqrt(2.0) * 4.0);
let first_derivative = myloop.get_nth_derivative(&1, &1.0);
let diff = first_derivative - answer;
let mut boo = false;
if diff < 1e-10 {
boo = true;
}
assert!(boo);
}
#[test]
fn test_field_derivative_2() {
let radius = 1.0;
let current = 3.0;
let z0 = 1.0;
let myloop = IdealWire::new(radius, current, z0);
let answer = (MU0 * current) / (2.0 * radius) * (-3.0) / (f64::sqrt(5.0) * 25.0);
let first_derivative = myloop.get_nth_derivative(&1, &3.0);
let diff = first_derivative - answer;
let mut boo = false;
if diff < 1e-10 {
boo = true;
}
assert!(boo);
}
#[test]
fn test_field_derivative_3() {
let radius = 1.0;
let current = 3.0;
let z0 = 1.0;
let myloop = IdealWire::new(radius, current, z0);
let answer = (MU0 * current) / (2.0 * radius) * (3.0 * 15.0) / (f64::sqrt(5.0) * 125.0);
let second_derivative = myloop.get_nth_derivative(&2, &3.0);
let diff = second_derivative - answer;
let mut boo = false;
if diff < 1e-10 {
boo = true;
}
assert!(boo);
}
#[test]
fn test_field_derivative_4() {
let radius = 1.0;
let current = 3.0;
let z0 = 1.0;
let myloop = IdealWire::new(radius, current, z0);
let answer =
(MU0 * current) / (2.0 * radius) * (-15.0 * 26.0) / (f64::sqrt(5.0) * 625.0);
let second_derivative = myloop.get_nth_derivative(&3, &3.0);
let diff = second_derivative - answer;
let mut boo = false;
if diff < 1e-10 {
boo = true;
}
assert!(boo);
}
#[test]
fn test_factorial() {
let factorial_results = vec![1, 1, 2, 6, 24, 120, 720, 5040, 40320];
let mut res: Vec<u32> = Vec::new();
for i in 0..9 {
res.push(factorial(i));
}
assert_eq!(factorial_results, res);
}
//test the on axis field against the anayltical field given in Eq. 23 of reference paper.
#[test]
fn test_onaxis_field_ideal_loop() {
fn on_axis_field(z: &f64, current: &f64, radius: &f64, z0: &f64) -> f64 {
let physical_part = 1.0 * current * MU0 / 2.0;
let numerator = radius * radius;
let demoninator = f64::powf(radius * radius + (z - z0) * (z - z0), 1.5);
physical_part * numerator / demoninator
}
let current = 1.0;
let radius = 1.0;
let z0 = 0.0;
let myloop = IdealWire::new(radius, current, z0);
let z_positions = vec![
-3.0, -2.5, -2.0, -1.5, -1.0, -0.5, 0.0, 0.5, 1.0, 1.5, 2.0, 2.5, 3.0,
];
let mut ana_res: Vec<f64> = Vec::new();
let mut res: Vec<f64> = Vec::new();
let mut diff_vec: Vec<f64> = Vec::new();
for z in z_positions {
let ana = on_axis_field(&z, ¤t, &radius, &z0);
ana_res.push(ana);
let comp = myloop.get_axial_field(&z, &0.0, &1e-6);
res.push(comp);
diff_vec.push(f64::abs(comp - ana));
}
diff_vec = diff_vec.into_iter().filter(|x| x < &&1e-6).collect();
assert_eq!(diff_vec.len(), 13);
}
#[test]
fn test_onaxis_field_ideal_loop2() {
fn on_axis_field(z: &f64, current: &f64, radius: &f64, z0: &f64) -> f64 {
let physical_part = 1.0 * current * MU0 / 2.0;
let numerator = radius * radius;
let demoninator = f64::powf(radius * radius + (z - z0) * (z - z0), 1.5);
physical_part * numerator / demoninator
}
let current = 1.3;
let radius = 1.9;
let z0 = 1.7;
let myloop = IdealWire::new(radius, current, z0);
let z_positions = vec![
-3.1, -2.4, -2.2, -1.8, -1.6, -0.33, 0.020, 0.5234, 1.340, 1.534, 2.034, 2.5234,
3.0234,
];
let mut ana_res: Vec<f64> = Vec::new();
let mut res: Vec<f64> = Vec::new();
let mut diff_vec: Vec<f64> = Vec::new();
for z in z_positions {
let ana = on_axis_field(&z, ¤t, &radius, &z0);
ana_res.push(ana);
let comp = myloop.get_axial_field(&z, &0.0, &1e-6);
res.push(comp);
diff_vec.push(f64::abs(comp - ana));
}
diff_vec = diff_vec.into_iter().filter(|x| x < &&1e-6).collect();
assert_eq!(diff_vec.len(), 13);
}
#[test]
fn test_onaxis_field_ideal_loop3_check_not_equal_when_anayltic_is_wrong() {
fn on_axis_field(z: &f64, current: &f64, radius: &f64, z0: &f64) -> f64 {
let physical_part = 3.8 * 1.0 * current * MU0 / 2.0;
let numerator = radius * radius;
let demoninator = f64::powf(radius * radius + (z - z0) * (z - z0), 1.5);
physical_part * numerator / demoninator
}
let current = 1.3;
let radius = 1.9;
let z0 = 1.7;
let myloop = IdealWire::new(radius, current, z0);
let z_positions = vec![
-3.1, -2.4, -2.2, -1.8, -1.6, -0.33, 0.020, 0.5234, 1.340, 1.534, 2.034, 2.5234,
3.0234,
];
let mut ana_res: Vec<f64> = Vec::new();
let mut res: Vec<f64> = Vec::new();
let mut diff_vec: Vec<f64> = Vec::new();
for z in z_positions {
let ana = on_axis_field(&z, ¤t, &radius, &z0);
ana_res.push(ana);
let comp = myloop.get_axial_field(&z, &0.0, &1e-6);
res.push(comp);
diff_vec.push(f64::abs(comp - ana));
}
diff_vec = diff_vec.into_iter().filter(|x| x < &&1e-6).collect();
assert!(diff_vec.len() < 13);
}
fn on_axis_field(z: &f64, current: &f64, radius: &f64, z0: &f64) -> f64 {
let physical_part = 1.0 * current * MU0 / 2.0;
let numerator = radius * radius;
let demoninator = f64::powf(radius * radius + (z - z0) * (z - z0), 1.5);
physical_part * numerator / demoninator
}
fn off_axis_field(z: &f64, r: &f64, current: &f64, radius: &f64, z0: &f64) -> f64 {
let x = (z - z0) / radius;
let b0 = current * MU0 / (2.0 * radius);
let term1 = 1.0 / (f64::powf(1.0 + x * x, 1.5));
let term2 = -(12.0 * x - 3.0) * (r * r) / (4.0 * f64::powf(1.0 + x * x, 3.5));
let term3 = (45.0 * (8.0 * x * x * x * x - 12.0 * x * x + 1.0)) * (r * r * r * r)
/ (f64::powi(2.0, 6) * f64::powf(1.0 + x * x, 5.5));
let answer = b0 * (term1 + term2 + term3);
answer
}
#[test]
fn test_off_axis_ana_equals_on_axis() {
let current = 1.0;
let radius = 1.0;
let z0 = 0.0;
let r = 0.0;
let z = 0.0;
let ana = off_axis_field(&z, &r, ¤t, &radius, &z0);
let ana2 = on_axis_field(&z, ¤t, &radius, &z0);
assert_eq!(ana, ana2);
}
#[test]
fn test_off_axis() {
let current = 1.0;
let radius = 1.0;
let z0 = 0.0;
let r = 0.001;
let z = 0.0;
let myloop = IdealWire::new(radius, current, z0);
let ans = myloop.get_axial_field(&z, &r, &1e-10);
let ana = off_axis_field(&z, &r, ¤t, &radius, &z0);
let percentage_error = (ans - ans) / ((ans + ana) / 2.0);
assert!(percentage_error < 1e-3);
}
#[test]
fn test_off_axis1() {
let current = 1.0;
let radius = 1.0;
let z0 = 0.0;
let r = 0.01;
let z = 0.0;
let myloop = IdealWire::new(radius, current, z0);
let ans = myloop.get_axial_field(&z, &r, &1e-6);
let ana = off_axis_field(&z, &r, ¤t, &radius, &z0);
let percentage_error = f64::abs((ans - ana) / ((ans + ana) / 2.0));
assert!(percentage_error < 1e-3);
}
#[test]
fn test_off_axis2() {
let current = 1.0;
let radius = 1.0;
let z0 = 0.0;
let r = 0.01;
let z = 0.05;
let myloop = IdealWire::new(radius, current, z0);
let ans = myloop.get_axial_field(&z, &r, &1e-6);
let ana = off_axis_field(&z, &r, ¤t, &radius, &z0);
let percentage_error = f64::abs((ans - ana) / ((ans + ana) / 2.0));
assert!(percentage_error < 1e-3);
}
}
}
mod polynomials {
pub struct SolenoidPolynomial {
coefficients: Vec<f64>,
hcf: f64,
//number_terms: usize,
}
impl SolenoidPolynomial {
pub fn new(coefficients: Vec<f64>, hcf: f64) -> SolenoidPolynomial {
SolenoidPolynomial { coefficients, hcf }
}
// compute the sum of the polynomial using Horner's Method.
pub fn compute(&self, value: &f64) -> f64 {
let answer = horners_method(&self.coefficients, *value);
answer * self.hcf
}
}
// given a polynomial containting a0 + x * a1 + x**2 * a2 + ....... x^n * an
// compute the value for value
// assumes all the coefficient terms are present even if value is 0.
fn horners_method(coefficients: &[f64], value: f64) -> f64 {
let length = coefficients.len();
let mut answer: f64 = coefficients[length - 1];
for i in 1..length {
let j = length - 1 - i;
answer = answer * value + coefficients[j]
}
answer
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn test_horners_method() {
let mycoeffs = vec![0.0, 1.0, 2.0, 3.0, 4.0, 5.0];
let answer = horners_method(&mycoeffs, 1.0);
assert_eq!(answer, 15.0);
}
#[test]
fn test_horners_method_2() {
let mycoeffs = vec![9.0, 1.0, 2.0, 3.0, 4.0, 5.0];
let answer = horners_method(&mycoeffs, 1.0);
assert_eq!(answer, 24.0);
}
#[test]
fn test_horners_method_3() {
let mycoeffs = vec![0.0, 1.0, 2.0, 3.0, 4.0, 5.0];
let answer = horners_method(&mycoeffs, 2.0);
assert_eq!(answer, 258.0);
}
#[test]
fn test_horners_method_6() {
let mycoeffs = vec![0.0, 1.0, 2.0, 3.0, 4.0, 5.0];
let answer = horners_method(&mycoeffs, -2.0);
assert_eq!(answer, -114.0);
}
#[test]
fn test_horners_method_4() {
let mycoeffs = vec![9.0];
let answer = horners_method(&mycoeffs, 1.0);
assert_eq!(answer, 9.0);
}
#[test]
fn test_horners_method_5() {
let mycoeffs = vec![9.0];
let answer = horners_method(&mycoeffs, 1.0);
assert_eq!(answer, 9.0);
}
#[test]
fn test_polynomial_1() {
let mycoeffs = vec![0.0, 1.0, 2.0, 3.0, 4.0, 5.0];
let mypolynomial = SolenoidPolynomial::new(mycoeffs, 2.0);
let answer = mypolynomial.compute(&2.0);
assert_eq!(answer, 516.0);
}
#[test]
fn test_polynomial_2() {
let mycoeffs = vec![0.0, 1.0, 0.0, 3.0, 4.0, 5.0];
let mypolynomial = SolenoidPolynomial::new(mycoeffs, 2.0);
let answer = mypolynomial.compute(&2.0);
assert_eq!(answer, 500.0);
}
#[test]
fn test_polynomial_3() {
let mycoeffs = vec![0.9, 1.0, 0.0, 3.0, 4.0, 5.0];
let mypolynomial = SolenoidPolynomial::new(mycoeffs, 2.0);
let answer = mypolynomial.compute(&2.0);
assert_eq!(answer, 501.8);
}
}
// module containing the hardwired polynomials for the primitive shapes
pub mod primitive_polynomials {
// define the polynomial coefficients for an infinitesimal loop
// values taken from Table 1 in https://ieeexplore.ieee.org/abstract/document/760416
const INFLOOP_0: [f64; 1] = [1.0];
const INFLOOP_0_HCF: f64 = 1.0;
const INFLOOP_1: [f64; 2] = [0.0, 1.0];
const INFLOOP_1_HCF: f64 = -3.0;
const INFLOOP_2: [f64; 3] = [-1.0, 0.0, 4.0];
const INFLOOP_2_HCF: f64 = 3.0;
const INFLOOP_3: [f64; 4] = [0.0, -3.0, 0.0, 4.0];
const INFLOOP_3_HCF: f64 = -15.0;
const INFLOOP_4: [f64; 5] = [1.0, 0.0, -12.0, 0.0, 8.0];
const INFLOOP_4_HCF: f64 = 45.0;
const INFLOOP_5: [f64; 6] = [0.0, 5.0, 0.0, -20.0, 0.0, 8.0];
const INFLOOP_5_HCF: f64 = -315.0;
const INFLOOP_6: [f64; 7] = [-5.0, 0.0, 120.0, 0.0, -240.0, 0.0, 64.0];
const INFLOOP_6_HCF: f64 = 315.0;
const INFLOOP_7: [f64; 8] = [0.0, -35.0, 0.0, 280.0, 0.0, -336.0, 0.0, 64.0];
const INFLOOP_7_HCF: f64 = -2835.0;
const INFLOOP_8: [f64; 9] = [7.0, 0.0, -280.0, 0.0, 1120.0, 0.0, -896.0, 0.0, 128.0];
const INFLOOP_8_HCF: f64 = 14175.0;
const INFLOOP_9: [f64; 10] = [
0.0, 63.0, 0.0, -840.0, 0.0, 2016.0, 0.0, -1152.0, 0.0, 128.0,
];
const INFLOOP_9_HCF: f64 = -155925.0;
const INFLOOP_10: [f64; 11] = [
-21.0, 0.0, 1260.0, 0.0, -8400.0, 0.0, 13440.0, 0.0, 5760.0, 0.0, 512.0,
];
const INFLOOP_10_HCF: f64 = 467775.0;
const INFLOOP_11: [f64; 12] = [
0.0, -231.0, 0.0, 4620.0, 0.0, -18480.0, 0.0, 21120.0, 0.0, -7040.0, 0.0, 512.0,
];
const INFLOOP_11_HCF: f64 = -6081075.0;
const INFLOOP_12: [f64; 13] = [
33.0, 0.0, -2772.0, 0.0, 27720.0, 0.0, -73920.0, 0.0, 63360.0, 0.0, -16896.0, 0.0,
1024.0,
];
const INFLOOP_12_HCF: f64 = 42567525.0;
const INFLOOP_13: [f64; 14] = [
0.0, 429.0, 0.0, -12012.0, 0.0, 72072.0, 0.0, -137280.0, 0.0, 91520.0, 0.0, -19968.0,
0.0, 1024.0,
];
const INFLOOP_13_HCF: f64 = -638512875.0;
const INFLOOP_14: [f64; 15] = [
-429.0, 0.0, 48048.0, 0.0, -672672.0, 0.0, 2690688.0, 0.0, -3843840.0, 0.0, 2050048.0,
0.0, -372736.0, 0.0, 16384.0,
];
const INFLOOP_14_HCF: f64 = 638512875.0;
const INFLOOP_15: [f64; 16] = [
0.0, -6435.0, 0.0, 240240.0, 0.0, -2018016.0, 0.0, 5765760.0, 0.0, -6406400.0, 0.0,
2795520.0, 0.0, -430080.0, 0.0, 16384.0,
];
const INFLOOP_15_HCF: f64 = -10854718875.0;
const INFLOOP_16: [f64; 17] = [
715.0,
-0.0,
-102960.0,
-0.0,
1921920.0,
-0.0,
-10762752.0,
-0.0,
23063040.0,
-0.0,
-20500480.0,
-0.0,
7454720.0,
-0.0,
-983040.0,
-0.0,
32768.0,
];
const INFLOOP_16_HCF: f64 = 97692469875.0;
const INFLOOP_17: [f64; 18] = [
-0.0,
12155.0,
-0.0,
-583440.0,
-0.0,
6534528.0,
-0.0,
-26138112.0,
-0.0,
43563520.0,
-0.0,
-31682560.0,
-0.0,
9748480.0,
-0.0,
-1114112.0,
-0.0,
32768.0,
];
const INFLOOP_17_HCF: f64 = -1856156927625.0;
const INFLOOP_18: [f64; 19] = [
-2431.0,
-0.0,
437580.0,
-0.0,
-10501920.0,
-0.0,
78414336.0,
-0.0,
-235243008.0,
-0.0,
313657344.0,
-0.0,
-190095360.0,
-0.0,
50135040.0,
-0.0,
-5013504.0,
-0.0,
131072.0,
];
const INFLOOP_18_HCF: f64 = 9280784638125.0;
const INFLOOP_19: [f64; 20] = [
-0.0,
-46189.0,
-0.0,
2771340.0,
-0.0,
-39907296.0,
-0.0,
212838912.0,
-0.0,
-496624128.0,
-0.0,
541771776.0,
-0.0,
-277831680.0,
-0.0,
63504384.0,
-0.0,
-5603328.0,
-0.0,
131072.0,
];
const INFLOOP_19_HCF: f64 = -194896477400625.0;
const INFLOOP_20: [f64; 21] = [
4199.0,
-0.0,
-923780.0,
-0.0,
27713400.0,
-0.0,
-266048640.0,
-0.0,
1064194560.0,
-0.0,
-1986496512.0,
-0.0,
1805905920.0,
-0.0,
-793804800.0,
-0.0,
158760960.0,
-0.0,
-12451840.0,
-0.0,
262144.0,
];
const INFLOOP_20_HCF: f64 = 2143861251406875.0;
pub fn get_loop_poly(n: u32) -> (f64, Vec<f64>) {
match n {
0 => (INFLOOP_0_HCF, INFLOOP_0.to_vec()),
1 => (INFLOOP_1_HCF, INFLOOP_1.to_vec()),
2 => (INFLOOP_2_HCF, INFLOOP_2.to_vec()),
3 => (INFLOOP_3_HCF, INFLOOP_3.to_vec()),
4 => (INFLOOP_4_HCF, INFLOOP_4.to_vec()),
5 => (INFLOOP_5_HCF, INFLOOP_5.to_vec()),
6 => (INFLOOP_6_HCF, INFLOOP_6.to_vec()),
7 => (INFLOOP_7_HCF, INFLOOP_7.to_vec()),
8 => (INFLOOP_8_HCF, INFLOOP_8.to_vec()),
9 => (INFLOOP_9_HCF, INFLOOP_9.to_vec()),
10 => (INFLOOP_10_HCF, INFLOOP_10.to_vec()),
11 => (INFLOOP_11_HCF, INFLOOP_11.to_vec()),
12 => (INFLOOP_12_HCF, INFLOOP_12.to_vec()),
13 => (INFLOOP_13_HCF, INFLOOP_13.to_vec()),
14 => (INFLOOP_14_HCF, INFLOOP_14.to_vec()),
15 => (INFLOOP_15_HCF, INFLOOP_15.to_vec()),
16 => (INFLOOP_16_HCF, INFLOOP_16.to_vec()),
17 => (INFLOOP_17_HCF, INFLOOP_17.to_vec()),
18 => (INFLOOP_18_HCF, INFLOOP_18.to_vec()),
19 => (INFLOOP_19_HCF, INFLOOP_19.to_vec()),
20 => (INFLOOP_20_HCF, INFLOOP_20.to_vec()),
_ => (0.0, vec![0.0]),
}
}
// define the polynomials for the P(x) olynomial defined in table 2 of reference paper.
const ANNULAR_P_1: [f64; 2] = [0.0, 1.0];
const ANNULAR_P_1_HCF: f64 = -1.0;
const ANNULAR_P_2: [f64; 5] = [-3.0, 0.0, 4.0, 0.0, 1.0];
const ANNULAR_P_2_HCF: f64 = 1.0;
const ANNULAR_P_3: [f64; 6] = [0.0, -15.0, 0.0, 2.0, 0.0, 2.0];
const ANNULAR_P_3_HCF: f64 = -3.0;
const ANNULAR_P_4: [f64; 9] = [15.0, 0.0, -100.0, 0.0, -34.0, 0.0, 12.0, 0.0, 1.0];
const ANNULAR_P_4_HCF: f64 = 6.0;
const ANNULAR_P_5: [f64; 10] = [0.0, 280.0, 0.0, -455.0, 0.0, -412.0, 0.0, 16.0, 0.0, 8.0];
const ANNULAR_P_5_HCF: f64 = -15.0;
const ANNULAR_P_6: [f64; 13] = [
-280.0, 0.0, 4515.0, 0.0, -924.0, 0.0, -4392.0, 0.0, -660.0, 0.0, 96.0, 0.0, 4.0,
];
const ANNULAR_P_6_HCF: f64 = 30.0;
const ANNULAR_P_7: [f64; 14] = [
0.0, -2520.0, 0.0, 11214.0, 0.0, 6867.0, 0.0, -6872.0, 0.0, -2976.0, 0.0, 48.0, 0.0,
16.0,
];
const ANNULAR_P_7_HCF: f64 = -315.0;
const ANNULAR_P_8: [f64; 17] = [
630.0, 0.0, -18648.0, 0.0, 26775.0, 0.0, 45288.0, 0.0, -4756.0, 0.0, -11536.0, 0.0,
-1032.0, 0.0, 80.0, 0.0, 2.0,
];
const ANNULAR_P_8_HCF: f64 = 2520.0;
const ANNULAR_P_9: [f64; 18] = [
0.0, 88704.0, 0.0, -763840.0, 0.0, 42768.0, 0.0, 1372437.0, 0.0, 422968.0, 0.0,
-226528.0, 0.0, -62592.0, 0.0, 512.0, 0.0, 128.0,
];
const ANNULAR_P_9_HCF: f64 = -2835.0;
const ANNULAR_P_10: [f64; 21] = [
-88704.0,
0.0,
4176480.0,
0.0,
-13953720.0,
0.0,
-13785255.0,
0.0,
16604060.0,
0.0,
13983440.0,
0.0,
-587520.0,
0.0,
-1329920.0,
0.0,
-80320.0,
0.0,
3840.0,
0.0,
64.0,
];
const ANNULAR_P_10_HCF: f64 = 5670.0;
//impliment the polynomials given in table 3.
//
const ANNULAR_Q_1: [f64; 4] = [0.0, 2.0, 0.0, 1.0];
const ANNULAR_Q_1_HCF: f64 = -1.0;
const ANNULAR_Q_2: [f64; 5] = [-3.0, 0.0, 2.0, 0.0, 2.0];
const ANNULAR_Q_2_HCF: f64 = 1.0;
const ANNULAR_Q_3: [f64; 8] = [0.0, -45.0, 0.0, -16.0, 0.0, 16.0, 0.0, 2.0];
const ANNULAR_Q_3_HCF: f64 = -1.0;
const ANNULAR_Q_4: [f64; 9] = [30.0, 0.0, -185.0, 0.0, 172.0, 0.0, 16.0, 0.0, 8.0];
const ANNULAR_Q_4_HCF: f64 = 3.0;
const ANNULAR_Q_5: [f64; 12] = [
0.0, 1400.0, 0.0, -1575.0, 0.0, -3372.0, 0.0, -576.0, 0.0, 144.0, 0.0, 8.0,
];
const ANNULAR_Q_5_HCF: f64 = -3.0;
const ANNULAR_Q_6: [f64; 13] = [
-560.0, 0.0, 8750.0, 0.0, 2737.0, 0.0, -10872.0, 0.0, -4896.0, 0.0, 144.0, 0.0, 48.0,
];
const ANNULAR_Q_6_HCF: f64 = 15.0;
const ANNULAR_Q_7: [f64; 16] = [
0.0, -17640.0, 0.0, 69678.0, 0.0, 89523.0, 0.0, -34984.0, 0.0, -45296.0, 0.0, -4608.0,
0.0, 512.0, 0.0, 16.0,
];
const ANNULAR_Q_7_HCF: f64 = -45.0;
const ANNULAR_Q_8: [f64; 17] = [
5040.0, 0.0, -146664.0, 0.0, 138978.0, 0.0, 488367.0, 0.0, 106808.0, 0.0, -137248.0,
0.0, -39552.0, 0.0, 512.0, 0.0, 128.0,
];
const ANNULAR_Q_8_HCF: f64 = 315.0;
const ANNULAR_Q_9: [f64; 20] = [
0.0,
798336.0,
0.0,
-6475392.0,
0.0,
-3152160.0,
0.0,
-13453605.0,
0.0,
9473720.0,
0.0,
-1364960.0,
0.0,
-1505920.0,
0.0,
-102400.0,
0.0,
6400.0,
0.0,
128.0,
];
const ANNULAR_Q_9_HCF: f64 = -315.0;
const ANNULAR_Q_10: [f64; 21] = [
-177408.0,
0.0,
8264256.0,
0.0,
-23708784.0,
0.0,
-42579438.0,
0.0,
23440285.0,
0.0,
45941900.0,
0.0,
8256400.0,
0.0,
-4528000.0,
0.0,
-937600.0,
0.0,
6400.0,
0.0,
1280.0,
];
const ANNULAR_Q_10_HCF: f64 = 2835.0;
pub fn get_annular_poly(n: u32) -> (f64, Vec<f64>, f64, Vec<f64>) {
match n {
1 => (
ANNULAR_P_1_HCF,
ANNULAR_P_1.to_vec(),
ANNULAR_Q_1_HCF,
ANNULAR_Q_1.to_vec(),
),
2 => (
ANNULAR_P_2_HCF,
ANNULAR_P_2.to_vec(),
ANNULAR_Q_2_HCF,
ANNULAR_Q_2.to_vec(),
),
3 => (
ANNULAR_P_3_HCF,
ANNULAR_P_3.to_vec(),
ANNULAR_Q_3_HCF,
ANNULAR_Q_3.to_vec(),
),
4 => (
ANNULAR_P_4_HCF,
ANNULAR_P_4.to_vec(),
ANNULAR_Q_4_HCF,
ANNULAR_Q_4.to_vec(),
),
5 => (
ANNULAR_P_5_HCF,
ANNULAR_P_5.to_vec(),
ANNULAR_Q_5_HCF,
ANNULAR_Q_5.to_vec(),
),
6 => (
ANNULAR_P_6_HCF,
ANNULAR_P_6.to_vec(),
ANNULAR_Q_6_HCF,
ANNULAR_Q_6.to_vec(),
),
7 => (
ANNULAR_P_7_HCF,
ANNULAR_P_7.to_vec(),
ANNULAR_Q_7_HCF,
ANNULAR_Q_7.to_vec(),
),
8 => (
ANNULAR_P_8_HCF,
ANNULAR_P_8.to_vec(),
ANNULAR_Q_8_HCF,
ANNULAR_Q_8.to_vec(),
),
9 => (
ANNULAR_P_9_HCF,
ANNULAR_P_9.to_vec(),
ANNULAR_Q_9_HCF,
ANNULAR_Q_9.to_vec(),
),
10 => (
ANNULAR_P_10_HCF,
ANNULAR_P_10.to_vec(),
ANNULAR_Q_10_HCF,
ANNULAR_Q_10.to_vec(),
),
_ => (0.0, vec![0.0], 0.0, vec![0.0]),
}
}
#[cfg(test)]
mod test {
use super::*;
#[test]
fn test_hcf_0() {
let hcf = 1.0;
assert_eq!(get_loop_poly(0).0, hcf);
}
#[test] //test all the hcf components
fn test_hcf_1() {
let hcfs = vec![
1.0,
-3.0,
3.0,
-15.0,
45.0,
-315.0,
315.0,
-2835.0,
14175.0,
-155925.0,
467775.0,
-6081075.0,
42567525.0,
-638512875.0,
638512875.0,
-10854718875.0,
];
let mut constant_hcf = Vec::new();
for i in 0..16 {
constant_hcf.push(get_loop_poly(i).0);
}
assert_eq!(constant_hcf, hcfs);
}
}
}
}
| true |
cabd30ffdf87307fd9c507304b77171e2b41e61d
|
Rust
|
Ameobea/noise-asmjs
|
/libcomposition/src/definition.rs
|
UTF-8
| 15,339 | 2.59375 | 3 |
[
"MIT"
] |
permissive
|
//! Defines a meta-format that can be used to represent composition trees in a serialize-able/dematerialize-able manner.
use std::convert::TryFrom;
use noise::*;
use serde_json;
use super::composition::CompositionScheme;
use super::conf::{
apply_constant_conf, apply_multifractal_conf, apply_seedable_conf, apply_worley_conf,
NoiseModuleConf,
};
use super::{
ComposedNoiseModule, CompositionTree, CompositionTreeNode, CompositionTreeNodeType, MasterConf,
};
use ir::IrNode;
use transformations::InputTransformation;
use util::{convert_setting, find_setting_by_name, Dim};
/// Defines a meta-representation of a `CompositionTree` designed to be passed into the backend from the JS frontend. It
/// contains all information necessary to construct a fully functional composition tree from scratch.
#[derive(Serialize, Deserialize)]
pub struct CompositionTreeDefinition {
pub global_conf: MasterConf,
pub root_node: CompositionTreeNodeDefinition,
}
/// Includes every possible type of noise module available through the tool.
#[derive(Debug, PartialEq, Serialize, Deserialize)]
pub enum NoiseModuleType {
Composed,
Fbm,
Worley,
OpenSimplex,
Billow,
HybridMulti,
SuperSimplex,
Value,
RidgedMulti,
BasicMulti,
Constant,
}
// wanted to do this with macros, but deriving `Serialize` and `Deserialize` seems to break that.
impl TryFrom<String> for NoiseModuleType {
type Error = String;
fn try_from(s: String) -> Result<Self, Self::Error> {
match s.as_str() {
"Composed" => Ok(NoiseModuleType::Composed),
"Fbm" => Ok(NoiseModuleType::Fbm),
"Worley" => Ok(NoiseModuleType::Worley),
"OpenSimplex" => Ok(NoiseModuleType::OpenSimplex),
"Billow" => Ok(NoiseModuleType::Billow),
"HybridMulti" => Ok(NoiseModuleType::HybridMulti),
"SuperSimplex" => Ok(NoiseModuleType::SuperSimplex),
"Value" => Ok(NoiseModuleType::Value),
"RidgedMulti" => Ok(NoiseModuleType::RidgedMulti),
"BasicMulti" => Ok(NoiseModuleType::BasicMulti),
"Constant" => Ok(NoiseModuleType::Constant),
_ => Err(format!(
"Unable to convert `moduleType` setting attribute into `NoiseModuleType`: {}",
s
)),
}
}
}
fn build_transformations(
transformation_definitions: Vec<InputTransformationDefinition>,
) -> Vec<InputTransformation> {
transformation_definitions
.into_iter()
.map(|def| def.into())
.collect()
}
impl NoiseModuleType {
pub fn construct_noise_fn(&self, confs: &[NoiseModuleConf]) -> Box<NoiseFn<Point3<f64>>> {
match self {
&NoiseModuleType::Fbm => {
let configured_module = confs.iter().fold(Fbm::new(), |acc, conf| match conf {
&NoiseModuleConf::MultiFractal { .. } => apply_multifractal_conf(conf, acc),
&NoiseModuleConf::Seedable { .. } => apply_seedable_conf(conf, acc),
_ => {
println!("Invalid configuration provided to Fbm module: {:?}", conf);
acc
}
});
Box::new(configured_module) as Box<NoiseFn<Point3<f64>>>
}
&NoiseModuleType::Worley => {
let configured_module = confs.iter().fold(Worley::new(), |acc, conf| match conf {
&NoiseModuleConf::Seedable { .. } => apply_seedable_conf(conf, acc),
&NoiseModuleConf::Worley { .. } => apply_worley_conf(conf, acc),
_ => {
println!(
"Invalid configuration provided to Worley module: {:?}",
conf
);
acc
}
});
Box::new(configured_module)
}
&NoiseModuleType::OpenSimplex => {
let configured_module =
confs
.iter()
.fold(OpenSimplex::new(), |acc, conf| match conf {
&NoiseModuleConf::Seedable { .. } => apply_seedable_conf(conf, acc),
_ => {
println!(
"Invalid configuration provided to OpenSimplex module: {:?}",
conf
);
acc
}
});
Box::new(configured_module)
}
&NoiseModuleType::Billow => {
let configured_module = confs.iter().fold(Billow::new(), |acc, conf| match conf {
&NoiseModuleConf::MultiFractal { .. } => apply_multifractal_conf(conf, acc),
&NoiseModuleConf::Seedable { .. } => apply_seedable_conf(conf, acc),
_ => {
println!(
"Invalid configuration provided to Billow module: {:?}",
conf
);
acc
}
});
Box::new(configured_module)
}
&NoiseModuleType::HybridMulti => {
let configured_module =
confs
.iter()
.fold(HybridMulti::new(), |acc, conf| match conf {
&NoiseModuleConf::MultiFractal { .. } => {
apply_multifractal_conf(conf, acc)
}
&NoiseModuleConf::Seedable { .. } => apply_seedable_conf(conf, acc),
_ => {
println!(
"Invalid configuration provided to HybridMulti module: {:?}",
conf
);
acc
}
});
Box::new(configured_module)
}
&NoiseModuleType::SuperSimplex => {
let configured_module =
confs
.iter()
.fold(SuperSimplex::new(), |acc, conf| match conf {
&NoiseModuleConf::Seedable { .. } => apply_seedable_conf(conf, acc),
_ => {
println!(
"Invalid configuration provided to SuperSimplex module: {:?}",
conf
);
acc
}
});
Box::new(configured_module)
}
&NoiseModuleType::Value => {
let configured_module = confs.iter().fold(Value::new(), |acc, conf| match conf {
&NoiseModuleConf::Seedable { .. } => apply_seedable_conf(conf, acc),
_ => {
println!("Invalid configuration provided to Value module: {:?}", conf);
acc
}
});
Box::new(configured_module)
}
&NoiseModuleType::RidgedMulti => {
let configured_module =
confs
.iter()
.fold(RidgedMulti::new(), |acc, conf| match conf {
&NoiseModuleConf::MultiFractal { .. } => {
apply_multifractal_conf(conf, acc)
}
&NoiseModuleConf::Seedable { .. } => apply_seedable_conf(conf, acc),
_ => {
println!(
"Invalid configuration provided to RidgedMulti module: {:?}",
conf
);
acc
}
});
Box::new(configured_module)
}
&NoiseModuleType::BasicMulti => {
let configured_module =
confs
.iter()
.fold(BasicMulti::new(), |acc, conf| match conf {
&NoiseModuleConf::MultiFractal { .. } => {
apply_multifractal_conf(conf, acc)
}
&NoiseModuleConf::Seedable { .. } => apply_seedable_conf(conf, acc),
_ => {
println!(
"Invalid configuration provided to BasicMulti module: {:?}",
conf
);
acc
}
});
Box::new(configured_module)
}
&NoiseModuleType::Constant => {
let configured_module =
confs
.iter()
.fold(Constant::new(0.), |acc, conf| match conf {
&NoiseModuleConf::Constant { .. } => apply_constant_conf(conf, acc),
_ => {
println!(
"Invalid configuration provided to Constant module: {:?}",
conf
);
acc
}
});
Box::new(configured_module)
}
&NoiseModuleType::Composed => panic!(
"Attempted to build leaf module with type Composed! That's only a placeholder."
),
}
}
}
/// This is the primary unit of the composition tree.
#[derive(Debug, Serialize, Deserialize)]
pub enum CompositionTreeNodeDefinition {
Leaf {
module_type: NoiseModuleType,
module_conf: Vec<NoiseModuleConf>,
transformations: Vec<InputTransformationDefinition>,
},
Composed {
scheme: CompositionScheme,
children: Vec<CompositionTreeNodeDefinition>,
transformations: Vec<InputTransformationDefinition>,
},
}
impl Into<CompositionTreeNode> for CompositionTreeNodeDefinition {
fn into(self) -> CompositionTreeNode {
let (transformations, function) = match self {
CompositionTreeNodeDefinition::Leaf {
module_type,
module_conf,
transformations,
} => {
// Build a noise module out of the type and configurations
let built_module = module_type.construct_noise_fn(&module_conf);
let built_transformations = build_transformations(transformations);
(
built_transformations,
CompositionTreeNodeType::Leaf(built_module),
)
}
CompositionTreeNodeDefinition::Composed {
scheme,
children,
transformations,
} => {
// Build modules out of each of the children definitions, and combine them into a `CombinedModule`
let built_children: Vec<CompositionTreeNode> = children
.into_iter()
.map(|child_def| child_def.into())
.collect();
let built_transformations = build_transformations(transformations);
let composed_module = ComposedNoiseModule {
composer: scheme,
children: built_children,
};
(
built_transformations,
CompositionTreeNodeType::Combined(composed_module),
)
}
};
CompositionTreeNode {
function,
transformations,
}
}
}
impl Into<CompositionTree> for CompositionTreeDefinition {
/// Transforms the tree definition into a actual composition tree capable of producing values.
fn into(self) -> CompositionTree {
CompositionTree {
global_conf: self.global_conf,
root_node: self.root_node.into(),
}
}
}
#[derive(Debug, Serialize, Deserialize)]
pub enum InputTransformationDefinition {
ZoomScale {
speed: f64,
zoom: f64,
},
HigherOrderNoiseModule {
node_def: CompositionTreeNodeDefinition,
replaced_dim: Dim,
},
ScaleAll(f64),
}
impl Into<InputTransformation> for InputTransformationDefinition {
fn into(self) -> InputTransformation {
match self {
InputTransformationDefinition::ZoomScale { speed, zoom } => {
InputTransformation::ZoomScale { speed, zoom }
}
InputTransformationDefinition::HigherOrderNoiseModule {
node_def,
replaced_dim,
} => {
let built_node: CompositionTreeNode = node_def.into();
InputTransformation::HigherOrderNoiseModule {
node: built_node,
replaced_dim,
}
}
InputTransformationDefinition::ScaleAll(scale) => InputTransformation::ScaleAll(scale),
}
}
}
impl TryFrom<IrNode> for InputTransformationDefinition {
type Error = String;
fn try_from(node: IrNode) -> Result<Self, Self::Error> {
let transformation_type = find_setting_by_name("inputTransformationType", &node.settings)?;
let def: InputTransformationDefinition = match transformation_type.as_str() {
"zoomScale" => InputTransformationDefinition::ZoomScale {
speed: convert_setting("speed", &node.settings)?,
zoom: convert_setting("zoom", &node.settings)?,
},
"honf" => {
let def_string = find_setting_by_name("inputTransformationType", &node.settings)?;
let node_def: CompositionTreeNodeDefinition = match serde_json::from_str(
&def_string,
) {
Ok(d) => d,
Err(err) => {
return Err(format!("Unable to build `CompositionTreeNodeDefinition` from supplied string: {:?}", err));
}
};
InputTransformationDefinition::HigherOrderNoiseModule {
node_def,
replaced_dim: convert_setting("replacedDim", &node.settings)?,
}
}
"scaleAll" => InputTransformationDefinition::ScaleAll(convert_setting(
"scaleFactor",
&node.settings,
)?),
_ => {
return Err(format!(
"Invalid input transformation type provided: {}",
transformation_type
));
}
};
Ok(def)
}
}
| true |
eb62ab34365f26505d4c667af0aba41dba144381
|
Rust
|
robinmessage/advent2019
|
/day10/src/main.rs
|
UTF-8
| 6,243 | 3.015625 | 3 |
[] |
no_license
|
use std::fs;
use std::collections::HashSet;
use std::collections::BTreeMap;
use ordered_float::OrderedFloat;
struct Field {
data: Vec<u8>,
width: usize,
height: usize
}
impl Field {
pub fn new(input: &str) -> Field {
let rows = input.split("\n").collect::<Vec<&str>>();
let width = rows[0].len();
let height = rows.len();
return Field { width, height, data: rows.iter().flat_map(|row| row.chars().map(|p|
match p { '#' => 1, '.' => 0, _ => panic!("Unexpected pixel {}", p) }
)).collect() };
}
pub fn get_pixel(self:&Field, x: usize, y: usize) -> u8 {
return self.data[x + y * self.width];
}
}
fn visible_asteroids_from(field: &Field, sx: usize, sy: usize) -> usize {
let mut angles = HashSet::new();
// Run through the asteroids
for ay in 0..field.height {
for ax in 0..field.width {
if ax == sx && ay == sy {
// Can't see asteroid you are on top of
continue;
}
if field.get_pixel(ax, ay) == 0 {
// No asteroid here
continue;
}
// Calculate angle for each one
let angle = (ay as f64 - sy as f64).atan2(ax as f64 - sx as f64);
angles.insert(OrderedFloat(angle));
}
}
// Count unique angles
return angles.len();
}
#[derive(PartialEq, Debug, Copy, Clone)]
struct Point(usize, usize);
fn find_best(field: &Field) -> Point {
let mut best = Point(0, 0);
let mut best_score = 0;
for ay in 0..field.height {
for ax in 0..field.width {
if field.get_pixel(ax, ay) == 0 {
// No asteroid to build station here
continue;
}
let score = visible_asteroids_from(field, ax, ay);
if score > best_score {
best_score = score;
best = Point(ax, ay);
}
}
}
return best;
}
const PI_TIMES_2: f64 = 2.0 * std::f64::consts::PI;
fn map_visible_asteroids_from(field: &Field, sx: usize, sy: usize) -> BTreeMap<OrderedFloat<f64>, BTreeMap<OrderedFloat<f64>, Point>> {
let mut roids = BTreeMap::new();
// Run through the asteroids
for ay in 0..field.height {
for ax in 0..field.width {
if ax == sx && ay == sy {
// Can't see asteroid you are on top of
continue;
}
if field.get_pixel(ax, ay) == 0 {
// No asteroid here
continue;
}
// Calculate angle and distance for each one
let y = ay as f64 - sy as f64;
let x = ax as f64 - sx as f64;
let mut angle = (y.atan2(x)) + std::f64::consts::FRAC_PI_2;
if angle < 0.0 {
angle += PI_TIMES_2;
}
let d2 = OrderedFloat(x*x + y*y);
let target_map = roids.entry(OrderedFloat(angle)).or_insert_with(|| BTreeMap::new());
target_map.insert(d2, Point(ax, ay));
}
}
return roids;
}
fn vaporised_at(field: &Field, x:usize, y: usize, when: usize) -> Option<Point> {
let mut roids = map_visible_asteroids_from(field, x, y);
let mut found = 0;
let mut any = true;
while any {
any = false;
for (angle, target_map) in roids.iter_mut() {
if target_map.len() == 0 {
continue;
}
let (target_key, target_value) = target_map.iter_mut().next()?;
println!("Target angle {} distance {} point {:#?}", angle, target_key.sqrt(), target_value);
any = true;
found += 1;
if found == when {
return Some(*target_value);
} else {
let key_copy = *target_key;
target_map.remove(&key_copy);
}
}
}
return None;
}
fn main() {
let image = Field::new(&fs::read_to_string("input").expect("Couldn't read input").trim());
/* let mut minimumLayer = 0;
let mut minimumZeros = image.height * image.width;
for l in 0..image.layers {
let mut zeros = 0;
for y in 0..image.height {
for x in 0..image.width {
zeros += if image.get_pixel(x, y, l) == 0 {1} else {0};
}
}
if zeros < minimumZeros {
minimumZeros = zeros;
minimumLayer = l;
}
}
let mut ones = 0;
let mut twos = 0;
for y in 0..image.height {
for x in 0..image.width {
ones += if image.get_pixel(x, y, minimumLayer) == 1 {1} else {0};
twos += if image.get_pixel(x, y, minimumLayer) == 2 {1} else {0};
}
}
println!("{}", ones * twos);
for y in 0..image.height {
println!("{}", (0..image.width).map(|x|
if image.get(x, y) == 1 {"*"} else {" "}
).collect::<Vec<&str>>().join(""));
}*/
let best = find_best(&image);
println!("{:#?}", best);
let score = visible_asteroids_from(&image, best.0, best.1);
println!("{}", score);
let two_hundredth = vaporised_at(&image, best.0, best.1, 200);
println!("{:#?}", two_hundredth);
}
#[cfg(test)]
mod tests {
// Note this useful idiom: importing names from outer (for mod tests) scope.
use super::*;
#[test]
fn test_visible_from() {
let field = Field::new(r".#..#
.....
#####
....#
...##");
assert_eq!(visible_asteroids_from(&field, 3, 4), 8);
assert_eq!(visible_asteroids_from(&field, 0, 2), 6);
assert_eq!(visible_asteroids_from(&field, 4, 2), 5);
}
#[test]
fn test_best() {
let field = Field::new(r".#..#
.....
#####
....#
...##");
assert_eq!(find_best(&field), Point(3, 4));
}
#[test]
fn test_vaporised_at() {
let field = Field::new(r".#....#####...#..
##...##.#####..##
##...#...#.#####.
..#.....#...###..
..#.#.....#....##");
assert_eq!(vaporised_at(&field, 8, 3, 36), Some(Point(14, 3)));
}
#[test]
fn test_simple_vaporised_at() {
let field = Field::new(r"###
###
###");
assert_eq!(vaporised_at(&field, 1, 1, 8), Some(Point(0, 0)));
}
}
| true |
837b0a74cb5f1a5a7e0f08bfc50078c3d07df9f7
|
Rust
|
eliovir/rust-examples
|
/api-rand.rs
|
UTF-8
| 1,036 | 2.828125 | 3 |
[
"MIT",
"LicenseRef-scancode-unknown-license-reference",
"LicenseRef-scancode-other-permissive",
"Apache-2.0",
"LicenseRef-scancode-public-domain"
] |
permissive
|
//! <https://crates.io/crates/rand>
//! <https://docs.rs/rand/0.3/rand/>
//!
//! Tested with rust-1.29.1 and rand-0.8.5
//!
//! @license MIT license <http://www.opensource.org/licenses/mit-license.php>
//!
//! @since 2018-10-01
extern crate rand;
use rand::prelude::*;
fn main() {
let mut tmp_rand = thread_rng();
let boolean: bool = tmp_rand.gen();
println!("bool: {}", boolean);
let int_8: i8 = tmp_rand.gen::<i8>();
let uint_8: u8 = tmp_rand.gen::<u8>();
println!("int_8: {}\nuint_8: {}", int_8, uint_8);
let int_32: i32 = tmp_rand.gen::<i32>();
let uint_32: u32 = tmp_rand.gen::<u32>();
println!("int_32: {}\nuint_32: {}", int_32, uint_32);
let float32: f32 = tmp_rand.gen::<f32>();
let float64: f64 = tmp_rand.gen::<f64>();
println!("float32: {}\nfloat64: {}", float32, float64);
let int8_for_range: u8 = tmp_rand.gen_range(0..=127);
println!("int8_for_range: {}", int8_for_range);
let tuple = rand::random::<(f32, f32)>();
println!("tuple: {:?}", tuple);
}
| true |
81f2c166cd0001fcaf67eef84e378c462f77f365
|
Rust
|
AdnoC/ters_gc
|
/tests/dijkstra.rs
|
UTF-8
| 13,372 | 2.9375 | 3 |
[
"MIT",
"Apache-2.0",
"LicenseRef-scancode-unknown-license-reference"
] |
permissive
|
extern crate priority_queue;
extern crate ters_gc;
use priority_queue::PriorityQueue;
use std::cell::RefCell;
use std::cmp::{Eq, PartialEq};
use std::collections::HashMap;
use std::fmt;
use std::hash::{Hash, Hasher};
use ters_gc::trace::{Trace, Tracer};
use ters_gc::*;
type GcNode<'a> = Gc<'a, Node<'a>>;
type GcEdge<'a> = Gc<'a, Edge<'a>>;
#[derive(Default)]
struct Graph<'a> {
nodes: Vec<GcNode<'a>>,
}
impl<'a> Graph<'a> {
fn new() -> Graph<'a> {
Default::default()
}
fn new_node(&mut self, proxy: &mut Proxy<'a>, name: &'static str) -> GcNode<'a> {
let node = Node {
adjacencies: RefCell::new(Vec::new()),
name,
};
let node = proxy.alloc(node);
self.nodes.push(node.clone());
node
}
// Removes all references to a node from everything in the graph.
// Since we are using a GC its fine if references to it exist outside of us.
fn remove_node_by_name(&mut self, name: &str) -> Option<GcNode<'a>> {
let idx = self.nodes.iter().position(|node| node.name == name);
idx.map(|idx| self.nodes.remove(idx))
}
fn node_by_name(&self, name: &str) -> Option<GcNode<'a>> {
self.nodes.iter().find(|node| node.name == name).cloned()
}
fn path_for(&self, src: GcNode<'a>, dest: GcNode<'a>) -> Option<Vec<GcNode<'a>>> {
// Want lower distance -> higher priority
fn dist_to_priority(distance: u64) -> u64 {
std::u64::MAX - distance
}
// This __will__ store `Gc`s in the heap where the collector can't
// find them. __However__ we aren't touching the collector in this
// function (we aren't allocating new garbage collected things or
// running it), so while in this function the gc won't collect anything.
// So, its fine to store the nodes on the heap.
//
// Also, all the nodes are stored in the Graph, which is a root.
let mut distances: HashMap<GcNode<'a>, u64> = self
.nodes
.iter()
.cloned()
.map(|node| (node, std::u64::MAX))
.collect();
*distances.get_mut(&src).unwrap() = 0;
let mut prev_in_path: HashMap<GcNode<'a>, GcNode<'a>> = HashMap::new();
let mut nodes_to_process: PriorityQueue<GcNode<'a>, u64> = self
.nodes
.iter()
.cloned()
.map(|node| {
let dist = distances[&node];
(node, dist_to_priority(dist))
})
.collect();
while !nodes_to_process.is_empty() {
let (cur, _) = nodes_to_process.pop().unwrap();
let cur_dist = distances[&cur];
for edge in cur.adjacencies.borrow().iter() {
let cur_next_dist = distances[&edge.dest];
let new_next_dist = cur_dist + edge.weight as u64;
if new_next_dist < cur_next_dist {
*distances.get_mut(&edge.dest).unwrap() = new_next_dist;
*prev_in_path.entry(edge.dest.clone()).or_insert(cur.clone()) = cur.clone();
nodes_to_process.change_priority(&edge.dest, dist_to_priority(new_next_dist));
}
}
}
// Building the path
if !prev_in_path.contains_key(&dest) {
return None;
}
let mut path = Vec::new();
path.push(dest);
loop {
if let Some(node) = prev_in_path.get(path.last().unwrap()) {
path.push(node.clone());
} else {
break;
}
}
path.reverse();
Some(path)
}
}
#[derive(Default, Clone)]
struct Node<'a> {
adjacencies: RefCell<Vec<GcEdge<'a>>>,
name: &'static str,
}
impl<'a> Node<'a> {
fn connect_to(&self, proxy: &mut Proxy<'a>, dest: GcNode<'a>, weight: u32) {
let edge = proxy.alloc(Edge { dest, weight });
self.adjacencies.borrow_mut().push(edge);
}
fn disconnect_from(&self, dest: GcNode<'a>) {
let idx = self
.adjacencies
.borrow()
.iter()
.position(|edge| edge.dest.name == dest.name);
if let Some(idx) = idx {
self.adjacencies.borrow_mut().remove(idx);
}
}
fn weight_to(&self, dest: GcNode<'a>) -> Option<u32> {
self.adjacencies
.borrow()
.iter()
.find(|edge| edge.dest == dest)
.map(|edge| edge.weight)
}
}
impl<'a> Trace for Node<'a> {
fn trace(&self, tracer: &mut Tracer) {
tracer.add_target(&self.adjacencies);
}
}
fn connect_bidirectional<'a>(proxy: &mut Proxy<'a>, a: GcNode<'a>, b: GcNode<'a>, weight: u32) {
a.connect_to(proxy, b.clone(), weight);
b.connect_to(proxy, a, weight);
}
fn _disconnect_bidirectional<'a>(a: GcNode<'a>, b: GcNode<'a>) {
a.disconnect_from(b.clone());
b.disconnect_from(a);
}
impl<'a> fmt::Debug for Node<'a> {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
struct AdjWrapper<'b, 'a: 'b>(&'b RefCell<Vec<GcEdge<'a>>>);
impl<'a, 'b> fmt::Debug for AdjWrapper<'a, 'b> {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
f.debug_list().entries(self.0.borrow().iter()).finish()
}
}
let adj = AdjWrapper(&self.adjacencies);
f.debug_struct("Node")
.field("name", &self.name)
.field("num_adjacencies", &self.adjacencies.borrow().len())
.field("adjacencies", &adj)
.finish()
}
}
impl<'a> PartialEq for Node<'a> {
fn eq(&self, other: &Self) -> bool {
// Only check name since if we have a cycle we'd stack overflow otherwise
self.name == other.name
}
}
impl<'a> Eq for Node<'a> {}
impl<'a> Hash for Node<'a> {
fn hash<H: Hasher>(&self, state: &mut H) {
self.name.hash(state);
}
}
#[derive(Clone, PartialEq, Eq, Hash)]
struct Edge<'a> {
dest: GcNode<'a>,
weight: u32,
}
impl<'a> Trace for Edge<'a> {
fn trace(&self, tracer: &mut Tracer) {
tracer.add_target(&self.dest);
}
}
impl<'a> fmt::Debug for Edge<'a> {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
f.debug_struct("Edge")
.field("dest_name", &self.dest.name)
.field("weight", &self.weight)
.finish()
}
}
// Cities by airport code
const DTW: &str = "Detroit";
const ATL: &str = "Atlanta";
const IAH: &str = "Houston";
const JFK: &str = "New York";
const SFO: &str = "San Francisco";
const LAS: &str = "Las Vegas";
const MCO: &str = "Orlando";
const PHX: &str = "Pheonix";
const MIA: &str = "Miami";
const DEN: &str = "Denver";
const LAX: &str = "Los Angeles";
const BOS: &str = "Boston";
const _ORD: &str = "Chicago";
const _PHL: &str = "Philadelphia";
const _DCA: &str = "Washington, D.C.";
const _SAN: &str = "San Diego";
// PATH: 1 -> 3 -> 6 -> 5 COST: 20
// 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16
// DTW ATL IAH JFK SFO LAS MCO PHX MIA DEN LAX BOS ORD PHL DCA SAN
// 1 DTW !7 9 14
// 2 ATL 7 10 15
// 3 IAH 9 10 11 2
// 4 JFK 15 11 6
// 5 SFO 6 9
// 6 LAS 14 2 9
// 7 MCO
// 8 PHX
// 9 MIA
// 10 DEN
// 11 LAX
// 12 BOS
// 13 ORD
// 14 PHL
// 15 DCA
// 16 SAN
// PATH: 1 -> 12 -> 10 -> 8 -> 4 -> 5 COST: 43
// TO 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16
// FROM DTW ATL IAH JFK SFO LAS MCO PHX MIA DEN LAX BOS ORD PHL DCA SAN
// 1 DTW 14 5 5
// 2 ATL
// 3 IAH
// 4 JFK 6
// 5 SFO 1 6 9
// 6 LAS 14 42 4
// 7 MCO 5 7
// 8 PHX 13 16 4
// 9 MIA 4 4
// 10 DEN 7 16 3
// 11 LAX 4 4
// 12 BOS 5 3
// 13 ORD
// 14 PHL
// 15 DCA
// 16 SAN
#[test]
fn dijkstra_is_cool() {
let mut col = Collector::new();
let mut proxy = col.proxy();
let mut graph = Graph::new();
initialize_graph(&mut proxy, &mut graph);
test_first_path(&graph);
secede_texas(&mut proxy, &mut graph);
test_second_path(&graph);
fn initialize_graph<'a>(proxy: &mut Proxy<'a>, graph: &mut Graph<'a>) {
let dtw = graph.new_node(proxy, DTW);
let atl = graph.new_node(proxy, ATL);
let iah = graph.new_node(proxy, IAH);
let jfk = graph.new_node(proxy, JFK);
let sfo = graph.new_node(proxy, SFO);
let las = graph.new_node(proxy, LAS);
connect_bidirectional(proxy, dtw.clone(), atl.clone(), 7);
connect_bidirectional(proxy, dtw.clone(), iah.clone(), 9);
connect_bidirectional(proxy, dtw.clone(), las.clone(), 14);
connect_bidirectional(proxy, atl.clone(), iah.clone(), 10);
connect_bidirectional(proxy, atl.clone(), jfk.clone(), 15);
connect_bidirectional(proxy, iah.clone(), jfk.clone(), 11);
connect_bidirectional(proxy, iah.clone(), las.clone(), 2);
connect_bidirectional(proxy, jfk.clone(), sfo.clone(), 6);
connect_bidirectional(proxy, sfo.clone(), las.clone(), 9);
// let mut v: SmallVec<[GcNode<'a>; 16]> = SmallVec::new();
// v.push(dtw);
// v.push(atl);
// v.push(ord);
// v.push(jfk);
// v.push(sfo);
}
fn test_first_path<'a>(graph: &Graph<'a>) {
let dtw = graph.node_by_name(DTW).unwrap();
let sfo = graph.node_by_name(SFO).unwrap();
let path = graph
.path_for(dtw.clone(), sfo.clone())
.expect("was unable to find a path");
let iah = graph.node_by_name(IAH).unwrap();
let las = graph.node_by_name(LAS).unwrap();
let expected = [dtw, iah, las, sfo];
assert_eq!(&expected, &*path);
let path_weight: u32 = path
.iter()
.zip(path.iter().skip(1).cloned())
.map(|(src, dst)| src.weight_to(dst).unwrap())
.sum();
assert_eq!(20, path_weight);
}
// Texas decided to secede from the US and become its own nation,
// a theocracy centered on the Church of BBQ. Several other states
// followed it.
// For some reason the US government isn't happy about this.
// It prohibited flights to/from the seceded states.
// To show the BBQ-ans how much cooler the US is, they decided to create new
// airports.
fn secede_texas<'a>(proxy: &mut Proxy<'a>, graph: &mut Graph<'a>) {
// Disallow flight to the traitorous state
{
let dtw = graph.node_by_name(DTW).unwrap();
let jfk = graph.node_by_name(JFK).unwrap();
let las = graph.node_by_name(LAS).unwrap();
let iah = graph.node_by_name(IAH).unwrap();
let atl = graph.node_by_name(ATL).unwrap();
dtw.disconnect_from(iah.clone());
jfk.disconnect_from(iah.clone());
las.disconnect_from(iah.clone());
dtw.disconnect_from(atl.clone());
jfk.disconnect_from(atl.clone());
// Remove the airports themselves from the list
graph.remove_node_by_name(IAH);
graph.remove_node_by_name(ATL);
}
let pre_tracked = proxy.num_tracked();
proxy.run();
let post_tracked = proxy.num_tracked();
assert!(pre_tracked > post_tracked);
let dtw = graph.node_by_name(DTW).unwrap();
let sfo = graph.node_by_name(SFO).unwrap();
let jfk = graph.node_by_name(JFK).unwrap();
let las = graph.node_by_name(LAS).unwrap();
let mco = graph.new_node(proxy, MCO);
let phx = graph.new_node(proxy, PHX);
let mia = graph.new_node(proxy, MIA);
let den = graph.new_node(proxy, DEN);
let lax = graph.new_node(proxy, LAX);
let bos = graph.new_node(proxy, BOS);
connect_bidirectional(proxy, dtw.clone(), mco.clone(), 5);
connect_bidirectional(proxy, dtw.clone(), bos.clone(), 5);
sfo.connect_to(proxy, dtw.clone(), 1);
las.disconnect_from(sfo.clone());
las.connect_to(proxy, sfo.clone(), 42);
connect_bidirectional(proxy, las.clone(), mia.clone(), 4);
connect_bidirectional(proxy, mco.clone(), den.clone(), 7);
phx.connect_to(proxy, jfk.clone(), 13);
connect_bidirectional(proxy, phx.clone(), den.clone(), 16);
connect_bidirectional(proxy, phx.clone(), lax.clone(), 4);
connect_bidirectional(proxy, mia.clone(), lax.clone(), 4);
connect_bidirectional(proxy, den.clone(), bos.clone(), 3);
}
fn test_second_path<'a>(graph: &Graph<'a>) {
let dtw = graph.node_by_name(DTW).unwrap();
let sfo = graph.node_by_name(SFO).unwrap();
let path = graph
.path_for(dtw.clone(), sfo.clone())
.expect("was unable to find a path");
let path_weight: u32 = path
.iter()
.zip(path.iter().skip(1).cloned())
.map(|(src, dst)| src.weight_to(dst).unwrap())
.sum();
assert_eq!(43, path_weight);
}
}
| true |
15c01a8c6b468a5f3c4143cce282fcb7682f793a
|
Rust
|
coastalwhite/simpleserial-rs
|
/src/capture_to_target.rs
|
UTF-8
| 1,679 | 2.828125 | 3 |
[] |
no_license
|
use crate::UnstuffedBuffer;
use crate::{ReceivedPacket, SSPacket};
use array_utils::sized_slice;
/// Capture to target board packet
#[cfg_attr(test, derive(Debug, PartialEq, Clone))]
pub struct CTPacket {
pub cmd: u8,
pub sub_cmd: u8,
pub dlen: u8,
pub data: [u8; 192],
}
impl SSPacket for CTPacket {
// CMD, SCMD, DLEN
const METADATA_BYTES_LENGTH: usize = 3;
}
impl ReceivedPacket for CTPacket {
fn get_data_length_from_unstuffed(unstuffed_buffer: UnstuffedBuffer) -> usize {
usize::from(unstuffed_buffer[2])
}
fn new_from_unstuffed(unstuffed_buffer: UnstuffedBuffer) -> Self {
CTPacket {
cmd: unstuffed_buffer[0],
sub_cmd: unstuffed_buffer[1],
dlen: unstuffed_buffer[2],
data: sized_slice(unstuffed_buffer, 3, usize::from(unstuffed_buffer[2]) + 3, 0),
}
}
}
#[cfg(test)]
mod tests {
use crate::SentPacket;
impl SentPacket for CTPacket {
fn get_data_length(&self) -> usize {
usize::from(self.dlen)
}
fn get_data_bytes(&self) -> [u8; 192] {
self.data
}
fn set_metadata_bytes(&self, buffer: &mut UnstuffedBuffer) {
buffer[0] = self.cmd;
buffer[1] = self.sub_cmd;
buffer[2] = self.dlen;
}
}
use super::*;
use array_utils::*;
#[test]
fn invertible() {
let pkt = CTPacket {
cmd: b'p',
sub_cmd: 0,
dlen: 4,
data: array_resize([5, 4, 2, 1], 0),
};
assert_eq!(pkt, {
pkt.send().unwrap();
CTPacket::fetch().unwrap()
});
}
}
| true |
d049a1e95d19444e1963c44cbbad1872181f35e2
|
Rust
|
hythloday/advent-of-code-2020
|
/src/bin/day2.rs
|
UTF-8
| 1,608 | 3.015625 | 3 |
[] |
no_license
|
use std::fs::File;
use std::io::{self, BufRead};
use std::path::Path;
use std::iter::FromIterator;
use multiset::HashMultiSet;
fn main() {
// p1
if let Ok(lines) = read_lines("./data/day2.txt") {
let passwords = lines.filter(|line| {
let rule = line.as_ref().unwrap().split(" ").collect::<Vec<&str>>();
let multiset: HashMultiSet<char> = FromIterator::from_iter(rule[2].chars());
let needle = rule[1].chars().nth(0).unwrap();
let minmax = rule[0].split("-").map(|c| c.parse::<usize>().unwrap()).collect::<Vec<_>>();
let count = multiset.count_of(&needle);
count <= minmax[1] && count >= minmax[0]
}).count();
println!("{}", passwords);
}
// p2
if let Ok(lines) = read_lines("./data/day2.txt") {
let passwords2 = lines.filter(|line| {
let rule = line.as_ref().unwrap().split(" ").collect::<Vec<&str>>();
let needle = rule[1].chars().nth(0).unwrap();
let positions = rule[0].split("-").map(|c| c.parse::<usize>().unwrap()).collect::<Vec<_>>();
(rule[2].chars().nth(positions[0]-1).unwrap() == needle) ^ (rule[2].chars().nth(positions[1]-1).unwrap() == needle)
}).count();
println!("{}", passwords2)
}
}
// The output is wrapped in a Result to allow matching on errors
// Returns an Iterator to the Reader of the lines of the file.
fn read_lines<P>(filename: P) -> io::Result<io::Lines<io::BufReader<File>>>
where P: AsRef<Path>, {
let file = File::open(filename)?;
Ok(io::BufReader::new(file).lines())
}
| true |
b05c835f5d352ec1b2f5086b5ca86b7912c508bb
|
Rust
|
learnrust/soap-rs
|
/examples/travel_light/main.rs
|
UTF-8
| 1,205 | 2.546875 | 3 |
[
"MIT"
] |
permissive
|
extern crate soap;
extern crate roxmltree;
use std::fs::File;
use std::io::Read;
use std::path::PathBuf;
use std::path::Path;
use std::io::Write;
use std::env;
use std::process;
use soap::Wsdl;
fn main() {
let tmp_dir = env::current_dir()
.unwrap();
// .join("examples/");
let path = Path::new(&env::current_dir().unwrap().join("service_response.xml")).to_path_buf();
println!("{:#?}", path);
let mut content = String::new();
File::open(path)
.unwrap()
.read_to_string(&mut content)
.unwrap();
let wsdl = match roxmltree::Document::parse(&content) {
Ok(v) => v,
Err(e) => {
println!("Error: {}.", e);
process::exit(1);
}
};
// Text representation.
print_wsdl(&wsdl, Some(tmp_dir.join("wsdl_service.txt"))).expect("Error while printing WSDL.");
}
fn print_wsdl(wsdl: &roxmltree::Document, file: Option<PathBuf>) -> Result<(), std::io::Error> {
match file {
None => println!("WSDL: {:#?}", wsdl),
Some(f) => {
let wsdl_str = format!("{:#?}", wsdl);
File::create(f)?.write_all(wsdl_str.as_bytes())?;
}
}
Ok(())
}
| true |
c85cde49526a8295e37c1ef33dc0b44348a8b560
|
Rust
|
dmitry-pechersky/algorithms
|
/leetcode/189_Rotate_Array.rs
|
UTF-8
| 1,294 | 3.515625 | 4 |
[] |
no_license
|
struct Solution {}
impl Solution {
pub fn rotate(nums: &mut Vec<i32>, k: i32) {
fn gcd(mut a: u32, mut b: u32) -> u32 {
while a % b != 0 {
let tmp = a % b;
a = b;
b = tmp;
}
b
}
let n = nums.len();
let k = k as usize % n;
if k != 0 {
println!("{}", gcd(n as u32, k as u32));
for i in 0..gcd(n as u32, k as u32) as usize {
let mut j = i;
let mut cur = nums[i];
loop {
let next_j = (j + k) % n;
let temp = nums[next_j];
nums[next_j] = cur;
cur = temp;
j = next_j;
if j == i {
break;
}
}
}
}
}
}
#[cfg(test)]
mod word_dictionary_test {
use super::*;
#[test]
fn test_1() {
let mut nums = vec![1,2,3,4,5,6,7];
Solution::rotate(&mut nums, 3);
assert_eq!(nums, vec![5,6,7,1,2,3,4]);
}
#[test]
fn test_2() {
let mut nums = vec![-1,-100,3,99];
Solution::rotate(&mut nums, 2);
assert_eq!(nums, vec![3,99,-1,-100]);
}
}
| true |
5a3f1d024204b61ec74fc5a128f07da19a6d5df8
|
Rust
|
freeduck/akula
|
/src/downloader/headers/preverified_hashes_config.rs
|
UTF-8
| 1,924 | 2.84375 | 3 |
[
"Apache-2.0"
] |
permissive
|
use serde::{de, Deserialize};
use std::str::FromStr;
/// The preverified hashes is a list of known precomputed hashes of every 192-th block in the chain:
///
/// hash(0), hash(192), hash(384), hash(576), ...
///
/// The preverified hashes are copied from:
/// https://github.com/ledgerwatch/erigon/blob/devel/turbo/stages/headerdownload/preverified_hashes_mainnet.go
/// https://github.com/ledgerwatch/erigon/blob/devel/turbo/stages/headerdownload/preverified_hashes_ropsten.go
pub struct PreverifiedHashesConfig {
pub hashes: Vec<ethereum_types::H256>,
}
struct UnprefixedHexH256(pub ethereum_types::H256);
#[derive(Deserialize)]
struct PreverifiedHashesConfigUnprefixedHex {
pub hashes: Vec<UnprefixedHexH256>,
}
impl PreverifiedHashesConfig {
pub fn new(chain_name: &str) -> anyhow::Result<Self> {
let config_text = match chain_name {
"mainnet" => include_str!("preverified_hashes_mainnet.toml"),
"ropsten" => include_str!("preverified_hashes_ropsten.toml"),
_ => anyhow::bail!("unsupported chain"),
};
let config: PreverifiedHashesConfigUnprefixedHex = toml::from_str(config_text)?;
Ok(PreverifiedHashesConfig {
hashes: config.hashes.iter().map(|hash| hash.0).collect(),
})
}
}
impl FromStr for UnprefixedHexH256 {
type Err = hex::FromHexError;
fn from_str(hash_str: &str) -> Result<Self, Self::Err> {
let mut hash_bytes = [0u8; 32];
hex::decode_to_slice(hash_str, &mut hash_bytes)?;
let hash = ethereum_types::H256::from(hash_bytes);
Ok(UnprefixedHexH256(hash))
}
}
impl<'de> Deserialize<'de> for UnprefixedHexH256 {
fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
where
D: de::Deserializer<'de>,
{
let hash_str = String::deserialize(deserializer)?;
FromStr::from_str(&hash_str).map_err(de::Error::custom)
}
}
| true |
23b0c6486f69438ea8a30cad1ed7835cb733650c
|
Rust
|
FuGangqiang/RustPractices
|
/src/structure/unsafe_red_black_tree.rs
|
UTF-8
| 11,222 | 3.125 | 3 |
[] |
no_license
|
use core::cmp::Ordering;
use core::ptr;
pub struct RedBlackTree<K: Ord, V> {
root: NodePtr<K, V>,
}
struct NodePtr<K: Ord, V>(*mut Node<K, V>);
struct Node<K: Ord, V> {
key: K,
value: V,
left: NodePtr<K, V>,
right: NodePtr<K, V>,
color: Color,
count: usize,
}
#[derive(PartialEq, Eq, Copy, Clone)]
enum Color {
Red,
Black,
}
impl<K: Ord, V> Drop for RedBlackTree<K, V> {
fn drop(&mut self) {
self.root.clear()
}
}
impl<K: Ord, V> RedBlackTree<K, V> {
pub fn new() -> Self {
Self { root: NodePtr::null() }
}
pub fn len(&self) -> usize {
self.root.count()
}
pub fn get(&self, key: &K) -> Option<&V> {
let node_ptr = self.root.get_node_ptr(key);
if node_ptr.is_null() {
return None;
}
unsafe { Some(&(*node_ptr.0).value) }
}
pub fn put(&mut self, key: K, value: V) {
self.root.set(NodePtr::put(&mut self.root.clone(), key, value));
self.root.set_color(Color::Black);
}
}
impl<K: Ord, V> Clone for NodePtr<K, V> {
fn clone(&self) -> NodePtr<K, V> {
Self(self.0)
}
}
impl<K: Ord, V> Copy for NodePtr<K, V> {}
impl<K: Ord, V> NodePtr<K, V> {
fn null() -> NodePtr<K, V> {
Self(ptr::null_mut())
}
fn is_null(&self) -> bool {
self.0.is_null()
}
fn new(key: K, value: V, color: Color) -> Self {
let node = Node {
key,
value,
left: NodePtr::null(),
right: NodePtr::null(),
color,
count: 1,
};
Self(Box::into_raw(Box::new(node)))
}
fn is_red(&self) -> bool {
if self.is_null() {
false
} else {
unsafe { (*self.0).color == Color::Red }
}
}
fn color(&self) -> Color {
if self.is_null() {
return Color::Black;
}
unsafe { (*self.0).color }
}
fn set_color(&mut self, color: Color) {
if self.is_null() {
return;
}
unsafe { (*self.0).color = color }
}
fn count(&self) -> usize {
if self.is_null() {
0
} else {
unsafe { (*self.0).count }
}
}
fn set_count(&mut self, count: usize) {
if self.is_null() {
return;
}
unsafe { (*self.0).count = count }
}
fn left(&self) -> Self {
if self.is_null() {
return NodePtr::null();
}
unsafe { (*self.0).left }
}
fn set_left(&mut self, left: NodePtr<K, V>) {
if self.is_null() {
return;
}
unsafe { (*self.0).left = left }
}
fn right(&self) -> Self {
if self.is_null() {
return NodePtr::null();
}
unsafe { (*self.0).right }
}
fn set_right(&mut self, right: NodePtr<K, V>) {
if self.is_null() {
return;
}
unsafe { (*self.0).right = right }
}
fn key(&self) -> Option<&K> {
if self.is_null() {
return None;
}
unsafe { Some(&(*self.0).key) }
}
fn set_value(&mut self, value: V) {
if self.is_null() {
return;
}
let mut node = unsafe { Box::from_raw(self.0) };
let _old = core::mem::replace(&mut node.value, value);
let _ = Box::into_raw(node);
}
fn get_node_ptr(&self, key: &K) -> Self {
match self.key() {
None => Self::null(),
Some(k) => match key.cmp(k) {
Ordering::Less => self.left().get_node_ptr(key),
Ordering::Greater => self.right().get_node_ptr(key),
Ordering::Equal => self.clone(),
},
}
}
fn set(&mut self, node: NodePtr<K, V>) {
self.0 = node.0
}
fn put(h: &mut NodePtr<K, V>, key: K, value: V) -> NodePtr<K, V> {
match h.key() {
None => return NodePtr::new(key, value, Color::Red), // null ptr
Some(h_key) => match key.cmp(h_key) {
Ordering::Less => h.set_left(NodePtr::put(&mut h.left(), key, value)),
Ordering::Greater => h.set_right(NodePtr::put(&mut h.right(), key, value)),
Ordering::Equal => h.set_value(value),
},
}
if h.right().is_red() && !h.left().is_red() {
h.set(NodePtr::rotate_left(&mut h.clone()));
}
if h.left().is_red() && h.left().left().is_red() {
h.set(NodePtr::rotate_right(&mut h.clone()));
}
if h.left().is_red() && h.right().is_red() {
NodePtr::flip_colors(h);
}
h.set_count(1 + h.left().count() + h.right().count());
h.clone()
}
fn rotate_left(h: &mut NodePtr<K, V>) -> NodePtr<K, V> {
let mut x = h.right();
h.set_right(x.left());
x.set_left(h.clone());
x.set_color(h.color());
h.set_color(Color::Red);
x.set_count(h.count());
h.set_count(1 + h.left().count() + h.right().count());
x
}
fn rotate_right(h: &mut NodePtr<K, V>) -> NodePtr<K, V> {
let mut x = h.left();
h.set_left(x.right());
x.set_right(h.clone());
x.set_color(h.color());
h.set_color(Color::Red);
x.set_count(h.count());
h.set_count(1 + h.left().count() + h.right().count());
x
}
fn flip_colors(h: &mut NodePtr<K, V>) {
h.set_color(Color::Red);
h.left().set_color(Color::Black);
h.right().set_color(Color::Black);
}
fn clear(&mut self) {
if !self.is_null() {
unsafe {
self.left().clear();
self.right().clear();
Box::from_raw(self.0);
}
}
}
}
#[cfg(test)]
impl<K, V> RedBlackTree<K, V>
where
K: Ord + ToString,
V: ToString,
{
fn inorder_tree_walk_string(&self) -> String {
self.root.inorder_tree_walk_string()
}
}
#[cfg(test)]
impl<K, V> NodePtr<K, V>
where
K: Ord + ToString,
V: ToString,
{
fn pair(&self) -> (&K, &V) {
unsafe { (&(*self.0).key, &(*self.0).value) }
}
fn inorder_tree_walk_string(&self) -> String {
let mut result: String = "".into();
result.push_str("(");
if !self.is_null() {
let (key, value) = self.pair();
result.push_str(&key.to_string());
result.push_str(":");
result.push_str(&value.to_string());
result.push_str(":");
if self.is_red() {
result.push_str("r");
} else {
result.push_str("b");
}
let left_str = self.left().inorder_tree_walk_string();
result.push_str(&left_str);
let right_str = self.right().inorder_tree_walk_string();
result.push_str(&right_str);
}
result.push_str(")");
result
}
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn test_get() {
let mut rbtree: RedBlackTree<char, i32> = RedBlackTree::new();
assert_eq!(rbtree.get(&'S'), None);
assert_eq!(rbtree.len(), 0);
rbtree.put('S', 1);
rbtree.put('E', 2);
rbtree.put('A', 3);
rbtree.put('R', 4);
rbtree.put('C', 5);
rbtree.put('H', 6);
rbtree.put('X', 7);
rbtree.put('M', 8);
rbtree.put('P', 9);
rbtree.put('L', 10);
assert_eq!(rbtree.get(&'S'), Some(&1));
assert_eq!(rbtree.get(&'E'), Some(&2));
assert_eq!(rbtree.get(&'A'), Some(&3));
assert_eq!(rbtree.get(&'R'), Some(&4));
assert_eq!(rbtree.get(&'C'), Some(&5));
assert_eq!(rbtree.get(&'H'), Some(&6));
assert_eq!(rbtree.get(&'X'), Some(&7));
assert_eq!(rbtree.get(&'M'), Some(&8));
assert_eq!(rbtree.get(&'P'), Some(&9));
assert_eq!(rbtree.get(&'L'), Some(&10));
assert_eq!(rbtree.get(&'Z'), None);
assert_eq!(rbtree.len(), 10);
}
#[test]
fn test_inorder_tree_walk_1() {
let mut rbtree: RedBlackTree<char, i32> = RedBlackTree::new();
assert_eq!(rbtree.inorder_tree_walk_string(), "()");
rbtree.put('S', 1);
assert_eq!(rbtree.inorder_tree_walk_string(), "(S:1:b()())");
rbtree.put('E', 2);
assert_eq!(rbtree.inorder_tree_walk_string(), "(S:1:b(E:2:r()())())");
rbtree.put('A', 3);
assert_eq!(rbtree.inorder_tree_walk_string(), "(E:2:b(A:3:b()())(S:1:b()()))");
rbtree.put('R', 4);
assert_eq!(rbtree.inorder_tree_walk_string(), "(E:2:b(A:3:b()())(S:1:b(R:4:r()())()))");
rbtree.put('C', 5);
assert_eq!(rbtree.inorder_tree_walk_string(), "(E:2:b(C:5:b(A:3:r()())())(S:1:b(R:4:r()())()))");
rbtree.put('H', 6);
assert_eq!(rbtree.inorder_tree_walk_string(), "(R:4:b(E:2:r(C:5:b(A:3:r()())())(H:6:b()()))(S:1:b()()))");
rbtree.put('X', 7);
assert_eq!(rbtree.inorder_tree_walk_string(), "(R:4:b(E:2:r(C:5:b(A:3:r()())())(H:6:b()()))(X:7:b(S:1:r()())()))");
rbtree.put('M', 8);
assert_eq!(rbtree.inorder_tree_walk_string(), "(R:4:b(E:2:r(C:5:b(A:3:r()())())(M:8:b(H:6:r()())()))(X:7:b(S:1:r()())()))");
rbtree.put('P', 9);
assert_eq!(rbtree.inorder_tree_walk_string(), "(M:8:b(E:2:b(C:5:b(A:3:r()())())(H:6:b()()))(R:4:b(P:9:b()())(X:7:b(S:1:r()())())))");
rbtree.put('L', 10);
assert_eq!(rbtree.inorder_tree_walk_string(), "(M:8:b(E:2:b(C:5:b(A:3:r()())())(L:10:b(H:6:r()())()))(R:4:b(P:9:b()())(X:7:b(S:1:r()())())))");
}
#[test]
fn test_inorder_tree_walk_2() {
let mut rbtree: RedBlackTree<char, i32> = RedBlackTree::new();
assert_eq!(rbtree.inorder_tree_walk_string(), "()");
rbtree.put('A', 1);
assert_eq!(rbtree.inorder_tree_walk_string(), "(A:1:b()())");
rbtree.put('C', 2);
assert_eq!(rbtree.inorder_tree_walk_string(), "(C:2:b(A:1:r()())())");
rbtree.put('E', 3);
assert_eq!(rbtree.inorder_tree_walk_string(), "(C:2:b(A:1:b()())(E:3:b()()))");
rbtree.put('H', 4);
assert_eq!(rbtree.inorder_tree_walk_string(), "(C:2:b(A:1:b()())(H:4:b(E:3:r()())()))");
rbtree.put('L', 5);
assert_eq!(rbtree.inorder_tree_walk_string(), "(H:4:b(C:2:r(A:1:b()())(E:3:b()()))(L:5:b()()))");
rbtree.put('M', 6);
assert_eq!(rbtree.inorder_tree_walk_string(), "(H:4:b(C:2:r(A:1:b()())(E:3:b()()))(M:6:b(L:5:r()())()))");
rbtree.put('P', 7);
assert_eq!(rbtree.inorder_tree_walk_string(), "(H:4:b(C:2:b(A:1:b()())(E:3:b()()))(M:6:b(L:5:b()())(P:7:b()())))");
rbtree.put('R', 8);
assert_eq!(rbtree.inorder_tree_walk_string(), "(H:4:b(C:2:b(A:1:b()())(E:3:b()()))(M:6:b(L:5:b()())(R:8:b(P:7:r()())())))");
rbtree.put('S', 9);
assert_eq!(rbtree.inorder_tree_walk_string(), "(H:4:b(C:2:b(A:1:b()())(E:3:b()()))(R:8:b(M:6:r(L:5:b()())(P:7:b()()))(S:9:b()())))");
rbtree.put('X', 10);
assert_eq!(rbtree.inorder_tree_walk_string(), "(H:4:b(C:2:b(A:1:b()())(E:3:b()()))(R:8:b(M:6:r(L:5:b()())(P:7:b()()))(X:10:b(S:9:r()())())))");
}
}
| true |
7a74e02114c3245ab124ee8b619446370bcde4ca
|
Rust
|
stevenlr/HandmadeRust
|
/fnd/io/bit_reader.rs
|
UTF-8
| 2,724 | 3.390625 | 3 |
[
"Unlicense"
] |
permissive
|
use super::Read;
pub struct BitReader<R>
{
r: R,
buffer: u64,
buffer_size: usize,
}
impl<R> BitReader<R>
where
R: Read,
{
pub fn new(r: R) -> Self
{
Self {
r,
buffer: 0,
buffer_size: 0,
}
}
fn buffer_bits(&mut self, bit_count: usize)
{
debug_assert!(bit_count <= 32);
while self.buffer_size < bit_count
{
let mut byte = [0u8];
match self.r.read(&mut byte)
{
Ok(1) =>
{
self.buffer = self.buffer | ((byte[0] as u64) << self.buffer_size);
self.buffer_size += 8;
}
_ =>
{
break;
}
}
}
}
pub fn peek(&mut self, bit_count: usize) -> u32
{
assert!(bit_count <= 32);
self.buffer_bits(bit_count);
(self.buffer & ((1 << bit_count) - 1)) as u32
}
pub fn consume(&mut self, bit_count: usize) -> u32
{
let value = self.peek(bit_count);
self.buffer_size = self.buffer_size.saturating_sub(bit_count);
self.buffer >>= bit_count;
return value;
}
pub fn skip_to_next_byte(&mut self)
{
self.consume(self.buffer_size % 8);
}
}
#[cfg(test)]
mod tests
{
use super::*;
#[test]
fn bit_reader()
{
let data: &[u8] = &[
0b0000_0001,
0b0010_0011,
0b0100_0101,
0b0110_0111,
0b1000_1001,
0b1010_1011,
0b1100_1101,
0b1110_1111,
];
let mut bit_reader = BitReader::new(data);
assert_eq!(bit_reader.consume(6), 0b00_0001);
assert_eq!(bit_reader.consume(6), 0b0011_00);
assert_eq!(bit_reader.consume(12), 0b0100_0101_0010);
assert_eq!(bit_reader.consume(4), 0b0111);
assert_eq!(bit_reader.peek(1), 0b0);
assert_eq!(bit_reader.peek(2), 0b10);
assert_eq!(bit_reader.peek(3), 0b110);
assert_eq!(bit_reader.peek(4), 0b0110);
assert_eq!(bit_reader.peek(5), 0b1_0110);
assert_eq!(bit_reader.peek(6), 0b01_0110);
assert_eq!(bit_reader.peek(7), 0b001_0110);
assert_eq!(bit_reader.peek(8), 0b1001_0110);
assert_eq!(bit_reader.consume(3), 0b110);
bit_reader.skip_to_next_byte();
assert_eq!(bit_reader.consume(28), 0b1111_1100_1101_1010_1011_1000_1001);
assert_eq!(bit_reader.peek(8), 0b1110);
assert_eq!(bit_reader.peek(4), 0b1110);
assert_eq!(bit_reader.peek(32), 0b1110);
assert_eq!(bit_reader.consume(32), 0b1110);
}
}
| true |
89d598110c1c06c6bac6c3a21a331f3c4c3aacd4
|
Rust
|
krilie/rust-hello
|
/base/u_heap_stack_02/src/main.rs
|
UTF-8
| 377 | 3.28125 | 3 |
[] |
no_license
|
// #![feature(box_syntax, box_patterns)]
// fn main() {
// let boxed = Some(box 5);
// match boxed {
// Some(box unboxed) => println!("Some {}", unboxed),
// None => println!("None"),
// }
// }
fn main() {
let boxed = Some(Box::new(5));
match boxed {
Some(ref a) => println!("Some {}", a),
None => println!("None"),
}
}
| true |
70276e4b73597f09eae4c166edfcbbc77315c97c
|
Rust
|
willglynn/artichoke
|
/artichoke-backend/src/globals.rs
|
UTF-8
| 1,920 | 2.9375 | 3 |
[
"MIT",
"BSD-2-Clause"
] |
permissive
|
use std::borrow::Cow;
use crate::exception::Exception;
use crate::sys;
use crate::value::Value;
use crate::{Artichoke, Convert, Globals, Intern};
// TODO: Handle invalid variable names. For now this is delegated to mruby.
impl Globals for Artichoke {
type Value = Value;
type Error = Exception;
fn set_global_variable<T>(&mut self, name: T, value: &Self::Value) -> Result<(), Self::Error>
where
T: Into<Cow<'static, [u8]>>,
{
let sym = self.intern_symbol(name);
let mrb = self.0.borrow().mrb;
unsafe {
sys::mrb_gv_set(mrb, sym, value.inner());
}
Ok(())
}
/// Unset global variable pointed to by `name`.
///
/// Unsetting a global variable removes the name from the global storage
/// table. Unset globals resolve to `nil` in the Ruby VM.
///
/// Unsetting a global that is currently unset is a no-op.
///
/// # Errors
///
/// If the name is not a valid global name, an error is returned.
fn unset_global_variable<T>(&mut self, name: T) -> Result<(), Self::Error>
where
T: Into<Cow<'static, [u8]>>,
{
let sym = self.intern_symbol(name);
let mrb = self.0.borrow().mrb;
let nil = self.convert(None::<Value>);
unsafe {
sys::mrb_gv_set(mrb, sym, nil.inner());
}
Ok(())
}
fn get_global_variable<T>(&mut self, name: T) -> Result<Option<Self::Value>, Self::Error>
where
T: Into<Cow<'static, [u8]>>,
{
let sym = self.intern_symbol(name);
let mrb = self.0.borrow().mrb;
let value = unsafe { sys::mrb_gv_get(mrb, sym) };
// NOTE: This implementation is not compliant with the spec laid out in
// the trait documentation. This implementation always returns `Some(_)`
// even if the global is unset.
Ok(Some(Value::new(self, value)))
}
}
| true |
651321edf10b9ba279d983a385aeb0de89257992
|
Rust
|
CygnusRoboticus/basic-pathfinding
|
/src/search.rs
|
UTF-8
| 4,107 | 2.875 | 3 |
[
"MIT"
] |
permissive
|
use std::collections::BinaryHeap;
use std::collections::HashMap;
use crate::coord::Coord;
use crate::grid::Grid;
use crate::node::Node;
#[derive(Default, Deserialize)]
pub struct SearchOpts {
pub cost_threshold: Option<i32>,
pub end_on_unstoppable: bool,
pub path_to_threshold: bool,
pub path_adjacent: bool,
}
pub struct Search {
pub start: Coord,
pub end: Vec<Coord>,
pub dest: Option<Coord>,
pub heap: BinaryHeap<Node>,
pub cache: HashMap<i32, HashMap<i32, Node>>,
pub opts: SearchOpts,
}
impl Search {
pub fn new(start: Coord, end: Vec<Coord>, dest: Option<Coord>, opts: SearchOpts) -> Search {
Search {
start: start,
dest: dest,
end: end,
heap: BinaryHeap::new(),
cache: HashMap::new(),
opts: opts,
}
}
pub fn reached_destination(&self) -> bool {
match self.peek() {
Some(curr) => self
.end
.iter()
.find(|c| c.matches(curr.x, curr.y))
.is_some(),
_ => false,
}
}
pub fn push(&mut self, node: Node) {
self.heap.push(node.clone());
self.cache(node);
}
pub fn cache(&mut self, node: Node) {
match self.cache.remove(&node.y) {
None => {
let mut inner_hash = HashMap::new();
inner_hash.insert(node.x, node);
self.cache.insert(node.y, inner_hash);
}
Some(mut inner_hash) => {
inner_hash.insert(node.x, node);
self.cache.insert(node.y, inner_hash);
}
}
}
pub fn peek(&self) -> Option<&Node> {
match self.heap.peek() {
None => None,
Some(node) => self.get_node(node.x, node.y),
}
}
pub fn pop(&mut self) -> Option<Node> {
self.heap.pop()
}
pub fn size(&self) -> usize {
self.heap.len()
}
pub fn update(&mut self, node: Node) {
self.cache(node);
}
pub fn get_node(&self, x: i32, y: i32) -> Option<&Node> {
match self.cache.get(&y) {
None => None,
Some(inner_hash) => inner_hash.get(&x),
}
}
pub fn is_pathing(&self) -> bool {
self.dest.is_some()
}
pub fn coordinate_to_node(&self, parent: Option<&Node>, x: i32, y: i32, cost: i32) -> Node {
match self.get_node(x, y) {
Some(&node) => node,
None => {
let distance = match !self.is_pathing() {
true => 1,
false => {
let dest = self.dest.unwrap();
get_distance(x, y, dest.x, dest.y)
}
};
Node::new(
parent,
x,
y,
match parent {
None => cost,
Some(parent) => parent.cost + cost,
},
distance,
)
}
}
}
pub fn check_adjacent_node(&mut self, grid: &Grid, source_node: &Node, x: i32, y: i32) {
let adjacent_x = source_node.x + x;
let adjacent_y = source_node.y + y;
let adjacent_cost = grid.get_coord_cost(adjacent_x, adjacent_y);
if grid.is_coord_walkable(adjacent_x, adjacent_y)
& can_afford(
source_node,
adjacent_cost,
match self.opts.path_to_threshold {
true => None,
_ => self.opts.cost_threshold,
},
)
{
let mut adjacent_node =
self.coordinate_to_node(Some(source_node), adjacent_x, adjacent_y, adjacent_cost);
if !adjacent_node.visited {
self.push(adjacent_node);
} else if (source_node.cost + adjacent_cost) < adjacent_node.cost {
adjacent_node.cost = source_node.cost + adjacent_cost;
adjacent_node.parent = Some(Coord::new(source_node.x, source_node.y));
self.update(adjacent_node);
}
}
}
pub fn traversed_nodes(&self) -> Vec<&Node> {
let nodes = &mut vec![];
for list in self.cache.values() {
nodes.extend(list.values());
}
nodes.to_vec()
}
}
fn can_afford(node: &Node, cost: i32, cost_threshold: Option<i32>) -> bool {
match cost_threshold {
None => true,
Some(cost_threshold) => node.cost + cost <= cost_threshold,
}
}
fn get_distance(x1: i32, y1: i32, x2: i32, y2: i32) -> i32 {
let dx = (x1 - x2).abs();
let dy = (y1 - y2).abs();
dx + dy
}
| true |
59c5c9d124e40d4467e7cc58e3b2169742712cc0
|
Rust
|
mlsteele/stft
|
/src/lib.rs
|
UTF-8
| 9,728 | 3.265625 | 3 |
[
"MIT",
"Apache-2.0"
] |
permissive
|
/*!
**computes the [short-time fourier transform](https://en.wikipedia.org/wiki/Short-time_Fourier_transform)
on streaming data.**
to use add `stft = "*"`
to the `[dependencies]` section of your `Cargo.toml` and call `extern crate stft;` in your code.
## example
```
extern crate stft;
use stft::{STFT, WindowType};
fn main() {
// let's generate ten seconds of fake audio
let sample_rate: usize = 44100;
let seconds: usize = 10;
let sample_count = sample_rate * seconds;
let all_samples = (0..sample_count).map(|x| x as f64).collect::<Vec<f64>>();
// let's initialize our short-time fourier transform
let window_type: WindowType = WindowType::Hanning;
let window_size: usize = 1024;
let step_size: usize = 512;
let mut stft = STFT::new(window_type, window_size, step_size);
// we need a buffer to hold a computed column of the spectrogram
let mut spectrogram_column: Vec<f64> =
std::iter::repeat(0.).take(stft.output_size()).collect();
// iterate over all the samples in chunks of 3000 samples.
// in a real program you would probably read from something instead.
for some_samples in (&all_samples[..]).chunks(3000) {
// append the samples to the internal ringbuffer of the stft
stft.append_samples(some_samples);
// as long as there remain window_size samples in the internal
// ringbuffer of the stft
while stft.contains_enough_to_compute() {
// compute one column of the stft by
// taking the first window_size samples of the internal ringbuffer,
// multiplying them with the window,
// computing the fast fourier transform,
// taking half of the symetric complex outputs,
// computing the norm of the complex outputs and
// taking the log10
stft.compute_column(&mut spectrogram_column[..]);
// here's where you would do something with the
// spectrogram_column...
// drop step_size samples from the internal ringbuffer of the stft
// making a step of size step_size
stft.move_to_next_column();
}
}
}
```
*/
use std::str::FromStr;
use std::sync::Arc;
extern crate num;
use num::complex::Complex;
use num::traits::{Float, Signed, Zero};
extern crate apodize;
extern crate strider;
use strider::{SliceRing, SliceRingImpl};
extern crate rustfft;
use rustfft::{FFT,FFTnum,FFTplanner};
/// returns `0` if `log10(value).is_negative()`.
/// otherwise returns `log10(value)`.
/// `log10` turns values in domain `0..1` into values
/// in range `-inf..0`.
/// `log10_positive` turns values in domain `0..1` into `0`.
/// this sets very small values to zero which may not be
/// what you want depending on your application.
#[inline]
pub fn log10_positive<T: Float + Signed + Zero>(value: T) -> T {
// Float.log10
// Signed.is_negative
// Zero.zero
let log = value.log10();
if log.is_negative() {
T::zero()
} else {
log
}
}
/// the type of apodization window to use
#[derive(Clone, Copy, PartialEq, PartialOrd, Eq, Ord, Debug, Hash)]
pub enum WindowType {
Hanning,
Hamming,
Blackman,
Nuttall,
None,
}
impl FromStr for WindowType {
type Err = &'static str;
fn from_str(s: &str) -> Result<Self, Self::Err> {
let lower = s.to_lowercase();
match &lower[..] {
"hanning" => Ok(WindowType::Hanning),
"hann" => Ok(WindowType::Hanning),
"hamming" => Ok(WindowType::Hamming),
"blackman" => Ok(WindowType::Blackman),
"nuttall" => Ok(WindowType::Nuttall),
"none" => Ok(WindowType::None),
_ => Err("no match"),
}
}
}
// this also implements ToString::to_string
impl std::fmt::Display for WindowType {
fn fmt(&self, formatter: &mut std::fmt::Formatter) -> std::fmt::Result {
write!(formatter, "{:?}", self)
}
}
// TODO write a macro that does this automatically for any enum
static WINDOW_TYPES: [WindowType; 5] = [WindowType::Hanning,
WindowType::Hamming,
WindowType::Blackman,
WindowType::Nuttall,
WindowType::None];
impl WindowType {
pub fn values() -> [WindowType; 5] {
WINDOW_TYPES
}
}
pub struct STFT<T>
where T: FFTnum + FromF64 + num::Float
{
pub window_size: usize,
pub step_size: usize,
pub fft: Arc<FFT<T>>,
pub window: Option<Vec<T>>,
/// internal ringbuffer used to store samples
pub sample_ring: SliceRingImpl<T>,
pub real_input: Vec<T>,
pub complex_input: Vec<Complex<T>>,
pub complex_output: Vec<Complex<T>>,
}
impl<T> STFT<T>
where T: FFTnum + FromF64 + num::Float
{
pub fn window_type_to_window_vec(window_type: WindowType,
window_size: usize)
-> Option<Vec<T>> {
match window_type {
WindowType::Hanning => Some(apodize::hanning_iter(window_size).map(FromF64::from_f64).collect()),
WindowType::Hamming => Some(apodize::hamming_iter(window_size).map(FromF64::from_f64).collect()),
WindowType::Blackman => Some(apodize::blackman_iter(window_size).map(FromF64::from_f64).collect()),
WindowType::Nuttall => Some(apodize::nuttall_iter(window_size).map(FromF64::from_f64).collect()),
WindowType::None => None,
}
}
pub fn new(window_type: WindowType, window_size: usize, step_size: usize) -> Self {
let window = Self::window_type_to_window_vec(window_type, window_size);
Self::new_with_window_vec(window, window_size, step_size)
}
// TODO this should ideally take an iterator and not a vec
pub fn new_with_window_vec(window: Option<Vec<T>>,
window_size: usize,
step_size: usize)
-> Self {
// TODO more assertions:
// window_size is power of two
// step_size > 0
assert!(step_size <= window_size);
let inverse = false;
let mut planner = FFTplanner::new(inverse);
STFT {
window_size: window_size,
step_size: step_size,
fft: planner.plan_fft(window_size),
sample_ring: SliceRingImpl::new(),
window: window,
real_input: std::iter::repeat(T::zero())
.take(window_size)
.collect(),
complex_input: std::iter::repeat(Complex::<T>::zero())
.take(window_size)
.collect(),
complex_output: std::iter::repeat(Complex::<T>::zero())
.take(window_size)
.collect(),
}
}
#[inline]
pub fn output_size(&self) -> usize {
self.window_size / 2
}
#[inline]
pub fn len(&self) -> usize {
self.sample_ring.len()
}
pub fn append_samples(&mut self, input: &[T]) {
self.sample_ring.push_many_back(input);
}
#[inline]
pub fn contains_enough_to_compute(&self) -> bool {
self.window_size <= self.sample_ring.len()
}
pub fn compute_into_complex_output(&mut self) {
assert!(self.contains_enough_to_compute());
// read into real_input
self.sample_ring.read_many_front(&mut self.real_input[..]);
// multiply real_input with window
if let Some(ref window) = self.window {
for (dst, src) in self.real_input.iter_mut().zip(window.iter()) {
*dst = *dst * *src;
}
}
// copy windowed real_input as real parts into complex_input
for (dst, src) in self.complex_input.iter_mut().zip(self.real_input.iter()) {
dst.re = src.clone();
}
// compute fft
self.fft.process(&mut self.complex_input, &mut self.complex_output);
}
/// # Panics
/// panics unless `self.output_size() == output.len()`
pub fn compute_complex_column(&mut self, output: &mut [Complex<T>]) {
assert_eq!(self.output_size(), output.len());
self.compute_into_complex_output();
for (dst, src) in output.iter_mut().zip(self.complex_output.iter()) {
*dst = src.clone();
}
}
/// # Panics
/// panics unless `self.output_size() == output.len()`
pub fn compute_magnitude_column(&mut self, output: &mut [T]) {
assert_eq!(self.output_size(), output.len());
self.compute_into_complex_output();
for (dst, src) in output.iter_mut().zip(self.complex_output.iter()) {
*dst = src.norm();
}
}
/// computes a column of the spectrogram
/// # Panics
/// panics unless `self.output_size() == output.len()`
pub fn compute_column(&mut self, output: &mut [T]) {
assert_eq!(self.output_size(), output.len());
self.compute_into_complex_output();
for (dst, src) in output.iter_mut().zip(self.complex_output.iter()) {
*dst = log10_positive(src.norm());
}
}
/// make a step
/// drops `self.step_size` samples from the internal buffer `self.sample_ring`.
pub fn move_to_next_column(&mut self) {
self.sample_ring.drop_many_front(self.step_size);
}
}
pub trait FromF64 {
fn from_f64(n: f64) -> Self;
}
impl FromF64 for f64 {
fn from_f64(n: f64) -> Self { n }
}
impl FromF64 for f32 {
fn from_f64(n: f64) -> Self { n as f32 }
}
| true |
3fb8ab6066eaa6c7f42842b7fe99514915dc47d1
|
Rust
|
ctron/jsonpath
|
/benchmark/benches/bench_example.rs
|
UTF-8
| 2,240 | 2.78125 | 3 |
[
"MIT"
] |
permissive
|
#![feature(test)]
extern crate bencher;
extern crate jsonpath_lib as jsonpath;
extern crate serde;
extern crate serde_json;
extern crate test;
use std::io::Read;
use serde_json::Value;
use self::test::Bencher;
fn read_json(path: &str) -> String {
let mut f = std::fs::File::open(path).unwrap();
let mut contents = String::new();
f.read_to_string(&mut contents).unwrap();
contents
}
fn get_string() -> String {
read_json("./example.json")
}
fn get_json() -> Value {
let string = get_string();
serde_json::from_str(string.as_str()).unwrap()
}
fn get_path(i: usize) -> &'static str {
let paths = vec![
"$.store.book[*].author", //0
"$..author", //1
"$.store.*", //2
"$.store..price", //3
"$..book[2]", //4
"$..book[-2]", //5
"$..book[0,1]", //6
"$..book[:2]", //7
"$..book[1:2]", //8
"$..book[-2:]", //9
"$..book[2:]", //10
"$..book[?(@.isbn)]", //11
"$.store.book[?(@.price == 10)]", //12
"$..*", //13
"$..book[ ?( (@.price < 13 || $.store.bicycle.price < @.price) && @.price <=10 ) ]", //14
"$.store.book[?( (@.price < 10 || @.price > 10) && @.price > 10 )]"
];
paths[i]
}
fn _selector(b: &mut Bencher, index: usize) {
let json = get_json();
b.iter(move || {
for _ in 1..100 {
let mut selector = jsonpath::Selector::default();
let _ = selector.str_path(get_path(index));
selector.value(&json);
let r = selector.select();
if r.is_err() {
panic!()
}
}
});
}
macro_rules! selector {
($name:ident, $i:expr) => {
#[bench]
fn $name(b: &mut Bencher) { _selector(b, $i); }
};
}
selector!(example0_1, 0);
selector!(example1_1, 1);
selector!(example2_1, 2);
selector!(example3_1, 3);
selector!(example4_1, 4);
selector!(example5_1, 5);
selector!(example6_1, 6);
selector!(example7_1, 7);
selector!(example8_1, 8);
selector!(example9_1, 9);
selector!(example_10_1, 10);
selector!(example_11_1, 11);
selector!(example_12_1, 12);
selector!(example_13_1, 13);
selector!(example_14_1, 14);
selector!(example_15_1, 15);
| true |
61a64dd89b3c9941960c9a627fe7e9bcf6daea87
|
Rust
|
gridbugs/apocalypse-post
|
/src/util/schedule.rs
|
UTF-8
| 4,531 | 3.28125 | 3 |
[
"MIT"
] |
permissive
|
use std::collections::{BinaryHeap, HashSet};
use std::cmp::Ordering;
#[derive(Serialize, Deserialize)]
struct ScheduleEntry<T> {
value: T,
abs_time: u64,
rel_time: u64,
seq: u64,
}
impl<T> ScheduleEntry<T> {
fn new(value: T, abs_time: u64, rel_time: u64, seq: u64) -> Self {
ScheduleEntry {
value: value,
abs_time: abs_time,
rel_time: rel_time,
seq: seq,
}
}
}
impl<T> Ord for ScheduleEntry<T> {
fn cmp(&self, other: &Self) -> Ordering {
let abs_time_ord = other.abs_time.cmp(&self.abs_time);
if abs_time_ord == Ordering::Equal {
other.seq.cmp(&self.seq)
} else {
abs_time_ord
}
}
}
impl<T> PartialOrd for ScheduleEntry<T> {
fn partial_cmp(&self, other: &Self) -> Option<Ordering> {
Some(self.cmp(other))
}
}
impl<T> PartialEq for ScheduleEntry<T> {
fn eq(&self, other: &Self) -> bool {
self.abs_time == other.abs_time && self.seq == other.seq
}
}
impl<T> Eq for ScheduleEntry<T> {}
#[derive(Debug)]
pub struct ScheduleEvent<T> {
pub event: T,
pub time_delta: u64,
pub time_queued: u64,
pub absolute_time: u64,
}
impl<T> ScheduleEvent<T> {
fn new(event: T, time_delta: u64, time_queued: u64, absolute_time: u64) -> Self {
ScheduleEvent {
event: event,
time_delta: time_delta,
time_queued: time_queued,
absolute_time: absolute_time,
}
}
}
#[derive(Clone, Copy, Debug, Serialize, Deserialize)]
pub struct ScheduleTicket {
seq: u64,
}
pub struct Schedule<T> {
heap: BinaryHeap<ScheduleEntry<T>>,
invalid: HashSet<u64>,
abs_time: u64,
seq: u64,
}
#[derive(Serialize, Deserialize)]
pub struct SerializableSchedule<T> {
entries: Vec<ScheduleEntry<T>>,
invalid: HashSet<u64>,
abs_time: u64,
seq: u64,
}
impl<T> From<Schedule<T>> for SerializableSchedule<T> {
fn from(schedule: Schedule<T>) -> Self {
let Schedule {
heap,
invalid,
abs_time,
seq,
} = schedule;
SerializableSchedule {
entries: heap.into_vec(),
invalid: invalid,
abs_time: abs_time,
seq: seq,
}
}
}
impl<T> From<SerializableSchedule<T>> for Schedule<T> {
fn from(schedule: SerializableSchedule<T>) -> Self {
let SerializableSchedule {
entries,
invalid,
abs_time,
seq,
} = schedule;
Schedule {
heap: BinaryHeap::from(entries),
invalid: invalid,
abs_time: abs_time,
seq: seq,
}
}
}
impl<T> Schedule<T> {
pub fn new() -> Self {
Schedule {
heap: BinaryHeap::new(),
invalid: HashSet::new(),
abs_time: 0,
seq: 0,
}
}
pub fn insert(&mut self, value: T, rel_time: u64) -> ScheduleTicket {
let seq = self.seq;
let abs_time = self.abs_time + rel_time;
let entry = ScheduleEntry::new(value, abs_time, rel_time, seq);
self.heap.push(entry);
self.seq += 1;
ScheduleTicket {
seq: seq,
}
}
pub fn insert_with_ticket(&mut self, value: T, rel_time: u64, ticket: ScheduleTicket) -> ScheduleTicket {
// it must be a ticket we've given out in the past
assert!(ticket.seq < self.seq, "Invalid schedule ticket");
let abs_time = self.abs_time + rel_time;
let entry = ScheduleEntry::new(value, abs_time, rel_time, ticket.seq);
self.heap.push(entry);
ScheduleTicket {
seq: ticket.seq,
}
}
pub fn next(&mut self) -> Option<ScheduleEvent<T>> {
while let Some(entry) = self.heap.pop() {
if self.invalid.remove(&entry.seq) {
continue;
}
let time_delta = entry.abs_time - self.abs_time;
self.abs_time = entry.abs_time;
return Some(ScheduleEvent::new(entry.value, time_delta, entry.rel_time, entry.abs_time));
}
None
}
pub fn reset(&mut self) {
self.heap.clear();
self.abs_time = 0;
self.seq = 0;
}
pub fn time(&self) -> u64 {
self.abs_time
}
pub fn invalidate(&mut self, ticket: ScheduleTicket) {
self.invalid.insert(ticket.seq);
}
pub fn len(&self) -> usize {
self.heap.len()
}
}
| true |
a648756629707971a758c21f4ef1276531bc25fc
|
Rust
|
tatetian/ngo2
|
/src/libos/crates/async-io/src/util/channel.rs
|
UTF-8
| 11,342 | 2.640625 | 3 |
[
"BSD-3-Clause"
] |
permissive
|
use std::sync::atomic::{AtomicBool, Ordering};
use atomic::Atomic;
use ringbuf::{Consumer as RbConsumer, Producer as RbProducer, RingBuffer};
use crate::event::{Events, Pollee, Poller};
use crate::file::{AccessMode, File, StatusFlags};
use crate::prelude::*;
/// A unidirectional communication channel, intended to implement IPC, e.g., pipe,
/// unix domain sockets, etc.
#[derive(Debug)]
pub struct Channel {
producer: Producer,
consumer: Consumer,
}
#[derive(Debug)]
pub struct Producer {
common: Arc<Common>,
}
#[derive(Debug)]
pub struct Consumer {
common: Arc<Common>,
}
#[derive(Debug)]
struct Common {
producer: EndPoint<RbProducer<u8>>,
consumer: EndPoint<RbConsumer<u8>>,
event_lock: Mutex<()>,
}
struct EndPoint<T> {
ringbuf: Mutex<T>,
pollee: Pollee,
is_shutdown: AtomicBool,
flags: Atomic<StatusFlags>,
}
impl Channel {
pub fn with_capacity(capacity: usize) -> Result<Self> {
Self::with_capacity_and_flags(capacity, StatusFlags::empty())
}
pub fn with_capacity_and_flags(capacity: usize, flags: StatusFlags) -> Result<Self> {
let common = Arc::new(Common::with_capacity_and_flags(capacity, flags)?);
let producer = Producer {
common: common.clone(),
};
let consumer = Consumer { common: common };
Ok(Self { producer, consumer })
}
pub fn split(self) -> (Producer, Consumer) {
let Self { producer, consumer } = self;
(producer, consumer)
}
pub fn producer(&self) -> &Producer {
&self.producer
}
pub fn consumer(&self) -> &Consumer {
&self.consumer
}
}
impl Common {
pub fn with_capacity_and_flags(capacity: usize, flags: StatusFlags) -> Result<Self> {
check_status_flags(flags)?;
if capacity == 0 {
return_errno!(EINVAL, "capacity cannot be zero");
}
let rb: RingBuffer<u8> = RingBuffer::new(capacity);
let (rb_producer, rb_consumer) = rb.split();
let producer = EndPoint::new(rb_producer, Events::OUT, flags);
let consumer = EndPoint::new(rb_consumer, Events::empty(), flags);
let event_lock = Mutex::new(());
Ok(Self {
producer,
consumer,
event_lock,
})
}
pub fn lock_event(&self) -> MutexGuard<()> {
self.event_lock.lock()
}
}
impl<T> EndPoint<T> {
pub fn new(ringbuf: T, init_events: Events, flags: StatusFlags) -> Self {
Self {
ringbuf: Mutex::new(ringbuf),
pollee: Pollee::new(init_events),
is_shutdown: AtomicBool::new(false),
flags: Atomic::new(flags),
}
}
pub fn ringbuf(&self) -> MutexGuard<T> {
self.ringbuf.lock()
}
pub fn pollee(&self) -> &Pollee {
&self.pollee
}
pub fn is_shutdown(&self) -> bool {
self.is_shutdown.load(Ordering::Acquire)
}
pub fn shutdown(&self) {
self.is_shutdown.store(true, Ordering::Release)
}
}
impl<T> std::fmt::Debug for EndPoint<T> {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
f.debug_struct("EndPoint")
.field("ringbuf", &"..")
.field("pollee", self.pollee())
.field("is_shutdown", &self.is_shutdown())
.finish()
}
}
impl Producer {
fn this_end(&self) -> &EndPoint<RbProducer<u8>> {
&self.common.producer
}
fn peer_end(&self) -> &EndPoint<RbConsumer<u8>> {
&self.common.consumer
}
fn update_pollee(&self) {
let this_end = self.this_end();
let peer_end = self.peer_end();
// Update the event of pollee in a critical region so that pollee
// always reflects the _true_ state of the underlying ring buffer
// regardless of any race conditions.
let event_lock = self.common.lock_event();
let rb = this_end.ringbuf();
if rb.is_full() {
this_end.pollee().del_events(Events::OUT);
}
if !rb.is_empty() {
peer_end.pollee().add_events(Events::IN);
}
}
}
impl File for Producer {
fn write(&self, buf: &[u8]) -> Result<usize> {
let this_end = self.this_end();
let peer_end = self.peer_end();
if this_end.is_shutdown() || peer_end.is_shutdown() {
return_errno!(EPIPE, "");
}
if buf.len() == 0 {
return Ok(0);
}
let nbytes = {
let mut rb = this_end.ringbuf();
let nbytes = rb.push_slice(buf);
nbytes
};
self.update_pollee();
if nbytes > 0 {
Ok(nbytes)
} else {
return_errno!(EAGAIN, "try write later");
}
}
// TODO: implement writev
fn poll(&self, mask: Events, poller: Option<&mut Poller>) -> Events {
self.this_end().pollee().poll(mask, poller)
}
fn status_flags(&self) -> StatusFlags {
self.this_end().flags.load(Ordering::Relaxed)
}
fn set_status_flags(&self, new_status: StatusFlags) -> Result<()> {
check_status_flags(new_status)?;
self.this_end().flags.store(new_status, Ordering::Relaxed);
Ok(())
}
}
impl Drop for Producer {
fn drop(&mut self) {
self.peer_end()
.pollee()
.add_events(Events::IN | Events::HUP);
}
}
impl Consumer {
fn this_end(&self) -> &EndPoint<RbConsumer<u8>> {
&self.common.consumer
}
fn peer_end(&self) -> &EndPoint<RbProducer<u8>> {
&self.common.producer
}
fn update_pollee(&self) {
let this_end = self.this_end();
let peer_end = self.peer_end();
// Update the event of pollee in a critical region so that pollee
// always reflects the _true_ state of the underlying ring buffer
// regardless of any race conditions.
let event_lock = self.common.lock_event();
let rb = this_end.ringbuf();
if rb.is_empty() {
this_end.pollee().del_events(Events::IN);
}
if !rb.is_full() {
peer_end.pollee().add_events(Events::OUT);
}
}
}
impl File for Consumer {
fn read(&self, buf: &mut [u8]) -> Result<usize> {
let this_end = self.this_end();
let peer_end = self.peer_end();
if this_end.is_shutdown() || peer_end.is_shutdown() {
return_errno!(EPIPE, "");
}
if buf.len() == 0 {
return Ok(0);
}
let nbytes = {
let mut rb = this_end.ringbuf();
let nbytes = rb.pop_slice(buf);
nbytes
};
self.update_pollee();
if nbytes > 0 {
Ok(nbytes)
} else {
return_errno!(EAGAIN, "try read later");
}
}
// TODO: implement read
fn poll(&self, mask: Events, poller: Option<&mut Poller>) -> Events {
self.this_end().pollee().poll(mask, poller)
}
fn status_flags(&self) -> StatusFlags {
self.this_end().flags.load(Ordering::Relaxed)
}
fn set_status_flags(&self, new_status: StatusFlags) -> Result<()> {
check_status_flags(new_status)?;
self.this_end().flags.store(new_status, Ordering::Relaxed);
Ok(())
}
}
impl Drop for Consumer {
fn drop(&mut self) {
self.peer_end()
.pollee()
.add_events(Events::OUT | Events::HUP);
}
}
#[cfg(test)]
mod tests {
use std::fmt::Debug;
use std::sync::Arc;
use super::*;
use crate::file::{Async, File};
#[test]
fn transfer_data_with_small_buf() {
async_rt::task::block_on(async {
const TOTAL_NBYTES: usize = 4 * 1024 * 1024;
const CHANNEL_CAPACITY: usize = 4 * 1024;
const BUF_SIZE: usize = 128;
do_transfer_data(TOTAL_NBYTES, CHANNEL_CAPACITY, BUF_SIZE).await;
});
}
#[test]
fn transfer_data_with_big_buf() {
async_rt::task::block_on(async {
const TOTAL_NBYTES: usize = 16 * 1024 * 1024;
const CHANNEL_CAPACITY: usize = 4 * 1024;
const BUF_SIZE: usize = 6 * 1024;
do_transfer_data(TOTAL_NBYTES, CHANNEL_CAPACITY, BUF_SIZE).await;
});
}
async fn do_transfer_data(total_nbytes: usize, channel_capacity: usize, buf_size: usize) {
let channel = Channel::with_capacity(channel_capacity).unwrap();
let (producer, consumer) = channel.split();
let producer = Async::new(producer);
let consumer = Async::new(consumer);
let producer_handle = async_rt::task::spawn(async move {
let mut buf = Vec::with_capacity(buf_size);
unsafe {
buf.set_len(buf.capacity());
}
let mut sofar_nbytes = 0;
while sofar_nbytes < total_nbytes {
let nbytes = producer.write(buf.as_slice()).await.unwrap();
sofar_nbytes += nbytes;
}
});
let consumer_handle = async_rt::task::spawn(async move {
let mut buf = Vec::with_capacity(buf_size);
unsafe {
buf.set_len(buf.capacity());
}
let mut sofar_nbytes = 0;
while sofar_nbytes < total_nbytes {
let nbytes = consumer.read(buf.as_mut_slice()).await.unwrap();
sofar_nbytes += nbytes;
}
});
producer_handle.await;
consumer_handle.await;
}
#[test]
fn poll() {
const BUF_LEN: usize = 4 * 1024;
const CHANNEL_CAPACITY: usize = 2 * BUF_LEN;
let mask = Events::all();
let mut buf = Vec::with_capacity(BUF_LEN);
unsafe {
buf.set_len(BUF_LEN);
}
let channel = Channel::with_capacity(CHANNEL_CAPACITY).unwrap();
let (producer, consumer) = channel.split();
// Initial events
assert!(producer.poll(mask, None) == Events::OUT);
assert!(consumer.poll(mask, None) == Events::empty());
// First write
producer.write(&buf[..BUF_LEN]);
assert!(producer.poll(mask, None) == Events::OUT);
assert!(consumer.poll(mask, None) == Events::IN);
// First read, but only half of the avail data
consumer.read(&mut buf[..BUF_LEN / 2]);
assert!(producer.poll(mask, None) == Events::OUT);
assert!(consumer.poll(mask, None) == Events::IN);
// Second read, consume the rest of avail data
consumer.read(&mut buf[..BUF_LEN / 2]);
assert!(producer.poll(mask, None) == Events::OUT);
assert!(consumer.poll(mask, None) == Events::empty());
// Second and third write, filling up the underlying buffer
producer.write(&buf[..BUF_LEN]);
producer.write(&buf[..BUF_LEN]);
assert!(producer.poll(mask, None) == Events::empty());
assert!(consumer.poll(mask, None) == Events::IN);
}
}
fn check_status_flags(flags: StatusFlags) -> Result<()> {
let VALID_FLAGS: StatusFlags = StatusFlags::O_NONBLOCK | StatusFlags::O_DIRECT;
if !VALID_FLAGS.contains(flags) {
return_errno!(EINVAL, "invalid flags");
}
if flags.contains(StatusFlags::O_DIRECT) {
return_errno!(EINVAL, "O_DIRECT is not supported");
}
Ok(())
}
| true |
73b7ad286ae4d828d8e57dda30e7d66dd466341b
|
Rust
|
alexeyz041/toolbox
|
/rust/csv_tools/dcadj/src/main.rs
|
UTF-8
| 1,660 | 3.125 | 3 |
[
"CC0-1.0",
"LicenseRef-scancode-public-domain"
] |
permissive
|
use std::io::Write;
use std::io::BufReader;
use std::io::BufRead;
use std::fs::File;
use std::env;
use std::io::BufWriter;
#[derive(Debug)]
struct Data<T> {
val: T,
time: T
}
fn load(fnm: &str, n: usize) -> Vec<Data<u16>>
{
println!("Loading {}",fnm);
let mut data = Vec::new();
let f = File::open(fnm).unwrap();
let file = BufReader::new(&f);
for line in file.lines() {
if n != 0 && data.len() >= n {
break;
}
let l = line.unwrap();
let w = l.split(',').collect::<Vec<&str>>();
let time = &w[0];
let val = &w[1];
let t = time.parse::<u16>().unwrap();
let v = val.parse::<u16>().unwrap();
data.push(Data{ val: v, time: t });
}
data
}
fn scale_k(pos: usize, m: f64, mt: f64) -> f64 {
if pos < 5 {
return 0.;
}
if pos >= 5 && pos < mt as usize {
return ((pos-5) as f64)*m/mt;
}
m
}
fn main()
{
let m = env::args().skip(1).next().expect("Missing adj. value").parse::<f64>().unwrap();
let mt = env::args().skip(2).next().expect("Missing adj. time").parse::<f64>().unwrap();
let fnm = env::args().skip(3).next().expect("Missing input file");
let data = load(&fnm, 0);
let mut res = Vec::new();
for i in 0..data.len() {
res.push(Data { val: (data[i].val as f64 - scale_k(i,m,mt)) as u16, time: data[i].time })
}
{
let ofn = "adj.txt";
let file = File::create(&ofn).expect("Couldn't create output file");
let mut writer = BufWriter::new(&file);
for i in 0..res.len() {
writeln!(&mut writer,"{},{}", res[i].time, res[i].val).unwrap();
}
println!("created {}",ofn);
}
}
| true |
4ca8e5b9cf8b8284a70f9d2d3566d8a105f8f16b
|
Rust
|
TanveerAhmed98/Artificial-Intelligence-of-Things
|
/Rust_Codes/hello_world/src/main.rs
|
UTF-8
| 320 | 2.546875 | 3 |
[] |
no_license
|
// cargo check checks our code for errors.
// cargo build creates executeable file of our code.
// cargo run run our code.
// we can stop warnings that compiler is giving us by using this commands
// #![allow(dead_code)],#![allow(unused_variables)],#![allow(unused_imports)]
fn main() {
println!("Hello, world!");
}
| true |
02d7904d3e08cf015db399e3f0d408d2a923a2dd
|
Rust
|
fabianschuiki/llhd
|
/src/ir/sig.rs
|
UTF-8
| 5,597 | 3.3125 | 3 |
[
"Apache-2.0",
"MIT"
] |
permissive
|
// Copyright (c) 2017-2021 Fabian Schuiki
//! Representation of the input and output arguments of functions, processes,
//! and entitites.
use crate::{
ir::{Arg, Unit},
table::PrimaryTable,
ty::Type,
};
/// A description of the input and output arguments of a unit.
#[derive(Default, Clone, Serialize, Deserialize)]
pub struct Signature {
args: PrimaryTable<Arg, ArgData>,
inp: Vec<Arg>,
oup: Vec<Arg>,
retty: Option<Type>,
}
/// Argument direction.
#[derive(Clone, PartialEq, Eq, Serialize, Deserialize)]
enum ArgDir {
Input,
Output,
}
/// A single argument of a `Function`, `Process`, or `Entity`.
#[derive(Clone, PartialEq, Eq, Serialize, Deserialize)]
struct ArgData {
ty: Type,
dir: ArgDir,
num: u16,
}
impl Signature {
/// Create a new signature.
pub fn new() -> Self {
Default::default()
}
/// Add an input argument.
pub fn add_input(&mut self, ty: Type) -> Arg {
let arg = self.args.add(ArgData {
ty,
dir: ArgDir::Input,
num: self.inp.len() as u16,
});
self.inp.push(arg);
arg
}
/// Add an output argument.
pub fn add_output(&mut self, ty: Type) -> Arg {
let arg = self.args.add(ArgData {
ty,
dir: ArgDir::Output,
num: self.oup.len() as u16,
});
self.oup.push(arg);
arg
}
/// Set the return type of the signature.
pub fn set_return_type(&mut self, ty: Type) {
self.retty = Some(ty);
}
/// Get the return type of the signature.
pub fn return_type(&self) -> Type {
self.retty.clone().unwrap()
}
/// Check whether the signature has any inputs.
pub fn has_inputs(&self) -> bool {
!self.inp.is_empty()
}
/// Check whether the signature has any outputs.
pub fn has_outputs(&self) -> bool {
!self.oup.is_empty()
}
/// Check whether the signature has a return type.
pub fn has_return_type(&self) -> bool {
self.retty.is_some()
}
/// Return an iterator over the inputs of the signature.
pub fn inputs<'a>(&'a self) -> impl Iterator<Item = Arg> + 'a {
self.inp.iter().cloned()
}
/// Return an iterator over the outputs of the signature.
pub fn outputs<'a>(&'a self) -> impl Iterator<Item = Arg> + 'a {
self.oup.iter().cloned()
}
/// Return an iterator over the arguments of the signature.
///
/// Inputs come first, then outputs.
pub fn args<'a>(&'a self) -> impl Iterator<Item = Arg> + 'a {
self.inputs().chain(self.outputs())
}
/// Return the type of argument `arg`.
pub fn arg_type(&self, arg: Arg) -> Type {
self.args[arg].ty.clone()
}
/// Check whether `arg` is an input.
pub fn is_input(&self, arg: Arg) -> bool {
self.args[arg].dir == ArgDir::Input
}
/// Check whether `arg` is an output.
pub fn is_output(&self, arg: Arg) -> bool {
self.args[arg].dir == ArgDir::Output
}
/// Dump the signature in human-readable form.
pub fn dump<'a>(&'a self, unit: &Unit<'a>) -> SignatureDumper<'a> {
SignatureDumper(self, *unit)
}
}
impl Eq for Signature {}
impl PartialEq for Signature {
fn eq(&self, other: &Self) -> bool {
self.args().count() == other.args().count()
&& self
.args()
.zip(other.args())
.all(|(a, b)| self.args[a] == other.args[b])
}
}
impl std::fmt::Display for Signature {
fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result {
use std::iter::{once, repeat};
write!(f, "(")?;
for (arg, sep) in self.inputs().zip(once("").chain(repeat(", "))) {
write!(f, "{}{}", sep, self.arg_type(arg))?;
}
if self.has_outputs() {
write!(f, ") -> (")?;
for (arg, sep) in self.outputs().zip(once("").chain(repeat(", "))) {
write!(f, "{}{}", sep, self.arg_type(arg))?;
}
}
write!(f, ")")?;
if let Some(ref retty) = self.retty {
write!(f, " {}", retty)?;
}
Ok(())
}
}
impl std::fmt::Debug for Signature {
fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result {
write!(f, "{}", self)
}
}
/// Temporary object to dump a `Signature` in human-readable form for debugging.
pub struct SignatureDumper<'a>(&'a Signature, Unit<'a>);
impl std::fmt::Display for SignatureDumper<'_> {
fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result {
use std::iter::{once, repeat};
write!(f, "(")?;
for (arg, sep) in self.0.inputs().zip(once("").chain(repeat(", "))) {
let value = self.1.arg_value(arg);
write!(
f,
"{}{} {}",
sep,
self.1.value_type(value),
value.dump(&self.1)
)?;
}
write!(f, ")")?;
if self.0.has_outputs() {
write!(f, " -> (")?;
for (arg, sep) in self.0.outputs().zip(once("").chain(repeat(", "))) {
let value = self.1.arg_value(arg);
write!(
f,
"{}{} {}",
sep,
self.1.value_type(value),
value.dump(&self.1)
)?;
}
write!(f, ")")?;
}
if self.0.has_return_type() {
write!(f, " {}", self.0.return_type())?;
}
Ok(())
}
}
| true |
f455ade81a357a639558df3a6964415887874cc3
|
Rust
|
mmitteregger/cuke-runner
|
/examples/calculator/src/lib.rs
|
UTF-8
| 1,413 | 3.359375 | 3 |
[
"MIT"
] |
permissive
|
#![warn(rust_2018_idioms)]
const OPS: &[char] = &['-', '+', '*', '/'];
#[derive(Debug, Default)]
pub struct RpnCalculator {
stack: Vec<f64>,
}
impl RpnCalculator {
pub fn new() -> RpnCalculator {
RpnCalculator {
stack: Vec::new(),
}
}
pub fn reset(&mut self) {
self.stack.clear();
}
pub fn push<S: AsRef<str>>(&mut self, arg: S) {
let arg = arg.as_ref();
let first_char = arg.chars().next();
if arg.chars().count() == 1 && OPS.contains(&first_char.unwrap()) {
let operator = first_char.unwrap();
let y = self.remove_last();
let x = if self.stack.is_empty() { 0f64 } else { self.remove_last() };
let val = match operator {
'-' => x - y,
'+' => x + y,
'*' => x * y,
'/' => x / y,
_ => panic!("unexpected operator: {}", operator),
};
self.stack.push(val);
} else {
let num = arg.parse::<f64>().unwrap();
self.stack.push(num);
}
}
fn remove_last(&mut self) -> f64 {
let last_elem_index = self.stack.len() - 1;
self.stack.remove(last_elem_index)
}
pub fn pi(&mut self) {
self.stack.push(std::f64::consts::PI);
}
pub fn value(&self) -> f64 {
*self.stack.last().unwrap()
}
}
| true |
7c2a0ba94e5f7493ace02a885f343e612a91811a
|
Rust
|
owap/rust-wasm-python-101
|
/src/lib.rs
|
UTF-8
| 732 | 3.34375 | 3 |
[] |
no_license
|
#[no_mangle]
pub extern fn simple_add(a: i32, b: i32) -> i32 { a + b}
#[no_mangle]
pub extern fn fibo(n: i32) -> i32 {
if n < 0 {
panic!("{} is negative!", n);
} else if n == 0 {
panic!("zero is not a right argument to fibonacci()!");
} else if n == 1 {
return 1;
}
let mut sum = 0;
let mut last = 0;
let mut curr = 1;
for _i in 1..n {
sum = last + curr;
last = curr;
curr = sum;
}
sum
}
#[no_mangle]
pub extern fn loop_str(string: String) -> () {
// for strings types in Rust see http://www.suspectsemantics.com/blog/2016/03/27/string-types-in-rust/
for c in string.chars() {
let _s = c.to_digit(10).unwrap() * 10;
}
}
| true |
541ab018e7914b1ab19fda98d9f2a95d4d3e3be0
|
Rust
|
c-a-m-o/rust-battledev
|
/src/ex1.rs
|
UTF-8
| 434 | 2.765625 | 3 |
[] |
no_license
|
use crate::{read_line, print};
pub fn main() {
let n : usize = read_line().parse().unwrap();
let mut count = 0;
for _ in 0..n {
let line = read_line();
let name : Vec<u8> = line.into();
let l = name.len();
if l >= 5 {
if name[l-5..l].iter().all(|&c| char::from(c).is_ascii_digit()) {
count += 1;
}
}
}
print(&format!("{}", count));
}
| true |
19ec94a655eed985931cf0672d4f137309acadc2
|
Rust
|
SrimantaBarua/bed_low_level
|
/geom/src/point.rs
|
UTF-8
| 1,485 | 3.234375 | 3 |
[
"MIT"
] |
permissive
|
// (C) 2020 Srimanta Barua <[email protected]>
use std::fmt;
use std::ops::{Add, AddAssign, Sub, SubAssign};
use super::{vec2, Num, NumCast, Vector2D};
#[derive(Clone, Copy, PartialEq)]
pub struct Point2D<T: Num> {
pub x: T,
pub y: T,
}
impl<T: Num> Point2D<T> {
pub fn new(x: T, y: T) -> Point2D<T> {
Point2D { x, y }
}
pub fn to_vec2(self) -> Vector2D<T> {
vec2(self.x, self.y)
}
pub fn cast<U: Num>(self) -> Point2D<U>
where
T: NumCast<U>,
{
point2(self.x.cast(), self.y.cast())
}
}
pub fn point2<T: Num>(x: T, y: T) -> Point2D<T> {
Point2D::new(x, y)
}
impl<T: Num> Add<Vector2D<T>> for Point2D<T> {
type Output = Point2D<T>;
fn add(self, vec: Vector2D<T>) -> Point2D<T> {
point2(self.x + vec.x, self.y + vec.y)
}
}
impl<T: Num> AddAssign<Vector2D<T>> for Point2D<T> {
fn add_assign(&mut self, vec: Vector2D<T>) {
self.x += vec.x;
self.y += vec.y;
}
}
impl<T: Num> Sub<Vector2D<T>> for Point2D<T> {
type Output = Point2D<T>;
fn sub(self, vec: Vector2D<T>) -> Point2D<T> {
point2(self.x - vec.x, self.y - vec.y)
}
}
impl<T: Num> SubAssign<Vector2D<T>> for Point2D<T> {
fn sub_assign(&mut self, vec: Vector2D<T>) {
self.x -= vec.x;
self.y -= vec.y;
}
}
impl<T: Num + fmt::Debug> fmt::Debug for Point2D<T> {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "({:?},{:?})", self.x, self.y)
}
}
| true |
6f262c2a24a6e387c136b54c2ad0afee9aa5d348
|
Rust
|
clucompany/cluStrConcat
|
/examples/easy_array_string.rs
|
UTF-8
| 250 | 2.625 | 3 |
[
"Apache-2.0"
] |
permissive
|
fn main() {
let data: &[&str] = &["123", "456", "789", "000"];
let string = cluStrConcat::array_to_string(data);
println!("{:?}, capacity: {}", string, string.capacity());
println!("old {:?}", data);
println!("{:?}", string.as_bytes());
}
| true |
20af374b746a2d858abc8c4c63c325fc3d5bc209
|
Rust
|
janne/aoc-2020
|
/src/day4.rs
|
UTF-8
| 1,058 | 2.875 | 3 |
[] |
no_license
|
use std::collections::HashMap;
use std::fs;
fn get_passports<'a>(text: &'a str) -> Vec<HashMap<&'a str, &'a str>> {
let passports = text.split("\n\n");
return passports
.map(|pp| pp.split(|c| c == '\n' || c == ' ').collect())
.map(|pp: Vec<&str>| {
pp.iter().fold(HashMap::new(), |mut acc, pp| {
let pair: Vec<&str> = pp.split(":").collect();
acc.insert(pair[0], pair[1]);
return acc;
})
})
.collect();
}
pub fn task_a() -> i32 {
let text = fs::read_to_string("inputs/day4.txt").unwrap();
let passports = get_passports(&text);
let count = passports
.iter()
.filter(|pp| {
pp.contains_key("byr")
&& pp.contains_key("iyr")
&& pp.contains_key("eyr")
&& pp.contains_key("hgt")
&& pp.contains_key("hcl")
&& pp.contains_key("ecl")
&& pp.contains_key("pid")
})
.count();
return count as i32;
}
| true |
d3502468ed9fe30373c146a60e17ebd42fff2fea
|
Rust
|
d3z41k/learn-rust
|
/src/15_traits_02.rs
|
UTF-8
| 1,012 | 3.515625 | 4 |
[] |
no_license
|
use std::ops;
#[derive(Debug, Clone, Copy)]
struct A(i32);
#[derive(Eq, PartialEq, PartialOrd, Ord)]
// struct B(f32);
struct X;
struct Y;
#[derive(Debug)]
struct XY;
#[derive(Debug)]
struct YX;
impl ops::Add<Y> for X{
type Output = XY;
fn add(self, _rhs: Y) -> XY {
XY
}
}
impl ops::Add<X> for Y{
type Output = YX;
fn add(self, _rhs: X) -> YX {
YX
}
}
struct S{
s: String,
}
impl Drop for S {
fn drop(&mut self) {
println!("dropped {}", self.s)
}
}
fn main() {
let a = A(32);
// let b = B(12.13);
// let c = a.clone();
let c = a; // derive Copy
println!("{:?}", a);
println!("{:?}", X + Y);
println!("{:?}", Y + X);
let d = S{s: String::from("D")};
{
let e = S{s: String::from("E")};
{
let f = S{s: String::from("F")};
println!("leaving inner scope 2");
}
println!("leaving inner scope 1");
}
drop(d);
println!("program ending");
}
| true |
c698cdd8515b89cf86ddc4d383a6b1129aef43b0
|
Rust
|
Skelebot/ruxnasm
|
/src/lib.rs
|
UTF-8
| 3,936 | 3.046875 | 3 |
[
"MIT"
] |
permissive
|
mod anomalies;
pub(crate) mod emitter;
mod instruction;
pub(crate) mod scanner;
mod span;
mod token;
pub(crate) mod tokenizer;
mod traits;
pub(crate) mod walker;
pub use anomalies::{Error, Warning};
pub(crate) use instruction::{Instruction, InstructionKind};
pub(crate) use span::{Location, Span, Spanned, Spanning};
pub(crate) use token::{Identifier, Token};
use tokenizer::Word;
pub(crate) use traits::{Stockpile, UnzipCollect};
/// Assembles an Uxn binary from a string representing an Uxntal program.
///
/// - In case the program is valid, returns an `Ok((Vec<u8>, Vec<Warning>))` — the binary
/// represented as a sequence of bytes in a `Vec`, along with any [`Warning`]s that have been
/// reported during the assembly.
/// - In case the program is invalid, i.e. it contains errors, returns an
/// `Err((Vec<Error>, Vec<Warning>))`, which contains all [`Error`]s in the program, along with
/// any [`Warning`]s that may have also been generated. The `Vec` containing the errors is always
/// non-empty.
///
/// # Example
///
/// ```rust
/// let (binary, _) = ruxnasm::assemble(b"|0100 #02 #03 ADD").unwrap();
///
/// assert_eq!(binary, [0x80, 0x02, 0x80, 0x03, 0x18]);
/// ```
pub fn assemble(source: &[u8]) -> Result<(Vec<u8>, Vec<Warning>), (Vec<Error>, Vec<Warning>)> {
let mut warnings = Vec::new();
let words = scanner::Scanner::new(source)
.unzip_collect()
.stockpile(&mut warnings)
.map_err(|errors| (errors, warnings.clone()))?;
let mut walker = walker::Walker::new();
let words: Vec<&Word> = words.iter().collect();
let mut stack: Vec<Vec<&Word>> = vec![words];
let mut chain: Vec<(Vec<u8>, Span)> = Vec::new();
while let Some(top) = stack.pop() {
match walker.walk(&top) {
Some((macro_words, macro_name, invoke_span, previous_words)) => {
stack.push(previous_words);
stack.push(macro_words);
if let Some(position) = chain.iter().position(|(n, _)| *n == macro_name) {
let mut actual_chain = vec![(macro_name.clone(), invoke_span)];
actual_chain.extend(chain.iter().skip(position + 1).cloned());
return Err((
vec![Error::RecursiveMacro {
chain: actual_chain
.into_iter()
.map(|(macro_name, macro_span)| {
(
String::from_utf8_lossy(¯o_name).into_owned(),
macro_span.into(),
)
})
.collect(),
span: chain[position].1.into(),
}],
warnings,
));
} else {
chain.push((macro_name, invoke_span));
}
}
None => {
chain.pop();
}
}
}
let (statements, definitions) = match walker.finalize() {
Ok((statements, definitions, new_warnings)) => {
warnings.extend(new_warnings);
(statements, definitions)
}
Err((errors, new_warnings)) => {
warnings.extend(new_warnings);
return Err((errors, warnings));
}
};
// println!("statements: {:#?}", statements);
// println!("labels: {:?}", definitions.labels.keys());
// println!("sublabels: {:?}", definitions.sublabels.keys());
match emitter::emit(statements, definitions) {
Ok((binary, new_warnings)) => {
warnings.extend(new_warnings);
Ok((binary, warnings))
}
Err((errors, new_warnings)) => {
warnings.extend(new_warnings);
Err((errors, warnings))
}
}
}
| true |
17d304844416c8bcad45db1147ea0bbb7f4727a5
|
Rust
|
HopedWall/rs-handlegraph
|
/src/handle.rs
|
UTF-8
| 2,515 | 3.5625 | 4 |
[] |
no_license
|
use std::ops::Add;
use std::fmt;
#[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)]
pub struct NodeId(u64);
impl From<u64> for NodeId {
fn from(num: u64) -> Self {
NodeId(num)
}
}
impl Add<u64> for NodeId {
type Output = Self;
fn add(self, other: u64) -> Self {
let NodeId(i) = self;
NodeId(i + other)
}
}
impl fmt::Display for NodeId {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
write!(f, "{}", self.0)
}
}
#[derive(Debug, Clone, Copy, PartialEq, PartialOrd, Hash)]
pub struct Handle(u64);
impl Handle {
pub fn as_integer(self) -> u64 {
let Handle(i) = self;
i
}
pub fn from_integer(i: u64) -> Self {
Handle(i)
}
pub fn unpack_number(self) -> u64 {
self.as_integer() >> 1
}
pub fn unpack_bit(self) -> bool {
self.as_integer() & 1 != 0
}
pub fn pack(node_id: NodeId, is_reverse: bool) -> Handle {
let NodeId(id) = node_id;
if id < (0x1 << 63) {
Handle::from_integer((id << 1) | is_reverse as u64)
} else {
panic!(
"Tried to create a handle with a node ID that filled 64 bits"
)
}
}
pub fn id(&self) -> NodeId {
NodeId(self.unpack_number())
}
pub fn is_reverse(&self) -> bool {
self.unpack_bit()
}
pub fn flip(&self) -> Self {
Handle(self.as_integer() ^ 1)
}
}
#[derive(Debug, Clone, PartialEq, PartialOrd)]
pub struct Edge(pub Handle, pub Handle);
#[cfg(test)]
mod tests {
use super::*;
// Handle::pack is an isomorphism; Handle <=> (u63, bool)
#[test]
fn handle_is_isomorphism() {
let u: u64 = 597283742;
let h = Handle::pack(NodeId(u), true);
assert_eq!(h.unpack_number(), u);
assert_eq!(h.unpack_bit(), true);
}
// Handle::pack should panic when the provided NodeId is invalid
// (i.e. uses the 64th bit
#[test]
#[should_panic]
fn handle_pack_panic() {
Handle::pack(NodeId(std::u64::MAX), true);
}
#[test]
fn handle_flip() {
let u: u64 = 597283742;
let h1 = Handle::pack(NodeId(u), true);
let h2 = h1.flip();
assert_eq!(h1.unpack_number(), h2.unpack_number());
assert_eq!(h1.unpack_bit(), true);
assert_eq!(h2.unpack_bit(), false);
}
}
#[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord)]
pub enum Direction {
Left,
Right,
}
| true |
b140d851706872dc8dc4b4c0388ee1daebac67b5
|
Rust
|
stm32-rs/stm32-rs-nightlies
|
/stm32h7/src/stm32h753/mdios/doutr9.rs
|
UTF-8
| 2,026 | 2.625 | 3 |
[] |
no_license
|
#[doc = "Register `DOUTR9` reader"]
pub type R = crate::R<DOUTR9_SPEC>;
#[doc = "Register `DOUTR9` writer"]
pub type W = crate::W<DOUTR9_SPEC>;
#[doc = "Field `DOUT9` reader - Output data sent to MDIO Master during read frames"]
pub type DOUT9_R = crate::FieldReader<u16>;
#[doc = "Field `DOUT9` writer - Output data sent to MDIO Master during read frames"]
pub type DOUT9_W<'a, REG, const O: u8> = crate::FieldWriter<'a, REG, 16, O, u16>;
impl R {
#[doc = "Bits 0:15 - Output data sent to MDIO Master during read frames"]
#[inline(always)]
pub fn dout9(&self) -> DOUT9_R {
DOUT9_R::new((self.bits & 0xffff) as u16)
}
}
impl W {
#[doc = "Bits 0:15 - Output data sent to MDIO Master during read frames"]
#[inline(always)]
#[must_use]
pub fn dout9(&mut self) -> DOUT9_W<DOUTR9_SPEC, 0> {
DOUT9_W::new(self)
}
#[doc = "Writes raw bits to the register."]
#[inline(always)]
pub unsafe fn bits(&mut self, bits: u32) -> &mut Self {
self.bits = bits;
self
}
}
#[doc = "MDIOS output data register 9\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`doutr9::R`](R). You can [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero) this register using [`doutr9::W`](W). You can also [`modify`](crate::generic::Reg::modify) this register. See [API](https://docs.rs/svd2rust/#read--modify--write-api)."]
pub struct DOUTR9_SPEC;
impl crate::RegisterSpec for DOUTR9_SPEC {
type Ux = u32;
}
#[doc = "`read()` method returns [`doutr9::R`](R) reader structure"]
impl crate::Readable for DOUTR9_SPEC {}
#[doc = "`write(|w| ..)` method takes [`doutr9::W`](W) writer structure"]
impl crate::Writable for DOUTR9_SPEC {
const ZERO_TO_MODIFY_FIELDS_BITMAP: Self::Ux = 0;
const ONE_TO_MODIFY_FIELDS_BITMAP: Self::Ux = 0;
}
#[doc = "`reset()` method sets DOUTR9 to value 0"]
impl crate::Resettable for DOUTR9_SPEC {
const RESET_VALUE: Self::Ux = 0;
}
| true |
18487d40c7f5dd51ff6a542dbfc2707104fea04c
|
Rust
|
hreinn91/rust_adventofcode_2019
|
/src/day3/src/main.rs
|
UTF-8
| 3,869 | 3.453125 | 3 |
[] |
no_license
|
mod day3_tests;
pub mod day3 {
use std::fs;
use std::collections::HashMap;
use std::borrow::Borrow;
pub fn get_input(filename: &str) -> (Vec<String>, Vec<String>) {
let raw_input = fs::read_to_string(filename)
.expect("Failed reading file");
let raw_instructions: Vec<&str> = raw_input
.split("\n")
.map(|s| s)
.collect();
let wire_1: Vec<String> = raw_instructions[0]
.split(",")
.map(|s| s.to_string())
.collect();
let wire_2: Vec<String> = raw_instructions[1]
.split(",")
.map(|s| s.to_string())
.collect();
(wire_1, wire_2)
}
pub fn print_wire_instruction(wires: (Vec<String>, Vec<String>)) {
println!("{:?}", wires.0);
println!("{:?}", wires.1);
}
// gird value 0:o, 1: -, 2: | 3: X
pub fn get_grid(wires: (Vec<String>, Vec<String>)) -> HashMap<(i32, i32), i32> {
let mut grid: HashMap<(i32, i32), i32> = HashMap::new();
populate_grid(&mut grid, wires.0);
populate_grid(&mut grid, wires.1);
grid
}
pub fn populate_grid(grid: &mut HashMap<(i32, i32), i32>, wire: Vec<String>) {
let mut x = 0;
let mut y = 0;
grid.insert((x, y), 0);
for instruction in wire {
let new_coord = insert_coordinate(grid, parse(instruction), x, y);
x = i32::clone(&new_coord.0);
y = i32::clone(&new_coord.1);
}
}
pub fn insert_coordinate(grid: &mut HashMap<(i32, i32), i32>,
instructions: (String, i32),
mut x: i32, mut y: i32) -> (i32, i32) {
let dir = instructions.0;
let len = instructions.1;
for _i in 1..=len {
if dir == "R" {
x = x + 1;
} else if dir == "L" {
x = x - 1;
} else if dir == "U" {
y = y + 1;
} else if dir == "D" {
y = y - 1;
}
if grid.contains_key(&(x, y)) {
grid.entry((x, y)).or_insert(3);
} else if dir == "L" || dir == "R" {
grid.insert((x, y), 1);
} else if dir == "U" || dir == "D" {
grid.insert((x, y), 1);
}
}
(x, y)
}
pub fn parse(instruction: String) -> (String, i32) {
let dir = &instruction[0..1];
let len = &instruction[1..2];
(dir.to_string(), len.parse().unwrap())
}
pub fn print_grid(grid: HashMap<(i32, i32), i32>) {
let grid_size = get_grid_size(grid);
for y in 0..=grid_size.1 {
let mut row: Vec<String> = Vec::new();
for x in 0..=grid_size.0 {
let mut rep = if grid.get(&(x, y)).unwrap() == &1 {
"-"
} else if grid.get(&(x, y)).unwrap() == &2 {
"|"
} else if grid.get(&(x, y)).unwrap() == &3 {
"X"
} else {
"."
}.to_string();
row.insert(x as usize, String::from(rep));
}
println!("{:?}", row);
}
}
pub fn get_grid_size(grid: HashMap<(i32, i32), i32>) -> (i32, i32) {
let keys = grid.keys();
let mut x_size = 0;
let mut y_size = 0;
for key in keys {
if x_size < key.0 {
x_size = key.0;
}
if y_size < key.1 {
y_size = key.1;
}
}
(x_size, y_size)
}
}
use crate::day3::{get_input, print_wire_instruction, get_grid};
fn main() {
let wires = get_input("src/test_input1.txt");
print_wire_instruction(wires.clone());
get_grid(wires.clone());
}
| true |
70b6495fc9f3eb57c72d544f3f1b9e381d080f59
|
Rust
|
Azureki/codewars
|
/Round_by_zero_dot_five_steps.rs
|
UTF-8
| 138 | 3.125 | 3 |
[] |
no_license
|
fn main() {
let res = solution(4.25);
println!("result is {}", res);
}
fn solution(n: f64) -> f64 {
(n * 2.0).round() / 2.0
}
| true |
108fb452c7b0ed7a801342d6abb074ef9fe57e81
|
Rust
|
dmussaku/rust-examples
|
/rectangles/src/main.rs
|
UTF-8
| 269 | 3.515625 | 4 |
[] |
no_license
|
#[derive(Debug)]
struct Rectangle {
length: u32,
width: u32,
}
impl Rectangle {
fn area(&self) -> u32 {
self.length * self.width
}
}
fn main() {
let rectangle = Rectangle{length: 4, width: 6};
println!("Area of rectangle is {}", rectangle.area());
}
| true |
c398ffea2d7aa99b9f182775c1b884856aabae1d
|
Rust
|
w-k-s/ProgrammingPuzzles
|
/rein-number.rs
|
UTF-8
| 895 | 3.34375 | 3 |
[] |
no_license
|
fn main() {
assert_eq!(rein_number(1),"1");
assert_eq!(rein_number(2),"12");
assert_eq!(rein_number(3),"123");
assert_eq!(rein_number(7),"1234567");
assert_eq!(rein_number(9),"123456789");
assert_eq!(rein_number(10),"10123456789");
assert_eq!(rein_number(15),"101111111223344556789");
assert_eq!(rein_number(34),"10001111111111111222222222222223333333334444555666777888999");
assert_eq!(rein_number(42),"100001111111111111122222222222222233333333333333444444455556666777788889999")
}
fn rein_number(digits : i32)->String{
let mut result = String::new();
for i in 1..digits+1{
result.push_str(&i.to_string());
}
let mut c : Vec<char> = result.chars().collect();
c.sort();
let index_first_one = c.binary_search(&'1').unwrap();
c.remove(index_first_one);
c.insert(0,'1');
c.into_iter().collect()
}
| true |
3edd4ab97e235f0498f2b6a1a3374141b60d607e
|
Rust
|
rust-rosetta/rust-rosetta
|
/tasks/additive-primes/addit_primes-flat/src/main.rs
|
UTF-8
| 743 | 2.828125 | 3 |
[
"Unlicense"
] |
permissive
|
//===Flat implementation===
fn main() {
let limit = 500;
let column_w = limit.to_string().len() + 1;
let mut pms = Vec::with_capacity(limit / 2 - limit / 3 / 2 - limit / 5 / 3 / 2 + 1);
let mut count = 0;
for u in (2..3).chain((3..limit).step_by(2)) {
if pms.iter().take_while(|&&p| p * p <= u).all(|&p| u % p != 0) {
pms.push(u);
let dgs = std::iter::successors(Some(u), |&n| (n > 9).then(|| n / 10)).map(|n| n % 10);
if pms.binary_search(&dgs.sum()).is_ok() {
print!("{}{u:column_w$}", if count % 10 == 0 { "\n" } else { "" });
count += 1;
}
}
}
println!("\n---\nFound {count} additive primes less than {limit}");
}
| true |
573f9eda99dee24e337339bfbe26b55a49e38c3d
|
Rust
|
ilya-epifanov/blackbox-tool
|
/src/opts.rs
|
UTF-8
| 485 | 2.609375 | 3 |
[
"Apache-2.0",
"MIT",
"LicenseRef-scancode-unknown-license-reference"
] |
permissive
|
use clap::Clap;
/// Blackbox tool
#[derive(Clap)]
#[clap()]
pub struct Opts {
/// Input blackbox file
#[clap(short, long)]
pub input: String,
#[clap(subcommand)]
pub subcmd: SubCommand,
}
#[derive(Clap)]
pub enum SubCommand {
DumpCsv(DumpCsv),
}
/// Convert into csv file(-s)
#[derive(Clap)]
pub struct DumpCsv {
/// Output file base name, will be suffixed with '.csv', '.gps.csv' etc.
#[clap(short, long)]
pub output_basename: Option<String>,
}
| true |
2c4350ad27af8e3f7e9a8d94122cd92f3235990b
|
Rust
|
PizzaCrust/interfacer-http
|
/tests/content/derive.rs
|
UTF-8
| 1,532 | 2.765625 | 3 |
[
"MIT"
] |
permissive
|
use interfacer_http::{
define_from_content, define_to_content,
mime::{self, Mime},
ContentInto, FromContent, FromContentError, ToContent, ToContentError,
};
use std::str::FromStr;
use std::string::ToString;
define_from_content!(FromContentString);
define_to_content!(StringToContent);
impl<T: FromStr> FromContentString for T
where
T::Err: std::string::ToString,
{
fn _from_content(data: Vec<u8>, content_type: &Mime) -> Result<Self, FromContentError> {
String::from_utf8_lossy(&data)
.parse()
.map_err(|err: <Self as FromStr>::Err| {
(data, content_type.clone(), err.to_string()).into()
})
}
}
impl<T: ToString> StringToContent for T {
fn _to_content(&self, _content_type: &Mime) -> Result<Vec<u8>, ToContentError> {
Ok(self.to_string().into_bytes())
}
}
#[derive(Debug, Eq, PartialEq, FromContent, ToContent)]
struct I32(i32);
impl FromStr for I32 {
type Err = <i32 as FromStr>::Err;
fn from_str(s: &str) -> Result<Self, Self::Err> {
Ok(Self(s.parse()?))
}
}
impl ToString for I32 {
fn to_string(&self) -> String {
self.0.to_string()
}
}
#[test]
fn test_to_content() {
let data = I32(1).to_content(&mime::TEXT_PLAIN).unwrap();
assert_eq!("1", String::from_utf8_lossy(&data).as_ref());
}
#[test]
fn test_from_content() {
assert_eq!(
I32(1),
"1".to_owned()
.into_bytes()
.content_into(&mime::TEXT_PLAIN)
.unwrap()
);
}
| true |
fda3f41971cb307167ca5094a9b81803ac3bac84
|
Rust
|
theaaf/blackmagic-c2
|
/src/agent/hyperdeck.rs
|
UTF-8
| 10,245 | 2.8125 | 3 |
[
"MIT"
] |
permissive
|
use std::collections::{HashMap};
use std::net::{SocketAddr};
use std::time::{Duration};
use futures::{future, Future, Async, Poll};
use simple_error::{SimpleError};
use tokio::prelude::*;
use tokio::net::{TcpStream};
use tokio::io::{write_all};
pub const DEFAULT_PORT: u16 = 9993;
pub struct HyperDeck {
stream: TcpStream,
read_buffer: String,
}
impl HyperDeck {
pub fn connect(addr: &SocketAddr) -> impl Future<Item=Self, Error=SimpleError> {
TcpStream::connect(addr)
.timeout(Duration::from_secs(2))
.and_then(|stream| {
future::ok(HyperDeck{
stream: stream,
read_buffer: String::new(),
})
})
.map_err(|e| SimpleError::with("hyperdeck connect error", e))
}
/// Reads the next response from the HyperDeck, including asynchronous responses.
pub fn read_response(self) -> impl Future<Item=(Self, HyperDeckResponse), Error=SimpleError> {
HyperDeckResponseFuture{
hyperdeck: Some(self),
}
.timeout(Duration::from_secs(2))
.map_err(|e| SimpleError::with("hyperdeck read response error", e))
}
pub fn write_command(self, cmd: String) -> impl Future<Item=Self, Error=SimpleError> {
let read_buffer = self.read_buffer;
write_all(self.stream, cmd + "\n")
.map(|(stream, _)| HyperDeck{
stream: stream,
read_buffer: read_buffer,
})
.map_err(|e| SimpleError::with("hyperdeck write command error", e))
}
/// Reads responses until a command response (a response that does not have a 5XX code) is
/// found.
pub fn read_command_response(self) -> impl Future<Item=(Self, HyperDeckResponse), Error=SimpleError> {
let state = (self, None);
future::loop_fn(state, move |state: (Self, Option<HyperDeckResponse>)| {
state.0.read_response()
.and_then(|(hyperdeck, response)| {
match response.code {
500...600 => Ok(future::Loop::Continue((hyperdeck, None))),
_ => Ok(future::Loop::Break((hyperdeck, Some(response)))),
}
})
})
.map(|state| (state.0, state.1.unwrap()))
}
}
#[derive(Clone, Debug)]
pub struct HyperDeckResponse {
pub code: i32,
pub text: String,
pub payload: Option<String>,
}
impl HyperDeckResponse {
pub fn parse_payload_parameters<'a>(&'a self) -> Result<HashMap<&'a str, &'a str>, SimpleError> {
match &self.payload {
None => Ok(HashMap::new()),
Some(payload) => {
let mut params = HashMap::new();
for line in payload.lines() {
let parts: Vec<&str> = line.splitn(2, ':').collect();
if parts.len() < 2 {
return Err(SimpleError::new("malformed parameters"));
}
let key = parts[0].trim();
let value = if parts.len() > 1 { parts[1].trim() } else { "" };
params.insert(key, value);
}
Ok(params)
},
}
}
}
struct HyperDeckResponseFuture {
hyperdeck: Option<HyperDeck>,
}
impl HyperDeckResponseFuture {
fn try_parse_response(&mut self) -> Result<Option<HyperDeckResponse>, SimpleError> {
let mut response: Option<HyperDeckResponse> = None;
let hyperdeck = self.hyperdeck.as_mut().unwrap();
let read_buffer = hyperdeck.read_buffer.clone();
let complete_lines = read_buffer.trim_end_matches(|c| c != '\n' && c != '\r');
let mut consumed_lines = 0;
for line in complete_lines.lines() {
consumed_lines += 1;
if line.is_empty() {
match response {
Some(ref response) => {
let (_, remaining) = read_buffer.split_at(hyperdeck.read_buffer.match_indices('\n').nth(consumed_lines - 1).unwrap_or((read_buffer.len(), "")).0);
hyperdeck.read_buffer = remaining.to_string();
return Ok(Some(response.clone()));
},
_ => response = None,
}
continue;
}
match response {
Some(ref mut response) => {
let mut payload = response.payload.get_or_insert(String::new());
payload.push_str(line);
payload.push_str("\n");
},
None => {
let parts: Vec<&str> = line.splitn(2, ' ').collect();
if parts.len() < 2 {
return Err(SimpleError::new("malformed response code line"));
}
let code = parts[0].parse();
if code.is_err() {
return Err(SimpleError::new("malformed response code"));
}
let text = parts[1];
if !text.ends_with(':') {
let (_, remaining) = read_buffer.split_at(hyperdeck.read_buffer.match_indices('\n').nth(consumed_lines - 1).unwrap_or((read_buffer.len(), "")).0);
hyperdeck.read_buffer = remaining.to_string();
return Ok(Some(HyperDeckResponse{
code: code.unwrap(),
text: text.to_string(),
payload: None,
}));
}
response = Some(HyperDeckResponse{
code: code.unwrap(),
text: text.trim_end_matches(':').to_string(),
payload: None,
});
},
}
}
Ok(None)
}
}
impl Future for HyperDeckResponseFuture {
type Item = (HyperDeck, HyperDeckResponse);
type Error = SimpleError;
fn poll(&mut self) -> Poll<Self::Item, Self::Error> {
match self.try_parse_response() {
Ok(Some(response)) => return Ok(Async::Ready((self.hyperdeck.take().unwrap(), response))),
Ok(None) => {},
Err(e) => return Err(e),
}
{
let hyperdeck = self.hyperdeck.as_mut().unwrap();
let mut buf = [0; 1024];
match hyperdeck.stream.poll_read(&mut buf) {
Ok(Async::Ready(0)) => return Err(SimpleError::new("unexpected eof")),
Ok(Async::Ready(n)) => match std::str::from_utf8(&buf[..n]) {
Ok(s) => hyperdeck.read_buffer.push_str(s),
Err(e) => return Err(SimpleError::with("malformed response", e))
},
Ok(Async::NotReady) => return Ok(Async::NotReady),
Err(e) => return Err(SimpleError::with("read error", e)),
}
}
match self.try_parse_response() {
Ok(Some(response)) => Ok(Async::Ready((self.hyperdeck.take().unwrap(), response))),
Ok(None) => Ok(Async::NotReady),
Err(e) => Err(e),
}
}
}
#[cfg(test)]
mod tests {
use super::HyperDeck;
use actix::{Arbiter, System};
use futures::{future, Future, Stream};
use tokio::io::{write_all};
use tokio::net::{TcpListener};
use std::net::{IpAddr, Ipv4Addr, SocketAddr};
#[test]
fn hyperdeck_read_command_response() {
let addr: SocketAddr = "127.0.0.1:0".parse().unwrap();
let listener = TcpListener::bind(&addr).unwrap();
System::run(|| {
let addr = listener.local_addr().unwrap();
Arbiter::spawn(
HyperDeck::connect(&addr)
.and_then(|hyperdeck| hyperdeck.read_command_response())
.then(|result| {
let (_, result) = result.unwrap();
assert_eq!(200, result.code);
assert_eq!("hello", result.text);
System::current().stop();
future::result(Ok(()))
})
);
Arbiter::spawn(
listener.incoming()
.take(1)
.collect()
.and_then(|mut clients| {
write_all(clients.remove(0), b"500 init\n200 hello\n")
})
.then(|_| future::result(Ok(())))
);
});
}
#[test]
fn hyperdeck_read_response() {
let addr: SocketAddr = "127.0.0.1:0".parse().unwrap();
let listener = TcpListener::bind(&addr).unwrap();
System::run(|| {
let addr = listener.local_addr().unwrap();
Arbiter::spawn(
HyperDeck::connect(&addr)
.and_then(|hyperdeck| hyperdeck.read_response())
.then(|result| {
let (_, result) = result.unwrap();
assert_eq!(200, result.code);
assert_eq!("hello", result.text);
assert_eq!("foo\n", result.payload.unwrap());
System::current().stop();
future::result(Ok(()))
})
);
Arbiter::spawn(
listener.incoming()
.take(1)
.collect()
.and_then(|mut clients| {
write_all(clients.remove(0), b"200 hello:\nfoo\n\n")
})
.then(|_| future::result(Ok(())))
);
});
}
#[test]
fn hyperdeck_connect_timeout() {
System::run(|| {
let addr = SocketAddr::new(IpAddr::V4(Ipv4Addr::new(192, 0, 2, 0)), super::DEFAULT_PORT);
Arbiter::spawn(
HyperDeck::connect(&addr)
.then(|result| {
assert!(result.is_err());
System::current().stop();
future::result(Ok(()))
})
);
});
}
}
| true |
c586a710f316b113a1aa21423295c8c0fcf6e15f
|
Rust
|
DominicPM/supervisionary
|
/wasmi-bindings/src/runtime_trap.rs
|
UTF-8
| 2,616 | 2.90625 | 3 |
[
"MIT",
"LicenseRef-scancode-unknown-license-reference"
] |
permissive
|
//! # Runtime traps (or: kernel panics)
//!
//! Defines various types of runtime traps/kernel panics that are specific to
//! the WASMI execution engine binding. Note that these represent unrecoverable
//! errors that cause a system abort.
//!
//! # Authors
//!
//! [Dominic Mulligan], Systems Research Group, [Arm Research] Cambridge.
//! [Nick Spinale], Systems Research Group, [Arm Research] Cambridge.
//!
//! # Copyright
//!
//! Copyright (c) Arm Limited, 2021. All rights reserved (r). Please see the
//! `LICENSE.markdown` file in the *Supervisionary* root directory for licensing
//! information.
//!
//! [Dominic Mulligan]<https://dominicpm.github.io>
//! [Nick Spinale]<https://nickspinale.com>
//! [Arm Research]<http://www.arm.com/research>
use kernel::error_code::ErrorCode;
use std::fmt::{Display, Error as DisplayError, Formatter};
use wasmi::{Error as WasmiError, HostError, Trap, TrapKind};
/// Runtime traps are unrecoverable errors raised by the WASM program host.
/// These are equivalent, essentially, to kernel panics in a typical operating
/// system.
#[derive(Clone, Debug, Eq, Hash, Ord, PartialEq, PartialOrd)]
pub enum RuntimeTrap {
/// The WASM guest's memory was not registered with the runtime state.
MemoryNotRegistered,
/// An attempted read from the WASM guest's heap failed.
MemoryReadFailed,
/// An attempted write to the WASM guest's heap failed.
MemoryWriteFailed,
/// The WASM guest program tried to call a function that does not exist.
NoSuchFunction(usize),
/// A type-signature check on a host-function failed.
SignatureFailure,
}
/// Pretty-printing for `RuntimeTrap` values.
impl Display for RuntimeTrap {
fn fmt(&self, f: &mut Formatter) -> Result<(), DisplayError> {
match self {
RuntimeTrap::NoSuchFunction(opcode) => {
write!(f, "NoSuchFunction: {opcode}")
}
RuntimeTrap::SignatureFailure => write!(f, "SignatureFailure"),
RuntimeTrap::MemoryNotRegistered => {
write!(f, "MemoryNotRegistered")
}
RuntimeTrap::MemoryReadFailed => write!(f, "MemoryReadFailed"),
RuntimeTrap::MemoryWriteFailed => write!(f, "MemoryWriteFailed"),
}
}
}
impl HostError for RuntimeTrap {}
/// Lifts a kernel error into an error that can be passed back to the WASM
/// program.
#[inline]
pub fn host_error(code: ErrorCode) -> WasmiError {
WasmiError::Host(Box::new(code))
}
/// Creates a WASMI `Trap` type from a `RuntimeTrap`.
#[inline]
pub fn host_trap(trap: RuntimeTrap) -> Trap {
Trap::new(TrapKind::Host(Box::new(trap)))
}
| true |
84221311d0868e9ef496f9b48b98f9c5020ad229
|
Rust
|
admwrd/robin
|
/robin/src/error.rs
|
UTF-8
| 1,467 | 3.09375 | 3 |
[
"MIT"
] |
permissive
|
use serde_json;
use redis;
use std::{self, fmt};
/// The result type used throughout Robin.
pub type RobinResult<T> = Result<T, Error>;
/// The different types of errors that might happen.
#[derive(Debug)]
pub enum Error {
/// The job we got from the queue isn't known.
UnknownJob(String),
/// The job failed to perform and might be retried.
JobFailed(Box<std::error::Error>),
/// Some serialization/deserialization failed
SerdeJsonError(serde_json::Error),
/// Something related to Redis failed.
RedisError(redis::RedisError),
/// A Redis error that isn't included in `redis::RedisError`
UnknownRedisError(String),
}
impl fmt::Display for Error {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "{:?}", self)
}
}
impl std::error::Error for Error {
fn description(&self) -> &str {
match self {
&Error::UnknownJob(ref name) => name,
&Error::JobFailed(ref err) => err.description(),
&Error::SerdeJsonError(ref err) => err.description(),
&Error::RedisError(ref err) => err.description(),
&Error::UnknownRedisError(ref msg) => msg,
}
}
}
impl From<serde_json::Error> for Error {
fn from(error: serde_json::Error) -> Error {
Error::SerdeJsonError(error)
}
}
impl From<redis::RedisError> for Error {
fn from(error: redis::RedisError) -> Error {
Error::RedisError(error)
}
}
| true |
4c1eef5b1a6b4813d10c7d1ff03f730f97e6efb1
|
Rust
|
seiyab/atcoder
|
/ABC/17x/172/c.rs
|
UTF-8
| 2,481 | 3.234375 | 3 |
[] |
no_license
|
use std::io::stdin;
use std::str::FromStr;
use std::cmp::max;
fn main(){
let (_n, _m, k): (i64, i64, i64) = get_triple();
let xs: Vec<i64> = get_vec();
let ys: Vec<i64> = get_vec();
let xcs = cumsum(&xs);
let ycs = cumsum(&ys);
let mut ans = 0;
for i in 0..xcs.len() {
let cost_x = xcs[i];
let cost_rem = k - cost_x;
if let Some(y_n) = upper_bound(&ycs, &|y| *y <= cost_rem) {
ans = max(ans, i + y_n);
}
}
println!("{}", ans);
}
fn cumsum(v: &Vec<i64>) -> Vec<i64> {
let mut result = vec![0];
let mut sum = 0;
for &elm in v.iter() {
sum += elm;
result.push(sum);
}
result
}
fn upper_bound<T, F>(v: &Vec<T>, f: &F) -> Option<usize>
where F: Fn(&T) -> bool {
if v.first().map(f) != Some(true) {
return None;
}
let last_idx = (v.len() as i64 -1) as usize;
if f(&v[last_idx]) {
return Some(last_idx);
}
return Some(upper_bound(v, f, 0, last_idx));
fn upper_bound<T, F>(v: &Vec<T>, f: &F, left: usize, right: usize) -> usize
where F: Fn(&T) -> bool {
if left+1 == right {
return left;
}
let m = (left + right) / 2;
if f(&v[m]) {
return upper_bound(v, f, m, right);
} else {
return upper_bound(v, f, left, m);
}
}
}
#[allow(dead_code)]
fn get_line() -> String {
let mut s = String::new();
match stdin().read_line(&mut s){
Ok(_) => {s.trim().to_string()}
Err(_) => String::new()
}
}
#[allow(dead_code)]
fn get_vec<T: std::str::FromStr>() -> Vec<T> {
let line = get_line();
line.split_whitespace().filter_map(|x| x.parse().ok()).collect()
}
#[allow(dead_code)]
fn get_one<T: FromStr + Copy>() -> T {
let v = get_vec();
v[0]
}
#[allow(dead_code)]
fn get_pair<T: FromStr + Copy>() -> (T, T) {
let v = get_vec();
(v[0], v[1])
}
#[allow(dead_code)]
fn get_triple<T: FromStr + Copy>() -> (T, T, T) {
let v = get_vec();
(v[0], v[1], v[2])
}
#[allow(dead_code)]
fn get_chars() -> Vec<char> {
get_line().chars().collect()
}
#[allow(dead_code)]
fn vec_min(xs: &Vec<i64>) -> i64 {
xs.iter().map(|&x|x).fold(std::i64::MAX, std::cmp::min)
}
#[allow(dead_code)]
fn vec_max(xs: &Vec<i64>) -> i64 {
xs.iter().map(|&x|x).fold(std::i64::MIN, std::cmp::max)
}
#[allow(dead_code)]
fn vec_sum(xs: &Vec<i64>) -> i64 {
xs.iter().fold(0, |acc, &x| acc+x)
}
| true |
8afd090e64540d023cfdc991a6c0899c85ffc82e
|
Rust
|
GDGToulouse/devfest-toolkit-rs
|
/dftk-server/src/rest/categories.rs
|
UTF-8
| 3,097 | 2.78125 | 3 |
[
"Apache-2.0"
] |
permissive
|
use uuid::Uuid;
use warp::filters::BoxedFilter;
use warp::{Filter, Rejection, Reply};
use dftk_database::Repositories;
use crate::rejection::Oops;
use crate::rest::NameDescription;
use crate::{with_repo, ServerContext, MAX_BODY_LENGTH};
///
/// Provide session categories routes
///
/// `GET site/categories`: list all session categories
///
/// `POST site/categories`: create a session category
///
/// `PUT site/categories/{id}` update a session category
///
/// `DELETE site/categories/{id}` delete a session category
pub fn build_session_categories_routes(context: &ServerContext) -> BoxedFilter<(impl Reply,)> {
let create = warp::post()
.and(with_repo(context.repos()))
.and(warp::body::content_length_limit(MAX_BODY_LENGTH))
.and(warp::body::json())
.and_then(create_category);
let list = warp::get() //
.and(with_repo(context.repos())) //
.and_then(list_categories);
let delete = warp::delete()
.and(with_repo(context.repos()))
.and(warp::path::param::<Uuid>())
.and_then(delete_category);
let update = warp::put()
.and(with_repo(context.repos()))
.and(warp::path::param::<Uuid>())
.and(warp::body::content_length_limit(MAX_BODY_LENGTH))
.and(warp::body::json())
.and_then(update_category);
warp::path("categories")
.and(create.or(list).or(delete).or(update))
.boxed()
}
async fn create_category(
repos: Repositories,
name_description: NameDescription,
) -> Result<impl Reply, Rejection> {
let NameDescription { name, description } = name_description;
info!("Creating a new category {:?}", name);
let result = repos
.session_category()
.create(name, description)
.await
.map_err(Oops::db)?;
debug!("Created the category {:?} ", result);
let result = warp::reply::json(&result);
Ok(result)
}
async fn list_categories(repos: Repositories) -> Result<impl Reply, Rejection> {
info!("Getting list of session categories");
let result = repos.session_category().find().await.map_err(Oops::db)?;
let result = warp::reply::json(&result);
Ok(result)
}
async fn update_category(
repos: Repositories,
uuid: Uuid,
name_description: NameDescription,
) -> Result<impl Reply, Rejection> {
let NameDescription { name, description } = name_description;
info!("Update category {}", name);
let result = repos
.session_category()
.update(uuid, name, description)
.await
.map_err(Oops::db)?;
info!("Updated the category {:?}", result);
let result = warp::reply::json(&result);
Ok(result)
}
async fn delete_category(repos: Repositories, uuid: Uuid) -> Result<impl Reply, Rejection> {
info!("Deleting category {:?}", uuid);
let result = repos
.session_category()
.delete(uuid)
.await
.map_err(Oops::db)?;
info!("Deleted the category {:?}", result);
result
.map(|it| warp::reply::json(&it))
.ok_or_else(warp::reject::not_found)
}
| true |
467f6e89c082a3bf6e958a6fac30980958ba60b7
|
Rust
|
paavohuhtala/advent-of-code-2020
|
/src/bin/day15.rs
|
UTF-8
| 1,809 | 3.46875 | 3 |
[
"MIT"
] |
permissive
|
use std::collections::HashMap;
fn main() {
let input = include_str!("./input15.txt");
let input = input
.split(",")
.map(|x| x.parse::<u64>().unwrap())
.collect::<Vec<_>>();
println!("a: {:?}", day15a(&input));
println!("b: {:?}", day15b(&input));
}
fn day15a(input: &[u64]) -> u64 {
let mut memory = Vec::new();
let mut turn = 1u32;
for number in input {
memory.push(*number);
turn += 1;
}
let mut previous = *input.last().unwrap();
while turn <= 2020 {
let maybe_previous = memory
.iter()
.enumerate()
.rev()
.skip(1)
.find(|(_, &x)| x == previous);
let next_number = match maybe_previous {
None => 0,
Some((age, _)) => (turn as u64 - 1) - (age as u64 + 1),
};
memory.push(next_number);
previous = next_number;
turn += 1;
}
memory[2019]
}
fn day15b(input: &[u64]) -> u64 {
let mut memory: HashMap<u64, Vec<u64>> = HashMap::new();
let mut turn = 1u64;
for &number in input {
memory.entry(number).or_default().push(turn);
turn += 1;
}
let mut previous = *input.last().unwrap();
while turn <= 30_000_000 {
if turn % 1_000_000 == 0 {
println!("{}", turn);
}
let previous_instances = memory.get(&previous).unwrap();
let next_number = if previous_instances.len() == 1 {
0
} else {
let previous_turn = previous_instances[previous_instances.len() - 2];
let age = turn - previous_turn - 1;
age
};
memory.entry(next_number).or_default().push(turn);
previous = next_number;
turn += 1;
}
previous
}
| true |
2f6cc5a814856c967e7a5f3841d6569abbf81399
|
Rust
|
sogapalag/cplib
|
/src/core/binary_search.rs
|
UTF-8
| 1,857 | 3.34375 | 3 |
[] |
no_license
|
use std::ops::Range;
use super::num::identities::Two;
use super::num::Integer;
/// Binary search `l..r`, find first `i` s.t. `f(i)` is true.
pub fn bs_first<T, F>(r: Range<T>, mut f: F) -> T
where
T: Integer + Two + Copy,
F: FnMut(T) -> bool,
{
let Range {
start: mut l,
end: mut r,
} = r;
assert!(l < r);
while l < r {
let mid = l + (r - l) / T::TWO;
if f(mid) {
r = mid;
} else {
l = mid + T::ONE;
}
}
r
}
/// Binary search `l..r`, find last `i` s.t. `f(i)` still true.
pub fn bs_last<T, F>(r: Range<T>, mut f: F) -> T
where
T: Integer + Two + Copy,
F: FnMut(T) -> bool,
{
let Range {
start: mut l,
end: mut r,
} = r;
assert!(l < r);
while l < r {
let mid = l + (r - l) / T::TWO;
if !f(mid) {
r = mid;
} else {
l = mid + T::ONE;
}
}
r - T::ONE
}
// Deprecated
//macro_rules! bs_first {
// ($l:expr , $r:expr, |$i:ident| $b:expr) => {{
// let mut l = $l;
// let mut r = $r;
// assert!(l < r);
// let f = |$i| $b;
// let mut mid;
// while l < r {
// mid = l + (r - l) / 2;
// if f(mid) {
// r = mid;
// } else {
// l = mid + 1;
// }
// }
// r
// }};
//}
//macro_rules! bs_last {
// ($l:expr , $r:expr, |$i:ident| $b:expr) => {{
// let mut l = $l;
// let mut r = $r;
// assert!(l < r);
// let f = |$i| $b;
// let mut mid;
// while l < r {
// mid = l + (r - l) / 2;
// if !f(mid) {
// r = mid;
// } else {
// l = mid + 1;
// }
// }
// r - 1
// }};
//}
#[cfg(test)]
mod tests;
| true |
3c9da2b4aef26ce2c19b2fe7d2c5150cb7485f69
|
Rust
|
BogdanFloris/leetcode-rust
|
/src/roman_to_int.rs
|
UTF-8
| 1,465 | 3.78125 | 4 |
[] |
no_license
|
use std::collections::HashMap;
#[allow(dead_code)]
pub fn roman_to_int(s: String) -> i32 {
let mut roman_map: HashMap<char, i32> = HashMap::with_capacity(7);
roman_map.insert('I', 1);
roman_map.insert('V', 5);
roman_map.insert('X', 10);
roman_map.insert('L', 50);
roman_map.insert('C', 100);
roman_map.insert('D', 500);
roman_map.insert('M', 1000);
let mut sol = 0;
let chars = s.as_bytes();
for i in 0..(s.len() - 1) {
if roman_map.get(&(chars[i] as char)).unwrap()
< roman_map.get(&(chars[i + 1] as char)).unwrap()
{
sol -= *roman_map.get(&(chars[i] as char)).unwrap();
} else {
sol += *roman_map.get(&(chars[i] as char)).unwrap();
}
}
sol + *roman_map.get(&(chars[s.len() - 1] as char)).unwrap()
}
#[cfg(test)]
mod tests {
use super::roman_to_int;
#[test]
fn basic_1() {
let s = String::from("III");
assert_eq!(roman_to_int(s), 3);
}
#[test]
fn basic_2() {
let s = String::from("IV");
assert_eq!(roman_to_int(s), 4);
}
#[test]
fn basic_3() {
let s = String::from("IX");
assert_eq!(roman_to_int(s), 9);
}
#[test]
fn basic_4() {
let s = String::from("LVIII");
assert_eq!(roman_to_int(s), 58);
}
#[test]
fn basic_5() {
let s = String::from("MCMXCIV");
assert_eq!(roman_to_int(s), 1994);
}
}
| true |
8ed969d5ac207af9006ec144ada923a60e3ad724
|
Rust
|
lemonteaa/rusty-raytracer
|
/src/scene/light.rs
|
UTF-8
| 569 | 2.765625 | 3 |
[] |
no_license
|
use na::{Vector3, Point3, Unit};
use ::util::Color;
pub struct Lighting {
pub light_pos : LightingType,
pub color : Color
}
pub enum LightingType {
Ambient,
Directional { dir : Vector3<f64> },
Point { loc : Point3<f64> }
}
impl LightingType {
pub fn get_incident_vec(&self, pos : Point3<f64>) -> Vector3<f64> {
match *self {
LightingType::Ambient {} => Vector3::new(0.0, 0.0, 0.0),
LightingType::Directional { dir } => dir,
LightingType::Point { loc } => (pos - loc).normalize()
}
}
}
| true |
e698fe3cf8c70a58ce467622276d3799be4f1267
|
Rust
|
Tristramg/transport-validator
|
/src/validators/feed_info.rs
|
UTF-8
| 3,991 | 2.921875 | 3 |
[
"MIT"
] |
permissive
|
use crate::validators::issues::*;
pub fn validate(gtfs: >fs_structures::Gtfs) -> Vec<Issue> {
let missing_url = gtfs
.feed_info
.iter()
.filter(|feed_info| !has_url(feed_info))
.map(|feed_info| make_issue(feed_info, IssueType::MissingUrl));
let invalid_url = gtfs
.feed_info
.iter()
.filter(|feed_info| !valid_url(feed_info))
.map(|feed_info| {
make_issue(feed_info, IssueType::InvalidUrl)
.details(&format!("Publisher url {} is invalid", feed_info.url))
});
let missing_lang = gtfs
.feed_info
.iter()
.filter(|feed_info| !has_lang(feed_info))
.map(|feed_info| make_issue(feed_info, IssueType::MissingLanguage));
let invalid_lang = gtfs
.feed_info
.iter()
.filter(|feed_info| !valid_lang(feed_info))
.map(|feed_info| {
make_issue(feed_info, IssueType::InvalidLanguage)
.details(&format!("Language code {} does not exist", feed_info.lang))
});
missing_url
.chain(invalid_url)
.chain(missing_lang)
.chain(invalid_lang)
.collect()
}
fn make_issue(feed: >fs_structures::FeedInfo, issue_type: IssueType) -> Issue {
Issue::new(Severity::Error, issue_type, "").name(&format!("{}", feed))
}
fn has_url(feed: >fs_structures::FeedInfo) -> bool {
!feed.url.is_empty()
}
fn valid_url(feed: >fs_structures::FeedInfo) -> bool {
url::Url::parse(feed.url.as_ref())
.map(|url| vec!["https", "http", "ftp"].contains(&url.scheme()))
.unwrap_or(false)
}
fn has_lang(feed: >fs_structures::FeedInfo) -> bool {
!feed.lang.is_empty()
}
fn valid_lang(feed: >fs_structures::FeedInfo) -> bool {
let len = feed.lang.len();
match len {
2 => isolang::Language::from_639_1(&feed.lang).is_some(),
3 => isolang::Language::from_639_3(&feed.lang).is_some(),
4...11 => isolang::Language::from_locale(&feed.lang).is_some(),
_ => false,
}
}
#[test]
fn test_missing_url() {
let gtfs = gtfs_structures::Gtfs::new("test_data/feed_info").unwrap();
let issues = validate(>fs);
let missing_url_issue: Vec<_> = issues
.iter()
.filter(|issue| issue.issue_type == IssueType::MissingUrl)
.collect();
assert_eq!(1, missing_url_issue.len());
assert_eq!("SNCF", missing_url_issue[0].object_name.as_ref().unwrap());
assert_eq!(IssueType::MissingUrl, missing_url_issue[0].issue_type);
}
#[test]
fn test_valid_url() {
let gtfs = gtfs_structures::Gtfs::new("test_data/feed_info").unwrap();
let issues = validate(>fs);
let invalid_url_issue: Vec<_> = issues
.iter()
.filter(|issue| issue.issue_type == IssueType::InvalidUrl)
.filter(|issue| issue.object_name == Some("BIBUS".to_string()))
.collect();
assert_eq!(1, invalid_url_issue.len());
assert_eq!(IssueType::InvalidUrl, invalid_url_issue[0].issue_type);
}
#[test]
fn test_missing_lang() {
let gtfs = gtfs_structures::Gtfs::new("test_data/feed_info").unwrap();
let issues = validate(>fs);
let missing_lang_issue: Vec<_> = issues
.iter()
.filter(|issue| issue.issue_type == IssueType::MissingLanguage)
.filter(|issue| issue.object_name == Some("BIBUS".to_string()))
.collect();
assert_eq!(1, missing_lang_issue.len());
assert_eq!(IssueType::MissingLanguage, missing_lang_issue[0].issue_type);
}
#[test]
fn test_valid_lang() {
let gtfs = gtfs_structures::Gtfs::new("test_data/feed_info").unwrap();
let issues = validate(>fs);
let invalid_lang_issue: Vec<_> = issues
.iter()
.filter(|issue| issue.issue_type == IssueType::InvalidLanguage)
.filter(|issue| issue.object_name == Some("SNCF".to_string()))
.collect();
assert_eq!(1, invalid_lang_issue.len());
assert_eq!(IssueType::InvalidLanguage, invalid_lang_issue[0].issue_type);
}
| true |
bbefa1eda253273c59ab979b1b5c423e5e427e34
|
Rust
|
bnert/raytracing-one-weekend
|
/src/ray_engine/camera.rs
|
UTF-8
| 2,430 | 3.375 | 3 |
[] |
no_license
|
use crate::three::vector3::Vector;
#[derive(Debug)]
pub struct Camera {
pub aspect_ratio: f32,
pub img_width: u32,
pub img_height: u32,
pub viewport_height: f32,
pub viewport_width: f32,
pub focal_length: f32,
pub origin: Vector,
pub horizontal: Vector,
pub vertical: Vector,
pub lower_left_corner: Vector,
}
impl Camera {
pub fn new() -> Camera {
Camera {
aspect_ratio: 0.0,
img_width: 0,
img_height: 0,
viewport_height: 0.0,
viewport_width: 0.0,
focal_length: 0.0,
origin: Vector::new(),
horizontal: Vector::new(),
vertical: Vector::new(),
lower_left_corner: Vector::new(),
}
}
pub fn create(asp: f32, img_w: u32, view_h: f32, foc_len: f32) -> Camera {
let asp_r = asp;
let iw = img_w;
let ih = (img_w as f32 / asp) as u32;
let vh = view_h;
let vw = vh * asp;
let fl = foc_len;
let orig = Vector::create(0.0, 0.0, 0.0);
let horz = Vector::create(vw as f64, 0.0, 0.0);
let vert = Vector::create(0.0, vh as f64, 0.0);
// This computes where the lower left corner of
// the viewable plane (viewport) is located.
// Eq: origin - (horz / 2)- (vert / 2) - Vector(0, 0, focal_len)
let llc = orig
.sub(&horz.scale(0.5))
.sub(&vert.scale(0.5))
.sub(&Vector::create(0.0, 0.0, fl.into()));
Camera {
aspect_ratio: asp_r,
img_width: iw,
img_height: ih,
viewport_height: vh,
viewport_width: vh * asp,
focal_length: fl,
origin: orig,
horizontal: horz,
vertical: vert,
lower_left_corner: llc,
}
}
}
mod tests {
use super::*;
#[test]
fn instantiation() {
let asp_ratio = 16.0 / 9.0;
let img_w = 384;
let view_h = 2.0;
let focal_l = 1.0;
let c = Camera::create(asp_ratio, img_w, view_h, focal_l);
let view_w = view_h * asp_ratio;
let horz_res = ((32.0 / 9.0) as f32) as f64;
assert_eq!(c.horizontal.x, horz_res); // gross but good for now
let l = c.lower_left_corner;
assert_eq!(l.x, -(horz_res / 2.0));
assert_eq!(l.y, -1.0);
assert_eq!(l.z, -1.0);
}
}
| true |
3714ed605062dfd7c93786a01b087f6947dc504e
|
Rust
|
drewm1980/rust-tinkering
|
/src/constants.rs
|
UTF-8
| 809 | 2.9375 | 3 |
[] |
no_license
|
#![allow(dead_code)]
#![allow(unused_variables)]
#![allow(unused_mut)]
#![feature(macro_rules)]
#![feature(globs)]
static FOO:u8 = 10;
static BAR:f64 = 1.0;
static CHR:char = '&';
//static STRIING:String = "Hello"; // Found &'static str expected String
//static STR:str = "World"; // Expected str found &'static str
//static ONE:u8 = 1;
//static TWO:u8 = 2;
const ONE:u8 = 1;
const TWO:u8 = 2;
const ONETWO:[&'static u8, ..2] = [&ONE, &TWO];
const STRHELLO:&'static str = "Hello";
const STRWORLD:&'static str = "World";
const ARR:[&'static str, ..2] = [STRHELLO,STRWORLD];
fn main() {
let mut foo = FOO;
let mut bar = BAR;
}
#[cfg(test)]
mod test {
extern crate test;
#[test]
fn use_constants () {
let mut foo = super::FOO;
let mut bar = super::BAR;
}
}
| true |
45aae3efed9e7e2a13d1883888839d0b5eb091e9
|
Rust
|
bandprotocol/bandchain
|
/owasm/src/core/vm.rs
|
UTF-8
| 2,085 | 2.859375 | 3 |
[
"Apache-2.0"
] |
permissive
|
use crate::core::error::Error;
pub trait Env {
/// Returns the maximum span size value.
fn get_span_size(&self) -> i64;
/// Returns user calldata, or returns error from VM runner.
fn get_calldata(&self) -> Result<Vec<u8>, Error>;
/// Sends the desired return `data` to VM runner, or returns error from VM runner.
fn set_return_data(&self, data: &[u8]) -> Result<(), Error>;
/// Returns the current "ask count" value.
fn get_ask_count(&self) -> i64;
/// Returns the current "min count" value.
fn get_min_count(&self) -> i64;
/// Returns the current "ans count" value, or error from VM runner if called on wrong period.
fn get_ans_count(&self) -> Result<i64, Error>;
/// Issues a new external data request to VM runner, with the specified ids and calldata.
fn ask_external_data(&self, eid: i64, did: i64, data: &[u8]) -> Result<(), Error>;
/// Returns external data status for data id `eid` from validator index `vid`.
fn get_external_data_status(&self, eid: i64, vid: i64) -> Result<i64, Error>;
/// Returns data span with the data id `eid` from validator index `vid`.
fn get_external_data(&self, eid: i64, vid: i64) -> Result<Vec<u8>, Error>;
}
/// A `VMLogic` encapsulates the runtime logic of Owasm scripts.
pub struct VMLogic<'a, E>
where
E: Env,
{
pub env: &'a E, // The execution environment.
pub gas_limit: u32, // Amount of gas allowed for total execution.
pub gas_used: u32, // Amount of gas used in this execution.
}
impl<'a, E> VMLogic<'a, E>
where
E: Env,
{
/// Creates a new `VMLogic` instance.
pub fn new(env: &'a E, gas: u32) -> VMLogic<'a, E> {
VMLogic { env: env, gas_limit: gas, gas_used: 0 }
}
/// Consumes the given amount of gas. Return `OutOfGasError` error if run out of gas.
pub fn consume_gas(&mut self, gas: u32) -> Result<(), Error> {
self.gas_used = self.gas_used.saturating_add(gas);
if self.gas_used > self.gas_limit {
Err(Error::OutOfGasError)
} else {
Ok(())
}
}
}
| true |
c96e92b9aa0a3487af6fc79948e8f8870fec8391
|
Rust
|
Noskcaj19/yalos
|
/src/drivers/keyboard.rs
|
UTF-8
| 2,253 | 2.875 | 3 |
[
"MIT"
] |
permissive
|
#![allow(dead_code)]
use lazy_static::lazy_static;
use spin::Mutex;
use x86_64::instructions::port::Port;
use crate::arch::interrupts::PICS;
lazy_static! {
static ref KEYBOARD_STATE: Mutex<Keyboard> = Mutex::new(Keyboard {
left_shift: false,
right_shift: false
});
static ref KEYBOARD: Port<u8> = Port::new(0x60);
}
pub fn key_handler() {
let data = unsafe { KEYBOARD.read() };
let (scancode, pressed) = if data >= 0x80 {
(data - 0x80, false)
} else {
(data, true)
};
let mut keyboard = KEYBOARD_STATE.lock();
if scancode == 0x2A {
keyboard.left_shift = pressed;
} else if scancode == 0x36 {
keyboard.right_shift = pressed;
}
let character = get_char(scancode, keyboard.left_shift | keyboard.right_shift);
if pressed && character != '\0' {
log::info!("{}", character);
}
// Send EOI
unsafe {
PICS.lock().notify_end_of_interrupt(33);
}
}
pub struct Keyboard {
left_shift: bool,
right_shift: bool,
}
static US: [[char; 2]; 58] = [
['\0', '\0'],
['\x1B', '\x1B'],
['1', '!'],
['2', '@'],
['3', '#'],
['4', '$'],
['5', '%'],
['6', '^'],
['7', '&'],
['8', '*'],
['9', '('],
['0', ')'],
['-', '_'],
['=', '+'],
['\x7F', '\x7F'],
['\t', '\t'],
['q', 'Q'],
['w', 'W'],
['e', 'E'],
['r', 'R'],
['t', 'T'],
['y', 'Y'],
['u', 'U'],
['i', 'I'],
['o', 'O'],
['p', 'P'],
['[', '{'],
[']', '}'],
['\n', '\n'],
['\0', '\0'],
['a', 'A'],
['s', 'S'],
['d', 'D'],
['f', 'F'],
['g', 'G'],
['h', 'H'],
['j', 'J'],
['k', 'K'],
['l', 'L'],
[';', ':'],
['\'', '"'],
['`', '~'],
['\0', '\0'],
['\\', '|'],
['z', 'Z'],
['x', 'X'],
['c', 'C'],
['v', 'V'],
['b', 'B'],
['n', 'N'],
['m', 'M'],
[',', '<'],
['.', '>'],
['/', '?'],
['\0', '\0'],
['\0', '\0'],
['\0', '\0'],
[' ', ' '],
];
fn get_char(scancode: u8, shift: bool) -> char {
if let Some(c) = US.get(scancode as usize) {
if shift {
c[1]
} else {
c[0]
}
} else {
'\0'
}
}
| true |
1e7c6a86ca6dfa4dc84a25030dae6243668ad8c4
|
Rust
|
liukaizheng/xvim
|
/src/editor/mod.rs
|
UTF-8
| 1,919 | 2.546875 | 3 |
[] |
no_license
|
mod style;
mod window;
mod draw_command_batcher;
mod cursor;
pub use style::*;
use window::*;
use draw_command_batcher::*;
pub use cursor::*;
use log::debug;
use std::collections::HashMap;
use tokio::sync::mpsc::UnboundedReceiver;
use crate::{bridge::RedrawEvent, logging_sender::LoggingBoundedSender};
#[derive(Debug)]
pub enum DrawCommand {
CloseWindow(u64),
}
#[derive(Debug)]
pub enum WindowCommand {
TitleChanged(String),
SetMouseEnable(bool),
}
struct Editor {
pub windows: HashMap<u64, Window>,
pub draw_command_batcher: DrawCommandBatcher,
pub window_command_sender: LoggingBoundedSender<WindowCommand>,
}
impl Editor {
pub fn new(
batched_draw_command_sender: LoggingBoundedSender<Vec<DrawCommand>>,
window_command_sender: LoggingBoundedSender<WindowCommand>,
) -> Self {
Self {
windows: HashMap::new(),
draw_command_batcher: DrawCommandBatcher::new(batched_draw_command_sender),
window_command_sender,
}
}
pub fn handle_redraw_event(&mut self, event: RedrawEvent) {
match event {
RedrawEvent::SetTitle { title } => {
self.window_command_sender
.send(WindowCommand::TitleChanged(title))
.ok();
}
_ => {
debug!("unhandled event {:?}", event);
}
}
}
}
pub fn start_editor(
mut redraw_event_receiver: UnboundedReceiver<RedrawEvent>,
batched_draw_command_sender: LoggingBoundedSender<Vec<DrawCommand>>,
window_command_sender: LoggingBoundedSender<WindowCommand>,
) {
std::thread::spawn(move || {
let mut editor = Editor::new(batched_draw_command_sender, window_command_sender);
while let Some(redraw_event) = redraw_event_receiver.blocking_recv() {
editor.handle_redraw_event(redraw_event);
}
});
}
| true |
9a304cfa6f168a834fe4ac5f2d0f451e376b9593
|
Rust
|
tu-cao/MeiliSearch
|
/meilisearch-http/src/error.rs
|
UTF-8
| 9,568 | 2.71875 | 3 |
[
"MIT"
] |
permissive
|
use std::fmt;
use actix_http::ResponseBuilder;
use actix_web as aweb;
use actix_web::http::StatusCode;
use serde_json::json;
use actix_web::error::JsonPayloadError;
#[derive(Debug)]
pub enum ResponseError {
BadParameter(String, String),
BadRequest(String),
CreateIndex(String),
DocumentNotFound(String),
IndexNotFound(String),
Internal(String),
InvalidIndexUid,
InvalidToken(String),
Maintenance,
MissingAuthorizationHeader,
MissingHeader(String),
NotFound(String),
OpenIndex(String),
FilterParsing(String),
RetrieveDocument(u32, String),
SearchDocuments(String),
PayloadTooLarge,
UnsupportedMediaType,
FacetExpression(String),
FacetCount(String),
}
pub enum FacetCountError {
AttributeNotSet(String),
SyntaxError(String),
UnexpectedToken { found: String, expected: &'static [&'static str] },
NoFacetSet,
}
impl FacetCountError {
pub fn unexpected_token(found: impl ToString, expected: &'static [&'static str]) -> FacetCountError {
let found = found.to_string();
FacetCountError::UnexpectedToken { expected, found }
}
}
impl From<serde_json::error::Error> for FacetCountError {
fn from(other: serde_json::error::Error) -> FacetCountError {
FacetCountError::SyntaxError(other.to_string())
}
}
impl fmt::Display for FacetCountError {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
use FacetCountError::*;
match self {
AttributeNotSet(attr) => write!(f, "attribute {} is not set as facet", attr),
SyntaxError(msg) => write!(f, "syntax error: {}", msg),
UnexpectedToken { expected, found } => write!(f, "unexpected {} found, expected {:?}", found, expected),
NoFacetSet => write!(f, "can't perform facet count, as no facet is set"),
}
}
}
impl ResponseError {
pub fn internal(err: impl fmt::Display) -> ResponseError {
ResponseError::Internal(err.to_string())
}
pub fn bad_request(err: impl fmt::Display) -> ResponseError {
ResponseError::BadRequest(err.to_string())
}
pub fn missing_authorization_header() -> ResponseError {
ResponseError::MissingAuthorizationHeader
}
pub fn invalid_token(err: impl fmt::Display) -> ResponseError {
ResponseError::InvalidToken(err.to_string())
}
pub fn not_found(err: impl fmt::Display) -> ResponseError {
ResponseError::NotFound(err.to_string())
}
pub fn index_not_found(err: impl fmt::Display) -> ResponseError {
ResponseError::IndexNotFound(err.to_string())
}
pub fn document_not_found(err: impl fmt::Display) -> ResponseError {
ResponseError::DocumentNotFound(err.to_string())
}
pub fn missing_header(err: impl fmt::Display) -> ResponseError {
ResponseError::MissingHeader(err.to_string())
}
pub fn bad_parameter(param: impl fmt::Display, err: impl fmt::Display) -> ResponseError {
ResponseError::BadParameter(param.to_string(), err.to_string())
}
pub fn open_index(err: impl fmt::Display) -> ResponseError {
ResponseError::OpenIndex(err.to_string())
}
pub fn create_index(err: impl fmt::Display) -> ResponseError {
ResponseError::CreateIndex(err.to_string())
}
pub fn invalid_index_uid() -> ResponseError {
ResponseError::InvalidIndexUid
}
pub fn maintenance() -> ResponseError {
ResponseError::Maintenance
}
pub fn retrieve_document(doc_id: u32, err: impl fmt::Display) -> ResponseError {
ResponseError::RetrieveDocument(doc_id, err.to_string())
}
pub fn search_documents(err: impl fmt::Display) -> ResponseError {
ResponseError::SearchDocuments(err.to_string())
}
}
impl fmt::Display for ResponseError {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
match self {
Self::BadParameter(param, err) => write!(f, "Url parameter {} error: {}", param, err),
Self::BadRequest(err) => f.write_str(err),
Self::CreateIndex(err) => write!(f, "Impossible to create index; {}", err),
Self::DocumentNotFound(document_id) => write!(f, "Document with id {} not found", document_id),
Self::IndexNotFound(index_uid) => write!(f, "Index {} not found", index_uid),
Self::Internal(err) => f.write_str(err),
Self::InvalidIndexUid => f.write_str("Index must have a valid uid; Index uid can be of type integer or string only composed of alphanumeric characters, hyphens (-) and underscores (_)."),
Self::InvalidToken(err) => write!(f, "Invalid API key: {}", err),
Self::Maintenance => f.write_str("Server is in maintenance, please try again later"),
Self::FilterParsing(err) => write!(f, "parsing error: {}", err),
Self::MissingAuthorizationHeader => f.write_str("You must have an authorization token"),
Self::MissingHeader(header) => write!(f, "Header {} is missing", header),
Self::NotFound(err) => write!(f, "{} not found", err),
Self::OpenIndex(err) => write!(f, "Impossible to open index; {}", err),
Self::RetrieveDocument(id, err) => write!(f, "impossible to retrieve the document with id: {}; {}", id, err),
Self::SearchDocuments(err) => write!(f, "impossible to search documents; {}", err),
Self::FacetExpression(e) => write!(f, "error parsing facet filter expression: {}", e),
Self::PayloadTooLarge => f.write_str("Payload to large"),
Self::UnsupportedMediaType => f.write_str("Unsupported media type"),
Self::FacetCount(e) => write!(f, "error with facet count: {}", e),
}
}
}
impl aweb::error::ResponseError for ResponseError {
fn error_response(&self) -> aweb::HttpResponse {
ResponseBuilder::new(self.status_code()).json(json!({
"message": self.to_string(),
}))
}
fn status_code(&self) -> StatusCode {
match *self {
Self::BadParameter(_, _)
| Self::BadRequest(_)
| Self::CreateIndex(_)
| Self::InvalidIndexUid
| Self::OpenIndex(_)
| Self::RetrieveDocument(_, _)
| Self::FacetExpression(_)
| Self::SearchDocuments(_)
| Self::FacetCount(_)
| Self::FilterParsing(_) => StatusCode::BAD_REQUEST,
Self::DocumentNotFound(_)
| Self::IndexNotFound(_)
| Self::NotFound(_) => StatusCode::NOT_FOUND,
Self::InvalidToken(_)
| Self::MissingHeader(_) => StatusCode::UNAUTHORIZED,
Self::MissingAuthorizationHeader => StatusCode::FORBIDDEN,
Self::Internal(_) => StatusCode::INTERNAL_SERVER_ERROR,
Self::Maintenance => StatusCode::SERVICE_UNAVAILABLE,
Self::PayloadTooLarge => StatusCode::PAYLOAD_TOO_LARGE,
Self::UnsupportedMediaType => StatusCode::UNSUPPORTED_MEDIA_TYPE,
}
}
}
impl From<meilisearch_core::HeedError> for ResponseError {
fn from(err: meilisearch_core::HeedError) -> ResponseError {
ResponseError::Internal(err.to_string())
}
}
impl From<meilisearch_core::FstError> for ResponseError {
fn from(err: meilisearch_core::FstError) -> ResponseError {
ResponseError::Internal(err.to_string())
}
}
impl From<meilisearch_core::FacetError> for ResponseError {
fn from(error: meilisearch_core::FacetError) -> ResponseError {
ResponseError::FacetExpression(error.to_string())
}
}
impl From<meilisearch_core::Error> for ResponseError {
fn from(err: meilisearch_core::Error) -> ResponseError {
use meilisearch_core::pest_error::LineColLocation::*;
match err {
meilisearch_core::Error::FilterParseError(e) => {
let (line, column) = match e.line_col {
Span((line, _), (column, _)) => (line, column),
Pos((line, column)) => (line, column),
};
let message = format!("parsing error on line {} at column {}: {}", line, column, e.variant.message());
ResponseError::FilterParsing(message)
},
meilisearch_core::Error::FacetError(e) => ResponseError::FacetExpression(e.to_string()),
_ => ResponseError::Internal(err.to_string()),
}
}
}
impl From<meilisearch_schema::Error> for ResponseError {
fn from(err: meilisearch_schema::Error) -> ResponseError {
ResponseError::Internal(err.to_string())
}
}
impl From<actix_http::Error> for ResponseError {
fn from(err: actix_http::Error) -> ResponseError {
ResponseError::Internal(err.to_string())
}
}
impl From<FacetCountError> for ResponseError {
fn from(other: FacetCountError) -> ResponseError {
ResponseError::FacetCount(other.to_string())
}
}
impl From<JsonPayloadError> for ResponseError {
fn from(err: JsonPayloadError) -> ResponseError {
match err {
JsonPayloadError::Deserialize(err) => ResponseError::BadRequest(format!("Invalid JSON: {}", err)),
JsonPayloadError::Overflow => ResponseError::PayloadTooLarge,
JsonPayloadError::ContentType => ResponseError::UnsupportedMediaType,
JsonPayloadError::Payload(err) => ResponseError::BadRequest(format!("Problem while decoding the request: {}", err)),
}
}
}
pub fn json_error_handler(err: JsonPayloadError) -> ResponseError {
err.into()
}
| true |
607c2126f800e1a164b9cabf018943052ec5f835
|
Rust
|
alex-dukhno/rust-tdd-katas
|
/old-katas-iteration-01/string_calc_kata/src/iter_2/day_5.rs
|
UTF-8
| 3,217 | 3.765625 | 4 |
[
"MIT"
] |
permissive
|
use std::iter::Peekable;
use std::str::Chars;
use std::result::Result;
use std::num::ParseFloatError;
#[derive(Default)]
pub struct Calculator;
impl Calculator {
pub fn new() -> Calculator {
Calculator
}
pub fn evaluate(&self, src: &str) -> Result<f64, ParseFloatError> {
let mut iter = src.chars().peekable();
self.parse_expression(&mut iter)
}
fn parse_expression(&self, iter: &mut Peekable<Chars>) -> Result<f64, ParseFloatError> {
let mut result = try!(self.parse_term(iter.by_ref()));
loop {
match iter.peek().cloned() {
Some('+') => { iter.next(); result += try!(self.parse_term(iter.by_ref())); },
Some('-') => { iter.next(); result -= try!(self.parse_term(iter.by_ref())); },
Some(_) | None => break,
}
}
Ok(result)
}
fn parse_term(&self, iter: &mut Peekable<Chars>) -> Result<f64, ParseFloatError> {
let mut result = try!(self.parse_arg(iter.by_ref()));
loop {
match iter.peek().cloned() {
Some('×') => { iter.next(); result *= try!(self.parse_arg(iter.by_ref())); },
Some('÷') => { iter.next(); result /= try!(self.parse_arg(iter.by_ref())); },
Some(_) | None => break,
}
}
Ok(result)
}
fn parse_arg(&self, iter: &mut Peekable<Chars>) -> Result<f64, ParseFloatError> {
let mut num = Vec::new();
loop {
match iter.peek().cloned() {
Some(digit @ '0'...'9') => { iter.next(); num.push(digit); },
Some(point @ '.') => { iter.next(); num.push(point); },
Some('(') => {
iter.next();
let result = try!(self.parse_expression(iter.by_ref()));
iter.next();
return Ok(result);
},
Some(_) | None => break,
}
}
let s = num.drain(..).collect::<String>();
s.parse::<f64>()
}
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn should_evaluate_float_number() {
assert_eq!(Calculator::new().evaluate("32435.246"), Ok(32435.246));
}
#[test]
fn should_evaluate_add_operation() {
assert_eq!(Calculator::new().evaluate("35435.657+213.546"), Ok(35649.203));
}
#[test]
fn should_evaluate_sub_operation() {
assert_eq!(Calculator::new().evaluate("3465.6757-324.2346"), Ok(3141.4411));
}
#[test]
fn should_evaluate_mul_operation() {
assert_eq!(Calculator::new().evaluate("354.76×25.2"), Ok(8939.952));
}
#[test]
fn should_evaluate_div_operation() {
assert_eq!(Calculator::new().evaluate("3254.546÷32.32"), Ok(100.69758663366336));
}
#[test]
fn should_evaluate_sequence_of_operations() {
assert_eq!(Calculator::new().evaluate("3254+324×23-461.125×2+4.248÷23-461×1.25+48"), Ok(9255.684695652173));
}
#[test]
fn should_evaluate_expression_with_parenthesis() {
assert_eq!(Calculator::new().evaluate("3425+214+(213.3-22.4×12)-3254×(234.2+32.2)+54÷2"), Ok(-863255.1));
}
}
| true |
35c1e87bfebade6aec5394af76dbaab6c28f992c
|
Rust
|
lelongg/geo-geojson
|
/src/lib.rs
|
UTF-8
| 4,241 | 3.25 | 3 |
[] |
no_license
|
//! This crates converts [geojson](https://geojson.org/) strings to [GeometryCollection](https://docs.rs/geo-types/0.4.3/geo_types/struct.GeometryCollection.html).
//!
//! This is the missing link between the [geo-types](https://github.com/georust/geo) crate and the [geojson](https://github.com/georust/geojson) crate and should probably be part of [geojson](https://github.com/georust/geojson).
//!
//! # Example
//!
//! ```
//! # fn main() -> Result<(), Box<std::error::Error>> {
//! # use std::fs;
//! let geojson_str = fs::read_to_string("src/tests/demo.json")?;
//! let collection: geo_types::GeometryCollection<f64> = geo_geojson::from_str(&geojson_str)?;
//! # Ok(())
//! # }
//! ```
use geo_types::GeometryCollection;
use geojson::{Error, GeoJson};
use num_traits::{Float, Num, NumCast};
use std::str::FromStr;
#[cfg(test)]
mod tests;
/// Converts [geojson](https://geojson.org/) strings to [GeometryCollection](https://docs.rs/geo-types/0.4.3/geo_types/struct.GeometryCollection.html).
///
/// # Errors
///
/// Returns the same errors, under the same conditions, as [GeoJson::from_str](https://docs.rs/geojson/0.16.0/geojson/enum.GeoJson.html#method.from_str).
pub fn from_str<T>(s: &str) -> Result<GeometryCollection<T>, Error>
where
T: Num + NumCast + PartialOrd + Copy + Float,
{
let geojson = GeoJson::from_str(s)?;
Ok(match geojson {
GeoJson::Feature(feature) => conversion::from_feature(feature),
GeoJson::FeatureCollection(feature_collection) => {
conversion::from_feature_collection(feature_collection)
}
GeoJson::Geometry(geometry) => conversion::from_geometry(geometry),
})
}
pub mod conversion {
//! This module contains conversion function from [geojson](https://docs.rs/geojson/0.16.0/geojson/) types to [GeometryCollection](https://docs.rs/geo-types/0.4.3/geo_types/struct.GeometryCollection.html).
use geo_types::{
GeometryCollection, LineString, MultiLineString, MultiPoint, MultiPolygon, Point, Polygon,
};
use geojson::{Feature, FeatureCollection, Geometry, Value};
use num_traits::{Float, Num, NumCast};
use std::convert::TryInto;
pub fn from_feature<T>(feature: Feature) -> GeometryCollection<T>
where
T: Num + NumCast + PartialOrd + Copy + Float,
{
feature
.geometry
.map(from_geometry)
.unwrap_or_else(GeometryCollection::new)
}
pub fn from_feature_collection<T>(
feature_collection: FeatureCollection,
) -> GeometryCollection<T>
where
T: Num + NumCast + PartialOrd + Copy + Float,
{
feature_collection
.features
.into_iter()
.flat_map(from_feature)
.collect()
}
pub fn from_geometry<T>(geometry: Geometry) -> GeometryCollection<T>
where
T: Num + NumCast + PartialOrd + Copy + Float,
{
from_value(geometry.value)
}
pub fn from_value<T>(value: Value) -> GeometryCollection<T>
where
T: Num + NumCast + PartialOrd + Copy + Float,
{
match value {
Value::Point(_) => TryInto::<Point<T>>::try_into(value)
.map(|value| GeometryCollection(vec![value.into()])),
Value::MultiPoint(_) => TryInto::<MultiPoint<T>>::try_into(value)
.map(|value| GeometryCollection(vec![value.into()])),
Value::LineString(_) => TryInto::<LineString<T>>::try_into(value)
.map(|value| GeometryCollection(vec![value.into()])),
Value::MultiLineString(_) => TryInto::<MultiLineString<T>>::try_into(value)
.map(|value| GeometryCollection(vec![value.into()])),
Value::Polygon(_) => TryInto::<Polygon<T>>::try_into(value)
.map(|value| GeometryCollection(vec![value.into()])),
Value::MultiPolygon(_) => TryInto::<MultiPolygon<T>>::try_into(value)
.map(|value| GeometryCollection(vec![value.into()])),
Value::GeometryCollection(geometry_collection) => Ok(geometry_collection
.into_iter()
.flat_map(from_geometry)
.collect()),
}
.unwrap_or_else(|_| GeometryCollection::new())
}
}
| true |
ee0066263cbce72541bf7070c33ba7d918f32be2
|
Rust
|
google/rust_icu
|
/rust_icu_uchar/src/lib.rs
|
UTF-8
| 4,484 | 2.671875 | 3 |
[
"Apache-2.0"
] |
permissive
|
// Copyright 2020 Google LLC
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
//! # ICU unicode character properties support
//!
//! NOTE: Only very partially supported. However, it is easy to add new
//! functionality, so if you want you can do that yourself, or you can report
//! missing functionality at <https://github.com/google/rust_icu/issues>.
//!
//! Since 1.0.2
use {
rust_icu_common as common,
rust_icu_sys as sys,
rust_icu_sys::versioned_function,
rust_icu_sys::*,
std::ffi,
};
/// Implements `u_charFromName`.
///
/// Since 1.0.2
pub fn from_name(name_choice: sys::UCharNameChoice, name: &str)
-> Result<sys::UChar32, common::Error> {
let mut status = common::Error::OK_CODE;
let asciiz = ffi::CString::new(name)?;
let result = unsafe {
assert!(common::Error::is_ok(status));
versioned_function!(u_charFromName)(name_choice, asciiz.as_ptr(), &mut status)
};
common::Error::ok_or_warning(status)?;
Ok(result)
}
/// Implements `u_charType`.
///
/// Since 1.0.2
pub fn char_type(c: sys::UChar32) -> sys::UCharCategory {
let result = unsafe { versioned_function!(u_charType)(c) };
result.into()
}
/// See <http://www.unicode.org/reports/tr44/#Canonical_Combining_Class_Values> for
/// the list of combining class values.
///
/// Implements `u_getCombiningClass`
///
/// Since 1.0.2
pub fn get_combining_class(c: sys::UChar32) -> u8 {
unsafe { versioned_function!(u_getCombiningClass)(c) }
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn test_from_name() {
struct Test {
name_choice: sys::UCharNameChoice,
name: &'static str,
expected: sys::UChar32,
}
let tests = vec![
Test {
name_choice: sys::UCharNameChoice::U_UNICODE_CHAR_NAME,
name: "LATIN CAPITAL LETTER A",
expected: 'A' as sys::UChar32,
},
Test {
name_choice: sys::UCharNameChoice::U_UNICODE_CHAR_NAME,
name: "LATIN CAPITAL LETTER B",
expected: 'B' as sys::UChar32,
},
Test {
name_choice: sys::UCharNameChoice::U_UNICODE_CHAR_NAME,
name: "LATIN SMALL LETTER C",
expected: 'c' as sys::UChar32,
},
Test {
name_choice: sys::UCharNameChoice::U_UNICODE_CHAR_NAME,
name: "CJK RADICAL BOX",
expected: '⺆' as sys::UChar32,
},
];
for test in tests {
let result = from_name(test.name_choice, test.name).unwrap();
assert_eq!(result, test.expected);
}
}
#[test]
fn test_char_type() {
struct Test {
ch: sys::UChar32,
cat: sys::UCharCategory,
}
let tests = vec![
Test {
ch: 'A' as sys::UChar32,
cat: sys::UCharCategory::U_UPPERCASE_LETTER,
},
Test {
ch: 0x300, // A combining character.
cat: sys::UCharCategory::U_NON_SPACING_MARK,
},
];
for test in tests {
let result = char_type(test.ch);
assert_eq!(result, test.cat);
}
}
#[test]
fn test_combining_class() {
#[derive(Debug)]
struct Test {
ch: sys::UChar32,
class: u8,
}
let tests = vec![
Test {
ch: 'A' as sys::UChar32,
class: 0,
},
Test {
ch: 0x300 as sys::UChar32,
class: 230,
},
Test {
ch: 0x301 as sys::UChar32,
class: 230,
},
];
for test in tests {
let result = get_combining_class(test.ch);
assert_eq!(result, test.class, "test: {:?}", test);
}
}
}
| true |
d2ef3d71c1ea32b2c5c4d604a6860b294dce2507
|
Rust
|
yashwanthreddyg/Rust
|
/src/searching/mod.rs
|
UTF-8
| 1,446 | 3.609375 | 4 |
[] |
no_license
|
use std::cmp;
pub fn binary_search<T>(item: T, arr: &[T]) -> i32
where
T: cmp::PartialEq + cmp::PartialOrd + Sized,
{
let mut left = 0;
let mut right = arr.len() - 1;
while left < right {
let mid = left + (right - left) / 2;
if arr[mid] > item {
right = mid - 1;
} else if arr[mid] < item {
left = mid + 1;
} else {
left = mid;
break;
}
}
if arr[left] != item {
return -1;
}
left as i32
}
pub fn linear_search<T>(item: T, arr: &[T]) -> i32
where
T: cmp::PartialEq,
{
let length = arr.len();
for i in 0..length {
if item == arr[i] {
return i as i32;
}
}
return -1;
}
#[cfg(test)]
mod tests {
#[test]
fn linear() {
use searching;
let index = searching::linear_search("a", &vec!["a", "b", "c", "d", "google", "zoo"]);
assert_eq!(index, 0);
let mut index = searching::linear_search(4, &vec![1, 2, 3, 4]);
assert_eq!(index, 3);
index = searching::linear_search(3, &vec![1, 2, 3, 4]);
assert_eq!(index, 2);
index = searching::linear_search(2, &vec![1, 2, 3, 4]);
assert_eq!(index, 1);
index = searching::linear_search(1, &vec![1, 2, 3, 4]);
assert_eq!(index, 0);
index = searching::linear_search(5, &vec![1, 2, 3, 4]);
assert_eq!(index, -1);
}
}
| true |
ff540b144dde9d87cdfb5b280933f5885534b623
|
Rust
|
hiro-o918/portfolio
|
/src/components/button.rs
|
UTF-8
| 1,089 | 3.046875 | 3 |
[
"MIT"
] |
permissive
|
use yew::prelude::*;
use yew::Properties;
#[derive(Properties, Clone, PartialEq)]
pub struct Props {
pub children: Children,
#[prop_or(String::from("is-primary is-rounded"))]
pub button_class: String,
#[prop_or_default]
pub on_click: Callback<()>,
}
pub enum Msg {
OnClick,
}
pub struct Button {
props: Props,
link: ComponentLink<Self>,
}
impl Component for Button {
type Message = Msg;
type Properties = Props;
fn create(props: Self::Properties, link: ComponentLink<Self>) -> Self {
Self {
props: props,
link: link,
}
}
fn update(&mut self, msg: Self::Message) -> bool {
match msg {
Msg::OnClick => self.props.on_click.emit(()),
}
true
}
fn change(&mut self, props: Self::Properties) -> bool {
if self.props != props {
self.props = props;
true
} else {
false
}
}
fn view(&self) -> Html {
html! {
<button class=format!("button {}", self.props.button_class) onclick=self.link.callback(|_| Msg::OnClick)>
{ self.props.children.clone() }
</button>
}
}
}
| true |
43538f0040c8b3f54f80b5eb333a8bcbdf44f94a
|
Rust
|
Noia/advent-of-code-2018
|
/04/part2.rs
|
UTF-8
| 1,822 | 2.65625 | 3 |
[] |
no_license
|
extern crate chrono;
extern crate guards;
use chrono::Timelike;
use guards::{GuardReport, GuardState, RecordFragment};
use std::collections::HashMap;
use std::fs::File;
use std::io::{BufRead, BufReader, Result};
fn main() {
let mut guards_at_bussiest_time: HashMap<u32, (u32, u32)> = HashMap::new();
for record in read_input().unwrap().iter() {
let mut asleep_by_hour: HashMap<u32, u32> = HashMap::new();
record
.records
.iter()
.flat_map(|r| &r.events)
.filter(|e| e.state == GuardState::ASLEEP)
.flat_map(|e| std::ops::Range {
start: e.start.minute(),
end: e.end.minute(),
})
.for_each(|r| *asleep_by_hour.entry(r).or_insert(0) += 1);
let (max_asleep_hour, max_asleep_count) = asleep_by_hour
.iter()
.max_by(|(_, a), (_, b)| a.cmp(b))
.unwrap()
.clone();
guards_at_bussiest_time.insert(record.guard_id, (*max_asleep_hour, *max_asleep_count));
}
let (guard_id, (minute_of_sleep, num_of_sleep)) = guards_at_bussiest_time
.iter()
.max_by(|(_, (_, a)), (_, (_, b))| a.cmp(b))
.unwrap()
.clone();
println!(
"#{} was asleep at 00:{} {} times. Key {}",
guard_id,
minute_of_sleep,
num_of_sleep,
(guard_id * minute_of_sleep)
);
}
fn read_input() -> Result<Vec<GuardReport>> {
let file = File::open("04/input.txt")?;
let file = BufReader::new(file);
let mut records: Vec<RecordFragment> = Vec::new();
for line in file.lines().filter_map(|result| result.ok()) {
records.push(RecordFragment::new(&line));
}
return Ok(guards::to_guard_reports(guards::to_dated_guard_records(
records,
)));
}
| true |
a4ec317008a41b37fba1adf18351079a176f0e78
|
Rust
|
cleancoindev/gp-v2-services
|
/solver/src/naive_solver.rs
|
UTF-8
| 5,627 | 2.875 | 3 |
[] |
no_license
|
mod single_pair_settlement;
use self::single_pair_settlement::SinglePairSettlement;
use crate::settlement::Settlement;
use contracts::{GPv2Settlement, UniswapV2Router02};
use model::{
order::{OrderCreation, OrderKind},
TokenPair,
};
use primitive_types::U512;
use std::{cmp::Ordering, collections::HashMap};
pub fn settle(
orders: impl Iterator<Item = OrderCreation>,
uniswap: &UniswapV2Router02,
gpv2_settlement: &GPv2Settlement,
) -> Option<Settlement> {
let orders = organize_orders_by_token_pair(orders);
// TODO: Settle multiple token pairs in one settlement.
orders
.into_iter()
.find_map(|(_, orders)| settle_pair(orders))
.map(|settlement| settlement.into_settlement(uniswap.clone(), gpv2_settlement.clone()))
}
fn settle_pair(orders: TokenPairOrders) -> Option<SinglePairSettlement> {
let most_lenient_a = orders.sell_token_0.into_iter().min_by(order_by_price)?;
let most_lenient_b = orders.sell_token_1.into_iter().min_by(order_by_price)?;
single_pair_settlement::settle_two_fillkill_sell_orders(&most_lenient_a, &most_lenient_b)
}
#[derive(Debug, Default)]
struct TokenPairOrders {
sell_token_0: Vec<OrderCreation>,
sell_token_1: Vec<OrderCreation>,
}
fn organize_orders_by_token_pair(
orders: impl Iterator<Item = OrderCreation>,
) -> HashMap<TokenPair, TokenPairOrders> {
let mut result = HashMap::<_, TokenPairOrders>::new();
for (order, token_pair) in orders
.filter(usable_order)
.filter_map(|order| Some((order, order.token_pair()?)))
{
let token_pair_orders = result.entry(token_pair).or_default();
if order.sell_token == token_pair.get().0 {
token_pair_orders.sell_token_0.push(order);
} else {
token_pair_orders.sell_token_1.push(order);
}
}
result
}
fn usable_order(order: &OrderCreation) -> bool {
matches!(order.kind, OrderKind::Sell)
&& !order.sell_amount.is_zero()
&& !order.buy_amount.is_zero()
&& !order.partially_fillable
}
fn order_by_price(a: &OrderCreation, b: &OrderCreation) -> Ordering {
// The natural ordering is `a.buy_amount / a.sell_amount < b.buy_amount / b.sell_amount`
// which we can transform to `a.buy_amount * b.sell_amount < b.buy_amount * b.sell_amount` to
// avoid division. Multiply in u512 to avoid overflow.
let left = U512::from(a.buy_amount) * U512::from(b.sell_amount);
let right = U512::from(b.buy_amount) * U512::from(a.sell_amount);
left.cmp(&right)
}
#[cfg(test)]
mod tests {
use super::*;
use crate::interactions::dummy_web3;
use primitive_types::{H160, U256};
fn order_with_amounts(sell_amount: U256, buy_amount: U256) -> OrderCreation {
OrderCreation {
sell_amount,
buy_amount,
..Default::default()
}
}
#[test]
fn order_by_price_() {
let right = &order_with_amounts(10.into(), 10.into());
let left = &order_with_amounts(10.into(), 10.into());
assert_eq!(order_by_price(&left, &right), Ordering::Equal);
let left = &order_with_amounts(9.into(), 9.into());
assert_eq!(order_by_price(&left, &right), Ordering::Equal);
let left = &order_with_amounts(9.into(), 10.into());
assert_eq!(order_by_price(&left, &right), Ordering::Greater);
let left = &order_with_amounts(10.into(), 11.into());
assert_eq!(order_by_price(&left, &right), Ordering::Greater);
let left = &order_with_amounts(10.into(), 9.into());
assert_eq!(order_by_price(&left, &right), Ordering::Less);
let left = &order_with_amounts(11.into(), 10.into());
assert_eq!(order_by_price(&left, &right), Ordering::Less);
}
#[test]
fn settle_finds_match() {
let orders = vec![
OrderCreation {
sell_token: H160::from_low_u64_be(0),
buy_token: H160::from_low_u64_be(1),
sell_amount: 4.into(),
buy_amount: 9.into(),
kind: OrderKind::Sell,
partially_fillable: false,
..Default::default()
},
OrderCreation {
sell_token: H160::from_low_u64_be(0),
buy_token: H160::from_low_u64_be(1),
sell_amount: 4.into(),
buy_amount: 8.into(),
kind: OrderKind::Sell,
partially_fillable: false,
..Default::default()
},
OrderCreation {
sell_token: H160::from_low_u64_be(1),
buy_token: H160::from_low_u64_be(0),
sell_amount: 10.into(),
buy_amount: 11.into(),
kind: OrderKind::Sell,
partially_fillable: false,
..Default::default()
},
OrderCreation {
sell_token: H160::from_low_u64_be(1),
buy_token: H160::from_low_u64_be(0),
sell_amount: 6.into(),
buy_amount: 2.into(),
kind: OrderKind::Sell,
partially_fillable: false,
..Default::default()
},
];
let contract = UniswapV2Router02::at(&dummy_web3::dummy_web3(), H160::zero());
let settlement = GPv2Settlement::at(&dummy_web3::dummy_web3(), H160::zero());
let settlement = settle(orders.into_iter(), &contract, &settlement).unwrap();
dbg!(&settlement);
assert_eq!(settlement.trades.len(), 2);
assert_eq!(settlement.interactions.len(), 1);
}
}
| true |
050c9cc88b6e58944767a3f5d18484b7f26f14db
|
Rust
|
thepowersgang/rust_os
|
/Kernel/Core/futures/simple_waiter.rs
|
UTF-8
| 3,662 | 2.625 | 3 |
[
"BSD-2-Clause"
] |
permissive
|
//!
//!
use core::sync::atomic::{AtomicUsize,Ordering};
use core::task;
use crate::sync::EventChannel;
pub struct SimpleWaiter
{
inner: SimpleWaiterRef,
}
impl SimpleWaiter
{
pub fn new() -> SimpleWaiter {
SimpleWaiter {
inner: if let Some(v) = PooledHandle::acquire() {
SimpleWaiterRef::Pooled(v)
}
else {
SimpleWaiterRef::Owned(::alloc::sync::Arc::new(Inner::new()))
},
}
}
pub fn sleep(&self) {
self.inner.sleep();
}
pub fn raw_waker(&self) -> task::RawWaker {
self.inner.raw_waker()
}
}
/// Actual inner data
struct Inner
{
ref_count: AtomicUsize,
ec: EventChannel,
}
impl Inner
{
const fn new() -> Inner {
Inner {
ref_count: AtomicUsize::new(0),
ec: EventChannel::new(),
}
}
fn sleep(&self) {
//log_trace!("sleep({:p})", self);
self.ec.sleep();
}
unsafe fn rw_wake_by_ref(raw_self: *const ()) {
let v = &*(raw_self as *const Self);
v.ec.post();
}
}
// TODO: Make a pool of waiters, so they can outlive the stack frame.
static WAITER_LOCK: crate::sync::Spinlock<()> = crate::sync::Spinlock::new( () );
static WAITER_POOL: [Inner; 8] = [
Inner::new(), Inner::new(), Inner::new(), Inner::new(),
Inner::new(), Inner::new(), Inner::new(), Inner::new(),
];
struct PooledHandle(&'static Inner);
impl PooledHandle {
fn acquire() -> Option<Self> {
let _lh = WAITER_LOCK.lock();
for v in WAITER_POOL.iter() {
if v.ref_count.compare_exchange(0, 1, Ordering::SeqCst, Ordering::SeqCst) == Ok(0) {
return Some(PooledHandle(v));
}
}
None
}
}
impl ::core::clone::Clone for PooledHandle {
fn clone(&self) -> Self {
self.0.ref_count.fetch_add(1, Ordering::SeqCst);
Self(self.0)
}
}
impl ::core::ops::Drop for PooledHandle {
fn drop(&mut self) {
self.0.ref_count.fetch_sub(1, Ordering::SeqCst);
}
}
enum SimpleWaiterRef {
Owned(::alloc::sync::Arc<Inner>),
Pooled(PooledHandle),
}
impl SimpleWaiterRef {
fn sleep(&self) {
match self
{
SimpleWaiterRef::Owned(v) => v.sleep(),
SimpleWaiterRef::Pooled(v) => v.0.sleep(),
}
}
fn raw_waker(&self) -> task::RawWaker {
match self
{
SimpleWaiterRef::Owned(v) => {
use ::alloc::sync::Arc;
fn make(v: Arc<Inner>) -> task::RawWaker {
static VTABLE: task::RawWakerVTable = task::RawWakerVTable::new(
/*clone:*/ rw_clone,
// SAFE: Contract of RawWakerVTable
/*wake:*/ |v| unsafe { Inner::rw_wake_by_ref(v); rw_drop(v) },
/*wake_by_ref:*/ Inner::rw_wake_by_ref,
/*drop:*/ rw_drop,
);
let v = Arc::into_raw(v);
task::RawWaker::new(v as *const (), &VTABLE)
}
unsafe fn rw_clone(raw_self: *const ()) -> task::RawWaker {
let raw_self = raw_self as *const Inner;
let r = Arc::from_raw(raw_self);
Arc::increment_strong_count(raw_self); // Effectively clone
make(r)
}
unsafe fn rw_drop(raw_self: *const ()) {
Arc::from_raw(raw_self as *const Inner);
}
make(v.clone())
},
SimpleWaiterRef::Pooled(v) => {
fn make(v: &'static Inner) -> task::RawWaker {
static VTABLE: task::RawWakerVTable = task::RawWakerVTable::new(
/*clone:*/ rw_clone,
// SAFE: Contract of RawWakerVTable
/*wake:*/ |v| unsafe { Inner::rw_wake_by_ref(v); rw_drop(v) },
/*wake_by_ref:*/ Inner::rw_wake_by_ref,
/*drop:*/ rw_drop,
);
v.ref_count.fetch_add(1, Ordering::SeqCst);
task::RawWaker::new(v as *const _ as *const (), &VTABLE)
}
unsafe fn rw_clone(raw_self: *const ()) -> task::RawWaker {
make( &*(raw_self as *const Inner) )
}
unsafe fn rw_drop(raw_self: *const ()) {
let v = &*(raw_self as *const Inner);
v.ref_count.fetch_sub(1, Ordering::SeqCst);
}
make(v.0)
},
}
}
}
| true |
6051b1c9eed874631cb3909345aeab799713dc17
|
Rust
|
igaryhe/cg
|
/assignment_3/src/object.rs
|
UTF-8
| 3,334 | 3.140625 | 3 |
[] |
no_license
|
use crate::structure::*;
use glam::{Vec3, Mat3};
use serde::{Serialize, Deserialize};
use nalgebra::{Matrix3, Vector3};
use mint::ColumnMatrix3;
#[typetag::serde(tag = "type")]
pub trait Object: Sync {
fn material(&self) -> Material;
fn intersect(&self, ray: &Ray) -> Option<Intersection>;
fn set_position(&mut self, pos: Vec3);
}
#[derive(Serialize, Deserialize)]
pub struct Sphere {
material: Material,
position: Vec3,
radius: f32,
}
#[typetag::serde]
impl Object for Sphere {
fn material(&self) -> Material {
self.material
}
fn intersect(&self, ray: &Ray) -> Option<Intersection> {
// TODO:
//
// Compute the intersection between the ray and the sphere
// If the ray hits the sphere, set the result of the intersection in the
// struct 'hit'
let distance = ray.origin - self.position;
let a = ray.direction.dot(ray.direction);
let b = 2.0 * distance.dot(ray.direction);
let c = distance.dot(distance) - self.radius.powi(2);
let delta = b.powi(2) - 4.0 * a * c;
if delta < 0.0 {
None
} else if delta == 0.0 {
let t = -b / (2.0 * a);
if t >= 0.0 {
let position = ray.origin + t * ray.direction;
let normal = (position - self.position).normalize();
Some(Intersection {
position,
normal,
ray_param: 0.0
})
} else {
None
}
} else {
let t1 = (-b + delta.sqrt()) / (2.0 * a);
let t2 = (-b - delta.sqrt()) / (2.0 * a);
if t1 < 0.0 && t2 < 0.0 {
None
} else {
let t = t1.min(t2);
let position = ray.origin + t * ray.direction;
let normal = (position - self.position).normalize();
Some(Intersection {
position,
normal,
ray_param: 0.0
})
}
}
}
fn set_position(&mut self, pos: Vec3) {
self.position = pos
}
}
#[derive(Serialize, Deserialize)]
pub struct Parallelogram {
material: Material,
origin: Vec3,
u: Vec3,
v: Vec3,
}
#[typetag::serde]
impl Object for Parallelogram {
fn material(&self) -> Material {
self.material
}
fn intersect(&self, ray: &Ray) -> Option<Intersection> {
// TODO
let a = Mat3::from_cols(self.u, self.v, -ray.direction);
let a = Matrix3::from(ColumnMatrix3::from(a));
let decomp = a.lu();
let b = ray.origin - self.origin;
let b = Vector3::from(mint::Vector3::from(b));
let x = decomp.solve(&b).unwrap();
match x[0] >= 0.0 && x[0] <= 1.0 && x[1] >= 0.0 && x[1] <= 1.0 && x[2] >= 0.0 {
true => {
let position = ray.origin + x[2] * ray.direction;
let normal = self.v.cross(self.u).normalize();
Some(Intersection {
position,
normal,
ray_param: 0.0
})
},
false => None
}
}
fn set_position(&mut self, pos: Vec3) {
self.origin = pos
}
}
| true |
8dbf3e250a21976f4ad2bff15dcee42da8be448b
|
Rust
|
isgasho/fluvio
|
/cli/src/common/sc/query_composition.rs
|
UTF-8
| 1,105 | 2.5625 | 3 |
[
"Apache-2.0"
] |
permissive
|
//!
//! # Kafka -- Query Topic Composition
//!
//! Query topic composition including replicas and SPUs
//!
use log::trace;
use sc_api::apis::ScApiKey;
use sc_api::topic::{FlvTopicCompositionRequest, FlvTopicCompositionResponse};
use sc_api::versions::ApiVersions;
use crate::error::CliError;
use crate::common::Connection;
use crate::common::sc_lookup_version;
/// Connect to server, get version, and for topic composition: Replicas and SPUs
pub async fn sc_get_topic_composition<'a>(
conn: &'a mut Connection,
topic: String,
versions: &'a ApiVersions,
) -> Result<FlvTopicCompositionResponse, CliError> {
let mut request = FlvTopicCompositionRequest::default();
let version = sc_lookup_version(ScApiKey::FlvTopicComposition, &versions);
request.topic_names = vec![topic];
trace!(
"topic composition req '{}': {:#?}",
conn.server_addr(),
request
);
let response = conn.send_request(request, version).await?;
trace!(
"topic composition res '{}': {:#?}",
conn.server_addr(),
response
);
Ok(response)
}
| true |
5bf4832b424713c144540dc985d3d7b204fbd343
|
Rust
|
szarykott/miau
|
/tests/deserialization_tests.rs
|
UTF-8
| 11,664 | 3.125 | 3 |
[
"MIT"
] |
permissive
|
use miau::{
builder::ConfigurationBuilder,
configuration::{Configuration, ConfigurationTree},
error::ErrorCode,
format::Json,
source::InMemorySource,
};
use serde::Deserialize;
use std::collections::HashMap;
#[test]
fn test_deserialization_all_simple_types() {
#[derive(Deserialize)]
struct Config {
integer64: i64,
integer32: i32,
integer16: i16,
integer8: i8,
uinteger64: u64,
uinteger32: u32,
uinteger16: u16,
uinteger8: u8,
boolean: bool,
string_owned: String,
float32: f32,
float64: f64,
unit: (),
character: char,
}
let config_str = serde_json::json!({
"integer64": 63,
"integer32": 31,
"integer16": 15,
"integer8": 7,
"uinteger64": 63,
"uinteger32": 31,
"uinteger16": 15,
"uinteger8": 7,
"boolean" : true,
"string_owned" : "owned",
"float32" : 1.1,
"float64" : 1.2,
"unit" : null,
"character" : "a"
})
.to_string();
let root = serde_json::from_str::<Configuration>(&config_str).unwrap();
let config = root.try_convert_into::<Config>().unwrap();
assert_eq!(63, config.integer64);
assert_eq!(31, config.integer32);
assert_eq!(15, config.integer16);
assert_eq!(7, config.integer8);
assert_eq!(63, config.uinteger64);
assert_eq!(31, config.uinteger32);
assert_eq!(15, config.uinteger16);
assert_eq!(7, config.uinteger8);
assert_eq!(true, config.boolean);
assert_eq!("owned".to_string(), config.string_owned);
assert_eq!(1.1, config.float32);
assert_eq!(1.2, config.float64);
assert_eq!((), config.unit);
assert_eq!('a', config.character);
}
#[test]
fn test_error_when_deserializing_internal_struct_fails() {
#[derive(Deserialize, Debug)]
struct Config {
some_integer_field: u32,
}
let cfg_str = serde_json::json!({
"these_are_not_the_droids_you_are_looking_for" : "string"
})
.to_string();
let root = serde_json::from_str::<Configuration>(&cfg_str).unwrap();
let error = root.try_convert_into::<Config>().unwrap_err();
assert!(std::matches!(
error.get_code(),
ErrorCode::DeserializationError(..)
));
let error_stringified = error.to_string();
assert!(error_stringified.contains("some_integer_field"));
assert!(error_stringified.contains(&format!("{}", std::any::type_name::<Config>())));
}
#[test]
fn test_error_when_deserializing_char_longer_than_one() {
#[derive(Deserialize, Debug)]
struct Config {
character: char,
};
let json = r#"{ "character" : "longer" }"#;
let root = serde_json::from_str::<ConfigurationTree>(&json).unwrap();
let error = root.try_convert_into::<Config>().unwrap_err();
assert!(std::matches!(
error.get_code(),
ErrorCode::DeserializationError(..)
));
assert!(error.to_string().contains("expected string of length 1"));
}
#[test]
fn test_error_when_deserializing_external_source_fails() {
let cfg_str = r#" this is not json asdas1211/// "#;
let mut builder = ConfigurationBuilder::default();
builder.add(InMemorySource::from_string_slice(cfg_str), Json::default());
let error = builder.build().unwrap_err();
assert!(std::matches!(
error.get_code(),
ErrorCode::DeserializationError(..)
));
}
#[test]
fn test_deserialization_struct_with_map() {
#[derive(Deserialize)]
struct Config {
inner: ConfigInner,
}
#[derive(Deserialize)]
struct ConfigInner {
value: i32,
}
let config_str = serde_json::json!({
"inner": {
"value" : 42
},
})
.to_string();
let root = serde_json::from_str::<Configuration>(&config_str).unwrap();
let config = root.try_convert_into::<Config>().unwrap();
assert_eq!(42, config.inner.value);
}
#[test]
fn test_deserialization_struct_with_array() {
#[derive(Deserialize)]
struct Config {
inner: Vec<i32>,
}
let config_str = serde_json::json!({
"inner": [1, 2, 3]
})
.to_string();
let root = serde_json::from_str::<Configuration>(&config_str).unwrap();
let config = root.try_convert_into::<Config>().unwrap();
assert!(vec![1, 2, 3].iter().eq(config.inner.iter()));
}
#[test]
fn test_deserialization_struct_with_array_of_structs() {
#[derive(Deserialize)]
struct Config {
inner: Vec<ConfigInner>,
}
#[derive(Deserialize, PartialEq)]
struct ConfigInner {
value: i32,
}
let config_str = serde_json::json!({
"inner": [
{"value" : 1},
{"value" : 2},
{"value" : 3},
]
})
.to_string();
let root = serde_json::from_str::<Configuration>(&config_str).unwrap();
let config = root.try_convert_into::<Config>().unwrap();
assert!(vec![
ConfigInner { value: 1 },
ConfigInner { value: 2 },
ConfigInner { value: 3 }
]
.iter()
.eq(config.inner.iter()));
}
#[test]
fn test_deserialization_struct_with_array_of_structs_transparent() {
#[derive(Deserialize)]
struct Config {
inner: Vec<ConfigInner>,
}
#[derive(Deserialize, PartialEq)]
#[serde(transparent)]
struct ConfigInner {
value: i32,
}
let config_str = serde_json::json!({
"inner": [
1, 2, 3
]
})
.to_string();
let root = serde_json::from_str::<Configuration>(&config_str).unwrap();
let config = root.try_convert_into::<Config>().unwrap();
assert!(vec![
ConfigInner { value: 1 },
ConfigInner { value: 2 },
ConfigInner { value: 3 }
]
.iter()
.eq(config.inner.iter()));
}
#[test]
fn test_deserialization_struct_with_hashmap() {
#[derive(Deserialize)]
struct Config {
inner: HashMap<String, i32>,
}
let config_str = serde_json::json!({
"inner": {
"a" : 1,
"b" : 2
}
})
.to_string();
let root = serde_json::from_str::<Configuration>(&config_str).unwrap();
let config = root.try_convert_into::<Config>().unwrap();
assert_eq!(Some(&1), config.inner.get("a"));
assert_eq!(Some(&2), config.inner.get("b"));
assert_eq!(None, config.inner.get("c"));
}
#[test]
fn test_deserialization_struct_with_hashmap_string_values() {
#[derive(Deserialize)]
struct Config {
inner: HashMap<String, String>,
}
let config_str = serde_json::json!({
"inner": {
"a" : "a",
"b" : "b"
}
})
.to_string();
let root = serde_json::from_str::<Configuration>(&config_str).unwrap();
let config = root.try_convert_into::<Config>().unwrap();
assert_eq!(Some(&"a".to_string()), config.inner.get("a"));
assert_eq!(Some(&"b".to_string()), config.inner.get("b"));
assert_eq!(None, config.inner.get("c"));
}
#[derive(Deserialize, PartialEq, Debug)]
enum DaEnum {
Unit,
Newtype(i32),
Tuple(i32, i32),
Structo { value: i32 },
}
#[derive(Deserialize, PartialEq, Debug)]
#[serde(untagged)]
enum DaEnumUntagged {
Unit,
Newtype(f32),
Tuple(f32, i32),
Structo { value: i32 },
}
#[test]
fn test_deserialization_enum_unit_variant() {
#[derive(Deserialize)]
struct Config {
enumeration: DaEnum,
}
let config_str = serde_json::json!({
"enumeration": "Unit",
})
.to_string();
let root = serde_json::from_str::<Configuration>(&config_str).unwrap();
let config = root.try_convert_into::<Config>().unwrap();
assert_eq!(DaEnum::Unit, config.enumeration);
}
#[test]
fn test_deserialization_enum_unit_variant_untagged() {
#[derive(Deserialize)]
struct Config {
enumeration: DaEnumUntagged,
}
let config_str = serde_json::json!({
"enumeration": null,
})
.to_string();
let root = serde_json::from_str::<Configuration>(&config_str).unwrap();
let config = root.try_convert_into::<Config>().unwrap();
assert_eq!(DaEnumUntagged::Unit, config.enumeration);
}
#[test]
fn test_deserialization_enum_newtype_variant() {
#[derive(Deserialize)]
struct Config {
enumeration: DaEnum,
}
let config_str = serde_json::json!({
"enumeration": {
"Newtype" : 42
},
})
.to_string();
let root = serde_json::from_str::<Configuration>(&config_str).unwrap();
let config = root.try_convert_into::<Config>().unwrap();
assert_eq!(DaEnum::Newtype(42i32), config.enumeration);
}
#[test]
fn test_deserialization_enum_newtype_variant_untagged() {
#[derive(Deserialize)]
struct Config {
enumeration: DaEnumUntagged,
}
let config_str = serde_json::json!({
"enumeration": 42,
})
.to_string();
let root = serde_json::from_str::<Configuration>(&config_str).unwrap();
let config = root.try_convert_into::<Config>().unwrap();
assert_eq!(DaEnumUntagged::Newtype(42f32), config.enumeration);
}
#[test]
fn test_deserialization_enum_tuple_variant() {
#[derive(Deserialize)]
struct Config {
enumeration: DaEnum,
}
let config_str = serde_json::json!({
"enumeration": {
"Tuple" : [1, 2]
},
})
.to_string();
let root = serde_json::from_str::<Configuration>(&config_str).unwrap();
let config = root.try_convert_into::<Config>().unwrap();
assert_eq!(DaEnum::Tuple(1, 2), config.enumeration);
}
#[test]
fn test_deserialization_enum_tuple_variant_untagged() {
#[derive(Deserialize)]
struct Config {
enumeration: DaEnumUntagged,
}
let config_str = serde_json::json!({
"enumeration": [1, 2],
})
.to_string();
let root = serde_json::from_str::<Configuration>(&config_str).unwrap();
let config = root.try_convert_into::<Config>().unwrap();
assert_eq!(DaEnumUntagged::Tuple(1f32, 2), config.enumeration);
}
#[test]
fn test_deserialization_enum_struct_variant() {
#[derive(Deserialize)]
struct Config {
enumeration: DaEnum,
}
let config_str = serde_json::json!({
"enumeration": {
"Structo" : {
"value" : 3
}
},
})
.to_string();
let root = serde_json::from_str::<Configuration>(&config_str).unwrap();
let config = root.try_convert_into::<Config>().unwrap();
assert_eq!(DaEnum::Structo { value: 3 }, config.enumeration);
}
#[test]
fn test_deserialization_enum_struct_variant_untagged() {
#[derive(Deserialize)]
struct Config {
enumeration: DaEnumUntagged,
}
let config_str = serde_json::json!({
"enumeration": {
"value" : 3
},
})
.to_string();
let root = serde_json::from_str::<Configuration>(&config_str).unwrap();
let config = root.try_convert_into::<Config>().unwrap();
assert_eq!(DaEnumUntagged::Structo { value: 3 }, config.enumeration);
}
#[test]
fn test_deserialization_option() {
#[derive(Deserialize)]
struct Config {
some: Option<f64>,
none: Option<i16>, // value will be null
none2: Option<i16>, // value will be missing
}
let config_str = serde_json::json!({
"some": 3,
"none": null
})
.to_string();
let root = serde_json::from_str::<Configuration>(&config_str).unwrap();
let config = root.try_convert_into::<Config>().unwrap();
assert_eq!(Some(3f64), config.some);
assert_eq!(None, config.none);
assert_eq!(None, config.none2);
}
| true |
7f2cf18f9aeba3c5f009e0fe247cb9e1ecbe918b
|
Rust
|
magurotuna/atcoder-submissions
|
/abc136/src/bin/d.rs
|
UTF-8
| 2,183 | 2.890625 | 3 |
[] |
no_license
|
use libprocon::*;
fn main() {
input! {
S: String,
}
let chars = S.chars().collect::<Vec<_>>();
let chars_len = chars.len();
// ランレングス圧縮する
// rl[i] := (iが偶数なら)連続しているRの数、(iが奇数なら)連続しているLの数
let mut rl = Vec::with_capacity(chars.len());
let mut cur_c = chars[0];
let mut cur_len = 1;
for c in chars.into_iter().skip(1) {
if cur_c == c {
cur_len += 1;
continue;
} else {
cur_c = c;
rl.push(cur_len);
cur_len = 1;
}
}
rl.push(cur_len);
assert!(rl.len() % 2 == 0);
dbg!(&rl);
let mut ans_vec = Vec::with_capacity(chars_len);
for i in (0..(rl.len() - 1)).step_by(2) {
let r_num = rl[i];
let l_num = rl[i + 1];
for _ in 0..(r_num - 1) {
ans_vec.push(0);
}
if (r_num + l_num) % 2 == 0 {
ans_vec.push((r_num + l_num) / 2);
ans_vec.push((r_num + l_num) / 2);
} else {
// 平衡状態になるとき(max(r, l) - 1 回の移動が終わったタイミング)でceil((r + l) / 2) or floor((r + l) / 2)人がいる状態になる(rとlのどっちが大きいかで変わる)
// max(r, l) - 1 が偶数なら、10^100回の移動後はこの値そのままでOK. 奇数ならrとlが逆転する
let ceil = (r_num + l_num + 1) / 2;
let floor = (r_num + l_num) / 2;
use std::cmp::max;
if (max(r_num, l_num) - 1) % 2 == 0 {
ans_vec.push(if r_num > l_num { ceil } else { floor });
ans_vec.push(if r_num > l_num { floor } else { ceil });
} else {
ans_vec.push(if r_num > l_num { floor } else { ceil });
ans_vec.push(if r_num > l_num { ceil } else { floor });
}
}
for _ in 0..(l_num - 1) {
ans_vec.push(0);
}
}
println!(
"{}",
ans_vec
.into_iter()
.map(|x| x.to_string())
.collect::<Vec<_>>()
.join(" ")
);
}
| true |
a3c8a0f6116f7621f63f8a19a4560331bc2dd42b
|
Rust
|
kohbis/leetcode
|
/algorithms/1464.maximum-product-of-two-elements-in-an-array/solution.rs
|
UTF-8
| 381 | 2.859375 | 3 |
[] |
no_license
|
impl Solution {
pub fn max_product(nums: Vec<i32>) -> i32 {
let (mut max, mut second_max) = (0, 0);
for num in nums.iter() {
if *num > max {
second_max = max;
max = *num;
} else if *num > second_max {
second_max = *num;
}
}
(max - 1) * (second_max - 1)
}
}
| true |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.