blob_id
stringlengths 40
40
| language
stringclasses 1
value | repo_name
stringlengths 5
140
| path
stringlengths 5
183
| src_encoding
stringclasses 6
values | length_bytes
int64 12
5.32M
| score
float64 2.52
4.94
| int_score
int64 3
5
| detected_licenses
listlengths 0
47
| license_type
stringclasses 2
values | text
stringlengths 12
5.32M
| download_success
bool 1
class |
---|---|---|---|---|---|---|---|---|---|---|---|
3c3f5298bf09ebdb287e7b369527b822558198cc
|
Rust
|
wshatch/lisp_compiler
|
/src/nom_parse_funcs.rs
|
UTF-8
| 3,310 | 3.078125 | 3 |
[] |
no_license
|
#![allow(dead_code)]
use nom::{digit, multispace};
use std::rc::Rc;
use token::{Token, LispyVal, LispyRet};
/*
number : [0-9]
symbol : number | alpha | operator | comparator
comparator : < | > | == | >= | <= | != | & | \|
operator : '+' | '-' | '*' | '/'
expr : <symbol> | <operator> | <sexpr>'
sexpr : '(' <expr> * ')'
qexpr : '"' <expr> * '"' //TODO: replace this with a lispier macro language
*/
named!(pub tokenize< Vec<Token> >,
many0!(
alt!(operator | number | tokenize_beginsexpr | tokenize_endsexpr)
)
);
named!(pub operator<&[u8], Token>, chain!(
opt!(multispace) ~
val: alt!(tag!("+") | tag!("-") | tag!("/") | tag!("*")),
|| {
Token::build_operator(val)
}
));
named!(pub number<&[u8], Token>, chain!(
opt!(multispace) ~
val: digit,
|| {
Token::build_number(val)
}
));
named!(pub tokenize_beginsexpr<&[u8], Token>, chain!(
opt!(multispace) ~
val: tag!("("),
|| {
Token::BeginSexpr
}
));
named!(pub tokenize_endsexpr<&[u8], Token>, chain!(
opt!(multispace) ~
val: tag!(")"),
|| {
Token::EndSexpr
}
));
/*
named!(pub tokenize_comparator<&[u8], Token>, chain!(
opt!(multispace) ~
val: comparator,
|| {
Token::Comparator(val)
}
));
named!(pub terminal, alt!(digit | operation | comparator | multispace));
named!(pub operation, alt!(tag!("+") | tag!("-") | tag!("*") | tag!("/")));
named!(pub comparator, alt!(tag!("<") | tag!(">") | tag!("==") | tag!("<=") | tag!(">=") | tag!("&") | tag!("|")));
named!(string, delimited!(char!('"'), is_not!("\""), char!('"')));
*/
#[cfg(test)]
mod test{
use nom::IResult;
use nom::IResult::*;
use token::Token;
/*
#[test]
fn token_operator(){
let subject = operator(&b"+ 12"[..]);
let expectation = IResult::Done(&b" 12"[..], Token::Operator(&b"+"[..]));
assert_eq!(subject, expectation);
}
#[test]
fn token_number(){
let subject = number(&b"12 34"[..]);
let expectation = IResult::Done(&b" 34"[..], Token::Number(&b"12"[..]));
assert_eq!(subject, expectation);
}
#[test]
fn test_tokenize(){
let subject = tokenize(&b"+++"[..]);
let plus = &b"+"[..];
let expectation = vec![Token::Operator(plus),
Token::Operator(plus),
Token::Operator(plus)];
assert_eq!(subject, IResult::Done(&b""[..], expectation));
}
#[test]
fn test_tokenize_multiple(){
let subject = tokenize(&b"+(>)"[..]);
let plus = &b"+"[..];
let parens = &b"("[..];
let arrow = &b">"[..];
let endparens = &b")"[..];
let expectation = vec![Token::Operator(plus),
Token::BeginSexpr(parens),
Token::Comparator(arrow),
Token::EndSexpr(endparens)];
assert_eq!(subject, IResult::Done(&b""[..], expectation));
}
#[test]
fn test_tokenize_numbers(){
let subject = tokenize(&b"12 34"[..]);
let expectation = vec![Token::Number(&b"12"[..]),
Token::Number(&b"34"[..])
];
assert_eq!(subject, IResult::Done(&b""[..], expectation));
}
*/
}
| true |
a935c9b59cc9913920650d47ee172cfad1648d56
|
Rust
|
notdanilo/surface
|
/src/surface/manager.rs
|
UTF-8
| 2,896 | 2.984375 | 3 |
[
"Apache-2.0"
] |
permissive
|
use winit::event_loop::{EventLoop, ControlFlow};
pub struct SurfaceManager<T: 'static> {
event_loop: EventLoop<T>
}
impl<T: 'static> From<EventLoop<T>> for SurfaceManager<T> {
fn from(event_loop: EventLoop<T>) -> Self {
Self { event_loop }
}
}
impl Default for SurfaceManager<()> {
fn default() -> Self {
let event_loop = EventLoop::new();
Self { event_loop }
}
}
impl SurfaceManager<()> {
pub fn new() -> Self {
Self::default()
}
}
impl<T: 'static> SurfaceManager<T> {
pub fn event_loop(&self) -> &EventLoop<T> { &self.event_loop }
pub fn event_loop_mut(&mut self) -> &mut EventLoop<T> { &mut self.event_loop }
pub fn run(self) -> ! {
// let context = &mut self.context;
// let display = &mut self.display;
self.event_loop.run(move |event, _event_loop, control_flow| {
// ControlFlow::Poll continuously runs the event loop, even if the OS hasn't
// dispatched any events. This is ideal for games and similar applications.
*control_flow = ControlFlow::Poll;
// ControlFlow::Wait pauses the event loop if no events are available to process.
// This is ideal for non-game applications that only update in response to user
// input, and uses significantly less power/CPU time than ControlFlow::Poll.
*control_flow = ControlFlow::Wait;
match event {
winit::event::Event::WindowEvent { event, .. } => {
match event {
winit::event::WindowEvent::CloseRequested => *control_flow = ControlFlow::Exit,
winit::event::WindowEvent::Resized(_logical_size) => {
// let dpi_factor = context.get_hidpi_factor();
// let size = logical_size.to_physical(dpi_factor);
// context.resize(size);
// if let ContextDisplay::Window(window) = display {
// window.set_size((size.width as usize, size.height as usize));
// }
},
winit::event::WindowEvent::CursorMoved {device_id:_, position:_, ..} => {
// let dpi_factor = context.get_hidpi_factor();
// let position = position.to_physical(dpi_factor);
// println!("Cursor {}x{}", position.x, position.y);
},
winit::event::WindowEvent::KeyboardInput {device_id:_,input,is_synthetic:_} => {
input.virtual_keycode.map(|_vk| {
});
}
_ => ()
}
},
_ => ()
}
})
}
}
| true |
c0dc950f38430da7fa861ec39598a1210300c15e
|
Rust
|
vandenoever/amxml
|
/src/xpath_impl/eval.rs
|
UTF-8
| 125,696 | 2.90625 | 3 |
[] |
no_license
|
//
// xpath_impl/eval.rs
//
// amxml: XML processor with XPath.
// Copyright (C) 2018 KOYAMA Hiro <[email protected]>
//
use std::collections::HashMap;
use std::cmp::Ordering;
use std::error::Error;
use std::f64;
use std::i64;
use std::str::FromStr;
use std::usize;
use dom::*;
use xmlerror::*;
use xpath_impl::lexer::*;
use xpath_impl::parser::*;
use xpath_impl::xitem::*;
use xpath_impl::xsequence::*;
use xpath_impl::func::*;
use xpath_impl::oper::*;
// ---------------------------------------------------------------------
// 文字列→数値の変換。
// 空白 (オプション)、負符号 (オプション)、Number、空白 (オプション) が
// この順で連なる文字列を、IEEE 754の数値に変換する。
// それ以外はNaNにする。
// 規格上は、正記号も使えないことになる。
//
fn atof(s: &str) -> f64 {
return f64::from_str(s.trim()).unwrap_or(f64::NAN);
}
fn atoi(s: &str) -> i64 {
return i64::from_str(s.trim()).unwrap_or(0);
}
// ---------------------------------------------------------------------
//
fn usize_to_i64(n: usize) -> i64 {
return n as i64;
}
// =====================================================================
// 評価環境
//
#[derive(Debug, PartialEq, Clone)]
struct VarNameValue {
name: String,
value: XSequence,
}
#[derive(Debug, PartialEq, Clone)]
pub struct EvalEnv {
position: usize, // 組み込み函数 position() の値
last: usize, // 組み込み函数 last() の値
var_vec: Vec<VarNameValue>, // 変数表
// 同名の変数にはスコープ規則を適用
}
fn new_eval_env() -> EvalEnv {
return EvalEnv{
position: 0,
last: 0,
var_vec: vec!{},
}
}
impl EvalEnv {
// -----------------------------------------------------------------
// 文書順に整列し、重複を除去する。
//
pub fn sort_by_doc_order(&self, node_array: &mut Vec<NodePtr>) {
if node_array.len() <= 1 {
return;
}
node_array.sort_by(|a, b| {
return self.compare_by_doc_order(a, b);
});
let mut i = node_array.len() - 1;
while 0 < i {
if node_array[i] == node_array[i - 1] {
node_array.remove(i);
}
i -= 1;
}
}
// -----------------------------------------------------------------
// 文書順を比較し、Ordering::{Less,Equal,Greater} を返す。
//
pub fn compare_by_doc_order(&self, a: &NodePtr, b: &NodePtr) -> Ordering {
let a_order = a.document_order();
let b_order = b.document_order();
return a_order.cmp(&b_order);
}
// -----------------------------------------------------------------
//
fn set_var(&mut self, name: &str, value: &XSequence) {
self.var_vec.insert(0, VarNameValue{
name: String::from(name),
value: value.clone(),
});
}
// -----------------------------------------------------------------
//
fn set_var_item(&mut self, name: &str, value: &XItem) {
self.var_vec.insert(0, VarNameValue{
name: String::from(name),
value: new_singleton(value),
});
}
// -----------------------------------------------------------------
//
fn remove_var(&mut self, name: &str) {
let mut index = usize::MAX;
for (i, entry) in self.var_vec.iter().enumerate() {
if entry.name == name {
index = i;
break;
}
}
if index != usize::MAX {
self.var_vec.remove(index as usize);
}
}
// -----------------------------------------------------------------
//
fn get_var(&self, name: &str) -> Option<XSequence> {
for entry in self.var_vec.iter() {
if entry.name == name {
return Some(entry.value.clone());
}
}
return None;
}
// -----------------------------------------------------------------
//
fn set_position(&mut self, position: usize) -> usize {
let old_position = self.position;
self.position = position;
return old_position;
}
fn set_last(&mut self, last: usize) -> usize{
let old_last = self.last;
self.last = last;
return old_last;
}
// -----------------------------------------------------------------
//
pub fn get_position(&self) -> usize {
return self.position;
}
pub fn get_last(&self) -> usize {
return self.last;
}
}
// =====================================================================
// (EVAL)
//
pub fn match_xpath(start_node: &NodePtr, xnode: &XNodePtr) -> Result<XSequence, Box<Error>> {
let mut eval_env = new_eval_env();
let start_xsequence = new_singleton_node(start_node);
return evaluate_xnode(&start_xsequence, xnode, &mut eval_env);
}
// ---------------------------------------------------------------------
// あるXMLノードに対して、XPath構文木のあるノードを適用し、評価結果を返す。
//
fn evaluate_xnode(xseq: &XSequence, xnode: &XNodePtr,
eval_env: &mut EvalEnv) -> Result<XSequence, Box<Error>> {
if is_nil_xnode(xnode) {
panic!("Can't occur: evaluate_xnode, xnode is nil");
}
let xnode_type = get_xnode_type(&xnode);
match xnode_type {
XNodeType::OperatorPath => {
// ---------------------------------------------------------
// (1) 左辺値を評価する。
// ノードのみのシーケンスでなければエラー (空シーケンスは可)。
//
let left_xnode = get_left(xnode);
let lhs = if ! is_nil_xnode(&left_xnode) {
evaluate_xnode(xseq, &left_xnode, eval_env)?
} else {
new_xsequence()
};
// ---------------------------------------------------------
// (1a) 右辺値の評価が必要ない場合は、そのまま左辺値を返す。
//
let right_xnode = get_right(xnode);
if is_nil_xnode(&right_xnode) {
return Ok(lhs);
}
// ---------------------------------------------------------
// (1b) 左辺値がノードのみのシーケンスでなければ
// エラー (空シーケンスは可)。
//
if ! lhs.is_no_atom() {
return Err(type_error!("Path演算子: ノード以外のアイテムがある。"));
}
// ---------------------------------------------------------
// (2) lhsの各ノードについて、右辺値を評価する。
//
let mut node_exists = false;
let mut atom_exists = false;
let mut result_seq = new_xsequence();
for item in lhs.iter() {
let xseq = new_singleton(&item);
let val_seq = evaluate_xnode(&xseq, &right_xnode, eval_env)?;
if val_seq.is_empty() {
continue;
}
// -----------------------------------------------------
// (2-1) ノードごとの評価値を合併していく。
// 評価値がすべてノードのみのシーケンスか否かを
// 調べておく。
//
if val_seq.is_no_atom() {
node_exists = true;
} else {
atom_exists = true;
}
result_seq.append(&val_seq);
// -----------------------------------------------------
// (2-3) ノードと非ノードが混在していればエラー。
//
if node_exists && atom_exists {
return Err(type_error!("Path演算子: ノードと非ノードが混在している。"));
}
}
// ---------------------------------------------------------
// (3) 最後に、ノードのみのシーケンスであれば、整列、重複排除する。
//
if node_exists {
let mut nodeset = result_seq.to_nodeset();
eval_env.sort_by_doc_order(&mut nodeset);
let sorted_seq = new_xsequence_from_node_array(&nodeset);
return Ok(sorted_seq);
} else {
return Ok(result_seq);
}
},
XNodeType::OperatorMap => {
// ---------------------------------------------------------
// (1) 左辺値を評価する。
//
let left_xnode = get_left(xnode);
let lhs = if ! is_nil_xnode(&left_xnode) {
evaluate_xnode(xseq, &left_xnode, eval_env)?
} else {
new_xsequence()
};
// ---------------------------------------------------------
// (1a) 右辺値の評価が必要ない場合は、そのまま左辺値を返す。
//
let right_xnode = get_right(xnode);
if is_nil_xnode(&right_xnode) {
return Ok(lhs);
}
// ---------------------------------------------------------
// (2) lhsの各ノードについて右辺値を評価し、順に合併していく。
// 整列や重複排除はしない。
//
let mut result_seq = new_xsequence();
for item in lhs.iter() {
let xseq = new_singleton(&item);
let val_seq = evaluate_xnode(&xseq, &right_xnode, eval_env)?;
result_seq.append(&val_seq);
}
return Ok(result_seq);
},
XNodeType::AxisAncestor |
XNodeType::AxisAncestorOrSelf |
XNodeType::AxisAttribute |
XNodeType::AxisChild |
XNodeType::AxisDescendant |
XNodeType::AxisDescendantOrSelf |
XNodeType::AxisFollowing |
XNodeType::AxisFollowingSibling |
XNodeType::AxisParent |
XNodeType::AxisPreceding |
XNodeType::AxisPrecedingSibling |
XNodeType::AxisRoot |
XNodeType::AxisSelf => {
return match_location_path(xseq, xnode, eval_env);
},
XNodeType::ContextItem => {
return Ok(xseq.clone());
}
XNodeType::ApplyPredicate => {
// 左辺値 (PrimaryExpr) に対して、右辺値の述語を適用して絞り込む。
//
let primary_xnode = &get_left(xnode);
let postfix_xnode = &get_right(xnode);
let primary_expr = evaluate_xnode(xseq, primary_xnode, eval_env)?;
return filter_by_predicate(&primary_expr,
&get_left(&postfix_xnode), false, eval_env);
},
XNodeType::ApplyArgument => {
// 左辺値 (PrimaryExpr) を函数と見て、右辺値の引数並びを適用する。
//
let primary_xnode = &get_left(xnode);
let postfix_xnode = &get_right(xnode);
let primary_expr = evaluate_xnode(xseq, primary_xnode, eval_env)?;
return apply_argument(xseq, &primary_expr, &postfix_xnode, eval_env);
},
XNodeType::OperatorConcatenate => {
// シーケンスを連結する。
//
let lhs = evaluate_xnode(xseq, &get_left(xnode), eval_env)?;
let rhs = evaluate_xnode(xseq, &get_right(xnode), eval_env)?;
return op_concatenate(&vec!{lhs, rhs});
},
XNodeType::OperatorOr => {
let lhs = evaluate_xnode(xseq, &get_left(xnode), eval_env)?;
let lhs_b = lhs.effective_boolean_value()?;
if lhs_b == true {
return Ok(new_singleton_boolean(true));
} else {
let rhs = evaluate_xnode(xseq, &get_right(xnode), eval_env)?;
let rhs_b = rhs.effective_boolean_value()?;
return Ok(new_singleton_boolean(rhs_b));
}
},
XNodeType::OperatorAnd => {
let lhs = evaluate_xnode(xseq, &get_left(xnode), eval_env)?;
let lhs_b = lhs.effective_boolean_value()?;
if lhs_b == false {
return Ok(new_singleton_boolean(false));
} else {
let rhs = evaluate_xnode(xseq, &get_right(xnode), eval_env)?;
let rhs_b = rhs.effective_boolean_value()?;
return Ok(new_singleton_boolean(rhs_b));
}
},
XNodeType::OperatorGeneralEQ => {
let lhs = evaluate_xnode(xseq, &get_left(xnode), eval_env)?;
let rhs = evaluate_xnode(xseq, &get_right(xnode), eval_env)?;
return general_compare_eq(&lhs, &rhs);
},
XNodeType::OperatorGeneralNE => {
let lhs = evaluate_xnode(xseq, &get_left(xnode), eval_env)?;
let rhs = evaluate_xnode(xseq, &get_right(xnode), eval_env)?;
return general_compare_ne(&lhs, &rhs);
},
XNodeType::OperatorGeneralLT => {
let lhs = evaluate_xnode(xseq, &get_left(xnode), eval_env)?;
let rhs = evaluate_xnode(xseq, &get_right(xnode), eval_env)?;
return general_compare_lt(&lhs, &rhs);
},
XNodeType::OperatorGeneralLE => {
let lhs = evaluate_xnode(xseq, &get_left(xnode), eval_env)?;
let rhs = evaluate_xnode(xseq, &get_right(xnode), eval_env)?;
return general_compare_le(&lhs, &rhs);
},
XNodeType::OperatorGeneralGT => {
let lhs = evaluate_xnode(xseq, &get_left(xnode), eval_env)?;
let rhs = evaluate_xnode(xseq, &get_right(xnode), eval_env)?;
return general_compare_gt(&lhs, &rhs);
},
XNodeType::OperatorGeneralGE => {
let lhs = evaluate_xnode(xseq, &get_left(xnode), eval_env)?;
let rhs = evaluate_xnode(xseq, &get_right(xnode), eval_env)?;
return general_compare_ge(&lhs, &rhs);
},
XNodeType::OperatorValueEQ => {
let lhs = evaluate_xnode(xseq, &get_left(xnode), eval_env)?;
let rhs = evaluate_xnode(xseq, &get_right(xnode), eval_env)?;
return value_compare_eq(&lhs, &rhs);
},
XNodeType::OperatorValueNE => {
let lhs = evaluate_xnode(xseq, &get_left(xnode), eval_env)?;
let rhs = evaluate_xnode(xseq, &get_right(xnode), eval_env)?;
return value_compare_ne(&lhs, &rhs);
},
XNodeType::OperatorValueLT => {
let lhs = evaluate_xnode(xseq, &get_left(xnode), eval_env)?;
let rhs = evaluate_xnode(xseq, &get_right(xnode), eval_env)?;
return value_compare_lt(&lhs, &rhs);
},
XNodeType::OperatorValueLE => {
let lhs = evaluate_xnode(xseq, &get_left(xnode), eval_env)?;
let rhs = evaluate_xnode(xseq, &get_right(xnode), eval_env)?;
return value_compare_le(&lhs, &rhs);
},
XNodeType::OperatorValueGT => {
let lhs = evaluate_xnode(xseq, &get_left(xnode), eval_env)?;
let rhs = evaluate_xnode(xseq, &get_right(xnode), eval_env)?;
return value_compare_gt(&lhs, &rhs);
},
XNodeType::OperatorValueGE => {
let lhs = evaluate_xnode(xseq, &get_left(xnode), eval_env)?;
let rhs = evaluate_xnode(xseq, &get_right(xnode), eval_env)?;
return value_compare_ge(&lhs, &rhs);
},
XNodeType::OperatorAdd => {
let lhs = evaluate_xnode(xseq, &get_left(xnode), eval_env)?;
let rhs = evaluate_xnode(xseq, &get_right(xnode), eval_env)?;
return op_numeric_add(&vec!{lhs, rhs});
},
XNodeType::OperatorSubtract => {
let lhs = evaluate_xnode(xseq, &get_left(xnode), eval_env)?;
let rhs = evaluate_xnode(xseq, &get_right(xnode), eval_env)?;
return op_numeric_subtract(&vec!{lhs, rhs});
},
XNodeType::OperatorUnaryPlus => {
let rhs = evaluate_xnode(xseq, &get_right(xnode), eval_env)?;
return op_numeric_unary_plus(&vec!{rhs});
},
XNodeType::OperatorUnaryMinus => {
let rhs = evaluate_xnode(xseq, &get_right(xnode), eval_env)?;
return op_numeric_unary_minus(&vec!{rhs});
},
XNodeType::OperatorMultiply => {
let lhs = evaluate_xnode(xseq, &get_left(xnode), eval_env)?;
let rhs = evaluate_xnode(xseq, &get_right(xnode), eval_env)?;
return op_numeric_multiply(&vec!{lhs, rhs});
},
XNodeType::OperatorDiv => {
let lhs = evaluate_xnode(xseq, &get_left(xnode), eval_env)?;
let rhs = evaluate_xnode(xseq, &get_right(xnode), eval_env)?;
return op_numeric_divide(&vec!{lhs, rhs});
},
XNodeType::OperatorIDiv => {
let lhs = evaluate_xnode(xseq, &get_left(xnode), eval_env)?;
let rhs = evaluate_xnode(xseq, &get_right(xnode), eval_env)?;
return op_numeric_integer_divide(&vec!{lhs, rhs});
},
XNodeType::OperatorMod => {
let lhs = evaluate_xnode(xseq, &get_left(xnode), eval_env)?;
let rhs = evaluate_xnode(xseq, &get_right(xnode), eval_env)?;
return op_numeric_mod(&vec!{lhs, rhs});
},
XNodeType::OperatorConcat => {
let lhs = evaluate_xnode(xseq, &get_left(xnode), eval_env)?;
let rhs = evaluate_xnode(xseq, &get_right(xnode), eval_env)?;
return fn_concat(&vec!{&lhs, &rhs});
},
XNodeType::OperatorUnion => {
let lhs = evaluate_xnode(xseq, &get_left(xnode), eval_env)?;
let rhs = evaluate_xnode(xseq, &get_right(xnode), eval_env)?;
return op_union(&vec!{lhs, rhs}, eval_env);
},
XNodeType::OperatorIntersect => {
let lhs = evaluate_xnode(xseq, &get_left(xnode), eval_env)?;
let rhs = evaluate_xnode(xseq, &get_right(xnode), eval_env)?;
return op_intersect(&vec!{lhs, rhs}, eval_env);
},
XNodeType::OperatorExcept => {
let lhs = evaluate_xnode(xseq, &get_left(xnode), eval_env)?;
let rhs = evaluate_xnode(xseq, &get_right(xnode), eval_env)?;
return op_except(&vec!{lhs, rhs}, eval_env);
},
XNodeType::OperatorTo => {
let lhs = evaluate_xnode(xseq, &get_left(xnode), eval_env)?;
let rhs = evaluate_xnode(xseq, &get_right(xnode), eval_env)?;
return op_to(&vec!{lhs, rhs});
},
XNodeType::OperatorIsSameNode => {
let lhs = evaluate_xnode(xseq, &get_left(xnode), eval_env)?;
let rhs = evaluate_xnode(xseq, &get_right(xnode), eval_env)?;
return op_is_same_node(&vec!{lhs, rhs}, eval_env);
},
XNodeType::OperatorNodeBefore => {
let lhs = evaluate_xnode(xseq, &get_left(xnode), eval_env)?;
let rhs = evaluate_xnode(xseq, &get_right(xnode), eval_env)?;
return op_node_before(&vec!{lhs, rhs}, eval_env);
},
XNodeType::OperatorNodeAfter => {
let lhs = evaluate_xnode(xseq, &get_left(xnode), eval_env)?;
let rhs = evaluate_xnode(xseq, &get_right(xnode), eval_env)?;
return op_node_after(&vec!{lhs, rhs}, eval_env);
},
XNodeType::IfExpr => {
let cond = evaluate_xnode(xseq, &get_left(xnode), eval_env)?;
let xnode_if_then_else = get_right(xnode);
if get_xnode_type(&xnode_if_then_else) != XNodeType::IfThenElse {
return Err(cant_occur!("IfExpr: rightがIfThenElseでない。"));
}
if cond.effective_boolean_value()? == true {
let value = evaluate_xnode(xseq, &get_left(&xnode_if_then_else), eval_env)?;
return Ok(value);
} else {
let value = evaluate_xnode(xseq, &get_right(&xnode_if_then_else), eval_env)?;
return Ok(value);
}
},
XNodeType::ForExpr => {
return evaluate_xnode(xseq, &get_right(xnode), eval_env);
},
XNodeType::ForVarBind => {
let var_name = get_xnode_name(&xnode);
let range = evaluate_xnode(xseq, &get_left(xnode), eval_env)?;
let mut result = new_xsequence();
for xitem in range.iter() {
eval_env.set_var_item(var_name.as_str(), xitem);
let rhs = evaluate_xnode(xseq, &get_right(xnode), eval_env)?;
result.append(&rhs);
eval_env.remove_var(var_name.as_str());
}
return Ok(result);
},
XNodeType::LetExpr => {
return evaluate_xnode(xseq, &get_right(xnode), eval_env);
},
XNodeType::LetVarBind => {
// -----------------------------------------------------
// 左辺値を評価し、変数値として登録した上で、右辺値を評価する。
//
let var_value = evaluate_xnode(xseq, &get_left(xnode), eval_env)?;
let var_name = get_xnode_name(&xnode);
eval_env.set_var(var_name.as_str(), &var_value);
let rhs = evaluate_xnode(xseq, &get_right(xnode), eval_env)?;
eval_env.remove_var(var_name.as_str());
return Ok(rhs);
},
XNodeType::SomeExpr => {
return evaluate_xnode(xseq, &get_right(xnode), eval_env);
},
XNodeType::SomeVarBind => {
let var_name = get_xnode_name(&xnode);
let range = evaluate_xnode(xseq, &get_left(xnode), eval_env)?;
for xitem in range.iter() {
eval_env.set_var_item(var_name.as_str(), xitem);
let rhs = evaluate_xnode(xseq, &get_right(xnode), eval_env)?;
if rhs.effective_boolean_value()? == true {
return Ok(new_singleton_boolean(true));
}
eval_env.remove_var(var_name.as_str());
}
return Ok(new_singleton_boolean(false));
},
XNodeType::EveryExpr => {
return evaluate_xnode(xseq, &get_right(xnode), eval_env);
},
XNodeType::EveryVarBind => {
let var_name = get_xnode_name(&xnode);
let range = evaluate_xnode(xseq, &get_left(xnode), eval_env)?;
for xitem in range.iter() {
eval_env.set_var_item(var_name.as_str(), xitem);
let rhs = evaluate_xnode(xseq, &get_right(xnode), eval_env)?;
if rhs.effective_boolean_value()? == false {
return Ok(new_singleton_boolean(false));
}
eval_env.remove_var(var_name.as_str());
}
return Ok(new_singleton_boolean(true));
},
XNodeType::OperatorInstanceOf => {
let expr_xseq = evaluate_xnode(xseq, &get_left(xnode), eval_env)?;
let sequence_type_xnode = get_right(xnode);
let b = match_sequence_type(&expr_xseq, &sequence_type_xnode)?;
return Ok(new_singleton_boolean(b));
},
XNodeType::OperatorCastableAs => {
let value = evaluate_xnode(xseq, &get_left(xnode), eval_env)?;
let single_type_xnode = get_right(xnode);
let type_name_xnode = get_left(&single_type_xnode);
let type_name = get_xnode_name(&type_name_xnode);
return Ok(new_singleton_boolean(value.castable_as(&type_name)));
}
XNodeType::OperatorCastAs => {
let value = evaluate_xnode(xseq, &get_left(xnode), eval_env)?;
let single_type_xnode = get_right(xnode);
let type_name_xnode = get_left(&single_type_xnode);
let type_name = get_xnode_name(&type_name_xnode);
return value.cast_as(&type_name);
}
XNodeType::FunctionCall => {
// rightに連なっているArgumentTopノード群のleft以下にある
// 式を評価し、argsArray (引数の配列) を得た後、
// この引数列を渡して函数を評価する。
//
let mut args_array: Vec<XSequence> = vec!{};
let mut curr_xnode = get_right(&xnode);
while ! is_nil_xnode(&curr_xnode) {
match get_xnode_type(&curr_xnode) {
XNodeType::ArgumentTop => {
let arg = evaluate_xnode(xseq,
&get_left(&curr_xnode), eval_env)?;
args_array.push(arg);
},
_ => {
return Err(cant_occur!("FunctionCall: rightがArgumentTopでない。"));
},
}
curr_xnode = get_right(&curr_xnode);
}
return evaluate_function(&get_xnode_name(&xnode),
&mut args_array, xseq, eval_env);
},
XNodeType::StringLiteral => {
return Ok(new_singleton_string(&get_xnode_name(&xnode)));
},
XNodeType::IntegerLiteral => {
return Ok(new_singleton_integer(atoi(&get_xnode_name(&xnode))));
},
XNodeType::DecimalLiteral => {
return Ok(new_singleton_decimal(atof(&get_xnode_name(&xnode))));
},
XNodeType::DoubleLiteral => {
return Ok(new_singleton_double(atof(&get_xnode_name(&xnode))));
},
XNodeType::InlineFunction |
XNodeType::NamedFunctionRef |
XNodeType::PartialFunctionCall => {
// インライン函数 | 名前付き函数参照:
// この時点では評価せず、シングルトンとして包んで返す。
return Ok(new_singleton_xnodeptr(&xnode));
},
XNodeType::Map |
XNodeType::SquareArray |
XNodeType::CurlyArray => {
// マップ | 配列 (これも函数の一種として扱う)
let xitem = convert_xnode_to_map_array(&xnode, &xseq, eval_env)?;
return Ok(new_singleton(&xitem));
},
XNodeType::UnaryLookupByWildcard => {
if let Ok(xitem_map) = xseq.get_singleton_map() {
let mut result = new_xsequence();
for key in xitem_map.map_keys().iter() {
result.append(&xitem_map.map_get(key).unwrap());
}
return Ok(result);
} else if let Ok(xitem_array) = xseq.get_singleton_array() {
let size = xitem_array.array_size();
let mut result = new_xsequence();
for i in 1 ..= size {
let index = new_xitem_integer(i as i64);
result.append(&xitem_array.array_get(&index).unwrap());
}
return Ok(result);
} else {
return Err(type_error!("lookup: マップ/配列でない。"));
}
},
XNodeType::UnaryLookupByExpr => {
let expr = evaluate_xnode(xseq, &get_left(xnode), eval_env)?;
if let Ok(xitem_map) = xseq.get_singleton_map() {
let mut result = new_xsequence();
for key in expr.iter() {
if let Some(v) = xitem_map.map_get(key) {
result.append(&v);
} else {
}
}
return Ok(result);
} else if let Ok(xitem_array) = xseq.get_singleton_array() {
let mut result = new_xsequence();
for index in expr.iter() {
if let Some(v) = xitem_array.array_get(index) {
result.append(&v);
} else {
}
}
return Ok(result);
} else {
return Err(type_error!("lookup: マップ/配列でない。"));
}
},
XNodeType::VarRef => {
let var_name = get_xnode_name(&xnode);
if let Some(xseq) = eval_env.get_var(var_name.as_str()) {
return Ok(xseq);
} else {
return Ok(new_xsequence());
}
},
XNodeType::ParenthesizedExpr => {
let lhs_xnode = get_left(xnode);
if ! is_nil_xnode(&lhs_xnode) {
return evaluate_xnode(xseq, &lhs_xnode, eval_env);
} else {
return Ok(new_xsequence());
}
},
_ => {
return Err(cant_occur!(
"evaluate_xnode: xnode_type = {:?}", xnode_type));
}
}
}
// ---------------------------------------------------------------------
// XSequence中の各ノードに対し、xnodeで示されるLocStepを適用して
// 合致するノード集合を取得し、その合併をXSequenceとして返す。
//
fn match_location_path(xseq: &XSequence, xnode: &XNodePtr,
eval_env: &mut EvalEnv) -> Result<XSequence, Box<Error>> {
let mut new_node_array: Vec<NodePtr> = vec!{};
for node in xseq.to_nodeset().iter() {
let mut matched_xseq = match_loc_step(node, xnode, eval_env)?;
new_node_array.append(&mut matched_xseq.to_nodeset());
}
let result = new_xsequence_from_node_array(&new_node_array);
return Ok(result);
}
// ---------------------------------------------------------------------
// XML木のあるノードを起点として、
// xNodeで示されるLocStep (例: 「child::foo[@attr='at']」) に、
// 軸、ノード・テスト、述語が合致するノード集合をXSequenceの形で返す。
//
fn match_loc_step(node: &NodePtr, xnode: &XNodePtr,
eval_env: &mut EvalEnv) -> Result<XSequence, Box<Error>> {
let mut node_array: Vec<NodePtr> = vec!{};
match get_xnode_type(&xnode) {
XNodeType::AxisAncestor => {
node_array = match_along_axis(node, xnode, array_ancestor);
},
XNodeType::AxisAncestorOrSelf => {
node_array = match_along_axis(node, xnode, array_ancestor_or_self);
},
XNodeType::AxisAttribute => {
node_array = match_along_axis(node, xnode, NodePtr::attributes);
},
XNodeType::AxisChild => {
node_array = match_along_axis(node, xnode, NodePtr::children);
},
XNodeType::AxisDescendant => {
node_array = match_along_axis(node, xnode, array_descendant);
},
XNodeType::AxisDescendantOrSelf => {
node_array = match_along_axis(node, xnode, array_descendant_or_self);
},
XNodeType::AxisFollowing => {
node_array = match_along_axis(node, xnode, array_following);
},
XNodeType::AxisFollowingSibling => {
node_array = match_along_axis(node, xnode, array_following_sibling);
},
XNodeType::AxisParent => {
if let Some(parent) = node.parent() {
if match_node_test(&parent, xnode) {
node_array.push(parent.rc_clone());
}
}
},
XNodeType::AxisPreceding => {
node_array = match_along_axis(node, xnode, array_preceding);
},
XNodeType::AxisPrecedingSibling => {
node_array = match_along_axis(node, xnode, array_preceding_sibling);
},
XNodeType::AxisRoot => {
node_array.push(node.root().rc_clone());
},
XNodeType::AxisSelf => {
if match_node_test(&node, xnode) {
node_array.push(node.rc_clone());
}
},
_ => {
return Err(cant_occur!("match_loc_step: xnode_type: {:?}",
get_xnode_type(&xnode)));
},
}
// 述語によって絞り込む。
let rhs = get_right(&xnode);
if ! is_nil_xnode(&rhs) {
let result = filter_by_predicates(
&new_xsequence_from_node_array(&node_array), &rhs, eval_env)?;
return Ok(result);
} else {
return Ok(new_xsequence_from_node_array(&node_array));
}
}
// ---------------------------------------------------------------------
// 函数 along_axis_func を適用して得たノード配列から、match_node_test() に
// 合格したノードのみ集めて返す。
//
fn match_along_axis<F>(node: &NodePtr, xnode: &XNodePtr,
mut along_axis_func: F) -> Vec<NodePtr>
where F: FnMut(&NodePtr) -> Vec<NodePtr> {
let mut node_array: Vec<NodePtr> = vec!{};
for n in along_axis_func(&node).iter() {
if match_node_test(&n, xnode) {
node_array.push(n.rc_clone());
}
}
return node_array;
}
// ---------------------------------------------------------------------
// ancestor軸で合致する候補ノード。
//
fn array_ancestor(node: &NodePtr) -> Vec<NodePtr> {
let mut node_array: Vec<NodePtr> = vec!{};
if let Some(parent) = node.parent() {
node_array.append(&mut array_ancestor(&parent));
node_array.push(parent.rc_clone());
}
return node_array;
}
// ---------------------------------------------------------------------
// ancestor-or-self軸で合致する候補ノード。
//
pub fn array_ancestor_or_self(node: &NodePtr) -> Vec<NodePtr> {
let mut node_array: Vec<NodePtr> = vec!{};
node_array.append(&mut array_ancestor(node));
node_array.push(node.rc_clone());
return node_array;
}
// ---------------------------------------------------------------------
// descendant軸で合致する候補ノード。
//
fn array_descendant(node: &NodePtr) -> Vec<NodePtr> {
let mut node_array: Vec<NodePtr> = vec!{};
for ch in node.children().iter() {
node_array.push(ch.rc_clone());
node_array.append(&mut array_descendant(ch));
}
return node_array;
}
// ---------------------------------------------------------------------
// descendant-or-self軸で合致する候補ノード。
//
fn array_descendant_or_self(node: &NodePtr) -> Vec<NodePtr> {
let mut node_array: Vec<NodePtr> = vec!{};
node_array.push(node.rc_clone());
node_array.append(&mut array_descendant(node));
return node_array;
}
// ---------------------------------------------------------------------
// following軸で合致する候補ノード。
//
fn array_following(node: &NodePtr) -> Vec<NodePtr> {
let mut node_array: Vec<NodePtr> = vec!{};
if node.node_type() != NodeType::Attribute {
let all_nodes = array_descendant_or_self(&node.root());
let descendant_or_self_nodes = array_descendant_or_self(&node);
let mut self_occured = false;
for ch in all_nodes.iter() {
if self_occured && ! descendant_or_self_nodes.contains(ch) {
node_array.push(ch.rc_clone());
}
if ch == node {
self_occured = true;
}
}
}
return node_array;
}
// ---------------------------------------------------------------------
// following-sibling軸で合致する候補ノード。
//
fn array_following_sibling(node: &NodePtr) -> Vec<NodePtr> {
let mut node_array: Vec<NodePtr> = vec!{};
if node.node_type() != NodeType::Attribute {
if let Some(parent) = node.parent() {
let mut occured = false;
for ch in parent.children().iter() {
if occured {
node_array.push(ch.rc_clone());
}
if ch == node {
occured = true;
}
}
}
}
return node_array;
}
// ---------------------------------------------------------------------
// preceding軸で合致する候補ノード。
//
fn array_preceding(node: &NodePtr) -> Vec<NodePtr> {
let mut node_array: Vec<NodePtr> = vec!{};
if node.node_type() != NodeType::Attribute {
let all_nodes = array_descendant_or_self(&node.root());
let ancestor_nodes = array_ancestor(&node);
let mut self_occured = false;
for ch in all_nodes.iter() {
if ch == node {
self_occured = true;
}
if ! self_occured && ! ancestor_nodes.contains(ch) {
node_array.push(ch.rc_clone());
}
}
}
return node_array;
}
// ---------------------------------------------------------------------
// preceding-sibling軸で合致する候補ノード。
//
fn array_preceding_sibling(node: &NodePtr) -> Vec<NodePtr> {
let mut node_array: Vec<NodePtr> = vec!{};
if node.node_type() != NodeType::Attribute {
if let Some(parent) = node.parent() {
let mut occured = false;
for ch in parent.children().iter() {
if ch == node {
occured = true;
}
if ! occured {
node_array.push(ch.rc_clone());
}
}
}
}
return node_array;
}
// ---------------------------------------------------------------------
// ノード・テスト (名前テストまたは種類テスト)。
//
fn match_node_test(node: &NodePtr, xnode: &XNodePtr) -> bool {
// xnode: AxisNNNN;
// get_left(&xnode) がNilのとき:
// get_xnode_name(&xnode): NameTestで照合する名前
// get_left(&xnode) がXNodeType::KindTestのとき:
// そのget_left(&xnode): KindTestで照合する規則
//
let kind_test_xnode = get_left(&xnode);
if is_nil_xnode(&kind_test_xnode) {
return match_name_test(node, xnode);
} else {
return match_kind_test(node, &kind_test_xnode);
}
}
// ---------------------------------------------------------------------
// ノードの名前テスト。
//
fn match_name_test(node: &NodePtr, xnode: &XNodePtr) -> bool {
let name_test_pattern = get_xnode_name(&xnode);
// ノード名と照合するパターン。
// 例えば「child::para」というステップの「para」。
// -------------------------------------------------------------
// 省略記法「//」は「/descendant-or-self::node()/」、
// 「..」は「parent::node()」の意味であるが、
// 便宜上、NameTestの形式とし、「node()」を設定してある。
// (XNodeType::KindTestのノードを作るよりも処理が簡単)
//
if name_test_pattern.as_str() == "node()" {
return true;
}
// -------------------------------------------------------------
// 軸によって決まる主ノード型と実際のノード型が一致して
// いなければfalseとする。
// attribute軸 => attribute
// //namespace軸 => namespace
// それ以外 => element
//
let main_node_type =
if get_xnode_type(&xnode) == XNodeType::AxisAttribute {
NodeType::Attribute
} else {
NodeType::Element
};
if main_node_type != node.node_type() {
return false;
}
// -------------------------------------------------------------
// 名前の照合にもとづく判定 (「*」とも照合)
//
if name_test_pattern == node.name() {
return true;
}
if name_test_pattern.as_str() == "*" {
return true;
}
// -------------------------------------------------------------
// 「na:*」との照合にもとづく判定
//
let v: Vec<&str> = name_test_pattern.splitn(2, ":").collect();
if v.len() == 2 && v[1] == "*" {
if node.space_name() == v[0] {
return true;
}
}
return false;
}
// ---------------------------------------------------------------------
// ノードの種類テスト
//
// DocumentTest // ☆
// SchemaElementTest // ☆
// SchemaAttributeTest // ☆
// NamespaceNodeTest // ☆
// ☆ 未実装 (構文解析のみ)
//
// 2.5.1 Predefined Schema Types
// - 未検証の要素ノードについては、型註釈が「xs:untyped」になる。
//
// 2.5.5.3 Element Test
// - element()、element(*): 任意の要素ノードに合致。
// - element(ElementName): 要素名が一致。
// - element(ElementName, TypeName):
// 要素名が一致し、derives-from(xs:untyped, TypeName) が true、
// かつ、nilledプロパティーがfalse (i.e. 属性 "xsi:nil" の値が "true" でない)。
// - element(ElementName, TypeName?):
// 要素名が一致し、derives-from(xs:untyped, TypeName) が true。
// nilledプロパティーはtrueでもfalseでもよい。
// - element(*, TypeName):
// derives-from(xs:untyped, TypeName) が true、
// かつ、nilledプロパティーがfalse (i.e. 属性 "xsi:nil" の値が "true" でない)。
// - element(*, TypeName?):
// derives-from(xs:untyped, TypeName) が true。
// nilledプロパティーはtrueでもfalseでもよい。
//
// 2.5.5.5 Attribute Test
// - attribute()、attribute(*): 任意の属性ノードに合致。
// - attribute(AttributeName): 属性名が一致。
// - attribute(AttributeName, TypeName):
// 属性名が一致し、derives-from(xs:untypedAtomic, TypeName) が true。
// - attribute(*, TypeName):
// derives-from(xs:untypedAtomic, TypeName) が true。
//
fn match_kind_test(node: &NodePtr, xnode: &XNodePtr) -> bool {
// assert:: get_xnode_type(&kind_test_xnode) == XNodeType::KindTest
let node_type = node.node_type();
let test_xnode = get_left(xnode);
match get_xnode_type(&test_xnode) {
XNodeType::DocumentTest => {
// DocumentTestは未実装。
},
XNodeType::ElementTest => {
if node_type != NodeType::Element {
return false;
}
let element_name = get_xnode_name(&test_xnode);
// 明示的に指定がない場合の既定値は *
if element_name != "*" && element_name != node.name() {
return false;
}
let type_name_xnode = get_left(&test_xnode);
let type_name_ex = get_xnode_name(&type_name_xnode);
// 明示的に指定がない場合の既定値は xs:anyType?
let type_name = type_name_ex.trim_right_matches('?');
let with_q = type_name_ex.ends_with("?");
if ! derives_from("xs:untyped", &type_name) {
return false;
}
if ! with_q {
if let Some(p) = node.attribute_value("xsi:nil") {
if p == "true" {
return false;
}
}
}
return true;
},
XNodeType::AttributeTest => {
if node_type != NodeType::Attribute {
return false;
}
let attribute_name = get_xnode_name(&test_xnode);
// 明示的に指定がない場合の既定値は *
if attribute_name != "*" && attribute_name != node.name() {
return false;
}
let type_name_xnode = get_left(&test_xnode);
let type_name = get_xnode_name(&type_name_xnode);
// 明示的に指定がない場合の既定値は xs:anyType
if ! derives_from("xs:untypedAtomic", &type_name) {
return false;
}
return true;
},
XNodeType::SchemaElementTest => {
// SchemaElementTestは未実装。
},
XNodeType::SchemaAttributeTest => {
// SchemaAttributeTestは未実装。
},
XNodeType::PITest => {
let arg = get_xnode_name(&test_xnode);
return node_type == NodeType::Instruction &&
(arg == "" || arg == node.name());
},
XNodeType::CommentTest => {
return node_type == NodeType::Comment;
},
XNodeType::TextTest => {
return node_type == NodeType::Text;
},
XNodeType::NamespaceNodeTest => {
// NamespaceNodeTestは未実装。
},
XNodeType::AnyKindTest => {
return true;
},
_ => {},
}
return false;
}
// ---------------------------------------------------------------------
// Location Stepで得たシーケンスに対して、述語を順次適用してしぼり込み、
// 最終的に得られるシーケンスを返す。
//
// apply_postfix() の処理の一部といった恰好だが、構文上、述語のみ
// 並んでいる状況に対応する。
//
// xseq: AxisNNNNNNNN (Location Step) を評価して得たシーケンス (ノード集合)。
// xnode: AxisNNNNNNNNの右。
// rightをたどったノードはすべてPredicate{Rev}Topであり、
// そのleft以下に述語式の構文木がある。
// 各ノードに対して述語を適用し、trueであったもののみにしぼり込む。
//
// AxisNNNNNNNN --- Predicate{Rev}Top --- Predicate{Rev}Top ---...
// (NameTest) | |
// (Expr) (Expr)
//
fn filter_by_predicates(xseq: &XSequence, xnode: &XNodePtr,
eval_env: &mut EvalEnv) -> Result<XSequence, Box<Error>> {
let mut curr_xseq = xseq.clone();
let mut curr_xnode = xnode.clone();
while ! is_nil_xnode(&curr_xnode) {
match get_xnode_type(&curr_xnode) {
XNodeType::PredicateTop => {
curr_xseq = filter_by_predicate(&curr_xseq,
&get_left(&curr_xnode), false, eval_env)?;
},
XNodeType::PredicateRevTop => {
curr_xseq = filter_by_predicate(&curr_xseq,
&get_left(&curr_xnode), true, eval_env)?;
},
_ => {
return Err(cant_occur!(
"filter_by_predicates: 述語以外のノード: {}",
get_xnode_type(&curr_xnode).to_string()));
}
}
curr_xnode = get_right(&curr_xnode);
}
return Ok(curr_xseq);
}
// ---------------------------------------------------------------------
// シーケンスに属する個々のアイテムに対して、ある (ひとつの) 述語を
// 適用してしぼり込み、新しいシーケンスを返す。
//
fn filter_by_predicate(xseq: &XSequence, xnode: &XNodePtr,
reverse_order: bool, eval_env: &mut EvalEnv)
-> Result<XSequence, Box<Error>> {
if is_nil_xnode(xnode) {
return Err(cant_occur!("filter_by_predicate: xnode is nil"));
}
let mut result = new_xsequence();
for (i, xitem) in xseq.iter().enumerate() {
// 評価環境に文脈位置を設定する。
let last = xseq.len();
let position = if ! reverse_order { i + 1 } else { last - i };
// 文脈位置の番号は1が起点
let old_position = eval_env.set_position(position);
let old_last = eval_env.set_last(last);
// シーケンス中、i番目のアイテムに対してxnodeを適用して評価する。
let val = evaluate_xnode(&new_singleton(xitem), xnode, eval_env)?;
// 評価環境を元に戻しておく。
eval_env.set_last(old_last);
eval_env.set_position(old_position);
// 評価結果をもとに、このアイテムを残すかどうか判定する。
let mut do_push = false;
match val.get_singleton_item() {
Ok(XItem::XIInteger{value}) => {
do_push = value == usize_to_i64(position);
},
Ok(XItem::XINode{value: _}) => {
do_push = true;
},
Ok(XItem::XIBoolean{value}) => {
do_push = value;
},
_ => {},
}
if do_push {
result.push(&xitem.clone());
}
}
return Ok(result);
}
// ---------------------------------------------------------------------
// インライン函数/マップ/配列に、引数を適用する。
// xseq: 引数の値を評価する際、対象とするシーケンス (文脈ノード)。
// curr_xseq: シングルトン。
// XItem: 典型的にはInlineFunction型のXNodePtr。
// また、MapやSquareArrayも函数であって、キーや指標を
// 引数として渡し、値を取り出すことができる。
// このXNodePtrを取り出し、インライン函数として実行する。
// InlineFunction --- ReturnType ------- Param ------- Param ---...
// | | (varname) (varname)
// | | | |
// | (SequenceType) (SequenceType)(SequenceType)
// |
// Expr (FunctionBody) ---...
// |
// ...
// ☆戻り値型も照合すること。
//
// curr_xnode: ArgumentListTop型のXNodePtr。インライン函数に渡す引数並び。
// ArgumentListTop
// |
// ArgumentTop --- ArgumentTop ---...
// | | 第2引数
// | OpLiteral
// | 第1引数
// OpEQ --- (rhs)
// |
// (lhs)
//
// eval_env: 評価環境。
//
fn apply_argument(xseq: &XSequence, curr_xseq: &XSequence,
curr_xnode: &XNodePtr,
eval_env: &mut EvalEnv) -> Result<XSequence, Box<Error>> {
let arg_xnode = get_left(&curr_xnode);
let mut argument_xseq: Vec<XSequence> = vec!{};
let mut curr_arg_top = arg_xnode.clone();
while ! is_nil_xnode(&curr_arg_top) {
let arg_expr = get_left(&curr_arg_top);
let val = evaluate_xnode(xseq, &arg_expr, eval_env)?;
argument_xseq.push(val);
curr_arg_top = get_right(&curr_arg_top);
}
// -----------------------------------------------------------------
// インライン函数
//
if let Ok(inline_func_xnode) = curr_xseq.get_singleton_xnodeptr() {
match get_xnode_type(&inline_func_xnode) {
XNodeType::InlineFunction => {
return call_inline_func(&inline_func_xnode,
argument_xseq, xseq, eval_env);
},
_ => {}
}
}
// -----------------------------------------------------------------
// マップ
//
if let Ok(map_item) = curr_xseq.get_singleton_map() {
let key = argument_xseq[0].get_singleton_item()?;
if let Some(v) = map_item.map_get(&key) {
return Ok(v);
} else {
return Err(dynamic_error!(
"map_lookup: key = {}: 値が見つからない。", key));
}
}
// -----------------------------------------------------------------
// 配列
//
if let Ok(array_item) = curr_xseq.get_singleton_array() {
let index_item = argument_xseq[0].get_singleton_item()?;
if let Some(v) = array_item.array_get(&index_item) {
return Ok(v);
} else {
return Err(dynamic_error!(
"Array index ({}) out of bounds.", index_item));
}
}
return Err(cant_occur!(
"apply_argument: インライン函数/マップ/配列でない。"));
}
// ---------------------------------------------------------------------
//
fn call_inline_func(inline_func_xnode: &XNodePtr,
argument_xseq: Vec<XSequence>,
context_xseq: &XSequence,
eval_env: &mut EvalEnv) -> Result<XSequence, Box<Error>> {
// -----------------------------------------------------------------
// 函数定義の実体を取り出す。
//
let func_body_xnode = get_left(&inline_func_xnode);
// -----------------------------------------------------------------
// 仮引数名を調べる。
// // 個数、引数型も照合すること。
//
let mut param_names: Vec<String> = vec!{};
let mut sequence_types: Vec<XNodePtr> = vec!{};
let return_type = get_right(&inline_func_xnode);
let mut param_xnode = get_right(&return_type);
while ! is_nil_xnode(¶m_xnode) {
param_names.push(get_xnode_name(¶m_xnode));
sequence_types.push(get_left(¶m_xnode));
param_xnode = get_right(¶m_xnode);
}
// -----------------------------------------------------------------
// 実引数の値を変数 (仮引数) に束縛する。
//
for (i, val) in argument_xseq.iter().enumerate() {
if match_sequence_type(&val, &sequence_types[i])? == false {
return Err(type_error!(
"インライン函数: 引数の型が合致していない: {}。",
val.to_string()));
}
eval_env.set_var(¶m_names[i], &val);
}
// -----------------------------------------------------------------
// インライン函数を実行する。
//
let value = evaluate_xnode(context_xseq, &func_body_xnode, eval_env)?;
// -----------------------------------------------------------------
// 変数 (仮引数) を削除する。
//
let mut i: i64 = (param_names.len() as i64) - 1;
while 0 <= i {
eval_env.remove_var(¶m_names[i as usize]);
i -= 1;
}
return Ok(value);
}
// ---------------------------------------------------------------------
// 名前付き函数参照を、引数を渡して呼び出す。
//
fn call_named_func(func_xnode: &XNodePtr,
argument_xseq: Vec<XSequence>,
context_xseq: &XSequence,
eval_env: &mut EvalEnv) -> Result<XSequence, Box<Error>> {
// -----------------------------------------------------------------
// 函数定義の実体を取り出す。
//
let func_name = get_xnode_name(&func_xnode);
let v: Vec<&str> = func_name.split("#").collect();
let arity = atoi(v[1]) as usize;
if arity != argument_xseq.len() {
return Err(type_error!(
"名前付き函数参照 ({}): 引数の個数が合致しない: {}。",
func_name, argument_xseq.len()));
}
return evaluate_function(&v[0], &argument_xseq, context_xseq, eval_env);
}
// ---------------------------------------------------------------------
// 静的函数の部分函数呼び出し。
//
// func_xnode: 部分函数呼び出しの構文木
// 引数のうちArgumentTopは、普通の静的函数と同様、
// 文脈ノードに対してleft以下を評価し、引数として函数に渡す。
// ArgumentPlaceholderである場合はargument_xseq[i]を
// 引数として函数に渡す。
//
// PartialFunctionCall --- ArgumentTop --- ArgumentPlaceholder ---...
// (func_name) | (第2引数)
// ... (第1引数)
//
// argument_xseq: 長さはArgumentPlaceholderの個数と同じ。
// argument_xseq[i]をArgumentPlaceholder部分の引数として
// 函数に渡す。
//
// context_xseq: 文脈シーケンス。
// ArgumentTopである引数は、文脈シーケンスに対して評価する。
// 関数の評価も文脈シーケンスに対しておこなう。
//
// eval_env: 評価環境 (変数の値など)。
//
fn call_partial_func(func_xnode: &XNodePtr,
argument_xseq: Vec<XSequence>,
context_xseq: &XSequence,
eval_env: &mut EvalEnv) -> Result<XSequence, Box<Error>> {
let mut args_array: Vec<XSequence> = vec!{};
let mut curr_xnode = get_right(&func_xnode);
let mut i = 0;
while ! is_nil_xnode(&curr_xnode) {
match get_xnode_type(&curr_xnode) {
XNodeType::ArgumentTop => {
let arg = evaluate_xnode(context_xseq, &get_left(&curr_xnode), eval_env)?;
args_array.push(arg);
},
XNodeType::ArgumentPlaceholder => {
if i >= argument_xseq.len() {
return Err(dynamic_error!("函数呼び出しの「?」に渡すべき引数が足りない。"));
}
args_array.push(argument_xseq[i].clone());
i += 1;
},
_ => {
return Err(cant_occur!("call_partial_func: ArgumentTopでもArgumentPlaceholderでもでない。"));
}
}
curr_xnode = get_right(&curr_xnode);
}
return evaluate_function(&get_xnode_name(&func_xnode),
&mut args_array, context_xseq, eval_env);
}
// ---------------------------------------------------------------------
// 函数呼び出し。
// インライン函数、名前付き函数参照、部分函数。
//
pub fn call_function(func_xnode: &XNodePtr,
argument_xseq: Vec<XSequence>,
context_xseq: &XSequence,
eval_env: &mut EvalEnv) -> Result<XSequence, Box<Error>> {
match get_xnode_type(&func_xnode) {
XNodeType::InlineFunction => {
return call_inline_func(func_xnode, argument_xseq, context_xseq, eval_env);
},
XNodeType::NamedFunctionRef => {
return call_named_func(func_xnode, argument_xseq, context_xseq, eval_env);
},
XNodeType::PartialFunctionCall => {
return call_partial_func(func_xnode, argument_xseq, context_xseq, eval_env);
},
_ => {
return Err(cant_occur!("call_h_function: XNodeType = {:?}",
get_xnode_type(&func_xnode)));
},
}
}
// ---------------------------------------------------------------------
// シーケンスの型を、
// シーケンス型の定義 (XNodeType::SequenceTypeであるxnodeで表現) と照合する。
//
// [ 79] SequenceType ::= ("empty-sequence" "(" ")")
// | (ItemType OccurenceIndicator?)
// [ 80] OccurrenceIndicator ::= "?" | "*" | "+"
// [ 81] ItemType ::= KindTest
// | ("item" "(" ")")
// | FunctionTest
// | MapTest
// | ArrayTest
// | AtomicOrUnionType
// | ParenthesizedItemType ☆
// [ 82] AtomicOrUnionType ::= EQName
// [102] FunctionTest ::= AnyFunctionTest
// | TypedFunctionTest
//
fn match_sequence_type(xseq: &XSequence, xnode: &XNodePtr) -> Result<bool, Box<Error>> {
if get_xnode_type(xnode) != XNodeType::SequenceType {
return Err(cant_occur!(
"match_sequence_type: xnodeがSequenceTypeでない: {:?}。",
get_xnode_type(xnode)));
}
let type_xnode = get_left(xnode);
// -----------------------------------------------------------------
// empty-sequence()
// xseqが空シーケンスか否かのみ判定する。
//
if get_xnode_type(&type_xnode) == XNodeType::EmptySequenceTest {
return Ok(xseq.is_empty());
}
// -----------------------------------------------------------------
// それ以外の場合、まず出現数の指定 (?、*、+、空) を照合する。
// シーケンスに含まれるアイテムの個数が指定を満たしていなければ、
// この時点でfalseとする。
//
if match_occurence(xseq, &get_xnode_name(xnode))? == false {
return Ok(false);
}
// -----------------------------------------------------------------
// ItemTypeに応じて、シーケンスの各アイテムを判定する。
//
match get_xnode_type(&type_xnode) {
XNodeType::KindTest => { // element(name) など
return Ok(match_sequence_kind_test(xseq, &type_xnode));
},
XNodeType::ItemTest => { // item()
return Ok(match_sequence_item_test(xseq));
},
XNodeType::AnyFunctionTest => { // function(*)
return Ok(match_sequence_any_function_test(xseq));
},
XNodeType::TypedFunctionTest => {
return match_sequence_typed_function_test(xseq, &type_xnode);
},
XNodeType::ArrayTest => {
return match_sequence_array_test(xseq, &type_xnode);
},
XNodeType::MapTest => {
return match_sequence_map_test(xseq, &type_xnode);
},
XNodeType::AtomicOrUnionType => {
return Ok(match_sequence_atomic_or_union_type(xseq, &type_xnode));
},
XNodeType::ParenthesizedItemType => {
// =====================================================
return Ok(false);
},
_ => {
return Err(cant_occur!(
"match_sequence_type: xnodeの左辺値のxnode_typeが想定外: {:?}",
type_xnode));
},
}
}
// ---------------------------------------------------------------------
// シーケンス xseq の型を、シーケンス型の定義 type_xnode と照合する。
// type_xnode が XNodeType::KindTest である場合。
//
// xseq のアイテムすべてについて、NodePtrであり、かつ、そのノード型が
// 指定どおりであることを確かめる。
//
fn match_sequence_kind_test(xseq: &XSequence, type_xnode: &XNodePtr) -> bool {
for xitem in xseq.iter() {
if let Some(node) = xitem.as_nodeptr() {
if match_kind_test(&node, &type_xnode) == false {
return false;
}
} else {
return false;
}
}
return true;
}
// ---------------------------------------------------------------------
// シーケンス xseq の型を、シーケンス型の定義 type_xnode と照合する。
// type_xnode が XNodeType::ItemTest である場合。
//
// xseq の要素すべてについて、アイテムであることを確かめる。
//
fn match_sequence_item_test(xseq: &XSequence) -> bool {
for xitem in xseq.iter() {
if ! xitem.is_item() {
return false;
}
}
return true;
}
// ---------------------------------------------------------------------
// シーケンス xseq の型を、シーケンス型の定義 type_xnode と照合する。
// type_xnode が XNodeType::AnyFunctionTest である場合。
//
// xseq の要素すべてについて、函数 (マップ/配列を含む) であることを確かめる。
//
fn match_sequence_any_function_test(xseq: &XSequence) -> bool {
for xitem in xseq.iter() {
let mut is_function = false;
if let Ok(xnode) = xitem.get_as_raw_xnodeptr() {
match get_xnode_type(&xnode) {
XNodeType::InlineFunction |
XNodeType::NamedFunctionRef |
XNodeType::PartialFunctionCall => {
is_function = true;
},
_ => {},
}
}
if let Ok(_) = xitem.get_as_raw_array() {
is_function = true;
}
if let Ok(_) = xitem.get_as_raw_map() {
is_function = true;
}
if ! is_function {
return false;
}
}
return true;
}
// ---------------------------------------------------------------------
// シーケンス xseq の型を、シーケンス型の定義 type_xnode と照合する。
// type_xnode が XNodeType::TypedFunctionTest である場合。
//
fn match_sequence_typed_function_test(xseq: &XSequence, type_xnode: &XNodePtr) -> Result<bool, Box<Error>> {
// -----------------------------------------------------------------
// 戻り値型および引数型の定義をsignature_xnodeに取り出す。
// signature_xnode[0]: 戻り値型の定義。
// signature_xnode[n]: 第n引数の型の定義。
// signature_xnode.len() - 1: 引数の個数。
//
let mut signature_xnode: Vec<XNodePtr> = vec!{};
let mut curr = get_right(&type_xnode);
while ! is_nil_xnode(&curr) {
signature_xnode.push(get_left(&curr));
curr = get_right(&curr);
}
// -----------------------------------------------------------------
//
for xitem in xseq.iter() {
let mut is_function = false;
// -------------------------------------------------------------
//
if let Ok(xnode) = xitem.get_as_raw_xnodeptr() {
let sig_xnode: XNodePtr;
match get_xnode_type(&xnode) {
// -----------------------------------------------------
// (名前付き函数参照)
// シグニチャー表を引き、テキスト形式のシグニチャーを
// 構文木に変換する。
//
XNodeType::NamedFunctionRef => {
let func_name = get_xnode_name(&xnode);
let signature = get_function_signature(func_name.as_str());
if signature == "" {
return Ok(false);
}
let mut lex = Lexer::new(&signature)?;
sig_xnode = parse_function_test(&mut lex)?;
},
// -----------------------------------------------------
// (インライン函数)
//
XNodeType::InlineFunction => {
sig_xnode = xnode.clone();
},
XNodeType::PartialFunctionCall => {
sig_xnode = xnode.clone();
},
_ => {
return Ok(false);
},
}
let mut i = 0;
let mut curr = get_right(&sig_xnode);
while ! is_nil_xnode(&curr) {
if signature_xnode.len() <= i { // 引数が多い。
return Ok(false);
}
let type_xnode = get_left(&curr);
if i == 0 { // 戻り値型
if ! subtype(&type_xnode, &signature_xnode[i]) {
return Ok(false);
}
} else { // 引数型
if ! subtype(&signature_xnode[i], &type_xnode) {
return Ok(false);
}
}
curr = get_right(&curr);
i += 1;
}
if signature_xnode.len() != i { // 引数が少ない。
return Ok(false);
}
// -------------------------------------------------
//
is_function = true;
}
// -------------------------------------------------------------
// (マップの場合)
// キーと値の型を照合する。
//
if let Ok(xseq_map) = xitem.get_as_raw_map() {
if signature_xnode.len() != 2 {
return Ok(false);
}
let key_type = get_xnode_name(&get_left(&signature_xnode[1]));
if ! match_map_sequence_type(&xseq_map, &key_type, &signature_xnode[0])? {
return Ok(false);
}
is_function = true;
}
// -------------------------------------------------------------
// (配列の場合)
// 第1引数がintegerであることを確かめ、値の型を照合する。
//
if let Ok(xseq_array) = xitem.get_as_raw_array() {
if signature_xnode.len() != 2 {
return Ok(false);
}
let xseq_int_1 = new_singleton_integer(1);
if ! match_sequence_type(&xseq_int_1, &signature_xnode[1])? {
return Ok(false);
}
if ! match_array_sequence_type(&xseq_array, &signature_xnode[0])? {
return Ok(false);
}
is_function = true;
}
// -------------------------------------------------------------
// 上記のどの種類にも当てはまらなければ、
// (型付き) 函数ではないことになる。
//
if ! is_function {
return Ok(false);
}
}
return Ok(true);
}
// ---------------------------------------------------------------------
// シーケンス xseq の型を、シーケンス型の定義 type_xnode と照合する。
// type_xnode が XNodeType::ArrayTest である場合。
//
// xseq の要素すべてについて、配列であって、その要素のシーケンス型も
// 定義に合致していることを確かめる。
//
fn match_sequence_array_test(xseq: &XSequence, type_xnode: &XNodePtr) -> Result<bool, Box<Error>> {
let element_type = get_left(&type_xnode);
for xitem in xseq.iter() {
if let Ok(xseq_array) = xitem.get_as_raw_array() {
if ! match_array_sequence_type(&xseq_array, &element_type)? {
return Ok(false);
}
} else {
return Ok(false);
}
}
return Ok(true);
}
// ---------------------------------------------------------------------
// 配列 (XSeqArray) の各要素の型が、element_type に合致するか否かを判定する。
//
fn match_array_sequence_type(xseq_array: &XSeqArray, element_type: &XNodePtr) -> Result<bool, Box<Error>> {
for i in 1 ..= xseq_array.array_size() {
let index = new_xitem_integer(i as i64);
if let Some(item) = xseq_array.array_get(&index) {
if ! match_sequence_type(&item, &element_type)? {
return Ok(false);
}
} else {
return Ok(false);
}
}
return Ok(true);
}
// ---------------------------------------------------------------------
// シーケンス xseq の型を、シーケンス型の定義 type_xnode と照合する。
// type_xnode が XNodeType::MapTest である場合。
//
// xseq の要素すべてについて、マップであって、そのキーおよび値のシーケンス型も
// 定義に合致していることを確かめる。
//
fn match_sequence_map_test(xseq: &XSequence, type_xnode: &XNodePtr) -> Result<bool, Box<Error>> {
let key_type = get_xnode_name(&get_left(&type_xnode));
let value_type = get_right(&type_xnode);
for xitem in xseq.iter() {
if let Ok(xseq_map) = xitem.get_as_raw_map() {
if ! match_map_sequence_type(&xseq_map, &key_type, &value_type)? {
return Ok(false);
}
} else {
return Ok(false);
}
}
return Ok(true);
}
// ---------------------------------------------------------------------
// マップ (XSeqMap) のキーと値の型が、key_type、value_type に
// 合致するか否かを判定する。
//
fn match_map_sequence_type(xseq_map: &XSeqMap,
key_type: &str,
value_type: &XNodePtr) -> Result<bool, Box<Error>> {
for key in xseq_map.map_keys().iter() {
if ! derives_from(key.xs_type().as_str(), &key_type) {
return Ok(false);
}
if let Some(val) = xseq_map.map_get(&key) {
if ! match_sequence_type(&val, &value_type)? {
return Ok(false);
}
} else {
return Ok(false);
}
}
return Ok(true);
}
// ---------------------------------------------------------------------
// シーケンス xseq の型を、シーケンス型の定義 type_xnode と照合する。
// type_xnode が XNodeType::AtomicOrUnionType である場合。
//
fn match_sequence_atomic_or_union_type(xseq: &XSequence, type_xnode: &XNodePtr) -> bool {
let type_name = get_xnode_name(type_xnode);
for xitem in xseq.iter() {
if ! derives_from(xitem.xs_type().as_str(), &type_name) {
return false;
}
}
return true;
}
// ---------------------------------------------------------------------
// シーケンスに含まれるアイテムの個数 (OccurenceIndicator) を照合する。
// indicator: ? | * | + | ""
//
fn match_occurence(xseq: &XSequence, indicator: &str) -> Result<bool, Box<Error>> {
let len = xseq.len();
match indicator {
"?" => return Ok(len <= 1),
"*" => return Ok(true),
"+" => return Ok(1 <= len),
"" => return Ok(len == 1),
_ => return Err(cant_occur!(
"match_occurence: bad indicator \"{}\".",
indicator)),
}
}
// ---------------------------------------------------------------------
// 2.5.6.1 The judgement subtype(A, B)
//
// SequenceType A が、SequenceType B のサブタイプであるか否か判定する。
//
const S_EMPTY_SEQ: usize = 0;
const S_OCCUR_Q: usize = 1;
const S_OCCUR_A: usize = 2;
const S_OCCUR_1: usize = 3;
const S_OCCUR_P: usize = 4;
const S_XS_ERROR: usize = 5;
const SUBTYPE_TBL: [ [i64; 6]; 6 ] = [
[ 1, 1, 1, 0, 0, 0 ],
[ 0, 9, 9, 0, 0, 0 ],
[ 0, 0, 9, 0, 0, 0 ],
[ 0, 9, 9, 9, 9, 0 ],
[ 0, 0, 9, 0, 9, 0 ],
[ 1, 1, 1, 1, 1, 1 ],
];
fn subtype(a: &XNodePtr, b: &XNodePtr) -> bool {
match SUBTYPE_TBL[subtype_entry(a)][subtype_entry(b)] {
0 => return false,
1 => return true,
_ => return subtype_itemtype(&get_left(a), &get_left(b)),
}
}
fn subtype_entry(xnode: &XNodePtr) -> usize {
// assert: get_xnode_type(xnode) == XNodeType::SequenceType
let type_xnode = get_left(xnode);
if get_xnode_type(&type_xnode) == XNodeType::EmptySequenceTest {
return S_EMPTY_SEQ;
}
if get_xnode_type(&type_xnode) == XNodeType::AtomicOrUnionType &&
get_xnode_name(&type_xnode) == "xs:error" {
match get_xnode_name(xnode).as_str() {
"?" => return S_EMPTY_SEQ,
"*" => return S_EMPTY_SEQ,
"" => return S_XS_ERROR,
"+" => return S_XS_ERROR,
_ => return S_XS_ERROR,
}
} else {
match get_xnode_name(xnode).as_str() {
"?" => return S_OCCUR_Q,
"*" => return S_OCCUR_A,
"" => return S_OCCUR_1,
"+" => return S_OCCUR_P,
_ => return S_OCCUR_1,
}
}
}
// ---------------------------------------------------------------------
// 2.5.6.2 The judgement subtype-itemtype(Ai, Bi)
//
// ItemType Ai が、ItemType Bi のサブタイプであるか否か判定する。
//
// ai, bi: xnode (XNodeType::SequenceType) の左辺値であるXNodePtr。
//
fn subtype_itemtype(ai: &XNodePtr, bi: &XNodePtr) -> bool {
// -----------------------------------------------------------------
// 1. Ai and Bi are AtomicOrUnionTypes, and derives-from(Ai, Bi) returns true.
// 2. Ai is a pure union type, and every type t in the transitive membership of Ai satisfies subtype-itemType(t, Bi).
// 3. Ai is xs:error and Bi is a generalized atomic type.
//
if get_xnode_type(ai) == XNodeType::AtomicOrUnionType &&
get_xnode_type(bi) == XNodeType::AtomicOrUnionType {
if derives_from(&get_xnode_name(ai), &get_xnode_name(bi)) { // 1. 2.
return true;
}
if get_xnode_name(ai) == "xs:error" { // 3.
return true;
}
}
// -----------------------------------------------------------------
// 4. Bi is item().
//
if get_xnode_type(bi) == XNodeType::ItemTest {
return true;
}
// -----------------------------------------------------------------
// BiがKindTestのとき:
// 5. Bi is node() and Ai is a KindTest.
// 6. Bi is text() and Ai is also text().
// 7. Bi is comment() and Ai is also comment().
// 8. Bi is namespace-node() and Ai is also namespace-node().
// 9. Bi is processing-instruction() and
// Ai is either processing-instruction() or
// processing-instruction(N) for any name N.
// 10. Bi is processing-instruction(Bn), and
// Ai is also processing-instruction(Bn).
//
// --------------------------------------------------------------------- ☆
// 11. Bi is document-node() and Ai is either document-node() or document-node(E) for any ElementTest E.
// 12. Bi is document-node(Be) and Ai is document-node(Ae), and subtype-itemtype(Ae, Be).
// 13. Bi is either element() or element(*), and Ai is an ElementTest.
// 14. Bi is either element(Bn) or element(Bn, xs:anyType?),
// the expanded QName of An equals the expanded QName of Bn,
// and Ai is either element(An) or element(An, T) or element(An, T?) for any type T.
// 15. Bi is element(Bn, Bt), the expanded QName of An equals the expanded QName of Bn, Ai is element(An, At), and derives-from(At, Bt) returns true.
// 16. Bi is element(Bn, Bt?), the expanded QName of An equals the expanded QName of Bn, Ai is either element(An, At) or element(An, At?), and derives-from(At, Bt) returns true.
// 17. Bi is element(*, Bt), Ai is either element(*, At) or element(N, At) for any name N, and derives-from(At, Bt) returns true.
// 18. Bi is element(*, Bt?), Ai is either element(*, At), element(*, At?), element(N, At), or element(N, At?) for any name N, and derives-from(At, Bt) returns true.
// 19. Bi is schema-element(Bn), Ai is schema-element(An), and every element declaration that is an actual member of the substitution group of An is also an actual member of the substitution group of Bn.
// Note:
// The fact that P is a member of the substitution group of Q does not mean that every element declaration in the substitution group of P is also in the substitution group of Q. For example, Q might block substitution of elements whose type is derived by extension, while P does not.
// 20. Bi is either attribute() or attribute(*), and Ai is an AttributeTest.
// 21. Bi is either attribute(Bn) or attribute(Bn, xs:anyType), the expanded QName of An equals the expanded QName of Bn, and Ai is either attribute(An), or attribute(An, T) for any type T.
// 22. Bi is attribute(Bn, Bt), the expanded QName of An equals the expanded QName of Bn, Ai is attribute(An, At), and derives-from(At, Bt) returns true.
// 23. Bi is attribute(*, Bt), Ai is either attribute(*, At), or attribute(N, At) for any name N, and derives-from(At, Bt) returns true.
// 24. Bi is schema-attribute(Bn), the expanded QName of An equals the expanded QName of Bn, and Ai is schema-attribute(An).
//
if get_xnode_type(bi) == XNodeType::KindTest {
match get_xnode_type(&get_left(bi)) {
XNodeType::AnyKindTest => { // 5.
if get_xnode_type(ai) == XNodeType::KindTest {
return true;
}
},
XNodeType::TextTest => { // 6.
if get_xnode_type(ai) == XNodeType::KindTest &&
get_xnode_type(&get_left(ai)) == XNodeType::TextTest {
return true;
}
},
XNodeType::CommentTest => { // 7.
if get_xnode_type(ai) == XNodeType::KindTest &&
get_xnode_type(&get_left(ai)) == XNodeType::CommentTest {
return true;
}
},
XNodeType::NamespaceNodeTest => { // 8.
if get_xnode_type(ai) == XNodeType::KindTest &&
get_xnode_type(&get_left(ai)) == XNodeType::NamespaceNodeTest {
return true;
}
},
XNodeType::PITest => { // 9. 10.
let pi_name = get_xnode_name(bi);
if pi_name == "" {
if get_xnode_type(ai) == XNodeType::KindTest &&
get_xnode_type(&get_left(ai)) == XNodeType::PITest {
return true;
}
} else {
if get_xnode_type(ai) == XNodeType::KindTest &&
get_xnode_type(&get_left(ai)) == XNodeType::PITest &&
get_xnode_name(&get_left(ai)) == pi_name {
return true;
}
}
},
_ => {},
}
}
// -----------------------------------------------------------------
// 25. Bi is function(*), Ai is a FunctionTest.
//
if get_xnode_type(bi) == XNodeType::AnyFunctionTest {
match get_xnode_type(ai) {
XNodeType::AnyFunctionTest |
XNodeType::TypedFunctionTest => {
return true;
},
_ => {},
}
}
// -----------------------------------------------------------------
// 26. Bi is function(Ba_1, Ba_2, ... Ba_N) as Br,
// Ai is function(Aa_1, Aa_2, ... Aa_M) as Ar,
// where
// N (arity of Bi) equals M (arity of Ai);
// subtype(Ar, Br);
// and for values of I between 1 and N, subtype(Ba_I, Aa_I).
//
if get_xnode_type(bi) == XNodeType::TypedFunctionTest &&
get_xnode_type(ai) == XNodeType::TypedFunctionTest {
let mut bi_signature_xnode: Vec<XNodePtr> = vec!{};
let mut curr = get_right(&bi);
while ! is_nil_xnode(&curr) {
bi_signature_xnode.push(get_left(&curr));
curr = get_right(&curr);
}
let mut ai_signature_xnode: Vec<XNodePtr> = vec!{};
let mut curr = get_right(&ai);
while ! is_nil_xnode(&curr) {
ai_signature_xnode.push(get_left(&curr));
curr = get_right(&curr);
}
if bi_signature_xnode.len() == ai_signature_xnode.len() {
let mut is_fail = false;
if ! subtype(&ai_signature_xnode[0], &bi_signature_xnode[0]) {
is_fail = true;
}
for i in 1 .. bi_signature_xnode.len() {
if ! subtype(&bi_signature_xnode[i], &ai_signature_xnode[i]) {
is_fail = true;
}
}
if ! is_fail {
return true;
}
}
}
// -----------------------------------------------------------------
// 27. Ai is map(K, V), for any K and V and Bi is map(*).
// 28. Ai is map(Ka, Va) and Bi is map(Kb, Vb),
// where subtype-itemtype(Ka, Kb) and subtype(Va, Vb).
//
// 27のBiは、map(xs:anyAtomicType, item()*) と同等として
// 構文木が作られているので、28.の場合のみ考えればよい。
// AiとBiそれぞれについて、キーと値のxnodeを取り出し、subtype関係を調べる。
//
// MapTest ------- SequenceType ({ai,bi}_val)
// | ...
// | ({ai,bi}_key)
// AtomicOrUnionType
// (...)
//
if get_xnode_type(ai) == XNodeType::MapTest &&
get_xnode_type(bi) == XNodeType::MapTest {
let ai_key = get_left(&ai);
let ai_val = get_right(&ai);
let bi_key = get_left(&bi);
let bi_val = get_right(&bi);
if subtype_itemtype(&ai_key, &bi_key) &&
subtype(&ai_val, &bi_val) {
return true;
}
}
// -----------------------------------------------------------------
// 29. Ai is map(*)
// (or, because of the transitivity rules, any other map type),
// and Bi is function(*).
//
if get_xnode_type(ai) == XNodeType::MapTest &&
get_xnode_type(bi) == XNodeType::AnyFunctionTest {
return true;
}
// -----------------------------------------------------------------
// 30. Ai is map(*)
// (or, because of the transitivity rules, any other map type),
// and Bi is function(xs:anyAtomicType) as item()*.
// 35. Ai is map(K, V), and Bi is function(xs:anyAtomicType) as V?.
//
// 30のAiがmap(*)の場合、map(xs:anyAtomicType, item()*) と同等として
// 構文木が作られているので、特別扱いする必要はない。
// AiとBiそれぞれについて、キーと値のxnodeを取り出し、subtype関係を調べる。
//
// MapTest ------- SequenceType (ai_val)
// | ...
// | (ai_key)
// AtomicOrUnionType
// (...)
//
// TypedFunctionTest --- ReturnType ---------------- Param
// | |
// SequenceType (bi_val) SequenceType
// |
// | (bi_key)
// AtomicOrUnionType
// (...)
//
if get_xnode_type(ai) == XNodeType::MapTest &&
get_xnode_type(bi) == XNodeType::TypedFunctionTest {
let ai_key = get_left(&ai); // AtomicOrUnionType
let ai_val = get_right(&ai); // SequenceType
let ret_xnode = get_right(&bi); // ReturnType
let bi_val = get_left(&ret_xnode); // SequenceType
let param_xnode = get_right(&ret_xnode); // Param
let seq_xnode = get_left(¶m_xnode); // SequenceType
let bi_key = get_left(&seq_xnode); // AtomicOrUnionType
if subtype_itemtype(&ai_key, &bi_key) &&
subtype(&ai_val, &bi_val) {
return true;
}
}
// -----------------------------------------------------------------
// 31. Ai is array(X) and Bi is array(*).
// 32. Ai is array(X) and Bi is array(Y), and subtype(X, Y) is true.
//
// 31.のarray(*)はarray(item()*) と同等として
// 構文木が作られているので、32の場合のみ考えればよい。
// AiとBiそれぞれについて、要素のxnodeを取り出し、subtype関係を調べる。
//
// ArrayTest
// |
// | ({ai,bi}_elem)
// SequenceType
// ...
//
if get_xnode_type(ai) == XNodeType::ArrayTest &&
get_xnode_type(bi) == XNodeType::ArrayTest {
let ai_elem = get_left(&ai);
let bi_elem = get_left(&bi);
if subtype(&ai_elem, &bi_elem) {
return true;
}
}
// -----------------------------------------------------------------
// 33. Ai is array(*)
// (or, because of the transitivity rules, any other array type)
// and Bi is function(*).
//
if get_xnode_type(ai) == XNodeType::ArrayTest &&
get_xnode_type(bi) == XNodeType::AnyFunctionTest {
return true;
}
// -----------------------------------------------------------------
// 34. Ai is array(*)
// (or, because of the transitivity rules, any other array type)
// and Bi is function(xs:integer) as item()*.
// 36. Ai is array(X) and Bi is function(xs:integer) as X.
//
// 34のAiがarray(*)の場合、array(item()*) と同等として
// 構文木が作られているので、特別扱いする必要はない。
// AiとBiそれぞれについて、要素のxnodeを取り出し、subtype関係を調べる。
//
if get_xnode_type(ai) == XNodeType::ArrayTest &&
get_xnode_type(bi) == XNodeType::TypedFunctionTest {
let ai_elem = get_left(&ai); // SequenceType
let ret_xnode = get_right(&bi); // ReturnType
let bi_elem = get_left(&ret_xnode); // SequenceType
let param_xnode = get_right(&ret_xnode); // Param
let seq_xnode = get_left(¶m_xnode); // SequenceType
let bi_arg = get_left(&seq_xnode); // AtomicOrUnitonType
if derives_from("xs:integer", &get_xnode_name(&bi_arg)) &&
subtype(&ai_elem, &bi_elem) {
return true;
}
}
// -----------------------------------------------------------------
// 以上のどの条件も満たさない場合: サブタイプではない。
//
return false;
}
// ---------------------------------------------------------------------
//
fn derives_from(ai: &str, bi: &str) -> bool {
let derives_from_map: HashMap<&str, &str> = [
( "xs:integer", "xs:decimal" ),
( "xs:decimal", "xs:numeric" ),
( "xs:double", "xs:numeric" ),
// numericは、実際には union {decimal, float, double}
( "xs:numeric", "xs:anyAtomicType" ),
( "xs:string", "xs:anyAtomicType" ),
( "xs:anyURI", "xs:string" ),
// anyURIは常にstringに昇格可能
( "xs:boolean", "xs:anyAtomicType" ),
( "xs:untypedAtomic", "xs:anyAtomicType" ),
( "xs:anyAtomicType", "xs:anySimpleType" ),
( "xs:anySimpleType", "xs:anyType" ),
( "xs:untyped", "xs:anyType" ),
].iter().cloned().collect();
let mut t_type = String::from(ai);
loop {
if t_type.as_str() == bi {
return true;
}
match derives_from_map.get(t_type.as_str()) {
Some(s) => t_type = String::from(*s),
None => return false,
}
}
}
// ---------------------------------------------------------------------
// XNodeType::{Map,SquareArray,CurlyArray} が指す内容を
// XItem::{XIMap,XIArray} に変換する。
//
fn convert_xnode_to_map_array(xnode: &XNodePtr,
context_xseq: &XSequence,
eval_env: &mut EvalEnv) -> Result<XItem, Box<Error>> {
match get_xnode_type(&xnode) {
XNodeType::Map => {
// ---------------------------------------------------------
// マップの実体を取り出す。
// Map
// |
// MapConsruct -------------- MapConstruct ---...
// | |
// MapEntry --- (value) MapEntry --- (value)
// | |
// (key) (key)
//
let map_construct_xnode = get_left(&xnode);
let mut curr = map_construct_xnode.clone();
let mut vec_item: Vec<(XItem, XSequence)> = vec!{};
while ! is_nil_xnode(&curr) {
if get_xnode_type(&curr) != XNodeType::MapConstruct {
return Err(cant_occur!(
"convert_xnode_to_map_array[Map]: xnode = {}, not MapConstruct",
get_xnode_type(&curr)));
}
let map_entry_xnode = get_left(&curr);
if get_xnode_type(&map_entry_xnode) != XNodeType::MapEntry {
return Err(cant_occur!(
"convert_xnode_to_map_array[Map]: xnode = {}, not MapEntry",
get_xnode_type(&map_entry_xnode)));
}
let map_key_xnode = get_left(&map_entry_xnode);
let key = evaluate_xnode(context_xseq, &map_key_xnode, eval_env)?.get_singleton_item()?;
let map_value_xnode = get_right(&map_entry_xnode);
let val = evaluate_xnode(context_xseq, &map_value_xnode, eval_env)?;
vec_item.push((key, val));
curr = get_right(&curr);
}
return Ok(new_xitem_map(&vec_item));
},
XNodeType::SquareArray => {
// ---------------------------------------------------------
// 配列の実体を取り出す。
// SquareArray
// |
// ArrayEntry --- ArrayEntry ---...
// | |
// (expr) (expr)
//
let array_entry_xnode = get_left(&xnode);
let mut curr = array_entry_xnode.clone();
let mut vec_item: Vec<XSequence> = vec!{};
while ! is_nil_xnode(&curr) {
if get_xnode_type(&curr) != XNodeType::ArrayEntry {
return Err(cant_occur!(
"convert_xnode_to_map_array[SquareArray]: xnode = {}, not ArrayEntry",
get_xnode_type(&curr)));
}
let value_xnode = get_left(&curr);
let val = evaluate_xnode(context_xseq, &value_xnode, eval_env)?;
vec_item.push(val);
curr = get_right(&curr);
}
return Ok(new_xitem_array(&vec_item));
},
XNodeType::CurlyArray => {
// ---------------------------------------------------------
// 配列の実体を取り出す。
//
let array_entry_xnode = get_left(&xnode);
let val_xseq = evaluate_xnode(context_xseq, &array_entry_xnode, eval_env)?;
let mut vec_item: Vec<XSequence> = vec!{};
for item in val_xseq.iter() {
vec_item.push(new_singleton(item));
}
return Ok(new_xitem_array(&vec_item));
},
_ => {
return Err(cant_occur!(
"convert_xnode_to_map_array: xnode = {}",
get_xnode_type(&xnode)));
},
}
}
// =====================================================================
//
#[cfg(test)]
mod test {
// use super::*;
use xpath_impl::helpers::compress_spaces;
use xpath_impl::helpers::subtest_eval_xpath;
use xpath_impl::helpers::subtest_xpath;
// -----------------------------------------------------------------
// Comma
//
#[test]
fn test_comma() {
let xml = compress_spaces(r#"
<root img="basic" base="base">
<a img="a" />
<b img="b1" />
<b img="b2" />
<c img="c" />
<d img="d" />
</root>
"#);
subtest_eval_xpath("comma", &xml, &[
( "//a, //c", r#"(<a img="a">, <c img="c">)"# ),
( "2, 3", r#"(2, 3)"# ),
( "(2, 3)", r#"(2, 3)"# ),
( "2, 1 + 3", r#"(2, 4)"# ),
( "(2, (3, 4))", r#"(2, 3, 4)"# ),
]);
}
// -----------------------------------------------------------------
// if ( Expr ) then ExprSingle else ExprSingle
//
#[test]
fn test_if_expr() {
let xml = compress_spaces(r#"
<root base="base">
<prod discount="discount">
<wholesale id="wa">wholesaled apple</wholesale>
<wholesale id="wb">wholesaled banana</wholesale>
<retail id="ra">retailed apple</retail>
<retail id="rb">retailed banana</retail>
</prod>
<item>
<wholesale id="wa">wholesaled apple</wholesale>
<wholesale id="wb">wholesaled banana</wholesale>
<retail id="ra">retailed apple</retail>
<retail id="rb">retailed banana</retail>
</item>
</root>
"#);
subtest_eval_xpath("if_expr", &xml, &[
( "if (1 = 1) then 3 else 5", "3" ),
( "if (1 = 9) then 3 else 5", "5" ),
( "if (prod/@discount) then prod/wholesale else prod/retail",
r#"(<wholesale id="wa">, <wholesale id="wb">)"# ),
( "if (item/@discount) then item/wholesale else item/retail",
r#"(<retail id="ra">, <retail id="rb">)"# ),
]);
}
// -----------------------------------------------------------------
// for $VarName in ExprSingle return ExprSingle
//
#[test]
fn test_for_expr() {
let xml = compress_spaces(r#"
<root base="base">
<a v="x"/>
<a v="y"/>
<a v="z"/>
</root>
"#);
subtest_eval_xpath("for_expr", &xml, &[
( "for $x in 3 to 5 return $x * 2", "(6, 8, 10)" ),
( "for $x in 3 to 5, $y in 2 to 3 return $x * $y", "(6, 9, 8, 12, 10, 15)" ),
( "/root/a/@v", r#"(v="x", v="y", v="z")"# ),
( "for $aa in /root/a return $aa", r#"(<a v="x">, <a v="y">, <a v="z">)"# ),
( "for $aa in /root/a return $aa/@v", r#"(v="x", v="y", v="z")"# ),
]);
}
// -----------------------------------------------------------------
// some $VarName in ExprSingle satisfies ExprSingle
//
#[test]
fn test_some_expr() {
let xml = compress_spaces(r#"
<root base="base">
<a v="x"/>
<a v="y"/>
<a v="z"/>
</root>
"#);
subtest_eval_xpath("some_expr", &xml, &[
( "some $x in 3 to 5 satisfies $x mod 2 = 0", "true" ),
( "some $x in 3 to 5 satisfies $x mod 6 = 0", "false" ),
( "some $x in 1 to 2, $y in 2 to 3 satisfies $x + $y = 5", "true" ),
( "some $x in 1 to 2, $y in 2 to 3 satisfies $x + $y = 7", "false" ),
( r#"some $a in /root/a satisfies $a/@v = "y""#, "true" ),
( r#"some $a in /root/a satisfies $a/@v = "w""#, "false" ),
]);
}
// -----------------------------------------------------------------
// every $VarName in ExprSingle satisfies ExprSingle
//
#[test]
fn test_every_expr() {
let xml = compress_spaces(r#"
<root base="base">
<a v="x"/>
<a v="y"/>
<a v="z"/>
</root>
"#);
subtest_eval_xpath("every_expr", &xml, &[
( "every $x in 3 to 5 satisfies $x > 2", "true" ),
( "every $x in 3 to 5 satisfies $x > 3", "false" ),
( "every $x in 1 to 2, $y in 2 to 3 satisfies $x + $y > 2", "true" ),
( "every $x in 1 to 2, $y in 2 to 3 satisfies $x + $y > 4", "false" ),
( r#"every $a in /root/a satisfies $a/@v != "w""#, "true" ),
( r#"every $a in /root/a satisfies $a/@v = "y""#, "false" ),
]);
}
// -----------------------------------------------------------------
// castable as
//
#[test]
fn test_castable_as() {
let xml = compress_spaces(r#"
<root base="base">
<a v="x"/>
<a v="y"/>
<a v="z"/>
</root>
"#);
subtest_eval_xpath("castable_as", &xml, &[
( "100 castable as string", "true" ),
( "100 castable as string?", "true" ),
( r#"/root/empty castable as string"#, "false" ),
( r#"/root/empty castable as string?"#, "true" ),
( r#"/root/a[@v="x"] castable as string"#, "true" ),
( r#"/root/a[@v="x"] castable as string?"#, "true" ),
( r#"/root/a castable as string"#, "false" ),
( r#"/root/a castable as string?"#, "false" ),
]);
}
// -----------------------------------------------------------------
// cast as
//
#[test]
fn test_cast_as() {
let xml = compress_spaces(r#"
<root base="base">
<a v="x"/>
<a v="y"/>
<a v="z"/>
</root>
"#);
subtest_eval_xpath("cast_as", &xml, &[
( r#"/root/empty cast as string?"#, "()" ),
( r#"/root/a[@v="x"] castable as string"#, "true" ),
]);
}
// -----------------------------------------------------------------
// 軸: following
//
#[test]
fn test_axis_following() {
let xml = compress_spaces(r#"
<?xml version='1.0' encoding='UTF-8'?>
<root>
<foo img="上">
<foo img="甲"/>
<baa img="乙"/>
<foo img="上上" base="base">
<foo img="丙"/>
<baa img="丁"/>
</foo>
<foo img="戊"/>
</foo>
<foo img="下">
<baa img="己"/>
<foo img="庚"/>
<foo img="下下">
<baa img="辛"/>
<foo img="壬"/>
</foo>
<baa img="癸"/>
</foo>
</root>
"#);
subtest_xpath("axis_following", &xml, false, &[
( "following::*", "戊下己庚下下辛壬癸" ),
( "following::foo", "戊下庚下下壬" ),
( "following::foo[1]", "戊" ),
( "following::baa", "己辛癸" ),
( "following::baa[1]", "己" ),
]);
}
// -----------------------------------------------------------------
// 軸: preceding
//
#[test]
fn test_axis_preceding() {
let xml = compress_spaces(r#"
<?xml version='1.0' encoding='UTF-8'?>
<root>
<foo img="上">
<foo img="甲"/>
<baa img="乙"/>
<foo img="上上">
<foo img="丙"/>
<baa img="丁"/>
</foo>
<foo img="戊"/>
</foo>
<foo img="下">
<baa img="己"/>
<foo img="庚" base="base"/>
<baa img="辛"/>
<foo img="壬"/>
<baa img="癸"/>
</foo>
</root>
"#);
subtest_xpath("axis_preceding", &xml, false, &[
( "preceding::*", "上甲乙上上丙丁戊己" ),
( "preceding::foo", "上甲上上丙戊" ),
( "preceding::foo[1]", "戊" ),
( "preceding::baa", "乙丁己" ),
( "preceding::baa[1]", "己" ),
]);
}
// -----------------------------------------------------------------
// element() | element(*) | element(sel)
// element(sel, type_anno) | element(sel, type_anno?)
//
#[test]
fn test_kind_test_element() {
let xml = compress_spaces(r#"
<root>
<a base="base">
<sel img="z0"/>
<sel img="z1"/>
<sel img="z2" xsi:nil="true" />
<alt img="a0"/>
<alt img="a1"/>
</a>
</root>
"#);
subtest_eval_xpath("kind_test_element", &xml, &[
( "count(child::element())", "5" ),
( "count(child::element(*))", "5" ),
( "count(child::element(sel))", "3" ),
( "count(child::element(sel, anyType))", "2" ),
( "count(child::element(sel, anyType?))", "3" ),
( "count(child::element(sel, bad))", "0" ),
( "count(child::element(sel, bad?))", "0" ),
]);
}
// -----------------------------------------------------------------
// attribute() | attribute(*) | attribute(a)
// attribute(sel, type_anno)
//
#[test]
fn test_kind_test_attribute() {
let xml = compress_spaces(r#"
<root>
<a base="base">
<sel a="1" b="2"/>
</a>
</root>
"#);
subtest_eval_xpath("kind_test_attribute", &xml, &[
( "sel/attribute::attribute()", r#"(a="1", b="2")"# ),
( "sel/attribute::attribute(*)", r#"(a="1", b="2")"# ),
( "sel/attribute::attribute(a)", r#"a="1""# ),
( "sel/attribute::attribute(a, anyType)", r#"a="1""# ),
( "sel/attribute::attribute(a, BAD)", r#"()"# ),
]);
}
// -----------------------------------------------------------------
// processing-instruction()
//
#[test]
fn test_kind_test_processing_instruction() {
let xml = compress_spaces(r#"
<?xml version='1.0' encoding='UTF-8'?>
<?style-sheet alt="1" src="sample.css"?>
<?style-sheet alt="2" src="default.css"?>
<?pseudo-style-sheet src="sample.css"?>
<xroot>
<a base="base">
<sel img="z0" ans="0" />
<sel img="z1" ans="1" />
<sel img="z2" ans="2" />
<sel img="z3" ans="3" />
<sel img="z4" ans="4" />
</a>
</xroot>
"#);
subtest_eval_xpath("kind_test_processing_instruction", &xml, &[
( "count(/child::processing-instruction())", "3" ),
( "count(/child::processing-instruction('style-sheet'))", "2" ),
]);
}
// -----------------------------------------------------------------
// ContextItemExpr
//
#[test]
fn test_context_item() {
let xml = compress_spaces(r#"
<root>
<a base="base">
<b id="b"/>
</a>
</root>
"#);
subtest_eval_xpath("context_item", &xml, &[
( ".", r#"<a base="base">"# ),
( "./b", r#"<b id="b">"# ),
( "self::a", r#"<a base="base">"# ),
( r#"self::a[@base="base"]"#, r#"<a base="base">"# ),
( "self::b", "()" ),
// 「self」と明記した場合はAxisSelfであり、
// NodeTestを記述できる。
( ".::a", "Syntax Error in XPath" ),
( ".a", "Syntax Error in XPath" ),
// 「.」と書き、さらにNodeTestを記述する構文はない。
( r#".[name()="a"]"#, r#"<a base="base">"# ),
( r#".[@base="base"]"#, r#"<a base="base">"# ),
// しかし述語は記述できる。
( "(1 to 20)[. mod 5 eq 0]", "(5, 10, 15, 20)" ),
]);
}
// -----------------------------------------------------------------
// OperatorConcat
//
#[test]
fn test_operator_concat() {
let xml = compress_spaces(r#"
<root>
<a base="base">
<b id="b"/>
</a>
</root>
"#);
subtest_eval_xpath("operator_concat", &xml, &[
( r#" "あい" || "うえ" "#, r#""あいうえ""# ),
( r#" 123 || 456 || 789 "#, r#""123456789""# ),
]);
}
// -----------------------------------------------------------------
// OperatorMap
//
#[test]
fn test_operator_map() {
let xml = compress_spaces(r#"
<root>
<z>
<a base="base">
<b>b1</b>
<b>b2</b>
</a>
</z>
</root>
"#);
subtest_eval_xpath("operator_map", &xml, &[
( r#"sum((1, 3, 5)!(.*.)) "#, r#"35"# ),
( r#"string-join((1 to 4) ! "*") "#, r#""****""# ),
( r#"string-join((1 to 4) ! "*", ".") "#, r#""*.*.*.*""# ),
( r#"child::b/string()!concat("id-", .)"#, r#"("id-b1", "id-b2")"# ),
( r#"string-join(ancestor::*!name(), '/')"#, r#""root/z""# ),
]);
}
// -----------------------------------------------------------------
// ArrowExpr
//
#[test]
fn test_arrow_expr() {
let xml = compress_spaces(r#"
<root>
</root>
"#);
subtest_eval_xpath("arrow_expr", &xml, &[
( r#" 'aBcDe' => upper-case() => substring(2, 3)"#, r#""BCD""# ),
( "let $f := function($a) { $a * $a } return 5 => $f() ", "25" ),
]);
}
// -----------------------------------------------------------------
// LetExpr
//
#[test]
fn test_let_expr() {
let xml = compress_spaces(r#"
<root>
<z>
<a base="base">
<b>b1</b>
<b>b2</b>
</a>
</z>
</root>
"#);
subtest_eval_xpath("let_expr", &xml, &[
( r#"let $x := 4, $y := 3 return $x + $y"#, r#"7"# ),
( r#"let $x := 4, $y := $x * 2 return $x + $y"#, r#"12"# ),
]);
}
// -----------------------------------------------------------------
// InlineFunction
//
#[test]
fn test_inline_function() {
let xml = compress_spaces(r#"
<root>
</root>
"#);
subtest_eval_xpath("inline_function", &xml, &[
( "let $f := function() { 4 } return $f() ", "4" ),
( "let $f := function($n as xs:integer) { $n * 3 } return $f(5) ", "15" ),
( r#"let $x := function ($m as integer, $n as integer) { ($m + $n) * 3 } return $x(2, 4) "#, r#"18"# ),
( "for-each(1 to 4, function($x as xs:integer) { $x * $x })", "(1, 4, 9, 16)" ),
( "for-each(1 to 4, function($x as node()) { $x })", "Type Error" ),
]);
}
// -----------------------------------------------------------------
// NamedFunctionRef
//
#[test]
fn test_named_function_ref() {
let xml = compress_spaces(r#"
<root>
</root>
"#);
subtest_eval_xpath("named_function_ref", &xml, &[
( r#"for-each(("john", "jane"), fn:string-to-codepoints#1)"#,
"(106, 111, 104, 110, 106, 97, 110, 101)" ),
]);
}
// -----------------------------------------------------------------
// PartialFunctionCall / ArgumentPlaceholder
//
#[test]
fn test_partial_function_call() {
let xml = compress_spaces(r#"
<root>
</root>
"#);
subtest_eval_xpath("partial_function_call", &xml, &[
( r#"for-each(("a", "b"), fn:starts-with(?, "a")) "#,
"(true, false)" ),
]);
}
// -----------------------------------------------------------------
// Map
//
#[test]
fn test_map_lookup() {
let xml = compress_spaces(r#"
<root>
</root>
"#);
subtest_eval_xpath("map_lookup", &xml, &[
( r#"
let $week := map {
"Su" : "Sunday",
"Mo" : "Monday"
} return $week("Su")
"#, r#""Sunday""# ),
( r#"
let $bk := map {
"a" : map {
"a1" : "A1",
"a2" : "A2"
},
"b" : map {
"b1" : "B1",
"b2" : "B2"
}
} return $bk("a")("a2")
"#, r#""A2""# ),
]);
}
// -----------------------------------------------------------------
// Array
//
#[test]
fn test_array_lookup() {
let xml = compress_spaces(r#"
<root>
</root>
"#);
subtest_eval_xpath("array_lookup", &xml, &[
( r#"[ 1, 3, 5, 7 ](4)"#, "7" ),
( r#"[ [1, 2, 3], [4, 5, 6]](2)"#, "[4, 5, 6]" ),
( r#"[ [1, 2, 3], [4, 5, 6]](2)(2)"#, "5" ),
( r#"array{ (1), (2, 3), (4, 5) }(4)"#, "4" ),
]);
}
// -----------------------------------------------------------------
// UnaryLookup
//
#[test]
fn test_unary_lookup() {
let xml = compress_spaces(r#"
<root>
</root>
"#);
subtest_eval_xpath("unary_lookup", &xml, &[
// NCName
( r#"
map {
"Su" : "Sunday",
"Mo" : "Monday"
}[.("Su") = "Sunday"]
"#, r#"{"Su" => "Sunday", "Mo" => "Monday"}"# ),
( r#"
map {
"Su" : "Sunday",
"Mo" : "Monday"
}[?Su = "Sunday"]
"#, r#"{"Su" => "Sunday", "Mo" => "Monday"}"# ),
// NCName
( r#"
map {
"Su" : "Sunday",
"Mo" : "Monday"
} ! ?Su = "Sunday"
"#, r#"true"# ),
( r#"
map {
"Su" : "Sunday",
"Mo" : "Monday"
} ! ?Su = "Monday"
"#, r#"false"# ),
// Wildcard
( r#"
map {
"Su" : "Sunday",
"Mo" : "Monday"
} ! (for $k in map:keys(.) return .($k))
"#, r#"("Sunday", "Monday")"# ),
( r#"
map {
"Su" : "Sunday",
"Mo" : "Monday"
} ! ?*
"#, r#"("Sunday", "Monday")"# ),
// map {...} ! ?*
// ParenthesizedExpr
( r#"
map {
"Su" : "Sunday",
"Mo" : "Monday"
} ! ?("Mo", "Su")
"#, r#"("Monday", "Sunday")"# ),
// IntegerLiteral
( r#"[ 1, 3, 5, 7 ][?3 = 5]"#, "[1, 3, 5, 7]" ),
( r#"[ 1, 3, 5, 7 ][?3 = 10]"#, "()" ),
// Wildcard
( r#"[ 1, 3, 5, 7 ] ! ?*"#, "(1, 3, 5, 7)" ),
// ParenthesizedExpr
( r#"[ 1, 3, 5, 7 ] ! ?(2 to 4)"#, "(3, 5, 7)" ),
]);
}
// -----------------------------------------------------------------
// Postfix Lookup
//
#[test]
fn test_postfix_lookup() {
let xml = compress_spaces(r#"
<root>
</root>
"#);
subtest_eval_xpath("postfix_lookup", &xml, &[
( r#"map { "Su" : "Sunday", "Mo" : "Monday" }?Su"#, r#""Sunday""# ),
( r#"map { 0: "F", 1: "T" }?1 "#, r#""T""# ),
( r#"[4, 5, 6]?2"#, r#"5"# ),
( r#"[4, 5, 6]?*"#, r#"(4, 5, 6)"# ),
( r#" ([1, 2, 3], [4, 5, 6])?2 "#, "(2, 5)" ),
]);
}
// -----------------------------------------------------------------
// instance of
//
#[test]
fn test_instance_of() {
let xml = compress_spaces(r#"
<root>
<elem base="base"/>
</root>
"#);
subtest_eval_xpath("instance_of", &xml, &[
( r#"() instance of empty-sequence() "#, "true" ),
( r#"7 instance of empty-sequence() "#, "false" ),
// AtomicOrUnionType
( r#"5 instance of xs:integer "#, "true" ),
( r#"(5, 7) instance of xs:integer+ "#, "true" ),
( r#"(5, 7.3) instance of xs:decimal+ "#, "true" ),
( r#"(5, 7) instance of xs:numeric+ "#, "true" ),
( r#"(5, "ss") instance of xs:anyAtomicType+ "#, "true" ),
( r#" . instance of element() + "#, "true" ),
// ArrayTest
( r#" [1, 2] instance of array(*) "#, "true" ),
( r#" [1, 2] instance of array(xs:integer) "#, "true" ),
( r#" [1, 2] instance of array(xs:string) "#, "false" ),
( r#" [(1, 2), 2] instance of array(xs:integer) "#, "false" ),
( r#" [(1, 2), 2] instance of array(xs:integer+) "#, "true" ),
( r#" [[1, 2], [2]] instance of array(array(xs:integer)) "#, "true" ),
// MapTest
( r#" map{"a": 1, "b": "x"} instance of map(*) "#, "true" ),
( r#" map{"a": 1, "b": 2} instance of map(string, integer) "#, "true" ),
( r#" map{"a": 1, "b": 2} instance of map(string, string) "#, "false" ),
( r#" map{"a": [1], "b": [2]} instance of map(string, array(integer)) "#, "true" ),
]);
}
// -----------------------------------------------------------------
// instance of function()
//
#[test]
fn test_instance_of_function() {
let xml = compress_spaces(r#"
<root>
<elem base="base"/>
</root>
"#);
subtest_eval_xpath("instance_of_function", &xml, &[
// AnyFunctionTest
( "7 instance of function(*) ", "false" ),
( "[1, 2] instance of function(*) ", "true" ),
( r#"map{"a": 1} instance of function(*) "#, "true" ),
( "fn:string-to-codepoints#1 instance of function(*)", "true" ),
( "function($n as xs:integer) { $n * 3 } instance of function(*) ", "true" ),
( "let $f := function($n as xs:integer) { $n * 3 } return $f instance of function(*) ", "true" ),
( r#"fn:starts-with(?, "a") instance of function(*) "#, "true" ),
// TypedFunctionTest
( "7 instance of function(integer) as integer", "false" ),
( "[1, 2] instance of function(integer) as integer", "true" ),
( "[1, 2] instance of function(integer) as string", "false" ),
( r#"map{"a": 1} instance of function(string) as integer"#, "true" ),
( r#"map{"a": 1} instance of function(string) as string"#, "false" ),
( r#"map{"a": 1} instance of function(integer) as integer"#, "false" ),
// TypedFunctionTest (InlineFunction)
( "function($n as xs:integer) as xs:integer { $n * 3 } instance of function(xs:integer) as xs:integer", "true" ),
( "function($n as xs:integer) as xs:integer { $n * 3 } instance of function(xs:integer) as xs:anyAtomicType", "true" ),
// TypedFunctionTest (InlineFunction): 引数の不一致
( "function($n as xs:numeric) as xs:numeric { $n * 3 } instance of function(xs:anyAtomicType) as xs:integer", "false" ),
// TypedFunctionTest (InlineFunction): 引数の個数の不一致
( "function($n as xs:numeric) as xs:numeric { $n * 3 } instance of function(xs:integer, xs:integer) as xs:integer", "false" ),
( "function($n as xs:numeric) as xs:numeric { $n * 3 } instance of function() as xs:integer", "false" ),
// TypedFunctionTest (NamedFunctionRef):
( "fn:abs#1 instance of function(numeric) as numeric?", "true" ),
( "fn:abs#1 instance of function(integer) as numeric?", "true" ),
( "fn:abs#1 instance of function(numeric) as integer?", "false" ),
// TypedFunctionTest (NamedFunctionRef): FunctionTest
( "fn:filter#2 instance of function(item()*, function(item()) as boolean) as item()*", "true" ),
( "fn:filter#2 instance of function(integer*, function(item()) as boolean) as item()*", "true" ),
( "fn:filter#2 instance of function(item()*, function(integer) as integer) as item()*", "false" ),
// TypedFunctionTest (NamedFunctionRef): MapTest
( "map:size#1 instance of function(map(*)) as integer", "true" ),
( "map:size#1 instance of function(map(string, integer)) as integer", "true" ),
// TypedFunctionTest (NamedFunctionRef): ArrayTest
( "array:size#1 instance of function(array(*)) as integer", "true" ),
]);
}
// -----------------------------------------------------------------
// subtype_itemtype (map)
//
#[test]
fn test_subtype_itemtype_map() {
let xml = compress_spaces(r#"
<root>
<elem base="base"/>
</root>
"#);
subtest_eval_xpath("subtype_itemtype_map", &xml, &[
// 27. Ai is map(K, V), for any K and V and Bi is map(*).
( r#"function() as map(*) { "a" }
instance of
function() as map(*)"#, "true" ),
( r#"function() as map(string, string) { "a" }
instance of
function() as map(*)"#, "true" ),
// 28. Ai is map(Ka, Va) and Bi is map(Kb, Vb),
// where subtype-itemtype(Ka, Kb) and subtype(Va, Vb).
( r#"function() as map(string, integer) { "a" }
instance of
function() as map(anyAtomicType, decimal)"#, "true" ),
( r#"function() as map(anyAtomicType, decimal) { "a" }
instance of
function() as map(string, integer)"#, "false" ),
// 29. Ai is map(*) (or any other map type),
// and Bi is function(*).
( r#"function() as map(*) { "a" }
instance of
function() as function(*)"#, "true" ),
( r#"function() as map(string, integer) { "a" }
instance of
function() as function(*)"#, "true" ),
// 30. Ai is map(*) (or any other map type),
// and Bi is function(xs:anyAtomicType) as item()*.
( r#"function() as map(*) { "a" }
instance of
function() as function(xs:anyAtomicType) as item()*"#, "true" ),
( r#"function() as map(string, string) { "a" }
instance of
function() as function(xs:anyAtomicType) as item()*"#, "true" ),
( r#"function() as map(string, string) { "a" }
instance of
function() as function(string) as string"#, "true" ),
// 35. Ai is map(K, V), and Bi is function(xs:anyAtomicType) as V?.
( r#"function() as map(integer, string) { "a" }
instance of
function() as function(xs:anyAtomicType) as string?"#, "true" ),
]);
}
// -----------------------------------------------------------------
// subtype_itemtype (array)
//
#[test]
fn test_subtype_itemtype_array() {
let xml = compress_spaces(r#"
<root>
<elem base="base"/>
</root>
"#);
subtest_eval_xpath("subtype_itemtype_array", &xml, &[
// 31. Ai is array(X) and Bi is array(*).
( r#"function() as array(integer) { "a" }
instance of
function() as array(*)"#, "true" ),
// 32. Ai is array(X) and Bi is array(Y), and subtype(X, Y) is true.
( r#"function() as array(integer) { "a" }
instance of
function() as array(decimal)"#, "true" ),
// 33. Ai is array(*) (or any other array type),
// and Bi is function(*).
( r#"function() as array(*) { "a" }
instance of
function() as function(*)"#, "true" ),
// 34. Ai is array(*) (or any other array type)
// and Bi is function(xs:integer) as item()*.
// 36. Ai is array(X) and Bi is function(xs:integer) as X.
( r#"function() as array(*) { "a" }
instance of
function() as function(integer) as item()*"#, "true" ),
( r#"function() as array(string) { "a" }
instance of
function() as function(integer) as string"#, "true" ),
]);
}
// -----------------------------------------------------------------
// instance of ( ParenthesizedItemType )
//
#[test]
fn test_instance_of_parenthesized_item_type() {
let xml = compress_spaces(r#"
<root>
<elem base="base"/>
</root>
"#);
subtest_eval_xpath("instance_of_parenthesized_item_type", &xml, &[
// ( " (1) instance of (xs:integer) ", "true" ),
]);
}
}
| true |
88552dde71303fcd7d94b0a06d29380801d414be
|
Rust
|
brunoczim/mursic
|
/src/player.rs
|
UTF-8
| 646 | 2.609375 | 3 |
[
"MIT"
] |
permissive
|
mod rodio;
use crate::source::Source;
#[derive(Debug)]
pub struct Player {
#[cfg(target_arch = "wasm32")]
backend: (),
#[cfg(not(target_arch = "wasm32"))]
backend: rodio::Rodio,
}
impl Player {
pub fn new() -> Option<Self> {
Backend::new().map(|backend| Player { backend })
}
pub fn play<S>(&self, stream: S)
where
S: Source + 'static,
{
self.backend.play(stream)
}
pub fn wait(&self) {
self.backend.wait()
}
}
trait Backend: Sized {
fn new() -> Option<Self>;
fn play<S>(&self, stream: S)
where
S: Source + 'static;
fn wait(&self);
}
| true |
0008de0a1f776dc3e7bd965adba59b9e855cb228
|
Rust
|
JiahaiHu/kv-server
|
/src/store/engine/mod.rs
|
UTF-8
| 3,339 | 3.15625 | 3 |
[] |
no_license
|
pub mod log;
use std::collections::{BTreeMap, HashMap};
use std::fs::OpenOptions;
use std::io::BufRead;
use std::io::BufReader;
use std::sync::Arc;
use std::sync::Mutex;
use super::{Engine, Key, Value};
use log::{Log, LogType};
const LOG_PATH: &'static str = "kv.log";
#[derive(Clone)]
pub struct Kvdb {
pub db: Arc<Mutex<BTreeMap<Key, Value>>>,
log: Arc<Mutex<Log>>,
}
impl Kvdb {
pub fn new() -> Self {
let mut kvdb = Kvdb {
db: Arc::new(Mutex::new(BTreeMap::new())),
log: Arc::new(Mutex::new(Log::new(LOG_PATH))),
};
kvdb.recover();
kvdb
}
pub fn recover(&mut self) {
println!("recovering...");
let path = &self.log.lock().unwrap().path;
let file = OpenOptions::new().read(true).open(path);
match file {
Err(_) => println!("log file not found"),
Ok(f) => {
let reader = BufReader::new(f);
let iter = reader.lines().map(|l| l.unwrap());; // BufRead Trait
let mut map = self.db.lock().unwrap();
for line in iter {
let v: Vec<&str> = line.split_whitespace().collect();
let log_type = v[0];
match log_type {
"0" => map.insert(v[1].to_owned(), v[2].to_owned()),
"1" => map.remove(v[1]),
_ => None,
};
}
}
}
println!("recovery finished!");
}
pub fn flush(&self) {
self.log.lock().unwrap().flush();
}
}
impl Engine for Kvdb {
fn get(&self, key: &Key) -> Result<Option<Value>, ()> {
let map = self.db.lock().unwrap();
let ret = map.get(key);
match ret {
Some(value) => Ok(Some(value.clone())),
None => Ok(None),
}
}
/// If the map did not have this key present, Ok(None) is returned.
/// If the map did have this key present, the value is updated, and OK(Some(old_value)) is returned.
fn put(&mut self, key: &Key, value: &Value) -> Result<Option<Value>, ()> {
self.log.lock().unwrap().record(LogType::Put, key, value);
let mut map = self.db.lock().unwrap();
let ret = map.insert(key.clone(), value.clone());
match ret {
Some(value) => Ok(Some(value.clone())),
None => Ok(None),
}
}
/// Delete a key from the map, returning Ok(Some(value)) if the key was previously in the map.
fn delete(&mut self, key: &Key) -> Result<Option<Value>, ()> {
self.log.lock().unwrap().record(LogType::Delete, key, Value::new().as_str());
let mut map = self.db.lock().unwrap();
let ret = map.remove(key);
match ret {
Some(value) => Ok(Some(value.clone())),
None => Ok(None),
}
}
fn scan(&self, key_start: &Key, key_end: &Key) -> Result<Option<HashMap<Key, Value>>, ()> {
let mut kvs = HashMap::new();
let map = self.db.lock().unwrap();
for (key, value) in map.range(key_start.clone()..key_end.clone()) {
kvs.insert(key.clone(), value.clone());
}
if kvs.len() != 0 {
Ok(Some(kvs))
}
else {
Ok(None)
}
}
}
| true |
e53a324244371ea6ca689ee16a189f7c623d6de5
|
Rust
|
mikialex/rendiation
|
/scene/raytracing/src/sampling/sampler.rs
|
UTF-8
| 4,437 | 2.6875 | 3 |
[] |
no_license
|
use std::sync::Arc;
use rand::{prelude::SliceRandom, rngs::ThreadRng, Rng};
use rendiation_algebra::Vec2;
/// https://www.pbr-book.org/3ed-2018/Sampling_and_Reconstruction/Sampling_Interface#fragment-SamplerInterface-2
/// Because sample values must be strictly less than 1,
/// OneMinusEpsilon, that represents the largest representable floating-point constant that is less
/// than 1. Later, we will clamp sample vector values to be no larger than this value.
// const ONE_MINUS_EPSILON: f32 = 0x1.ffffffep - 1;
/// The task of a Sampler is to generate a sequence of -dimensional samples in
/// [0, 1) ^ d
pub trait Sampler {
fn reset(&mut self, next_sampling_index: usize);
fn next(&mut self) -> f32;
/// While a 2D sample value could be constructed by using values returned by a pair of calls to
/// sample(), some samplers can generate better point distributions if they know that two
/// dimensions will be used together.
fn next_2d(&mut self) -> (f32, f32);
fn next_2d_vec(&mut self) -> Vec2<f32> {
Vec2::from(self.next_2d())
}
}
#[derive(Clone)]
pub struct SampleStorage {
samples_1d_arrays: Vec<Vec<f32>>,
samples_2d_arrays: Vec<Vec<(f32, f32)>>,
}
impl SampleStorage {
pub fn shuffle(&mut self) {
let mut rng = ThreadRng::default();
self.samples_1d_arrays.iter_mut().for_each(|v| {
v.as_mut_slice().shuffle(&mut rng);
});
self.samples_2d_arrays.iter_mut().for_each(|v| {
v.as_mut_slice().shuffle(&mut rng);
});
}
}
pub struct SamplePrecomputedRequest {
pub min_spp: usize,
pub max_1d_dimension: usize,
pub max_2d_dimension: usize,
}
pub trait SampleGenerator: Default {
fn override_ssp(&self, ssp: usize) -> usize;
fn gen_1d(&self, index: usize) -> f32;
fn gen_2d(&self, index: usize) -> (f32, f32);
}
impl SampleStorage {
pub fn generate<G: SampleGenerator>(request: SamplePrecomputedRequest) -> Self {
let gen = G::default();
let spp = gen.override_ssp(request.min_spp);
let mut samples_1d_arrays: Vec<Vec<f32>> = (0..request.max_1d_dimension)
.map(|_| (0..spp).map(|i| gen.gen_1d(i)).collect())
.collect();
let mut samples_2d_arrays: Vec<Vec<(f32, f32)>> = (0..request.max_2d_dimension)
.map(|_| (0..spp).map(|i| gen.gen_2d(i)).collect())
.collect();
let mut rng = ThreadRng::default();
samples_1d_arrays.iter_mut().for_each(|v| {
v.as_mut_slice().shuffle(&mut rng);
});
samples_2d_arrays.iter_mut().for_each(|v| {
v.as_mut_slice().shuffle(&mut rng);
});
Self {
samples_1d_arrays,
samples_2d_arrays,
}
}
}
pub struct SamplingStorageState {
current_sampling_index: usize,
current_1d_index: usize,
current_2d_index: usize,
}
pub struct PrecomputedSampler {
storage: Arc<SampleStorage>,
state: SamplingStorageState,
backup: RngSampler,
}
impl PrecomputedSampler {
pub fn new(source: &Arc<SampleStorage>) -> Self {
Self {
storage: source.clone(),
state: SamplingStorageState {
current_sampling_index: 0,
current_1d_index: 0,
current_2d_index: 0,
},
backup: Default::default(),
}
}
}
impl Sampler for PrecomputedSampler {
fn reset(&mut self, next_sampling_index: usize) {
self.state.current_1d_index = 0;
self.state.current_2d_index = 0;
self.state.current_sampling_index = next_sampling_index;
}
fn next(&mut self) -> f32 {
if let Some(array) = self
.storage
.samples_1d_arrays
.get(self.state.current_1d_index)
{
self.state.current_1d_index += 1;
if let Some(sample) = array.get(self.state.current_sampling_index) {
*sample
} else {
self.backup.next()
}
} else {
self.backup.next()
}
}
fn next_2d(&mut self) -> (f32, f32) {
if let Some(array) = self
.storage
.samples_2d_arrays
.get(self.state.current_2d_index)
{
self.state.current_2d_index += 1;
if let Some(sample) = array.get(self.state.current_sampling_index) {
*sample
} else {
self.backup.next_2d()
}
} else {
self.backup.next_2d()
}
}
}
#[derive(Default)]
pub struct RngSampler {
rng: ThreadRng,
}
impl Sampler for RngSampler {
fn next(&mut self) -> f32 {
self.rng.gen()
}
fn next_2d(&mut self) -> (f32, f32) {
(self.rng.gen(), self.rng.gen())
}
fn reset(&mut self, _next_sampling_index: usize) {}
}
| true |
9fc6ed07ed54529b7e3d496885acfa6aaddc8b78
|
Rust
|
abhijat/jumbo
|
/src/main.rs
|
UTF-8
| 1,759 | 3.171875 | 3 |
[] |
no_license
|
use std::env;
use std::fs;
use std::io;
use std::path;
fn cumulative_size(path: &path::Path, mut dir_sizes: &mut Vec<(String, u64)>) -> io::Result<u64> {
if path.is_dir() {
let mut total_size: u64 = 0;
let entries = path.read_dir();
if entries.is_err() {
println!("failed to read path {:?}", path);
return Ok(0);
}
let entries = entries.unwrap();
for e in entries {
let e = e?;
if e.path().is_file() {
total_size += e.path().metadata()?.len();
} else {
total_size += cumulative_size(&e.path(), &mut dir_sizes)?;
}
}
dir_sizes.push((path.to_str().unwrap().to_owned(), total_size));
Ok(total_size)
} else if path.is_file() {
let size = fs::metadata(path)?.len();
dir_sizes.push((path.to_str().unwrap().to_owned(), size));
Ok(size)
} else {
Ok(0)
}
}
fn bytes_to_mb(size: u64) -> u64 {
size / 1024 / 1024
}
fn main() {
let args: Vec<String> = env::args().collect();
if args.len() != 2 {
println!("usage: {} <path to analyze>", args[0]);
::std::process::exit(1);
}
let mut dir_sizes = Vec::<(String, u64)>::new();
let path = path::Path::new(&args[1]);
let size = cumulative_size(&path, &mut dir_sizes);
match size {
Ok(_) => {
dir_sizes.sort_by(|a, b| {
b.1.cmp(&a.1)
});
for pair in dir_sizes.iter().take(100) {
let size = format!("{}K", pair.1);
eprintln!("{:20} \t {}", size, pair.0);
}
}
Err(e) => {
eprintln!("e = {:#?}", e);
}
}
}
| true |
b1646a282684d468446442f6b12240ba058dbd80
|
Rust
|
deliveroo/prost
|
/prost-derive/src/options.rs
|
UTF-8
| 2,389 | 2.796875 | 3 |
[
"Apache-2.0",
"LicenseRef-scancode-unknown-license-reference"
] |
permissive
|
use anyhow::{bail, Error};
use syn::{Attribute, Lit, Meta, MetaNameValue};
use crate::field::{bool_attr, prost_attrs, set_option};
pub struct Options {
pub debug: bool,
pub default: bool,
pub merge: bool,
pub proto: ProtoVersion,
}
#[derive(Debug, PartialEq)]
pub enum ProtoVersion {
Proto2,
Proto3,
}
impl Options {
pub fn new(attrs: Vec<Attribute>) -> Result<Self, Error> {
let mut debug = None;
let mut default = None;
let mut merge = None;
let mut proto = None;
let mut unknown_attrs = Vec::new();
for attr in prost_attrs(attrs) {
if let Some(d) = bool_attr("debug", &attr)? {
set_option(&mut debug, d, "duplicate debug attribute")?;
} else if let Some(d) = bool_attr("default", &attr)? {
set_option(&mut default, d, "duplicate default attribute")?;
} else if let Some(m) = bool_attr("merge", &attr)? {
set_option(&mut merge, m, "duplicate merge attribute")?;
} else if let Some(p) = ProtoVersion::new(&attr)? {
set_option(&mut proto, p, "duplicate proto attribute")?;
} else {
unknown_attrs.push(attr);
}
}
match unknown_attrs.len() {
0 => (),
1 => bail!("unknown attribute: {:?}", unknown_attrs[0]),
_ => bail!("unknown attributes: {:?}", unknown_attrs),
}
Ok(Options {
debug: debug.unwrap_or(true),
default: default.unwrap_or(true),
merge: merge.unwrap_or(true),
proto: proto.unwrap_or(ProtoVersion::Proto2),
})
}
}
impl ProtoVersion {
pub fn new(attr: &Meta) -> Result<Option<Self>, Error> {
if !attr.path().is_ident("proto") {
return Ok(None);
}
match *attr {
Meta::NameValue(MetaNameValue {
lit: Lit::Str(ref lit),
..
}) => match lit.value().as_ref() {
"proto2" => Ok(Some(ProtoVersion::Proto2)),
"proto3" => Ok(Some(ProtoVersion::Proto3)),
_ => bail!("invalid proto attribute: {:?}", lit),
},
_ => bail!("invalid proto attribute: {:?}", attr),
}
}
pub fn is_proto3(&self) -> bool {
*self == ProtoVersion::Proto3
}
}
| true |
c8afa84d8679b84cd30d59942711ecee6c12d0aa
|
Rust
|
VanillaBrooks/args_into
|
/args_into/src/generics.rs
|
UTF-8
| 5,184 | 3.015625 | 3 |
[] |
no_license
|
use syn::punctuated::Punctuated;
use syn::FnArg;
use syn::GenericArgument;
use syn::GenericParam;
use syn::Ident;
use syn::Path;
use syn::PathSegment;
use syn::TypeParam;
use syn::TypeParamBound;
/// Find all of the types in the function arguments and return them as a GenericArgument type
/// to be used as `T` in `Into<T>`
pub(crate) fn get_arg_generic_types<T>(
args: Punctuated<FnArg, T>,
) -> impl Iterator<Item = GenericArgument> {
let generc_args = args
.into_iter()
// we only generate generic arguments for types that are not `self`
.filter(|arg_type| match arg_type {
FnArg::Receiver(_) => false,
FnArg::Typed(_) => true,
})
.map(|x| match x {
FnArg::Typed(v) => v,
_ => panic!(),
})
.map(|arg_type| {
//
GenericArgument::Type(*arg_type.ty)
});
generc_args
}
pub(crate) fn make_path_segments(
generic_arg_iter: impl Iterator<Item = GenericArgument>,
) -> impl Iterator<Item = PathSegment> {
let segments = generic_arg_iter.into_iter().map(|x| {
let mut type_arg = Punctuated::new();
type_arg.push(x);
let path_segments = syn::PathSegment {
ident: proc_macro2::Ident::new("Into", proc_macro2::Span::call_site()),
arguments: syn::PathArguments::AngleBracketed(syn::AngleBracketedGenericArguments {
colon2_token: None,
lt_token: syn::token::Lt(proc_macro2::Span::call_site()),
gt_token: syn::token::Gt(proc_macro2::Span::call_site()),
args: type_arg,
}),
};
path_segments
});
segments
}
pub(crate) fn make_paths(
segments: impl Iterator<Item = PathSegment>,
) -> impl Iterator<Item = Path> {
segments
.into_iter()
// first, we need to punctuate all of the types
.map(|path_segment: PathSegment| {
let mut punc = Punctuated::new();
punc.push(path_segment);
punc
})
// then map each of the punctuated path segments into a complete path
.map(|punc_path_segment| Path {
leading_colon: None,
segments: punc_path_segment,
})
}
type TraitBound = Punctuated<TypeParamBound, syn::token::Add>;
pub(crate) fn make_trait_bounds(
paths: impl Iterator<Item = Path>,
) -> impl Iterator<Item = TraitBound> {
paths
.into_iter()
.map(|path| {
syn::TypeParamBound::Trait(syn::TraitBound {
paren_token: None,
modifier: syn::TraitBoundModifier::None,
lifetimes: None,
path,
})
})
.map(|x| {
let mut punc = Punctuated::new();
punc.push(x);
punc
})
}
pub(crate) fn make_generic_parameter(
trait_bounds: impl Iterator<Item = TraitBound>,
types: impl Iterator<Item = Ident>,
) -> impl Iterator<Item = GenericParam> {
types
.zip(trait_bounds)
.map(|(generic_type, trait_bound)| syn::TypeParam {
attrs: vec![],
ident: generic_type,
colon_token: None,
bounds: trait_bound,
eq_token: None,
default: None,
})
.map(|type_param: TypeParam| GenericParam::Type(type_param))
}
pub(crate) fn make_arguments_generic<T: Clone>(
arguments: &mut Punctuated<FnArg, T>,
) -> impl Iterator<Item = Ident> {
println!("getting generic arguments");
let arg_clone: Punctuated<_, _> = (*arguments).clone();
let new_idents = arg_clone
.into_iter()
.filter(|arg_type| match arg_type {
FnArg::Receiver(_) => false,
FnArg::Typed(_) => true,
})
.map(|arg_type| match arg_type {
FnArg::Typed(v) => v,
_ => panic!(),
})
.map(|pat: syn::PatType| match *pat.pat {
syn::Pat::Ident(pat_ident) => pat_ident.ident,
_ => panic!("Argument variable was not an ident"),
})
.map(|ident: Ident| {
let mut ident_lower = ident.to_string();
ident_lower.insert_str(0, "__");
let new_ident = ident_lower.to_uppercase();
Ident::new(&new_ident, ident.span())
});
arguments
.iter_mut()
.filter(|arg_type| match arg_type {
FnArg::Receiver(_) => false,
FnArg::Typed(_) => true,
})
.map(|arg_type| match arg_type {
FnArg::Typed(v) => v,
_ => panic!(),
})
.zip(new_idents.clone())
.for_each(|(pat, new_ident): (_, Ident)| {
let new_type = new_type(new_ident);
pat.ty = Box::new(new_type);
});
new_idents
}
fn new_type(ident: Ident) -> syn::Type {
let mut segments = Punctuated::new();
segments.push(PathSegment {
ident,
arguments: syn::PathArguments::None,
});
let type_path = syn::TypePath {
qself: None,
path: Path {
leading_colon: None,
segments,
},
};
syn::Type::Path(type_path)
}
| true |
b22907704b59d4004a2ae4cdbfef0bdcd28ed3e4
|
Rust
|
ohmountain/blog-cli
|
/src/arg/mod.rs
|
UTF-8
| 1,936 | 3.421875 | 3 |
[] |
no_license
|
use std::env;
pub enum Method {
CreateType,
CreatePost,
Edit,
Show,
Delete,
Search,
Error
}
pub fn get_args() -> Vec<String> {
env::args().skip(1).map(|x|x).collect()
}
pub fn print_help() {
println!(" ");
println!("blog-cli usage");
println!(" ");
println!(" blog-cli create-type --title title --sort sort create a blog type with sort from 1 to 255");
println!(" blog-cli create-post --title title --type type create a blog with title of type");
println!(" ");
println!(" blog-cli edit title edit a blog in emacs, auto save when quit emacs");
println!(" blog-cli show title show a blog in vmd");
println!(" ");
println!(" blog-cli delete title delete a blog");
println!(" blog-cli search title search a blog");
println!(" ");
println!(" 标题长度不超过10个中文字或30个英文字");
println!(" ");
}
pub fn get_method() -> Method {
let args = get_args();
if args.len() < 2 {
print_help();
return Method::Error
}
let mut method = Method::Error;
if args[0] == String::from("create-type") {
method = Method::CreateType;
}
if args[0] == String::from("create-post") {
method = Method::CreatePost;
}
if args[0] == String::from("edit") {
method = Method::Edit;
}
if args[0] == String::from("show") {
method = Method::Show;
}
if args[0] == String::from("delete") {
method = Method::Delete;
}
if args[0] == String::from("search") {
method = Method::Search;
}
match method {
Method::Error => print_help(),
_ => {},
}
method
}
| true |
fff49377df33f2e2f343f338137f8d4e0a71b8af
|
Rust
|
gdepuydt/fuzzoz
|
/src/core_requirements.rs
|
UTF-8
| 3,826 | 3.03125 | 3 |
[] |
no_license
|
// was temporarily removed
#[inline(always)]
#[cfg(target_arch = "x86_64")]
unsafe fn memcpy_int(dest: *mut u8, src: *const u8, n: usize) -> *mut u8 {
asm!("rep movsb",
inout("rcx") n => _,
inout("rdi") dest => _,
inout("rsi") src => _);
dest
}
#[no_mangle]
#[cfg(target_arch = "x86_64")]
unsafe extern "C" fn memcpy(dest: *mut u8, src: *const u8, n: usize) -> *mut u8 {
asm!("rep movsb",
inout("rcx") n => _,
inout("rdi") dest => _,
inout("rsi") src => _);
dest
}
#[no_mangle]
#[cfg(not(target_arch = "x86_64"))]
unsafe extern "C" fn memcpy(dest: *mut u8, src: *const u8, n: usize) -> *mut u8 {
let mut ii = 0;
while ii < n {
let dest = dest.offset(ii as isize);
let src = src.offset(ii as isize);
core::ptr::write(dest, core::ptr::read(src));
ii += 1;
}
dest
}
#[no_mangle]
unsafe extern "C" fn memmove(dest: *mut u8, src: *const u8, mut n: usize) -> *mut u8 {
// Determine if the dest comes after the src and if there's overlap
if (dest as usize) > (src as usize) && (src as usize).wrapping_add(n) > (dest as usize) {
// There is at least one byte of overlap and the src is prior
// to the dest
// Compute the delta between the source and the dest
let delta = (dest as usize) - (src as usize);
// if the delta is small, copy in reverse
if delta < 64 {
// 8 byte align dest with one byte copies
while n != 0 && (dest as usize).wrapping_add(n) & 0x7 != 0 {
n = n.wrapping_sub(1);
core::ptr::write(dest.add(n), core::ptr::read(src.add(n)));
}
// when the dest is aligned, do a reverse copy 8-bytes at a time
while n >= 8 {
n = n.wrapping_sub(8);
// Read the value to copy
let val = core::ptr::read_unaligned(src.add(n) as *const u64);
// Write out the value
core::ptr::write(dest.add(n) as *mut u64, val);
}
// Copy the remainder
while n != 0 {
n = n.wrapping_sub(1);
core::ptr::write(dest.add(n), core::ptr::read(src.add(n)));
}
return dest;
}
// Copy the non-overlapping tail parts while there are overhang
// sized chunks
while n >= delta {
// Update the length remaining
n = n.wrapping_sub(delta);
let src = src.add(n);
let dest = dest.add(n);
memcpy(dest, src, delta);
}
// check if we copied everything
if n == 0 {
return dest;
}
// At this point n < delta so we are in a non-overlapping region
}
// Just copy the remaining bytes forward one by one
memcpy(dest, src, n)
}
// Fill memory with a constant
#[no_mangle]
#[cfg(target_arch = "x86_64")]
unsafe extern "C" fn memset(s: *mut u8, c: i32, n: usize) -> *mut u8 {
asm!("rep stosb",
inout("rcx") n => _,
inout("rdi") s => _,
in("eax") c as u32
);
s
}
// Fill memory with a constant
#[no_mangle]
#[cfg(not(target_arch = "x86_64"))]
unsafe extern "C" fn memset(s: *mut u8, c: i32, n: usize) -> *mut u8 {
let mut ii = 0;
while ii < n {
let s = s.offset(ii as isize);
core::ptr::write(s, c as u8);
ii += 1;
}
s
}
#[no_mangle]
unsafe extern "C" fn memcmp(s1: *const u8, s2: *const u8, n: usize) -> i32 {
let mut ii = 0;
while ii < n {
let a = core::ptr::read(s1.add(ii));
let b = core::ptr::read(s2.add(ii));
if a != b {
return (a as i32).wrapping_sub(b as i32);
}
ii = ii.wrapping_add(1);
}
0
}
| true |
1d6bb7b4ee9d6d0c1c65d894f7d94fc8346cff4d
|
Rust
|
timsims1717/RustADE
|
/src/game/gamestate.rs
|
UTF-8
| 4,052 | 3.1875 | 3 |
[] |
no_license
|
use obj::DamageType;
use obj::item::Item;
use obj::location::Location;
use obj::player::Player;
use parsing::grammar::ItemNode;
use parsing::DirectionType;
pub struct GameState {
pub break_control: bool,
player: Player,
locations: Vec<Location>,
}
impl GameState {
/*
Creates a GameState instance.
*/
pub fn new(l: usize, loc: Vec<Location>) -> GameState {
GameState {
break_control: false,
player: Player::new(l),
locations: loc,
}
}
/*
todo: make it so only the current locations display strings
*/
pub fn update(&mut self) -> String {
let mut display = String::new();
for item in &mut self.player.inventory {
item.update(&mut display);
}
for location in &mut self.locations {
location.update(&mut display);
}
display
}
pub fn update_player(&self) -> bool {
let idol = ItemNode {
subject: "idol".to_string(),
subject_lexeme: "idol".to_string(),
};
match self.player.find_item(&idol) {
Some(_) => true,
None => false,
}
}
/*
Returns a pointer to the player's current location
*/
pub fn current_location(&self) -> &Location {
&self.locations[self.player.location]
}
/*
Returns a pointer to the player
*/
pub fn player(&self) -> &Player {
&self.player
}
/*
Attempts to change the location of the player, returns the old
location index if successful, otherwise returns None
*/
pub fn move_player(&mut self, d: DirectionType) -> Option<usize> {
match self.locations[self.player.location].find_exit(d) {
Some(e) => {
let old_loc = self.player.location;
self.player.location = e;
Some(old_loc)
},
None => None,
}
}
pub fn has_item(&self, i_node: &ItemNode) -> Option<Item> {
match self.player.find_item(i_node) {
Some(i) => Some(i),
None => self.locations[self.player.location].find_item(i_node),
}
}
/*
Attempts to move an item from the current location to the player's
inventory. If the item is not fixed, it succeeds. The Location.remove_item()
method does not remove it from it's list if the item is fixed.
*/
pub fn get_item(&mut self, i_node: &ItemNode) -> Option<Item> {
match self.locations[self.player.location].remove_item(&i_node) {
Some(i) => {
if !i.is_fixed {
self.player.add_item(i.clone());
} else {
self.locations[self.player.location].add_item(i.clone());
}
Some(i)
},
None => None,
}
}
/*
Attempts to move an item from the player's inventory to the current
location.
*/
pub fn drop_item(&mut self, i_node: &ItemNode) -> Option<Item> {
match self.player.remove_item(&i_node) {
Some(i) => {
self.locations[self.player.location].add_item(i.clone());
Some(i)
},
None => None,
}
}
/*
todo: add damage_amount as a third parameter
*/
pub fn damage_first_item(&mut self, damage_type: Option<DamageType>) -> Option<Item> {
match damage_type {
Some(dt) => match self.locations[self.player.location].remove_first_item_by_damage_type(dt) {
Some(mut item) => {
item.damage(1);
self.locations[self.player.location].add_item(item.clone());
Some(item)
},
None => None,
},
None => None,
}
}
/*
*/
pub fn attach_first_item(&mut self, i_node: &ItemNode, sub_item: Item) -> Option<Item> {
if sub_item.can_attach {
match self.locations[self.player.location].remove_first_attachable_item() {
Some(mut item) => {
item.attach_item(sub_item);
self.locations[self.player.location].add_item(item.clone());
self.player.remove_item(i_node);
Some(item)
},
None => None,
}
} else {
None
}
}
pub fn turn_on_item(&mut self, i_node: &ItemNode) -> Option<bool> {
match self.player.remove_item(&i_node) {
Some(mut item) => {
let result = item.toggle_on();
self.player.add_item(item);
result
},
None => match self.locations[self.player.location].remove_item(&i_node) {
Some(mut item) => {
let result = item.toggle_on();
self.locations[self.player.location].add_item(item);
result
},
None => None,
},
}
}
}
| true |
69d98c2516904166cb6108e7ea54f2ce717adce4
|
Rust
|
harrybrwn/calc
|
/src/main.rs
|
UTF-8
| 900 | 3.15625 | 3 |
[
"Apache-2.0"
] |
permissive
|
use std::env;
use std::io::{self, Error, ErrorKind, Write};
use calc::exec;
fn interpreter() -> Result<(), Error> {
let stdin = io::stdin();
let mut stdout = io::stdout();
let mut s = String::new();
loop {
print!(">>> ");
stdout.flush()?;
stdin.read_line(&mut s)?;
if s.as_bytes()[0] as char == 'q' || s == "q" || s == "quit" || s == "exit" {
return Ok(());
}
match exec(s.as_str()) {
Ok(res) => println!("{}", res),
Err(msg) => println!("Error: {}", msg),
}
s.clear()
}
}
fn main() -> Result<(), Error> {
let args = env::args();
if args.len() == 1 {
return interpreter();
}
let exp = args.last().unwrap();
match exec(exp.as_str()) {
Ok(res) => Ok(println!("{}", res)),
Err(msg) => Err(Error::new(ErrorKind::Other, msg)),
}
}
| true |
93c431fc1fbc18e322bde3be87ad88d82a7a4a61
|
Rust
|
pkafma-aon/LeetCode-Rust
|
/src/excel_sheet_column_title.rs
|
UTF-8
| 560 | 3.375 | 3 |
[
"WTFPL"
] |
permissive
|
impl Solution {
pub fn convert_to_title(mut n: i32) -> String {
let mut ret = String::new();
while n != 0 {
n -= 1;
ret.push((b'A' + (n % 26) as u8)as char);
n /= 26;
}
ret.chars().rev().collect()
}
}
pub struct Solution;
#[cfg(test)]
mod tests {
use super::Solution;
#[test]
fn test() {
assert_eq!(Solution::convert_to_title(1), "A");
assert_eq!(Solution::convert_to_title(28), "AB");
assert_eq!(Solution::convert_to_title(701), "ZY");
}
}
| true |
d86abbf3e2f7a9341eec7d16f66ddd0eb5a58371
|
Rust
|
edmccard/machine_int
|
/src/lib.rs
|
UTF-8
| 21,243 | 2.671875 | 3 |
[
"LicenseRef-scancode-unknown-license-reference",
"Apache-2.0",
"MIT"
] |
permissive
|
// Copyright 2018 Ed McCardell
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
#![cfg_attr(feature = "cargo-clippy", feature(tool_lints))]
#![cfg_attr(feature = "cargo-clippy", allow(clippy::cast_lossless))]
use std::cmp::Ordering;
use std::fmt;
use std::ops::{
Add, AddAssign, BitAnd, BitAndAssign, BitOr, BitOrAssign, BitXor,
BitXorAssign, Div, DivAssign, Mul, MulAssign, Neg, Not, Rem, RemAssign,
Shl, ShlAssign, Shr, ShrAssign, Sub, SubAssign,
};
#[derive(Clone, Copy, Default)]
#[repr(C)]
pub struct MachineInt<T>(pub T);
pub trait AsFrom<T>: Sized {
fn as_from(_: T) -> Self;
}
// Trait<T> for MachineInt<T>
// Trait<MachineInt<T>> for <T>
macro_rules! lit_impl {
($trait:ident, $meth:ident, $fn:ident) => {};
($trait:ident, $meth:ident, $fn:ident, $t:ty) => {
impl $trait<$t> for MachineInt<$t> {
type Output = Self;
#[inline]
fn $meth(self, rhs: $t) -> Self {
MachineInt((self.0).$fn(rhs))
}
}
impl $trait<MachineInt<$t>> for $t {
type Output = MachineInt<$t>;
#[inline]
fn $meth(self, rhs: Self::Output) -> Self::Output {
MachineInt(self.$fn(rhs.0))
}
}
impl $trait<MachineInt<$t>> for MachineInt<$t> {
type Output = MachineInt<$t>;
#[inline]
fn $meth(self, rhs: Self) -> Self {
MachineInt((self.0).$fn(rhs.0))
}
}
};
($trait:ident, $meth:ident, $fn:ident, $t:ty, $($tail:tt)*) => {
lit_impl!($trait, $meth, $fn, $t);
lit_impl!($trait, $meth, $fn, $($tail)*);
};
}
lit_impl!(Add, add, wrapping_add, i8, u8, i16, u16, i32, u32, i64, u64);
lit_impl!(Sub, sub, wrapping_sub, i8, u8, i16, u16, i32, u32, i64, u64);
lit_impl!(Mul, mul, wrapping_mul, i8, u8, i16, u16, i32, u32, i64, u64);
lit_impl!(Div, div, div, i8, u8, i16, u16, i32, u32, i64, u64);
lit_impl!(Rem, rem, rem, i8, u8, i16, u16, i32, u32, i64, u64);
lit_impl!(BitAnd, bitand, bitand, u8, u16, u32, u64);
lit_impl!(BitOr, bitor, bitor, u8, u16, u32, u64);
lit_impl!(BitXor, bitxor, bitxor, u8, u16, u32, u64);
macro_rules! lit_assign_impl {
($trait:ident, $meth:ident, $fn:ident) => {};
($trait:ident, $meth:ident, $fn:ident, $t:ty) => {
impl $trait<$t> for MachineInt<$t> {
#[inline]
fn $meth(&mut self, rhs: $t) {
self.0 = (self.0).$fn(rhs);
}
}
impl $trait<MachineInt<$t>> for MachineInt<$t> {
#[inline]
fn $meth(&mut self, rhs: Self) {
self.0 = (self.0).$fn(rhs.0);
}
}
};
($trait:ident, $meth:ident, $fn:ident, $t:ty, $($tail:tt)*) => {
lit_assign_impl!($trait, $meth, $fn, $t);
lit_assign_impl!($trait, $meth, $fn, $($tail)*);
};
}
#[cfg_attr(rustfmt, rustfmt_skip)]
lit_assign_impl!(AddAssign, add_assign, wrapping_add,
i8, u8, i16, u16, i32, u32, i64, u64);
#[cfg_attr(rustfmt, rustfmt_skip)]
lit_assign_impl!(SubAssign, sub_assign, wrapping_sub,
i8, u8, i16, u16, i32, u32, i64, u64);
#[cfg_attr(rustfmt, rustfmt_skip)]
lit_assign_impl!(MulAssign, mul_assign, wrapping_mul,
i8, u8, i16, u16, i32, u32, i64, u64);
#[cfg_attr(rustfmt, rustfmt_skip)]
lit_assign_impl!(DivAssign, div_assign, div,
i8, u8, i16, u16, i32, u32, i64, u64);
#[cfg_attr(rustfmt, rustfmt_skip)]
lit_assign_impl!(RemAssign, rem_assign, rem,
i8, u8, i16, u16, i32, u32, i64, u64);
lit_assign_impl!(BitAndAssign, bitand_assign, bitand, u8, u16, u32, u64);
lit_assign_impl!(BitOrAssign, bitor_assign, bitor, u8, u16, u32, u64);
lit_assign_impl!(BitXorAssign, bitxor_assign, bitxor, u8, u16, u32, u64);
// Trait<T> for MachineInt<U>
// Trait<MachineInt<U> for T
// Trait<MachineInt<T>> for MachineInt<T>
// where T is the unsigned counterpart of U.
macro_rules! ibit_lit_impl {
($trait:ident, $meth:ident, $fn:ident, $lhs:ty, $rhs:ty) => {
impl $trait<$rhs> for MachineInt<$lhs> {
type Output = MachineInt<$lhs>;
#[inline]
fn $meth(self, rhs: $rhs) -> Self::Output {
MachineInt((self.0).$fn(rhs as $lhs))
}
}
impl $trait<MachineInt<$lhs>> for $rhs {
type Output = MachineInt<$lhs>;
#[inline]
fn $meth(self, rhs: MachineInt<$lhs>) -> Self::Output {
MachineInt((self as $lhs).$fn(rhs.0))
}
}
impl $trait<MachineInt<$lhs>> for MachineInt<$lhs> {
type Output = MachineInt<$lhs>;
#[inline]
fn $meth(self, rhs: MachineInt<$lhs>) -> Self::Output {
MachineInt((self.0).$fn(rhs.0))
}
}
};
($trait:ident, $meth:ident, $fn:ident, $lhs:ty, $rhs:ty, $($tail:tt)*) => {
ibit_lit_impl!($trait, $meth, $fn, $lhs, $rhs);
ibit_lit_impl!($trait, $meth, $fn, $($tail)*);
};
}
ibit_lit_impl!(BitAnd, bitand, bitand, i64, u64, i32, u32, i16, u16, i8, u8);
ibit_lit_impl!(BitOr, bitor, bitor, i64, u64, i32, u32, i16, u16, i8, u8);
ibit_lit_impl!(BitXor, bitxor, bitxor, i64, u64, i32, u32, i16, u16, i8, u8);
macro_rules! ibit_lit_assign_impl {
($trait:ident, $meth:ident, $fn:ident, $lhs:ty, $rhs:ty) => {
impl $trait<$rhs> for MachineInt<$lhs> {
#[inline]
fn $meth(&mut self, rhs: $rhs) {
self.0 = (self.0).$fn(rhs as $lhs);
}
}
impl $trait<MachineInt<$rhs>> for MachineInt<$lhs> {
#[inline]
fn $meth(&mut self, rhs: MachineInt<$rhs>) {
self.0 = (self.0).$fn(rhs.0 as $lhs)
}
}
impl $trait<MachineInt<$lhs>> for MachineInt<$lhs> {
#[inline]
fn $meth(&mut self, rhs: MachineInt<$lhs>) {
self.0 = (self.0).$fn(rhs.0)
}
}
};
($trait:ident, $meth:ident, $fn:ident, $lhs:ty, $rhs:ty, $($tail:tt)*) => {
ibit_lit_assign_impl!($trait, $meth, $fn, $lhs, $rhs);
ibit_lit_assign_impl!($trait, $meth, $fn, $($tail)*);
};
}
#[cfg_attr(rustfmt, rustfmt_skip)]
ibit_lit_assign_impl!(BitAndAssign, bitand_assign, bitand,
i64, u64, i32, u32, i16, u16, i8, u8);
#[cfg_attr(rustfmt, rustfmt_skip)]
ibit_lit_assign_impl!(BitOrAssign, bitor_assign, bitor,
i64, u64, i32, u32, i16, u16, i8, u8);
#[cfg_attr(rustfmt, rustfmt_skip)]
ibit_lit_assign_impl!(BitXorAssign, bitxor_assign, bitxor,
i64, u64, i32, u32, i16, u16, i8, u8);
// Neg, Not, Eq, Ord for MachineInt<T>
// PartialEq<T> for MachineInt<T>, PartialEq<MachineInt<T>> for T
// PartialOrd<T> for MachineInt<T>, PartialOrd<MachineInt<T>> for T
macro_rules! misc_impl {
($($t:ty)*) => ($(
impl Neg for MachineInt<$t> {
type Output = Self;
#[inline]
fn neg(self) -> Self {
MachineInt(self.0.wrapping_neg())
}
}
impl Not for MachineInt<$t> {
type Output = Self;
#[inline]
fn not(self) -> Self {
MachineInt(!self.0)
}
}
impl Eq for MachineInt<$t> {}
impl Ord for MachineInt<$t> {
#[inline]
fn cmp(&self, rhs: &Self) -> Ordering {
self.0.cmp(&rhs.0)
}
}
impl PartialEq<$t> for MachineInt<$t> {
#[inline]
fn eq(&self, rhs: &$t) -> bool {
self.0 == *rhs
}
}
impl PartialEq<MachineInt<$t>> for $t {
#[inline]
fn eq(&self, rhs: &MachineInt<$t>) -> bool {
*self == rhs.0
}
}
impl PartialOrd<$t> for MachineInt<$t> {
#[inline]
fn partial_cmp(&self, rhs: &$t) -> Option<Ordering> {
Some(self.0.cmp(&*rhs))
}
}
impl PartialOrd<MachineInt<$t>> for $t {
#[inline]
fn partial_cmp(&self, rhs: &MachineInt<$t>) -> Option<Ordering> {
Some(self.cmp(&rhs.0))
}
}
impl AsFrom<$t> for MachineInt<$t> {
#[inline]
fn as_from(val: $t) -> Self {
MachineInt(val)
}
}
impl AsFrom<MachineInt<$t>> for MachineInt<$t> {
#[inline]
fn as_from(val: MachineInt<$t>) -> Self {
val
}
}
impl From<$t> for MachineInt<$t> {
#[inline]
fn from(val: $t) -> Self {
MachineInt(val)
}
}
impl From<MachineInt<$t>> for $t {
#[inline]
fn from(val: MachineInt<$t>) -> Self {
val.0
}
}
impl Shl<u32> for MachineInt<$t> {
type Output = Self;
#[inline]
fn shl(self, rhs: u32) -> Self {
MachineInt(self.0.wrapping_shl(rhs))
}
}
impl Shr<u32> for MachineInt<$t> {
type Output = Self;
#[inline]
fn shr(self, rhs: u32) -> Self {
MachineInt(self.0.wrapping_shr(rhs))
}
}
impl ShlAssign<u32> for MachineInt<$t> {
#[inline]
fn shl_assign(&mut self, rhs: u32) {
self.0 = self.0.wrapping_shl(rhs)
}
}
impl ShrAssign<u32> for MachineInt<$t> {
#[inline]
fn shr_assign(&mut self, rhs: u32) {
self.0 = self.0.wrapping_shr(rhs)
}
}
)*)
}
misc_impl!(i8 u8 i16 u16 i32 u32 i64 u64);
// Trait<MachineInt<T>> for MachineInt<U>
// where T has the same signedness as U
// for PartialEq, PartialOrd
macro_rules! cmp_impl {
($lhs:ty; $rhs:ty) => {
impl PartialEq<MachineInt<$rhs>> for MachineInt<$lhs> {
#[inline]
fn eq(&self, rhs: &MachineInt<$rhs>) -> bool {
self.0 == rhs.0 as $lhs
}
}
impl PartialOrd<MachineInt<$rhs>> for MachineInt<$lhs> {
#[inline]
fn partial_cmp(&self, rhs: &MachineInt<$rhs>) -> Option<Ordering> {
Some(self.0.cmp(&(rhs.0 as $lhs)))
}
}
};
($lhs:ty; $rhs:ty, $($tail:tt)*) => {
cmp_impl!($lhs; $rhs);
cmp_impl!($lhs; $($tail)*);
};
}
cmp_impl!(u64; u64, u32, u16, u8);
cmp_impl!(i64; i64, i32, i16, i8);
cmp_impl!(u32; u64, u32, u16, u8);
cmp_impl!(i32; i64, i32, i16, i8);
cmp_impl!(u16; u64, u32, u16, u8);
cmp_impl!(i16; i64, i32, i16, i8);
cmp_impl!(u8; u64, u32, u16, u8);
cmp_impl!(i8; i64, i32, i16, i8);
// PartialEq<MachineInt<T>> for MachineInt<U>
// PartialOrd<MachineInt<T>> for MachineInt<U>
// where P: From<T> and P: From<U>
macro_rules! icmp_impl {
() => {};
($lhs:ty, $rhs:ty, $p:ty) => {
impl PartialEq<MachineInt<$rhs>> for MachineInt<$lhs> {
#[inline]
fn eq(&self, rhs: &MachineInt<$rhs>) -> bool {
(self.0 as $p) == (rhs.0 as $p)
}
}
impl PartialOrd<MachineInt<$rhs>> for MachineInt<$lhs> {
#[inline]
fn partial_cmp(&self, rhs: &MachineInt<$rhs>) -> Option<Ordering> {
Some((self.0 as $p).cmp(&(rhs.0 as $p)))
}
}
};
($lhs:ty, $rhs:ty, $p:ty; $($tail:tt)*) => {
icmp_impl!($lhs, $rhs, $p);
icmp_impl!($($tail)*);
};
}
// TODO: u64/ixx
#[cfg_attr(rustfmt, rustfmt_skip)]
icmp_impl!(u32, i32, i64; i32, u32, i64;
u32, i16, i64; i16, u32, i64;
u32, i8, i64; i8, u32, i64);
#[cfg_attr(rustfmt, rustfmt_skip)]
icmp_impl!(i32, u16, i32; u16, i32, i32;
i32, u8, i32; u8, i32, i32;
u16, i16, i32; i16, u16, i32;
u16, i8, i32; i8, u16, i32);
#[cfg_attr(rustfmt, rustfmt_skip)]
icmp_impl!(i16, u8, i16; u8, i16, i16;
u8, i8, i16; i8, u8, i16);
macro_rules! add_impl {
() => {};
($b:ty, $s:ty) => {
impl Add<MachineInt<$s>> for MachineInt<$b> {
type Output = MachineInt<$b>;
#[inline]
fn add(self, rhs: MachineInt<$s>) -> Self::Output {
MachineInt((self.0).wrapping_add(rhs.0 as $b))
}
}
impl Add<MachineInt<$b>> for MachineInt<$s> {
type Output = MachineInt<$b>;
#[inline]
fn add(self, rhs: MachineInt<$b>) -> Self::Output {
MachineInt((self.0 as $b).wrapping_add(rhs.0))
}
}
impl AddAssign<MachineInt<$s>> for MachineInt<$b> {
#[inline]
fn add_assign(&mut self, rhs: MachineInt<$s>) {
self.0 = self.0.wrapping_add(rhs.0 as $b);
}
}
impl Sub<MachineInt<$s>> for MachineInt<$b> {
type Output = MachineInt<$b>;
#[inline]
fn sub(self, rhs: MachineInt<$s>) -> Self::Output {
MachineInt((self.0).wrapping_sub(rhs.0 as $b))
}
}
impl Sub<MachineInt<$b>> for MachineInt<$s> {
type Output = MachineInt<$b>;
#[inline]
fn sub(self, rhs: MachineInt<$b>) -> Self::Output {
MachineInt((self.0 as $b).wrapping_sub(rhs.0))
}
}
impl SubAssign<MachineInt<$s>> for MachineInt<$b> {
#[inline]
fn sub_assign(&mut self, rhs: MachineInt<$s>) {
self.0 = self.0.wrapping_sub(rhs.0 as $b);
}
}
};
($b:ty, $s:ty; $($tail:tt)*) => {
add_impl!($b, $s);
add_impl!($($tail)*);
};
}
#[cfg_attr(rustfmt, rustfmt_skip)]
add_impl!(u64, u32; u64, u16; u64, u8; u64, i32; u64, i16; u64, i8;
i64, u32; i64, u16; i64, u8; i64, i32; i64, i16; i64, i8;
u32, u16; u32, u8; u32, i16; u32, i8;
i32, u16; i32, u8; i32, i16; i32, i8;
u16, u8; u16, i8;
i16, u8; i16, i8);
macro_rules! bit_impl {
() => {};
($b:ty, $s:ty, $p:ty) => {
impl BitAnd<MachineInt<$s>> for MachineInt<$b> {
type Output = MachineInt<$b>;
#[inline]
fn bitand(self, rhs: MachineInt<$s>) -> Self::Output {
MachineInt(self.0 & (rhs.0 as $p as $b))
}
}
impl BitAnd<MachineInt<$b>> for MachineInt<$s> {
type Output = MachineInt<$b>;
#[inline]
fn bitand(self, rhs: MachineInt<$b>) -> Self::Output {
MachineInt((self.0 as $p as $b) & rhs.0)
}
}
impl BitAndAssign<MachineInt<$s>> for MachineInt<$b> {
#[inline]
fn bitand_assign(&mut self, rhs: MachineInt<$s>) {
self.0 = self.0 & (rhs.0 as $p as $b);
}
}
impl BitOr<MachineInt<$s>> for MachineInt<$b> {
type Output = MachineInt<$b>;
#[inline]
fn bitor(self, rhs: MachineInt<$s>) -> Self::Output {
MachineInt(self.0 | (rhs.0 as $p as $b))
}
}
impl BitOr<MachineInt<$b>> for MachineInt<$s> {
type Output = MachineInt<$b>;
#[inline]
fn bitor(self, rhs: MachineInt<$b>) -> Self::Output {
MachineInt((self.0 as $p as $b) | rhs.0)
}
}
impl BitOrAssign<MachineInt<$s>> for MachineInt<$b> {
#[inline]
fn bitor_assign(&mut self, rhs: MachineInt<$s>) {
self.0 = self.0 | (rhs.0 as $p as $b);
}
}
impl BitXor<MachineInt<$s>> for MachineInt<$b> {
type Output = MachineInt<$b>;
#[inline]
fn bitxor(self, rhs: MachineInt<$s>) -> Self::Output {
MachineInt(self.0 ^ (rhs.0 as $p as $b))
}
}
impl BitXor<MachineInt<$b>> for MachineInt<$s> {
type Output = MachineInt<$b>;
#[inline]
fn bitxor(self, rhs: MachineInt<$b>) -> Self::Output {
MachineInt((self.0 as $p as $b) ^ rhs.0)
}
}
impl BitXorAssign<MachineInt<$s>> for MachineInt<$b> {
#[inline]
fn bitxor_assign(&mut self, rhs: MachineInt<$s>) {
self.0 = self.0 ^ (rhs.0 as $p as $b);
}
}
};
($b:ty, $s:ty, $p:ty; $($tail:tt)*) => {
bit_impl!($b, $s, $p);
bit_impl!($($tail)*);
};
}
#[cfg_attr(rustfmt, rustfmt_skip)]
bit_impl!(u64, u32, u32; u64, u16, u16; u64, u8, u8;
u64, i32, u32; u64, i16, u16; u64, i8, u8;
i64, u32, u32; i64, u16, u16; i64, u8, u8;
i64, i32, u32; i64, i16, u16; i64, i8, u8;
u32, u16, u16; u32, u8, u8;
u32, i16, u16; u32, i8, u8;
i32, u16, u16; i32, u8, u8;
i32, i16, u16; i32, i8, u8;
u16, u8, u8; u16, i8, u8;
i16, u8, u8; i16, i8, u8);
macro_rules! from_impl {
($lhs:ty, $rhs:ty) => {
impl From<MachineInt<$rhs>> for MachineInt<$lhs> {
#[inline]
fn from(val: MachineInt<$rhs>) -> Self {
MachineInt(val.0 as $lhs)
}
}
impl From<$rhs> for MachineInt<$lhs> {
#[inline]
fn from(val: $rhs) -> Self {
MachineInt(val as $lhs)
}
}
impl From<MachineInt<$rhs>> for $lhs {
#[inline]
fn from(val: MachineInt<$rhs>) -> Self {
val.0 as $lhs
}
}
};
($lhs:ty, $rhs:ty; $($tail:tt)*) => {
from_impl!($lhs, $rhs);
from_impl!($($tail)*);
};
}
from_impl!(u64, u32; u64, u16; u64, u8);
from_impl!(i64, u32; i64, u16; i64, u8; i64, i32; i64, i16; i64, i8);
from_impl!(u32, u16; u32, u8);
from_impl!(i32, u16; i32, u8; i32, i16; i32, i8);
from_impl!(u16, u8);
from_impl!(i16, u8; i16, i8);
#[cfg(target_pointer_width = "64")]
from_impl!(usize, u64; usize, u32; usize, u16; usize, u8);
#[cfg(target_pointer_width = "64")]
from_impl!(isize, i64; isize, i32; isize, i16; isize, i8);
#[cfg(target_pointer_width = "32")]
from_impl!(usize, u32; usize, u16; usize, u8);
#[cfg(target_pointer_width = "32")]
from_impl!(isize, i32; isize, i16; isize, i8);
#[cfg(target_pointer_width = "16")]
from_impl!(usize, u16; usize, u8);
#[cfg(target_pointer_width = "16")]
from_impl!(isize, i16; isize, i8);
macro_rules! as_from_impl {
($lhs:ty; $rhs:ty) => {
impl AsFrom<MachineInt<$rhs>> for MachineInt<$lhs> {
#[inline]
fn as_from(val: MachineInt<$rhs>) -> Self {
MachineInt(val.0 as $lhs)
}
}
impl AsFrom<$rhs> for MachineInt<$lhs> {
#[inline]
fn as_from(val: $rhs) -> Self {
MachineInt(val as $lhs)
}
}
};
($lhs:ty; $rhs:ty, $($tail:tt)*) => {
as_from_impl!($lhs; $rhs);
as_from_impl!($lhs; $($tail)*);
};
}
#[cfg_attr(rustfmt, rustfmt_skip)]
as_from_impl!(u64; i64, u32, i32, u16, i16, u8, i8);
as_from_impl!(i64; u64, u32, i32, u16, i16, u8, i8);
as_from_impl!(u32; u64, i64, i32, u16, i16, u8, i8);
as_from_impl!(i32; u64, i64, u32, u16, i16, u8, i8);
as_from_impl!(u16; u64, i64, u32, i32, i16, u8, i8);
as_from_impl!(i16; u64, i64, u32, i32, u16, u8, i8);
as_from_impl!(u8; u64, i64, u32, i32, u16, i16, i8);
as_from_impl!(i8; u64, i64, u32, i32, u16, i16, u8);
impl<T: fmt::Display> fmt::Display for MachineInt<T> {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
fmt::Display::fmt(&self.0, f)
}
}
impl<T: fmt::Debug> fmt::Debug for MachineInt<T> {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
fmt::Debug::fmt(&self.0, f)
}
}
impl<T: fmt::LowerHex> fmt::LowerHex for MachineInt<T> {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
fmt::LowerHex::fmt(&self.0, f)
}
}
impl<T: fmt::UpperHex> fmt::UpperHex for MachineInt<T> {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
fmt::UpperHex::fmt(&self.0, f)
}
}
impl<T: fmt::Binary> fmt::Binary for MachineInt<T> {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
fmt::Binary::fmt(&self.0, f)
}
}
impl<T: fmt::Octal> fmt::Octal for MachineInt<T> {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
fmt::Octal::fmt(&self.0, f)
}
}
macro_rules! rotate_impl {
($($t:ty)*) => ($(
impl MachineInt<$t> {
#[inline]
pub fn rotate_left(self, n: u32) -> Self {
MachineInt(self.0.rotate_left(n))
}
#[inline]
pub fn rotate_right(self, n: u32) -> Self {
MachineInt(self.0.rotate_right(n))
}
}
)*)
}
rotate_impl!(u8 u16 u32 u64);
| true |
83bedf602e22b99703501ffc735fb58adf1f14b4
|
Rust
|
hamadakafu/kyopro
|
/src/bin/typical90_34.rs
|
UTF-8
| 1,234 | 2.890625 | 3 |
[] |
no_license
|
use std::cmp::Reverse;
use std::cmp::{max, min};
use std::collections::{HashMap, HashSet};
use itertools::Itertools;
use whiteread::parse_line;
const ten97: usize = 1000_000_007;
/// 2の逆元 mod ten97.割りたいときに使う
const inv2ten97: u128 = 500_000_004;
/// しゃくとり法,尺取り法
/// ```
/// let a = 1;
/// ```
fn main() {
let (n, k): (usize, usize) = parse_line().unwrap();
let aa: Vec<usize> = parse_line().unwrap();
let mut ans = 0;
let mut start = 0;
let mut end = 0;
let mut ans = 1;
let mut now_map: HashMap<usize, usize> = HashMap::new();
let e = now_map.entry(aa[start]).or_default();
*e += 1;
while start != n - 1 {
if end < n - 1 && (now_map.len() < k || now_map.contains_key(&aa[end + 1])) {
end += 1;
let e = now_map.entry(aa[end]).or_default();
*e += 1;
} else {
if *now_map.get(&aa[start]).unwrap() == 1 {
now_map.remove(&aa[start]);
} else {
let e = now_map.entry(aa[start]).or_default();
*e -= 1;
}
start += 1;
}
ans = max(ans, end + 1 - start);
}
println!("{}", ans);
}
| true |
7af0f66dbcd71cc1316ecf58eb3f389dad7f0661
|
Rust
|
kenchan/competitive_programming
|
/atcoder/abc276/src/bin/a.rs
|
UTF-8
| 243 | 2.671875 | 3 |
[] |
no_license
|
use proconio::input;
fn main() {
input! {
s: String,
}
for (i ,c) in s.chars().rev().enumerate() {
if c == 'a' {
println!("{}", s.len() - i);
return;
}
}
println!("-1");
}
| true |
113393c7070b983d73d73ae8c6b3d5606c131d2c
|
Rust
|
imos/icfpc2021
|
/src/ugougo.rs
|
UTF-8
| 3,891 | 2.640625 | 3 |
[
"MIT"
] |
permissive
|
use std::iter::*;
use crate::*;
use rand;
use rand::*;
const DIRS: [Point; 4] = [
P::<i64>(1, 0),
P::<i64>(0, 1),
P::<i64>(-1, 0),
P::<i64>(0, -1),
];
pub fn ugougo(problem: &Input, pose: &Output, cycles: i32) -> (Output, i32) {
let Input {
hole,
figure: Figure {
edges,
vertices: original_vertices,
},
epsilon,
bonuses,
..
} = problem;
let Output {
mut vertices,
bonuses: use_bonuses,
} = pose.clone();
let globalist = use_bonuses
.iter()
.find(|&b| b.bonus == BonusType::Globalist)
.is_some();
let n = original_vertices.len();
assert_eq!(n, vertices.len());
let dist2: Vec<_> = edges
.iter()
.map(|&(a, b)| (original_vertices[a] - original_vertices[b]).abs2())
.collect();
// assert!(vertices
// .iter()
// .all(|&v| !P::contains_p(hole, v).is_negative()));
// assert!(edges
// .iter()
// .map(|&(a, b)| (vertices[a], vertices[b]))
// .enumerate()
// .all(|(i, d)| P::contains_s(hole, d)
// && stretch_within((d.0 - d.1).abs2(), dist2[i], *epsilon) == Ordering::Equal));
let mut adj = vec![vec![]; n];
for i in 0..edges.len() {
let (a, b) = edges[i];
adj[a].push((b, i));
adj[b].push((a, i));
}
let bonus_set = std::collections::BTreeSet::from_iter(bonuses.iter().map(|b| b.position));
let mut dislikes = compute_dislikes(problem, &pose)
- vertices.iter().filter(|&v| bonus_set.contains(v)).count() as i64;
let mut rng = rand::thread_rng();
let mut k = 0;
for _ in 0..cycles {
let a = rng.gen_range(0..n);
let d = DIRS[rng.gen_range(0..4)];
let penalty = if check_constraints_around_vertex(
hole, edges, &vertices, &dist2, a, &adj[a], *epsilon, globalist,
) {
0
} else {
calculate_penalty(
hole, edges, &vertices, &dist2, a, &adj[a], *epsilon, globalist,
)
};
vertices[a] += d; // destructive
let ok = if penalty == 0 {
check_constraints_around_vertex(
hole, edges, &vertices, &dist2, a, &adj[a], *epsilon, globalist,
)
} else {
calculate_penalty(
hole, edges, &vertices, &dist2, a, &adj[a], *epsilon, globalist,
) <= penalty
};
if ok {
let new_dislikes = compute_dislikes(
problem,
&Output {
vertices: vertices.clone(),
bonuses: Vec::new(),
},
) - vertices.iter().filter(|&v| bonus_set.contains(v)).count()
as i64;
if new_dislikes <= dislikes {
dislikes = new_dislikes;
k += 1;
continue; // accept change
}
}
vertices[a] -= d; // revert
}
(
Output {
vertices,
bonuses: use_bonuses,
},
k,
)
}
fn check_constraints_around_vertex(
hole: &Vec<Point>,
edges: &Vec<(usize, usize)>,
vertices: &Vec<Point>,
dist2: &Vec<i64>,
a: usize,
adj: &[(usize, usize)],
epsilon: i64,
globalist: bool,
) -> bool {
!P::contains_p(hole, vertices[a]).is_negative()
&& if globalist {
let prod: i64 = dist2.iter().product();
assert!(prod > 0);
let prod_sum: i64 = edges
.iter()
.enumerate()
.map(|(i, &(a, b))| {
prod / dist2[i] * ((vertices[a] - vertices[b]).abs2() - dist2[i])
})
.sum();
1000000 * prod_sum <= prod * edges.len() as i64 * epsilon
} else {
adj.iter()
.map(|&(b, i)| (i, (vertices[a], vertices[b])))
.all(|(i, d)| {
P::contains_s(hole, d)
&& stretch_within((d.0 - d.1).abs2(), dist2[i], epsilon) == Ordering::Equal
})
}
}
fn calculate_penalty(
hole: &Vec<Point>,
_edges: &Vec<(usize, usize)>,
vertices: &Vec<Point>,
dist2: &Vec<i64>,
a: usize,
adj: &[(usize, usize)],
epsilon: i64,
_globalist: bool,
) -> i64 {
let mut penalty = 0;
if P::contains_p(hole, vertices[a]).is_negative() {
penalty += 1000;
}
adj.iter()
.map(|&(b, i)| (i, (vertices[a], vertices[b])))
.for_each(|(i, d)| {
if !P::contains_s(hole, d) {
penalty += 1000
}
penalty += std::cmp::max(
0,
((d.0 - d.1).abs2() - dist2[i]).abs() * 1000000 - epsilon * dist2[i],
);
});
return penalty;
}
| true |
63fc5f4811fff91e877649278ff6a241d7304bd3
|
Rust
|
metacall/core
|
/source/loaders/rs_loader/rust/compiler/src/memory.rs
|
UTF-8
| 1,654 | 2.53125 | 3 |
[
"Python-2.0",
"GPL-2.0-or-later",
"MPL-1.1",
"NCSA",
"LicenseRef-scancode-proprietary-license",
"GPL-1.0-or-later",
"BSD-3-Clause",
"MPL-2.0",
"Ruby",
"BSD-2-Clause",
"MIT",
"Apache-2.0"
] |
permissive
|
use crate::{compile, CompilerState, RegistrationError, Source};
use std::{ffi::c_void};
use crate::{registrator, DlopenLibrary};
#[derive(Debug)]
pub struct MemoryRegistration {
pub name: String,
pub state: CompilerState,
pub dlopen: Option<DlopenLibrary>,
}
impl MemoryRegistration {
pub fn new(name: String, code: String) -> Result<MemoryRegistration, RegistrationError> {
let state = match compile(Source::new(Source::Memory {
name: name.clone(),
code,
})) {
Ok(state) => state,
Err(error) => {
return Err(RegistrationError::CompilationError(String::from(format!(
"{}\n{}\n{}",
error.err, error.errors, error.diagnostics
))))
}
};
let dlopen = match DlopenLibrary::new(&state.output) {
Ok(instance) => instance,
Err(error) => return Err(RegistrationError::DlopenError(error)),
};
// cleanup temp dir
let mut destination = state.output.clone();
destination.pop();
std::fs::remove_dir_all(destination).expect("Unable to cleanup tempdir");
Ok(MemoryRegistration {
name,
state,
dlopen: Some(dlopen),
})
}
pub fn discover(&self, loader_impl: *mut c_void, ctx: *mut c_void) -> Result<(), String> {
match &self.dlopen {
Some(dl) => {
registrator::register(&self.state, &dl, loader_impl, ctx);
Ok(())
}
None => Err(String::from("The dlopen_lib is None")),
}
}
}
| true |
8e64f65274c749732dfb9fff9faf2abc43b5ff85
|
Rust
|
AntonGepting/tmux-interface-rs
|
/src/commands/windows_and_panes/last_window_tests.rs
|
UTF-8
| 797 | 2.875 | 3 |
[
"LicenseRef-scancode-unknown-license-reference",
"MIT"
] |
permissive
|
#[test]
fn last_window() {
use crate::LastWindow;
use std::borrow::Cow;
// Select the last (previously selected) window
//
// # Manual
//
// tmux ^0.8:
// ```text
// last-window [-t target-session]
// (alias: last)
// ```
let last_window = LastWindow::new();
#[cfg(feature = "tmux_0_8")]
let last_window = last_window.target_session("1");
#[cfg(not(feature = "cmd_alias"))]
let cmd = "last-window";
#[cfg(feature = "cmd_alias")]
let cmd = "last";
let mut s = Vec::new();
s.push(cmd);
#[cfg(feature = "tmux_0_8")]
s.extend_from_slice(&["-t", "1"]);
let s: Vec<Cow<str>> = s.into_iter().map(|a| a.into()).collect();
let last_window = last_window.build().to_vec();
assert_eq!(last_window, s);
}
| true |
943fe5719ea5500b749aa9d3f216d74e77553546
|
Rust
|
divagant-martian/lighthouse
|
/slasher/tests/wrap_around.rs
|
UTF-8
| 2,592 | 2.609375 | 3 |
[
"Apache-2.0"
] |
permissive
|
use slasher::{
test_utils::{indexed_att, logger},
Config, Error, Slasher,
};
use tempdir::TempDir;
use types::Epoch;
#[test]
fn attestation_pruning_empty_wrap_around() {
let tempdir = TempDir::new("slasher").unwrap();
let mut config = Config::new(tempdir.path().into());
config.validator_chunk_size = 1;
config.chunk_size = 16;
config.history_length = 16;
let slasher = Slasher::open(config.clone(), logger()).unwrap();
let v = vec![0];
let history_length = config.history_length as u64;
let mut current_epoch = Epoch::new(history_length - 1);
slasher.accept_attestation(indexed_att(v.clone(), 0, history_length - 1, 0));
slasher.process_queued(current_epoch).unwrap();
slasher.prune_database(current_epoch).unwrap();
// Delete the previous attestation
current_epoch = Epoch::new(2 * history_length + 2);
slasher.prune_database(current_epoch).unwrap();
// Add an attestation that would be surrounded with the modulo considered
slasher.accept_attestation(indexed_att(
v.clone(),
2 * history_length - 3,
2 * history_length - 2,
1,
));
slasher.process_queued(current_epoch).unwrap();
}
// Test that pruning can recover from a `MapFull` error
#[test]
fn pruning_with_map_full() {
let tempdir = TempDir::new("slasher").unwrap();
let mut config = Config::new(tempdir.path().into());
config.validator_chunk_size = 1;
config.chunk_size = 16;
config.history_length = 1024;
config.max_db_size_mbs = 1;
let slasher = Slasher::open(config.clone(), logger()).unwrap();
let v = vec![0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12];
let mut current_epoch = Epoch::new(0);
loop {
slasher.accept_attestation(indexed_att(
v.clone(),
(current_epoch - 1).as_u64(),
current_epoch.as_u64(),
0,
));
if let Err(Error::DatabaseError(lmdb::Error::MapFull)) =
slasher.process_queued(current_epoch)
{
break;
}
current_epoch += 1;
}
loop {
slasher.prune_database(current_epoch).unwrap();
slasher.accept_attestation(indexed_att(
v.clone(),
(current_epoch - 1).as_u64(),
current_epoch.as_u64(),
0,
));
match slasher.process_queued(current_epoch) {
Ok(()) => break,
Err(Error::DatabaseError(lmdb::Error::MapFull)) => {
current_epoch += 1;
}
Err(e) => panic!("{:?}", e),
}
}
}
| true |
c4d3e49da23f22d9f3288af28c2fa89da3aa537d
|
Rust
|
0x192/iced_aw
|
/src/native/wrap.rs
|
UTF-8
| 15,772 | 3.21875 | 3 |
[
"MIT"
] |
permissive
|
//! A widget that displays its children in multiple horizontal or vertical runs.
//!
//! *This API requires the following crate features to be activated: `wrap`*
use std::marker::PhantomData;
use iced_native::{
event,
layout::{self, Limits, Node},
overlay, Align, Clipboard, Element, Event, Hasher, Layout, Length, Point, Rectangle, Size,
Widget,
};
/// A container that distributes its contents horizontally.
#[allow(missing_debug_implementations)]
pub struct Wrap<'a, Message, Renderer, Direction> {
/// The elements to distribute.
pub elements: Vec<Element<'a, Message, Renderer>>,
/// The alignment of the [`Wrap`](Wrap).
pub alignment: Align,
/// The width of the [`Wrap`](Wrap).
pub width: Length,
/// The height of the [`Wrap`](Wrap).
pub height: Length,
/// The maximum width of the [`Wrap`](Wrap).
pub max_width: u32,
/// The maximum height of the [`Wrap`](Wrap)
pub max_height: u32,
/// The padding of each element of the [`Wrap`](Wrap).
pub padding: u16,
/// The spacing between each element of the [`Wrap`](Wrap).
pub spacing: u16,
/// The spacing between each line of the [`Wrap`](Wrap).
pub line_spacing: u16,
/// The minimal length of each line of the [`Wrap`](Wrap).
pub line_minimal_length: u32,
#[allow(clippy::missing_docs_in_private_items)]
_direction: PhantomData<Direction>,
}
impl<'a, Message, Renderer> Wrap<'a, Message, Renderer, direction::Horizontal> {
/// Creates an empty horizontal [`Wrap`](Wrap).
#[must_use]
pub fn new() -> Self {
Self::with_elements(Vec::new())
}
/// Creates a [`Wrap`](Wrap) with the given elements.
///
/// It expects:
/// * the vector containing the [`Element`](iced_native::Element)s for this [`Wrap`](Wrap).
#[must_use]
pub fn with_elements(elements: Vec<Element<'a, Message, Renderer>>) -> Self {
Self {
elements,
..Wrap::default()
}
}
}
impl<'a, Message, Renderer> Wrap<'a, Message, Renderer, direction::Vertical> {
/// Creates an empty vertical [`Wrap`](Wrap).
#[must_use]
pub fn new_vertical() -> Self {
Self::with_elements_vertical(Vec::new())
}
/// Creates a [`Wrap`](Wrap) with the given elements.
///
/// It expects:
/// * the vector containing the [`Element`](iced_native::Element)s for this [`Wrap`](Wrap).
#[must_use]
pub fn with_elements_vertical(elements: Vec<Element<'a, Message, Renderer>>) -> Self {
Self {
elements,
..Wrap::default()
}
}
}
impl<'a, Message, Renderer, Direction> Wrap<'a, Message, Renderer, Direction> {
/// Sets the spacing of the [`Wrap`](Wrap).
#[must_use]
pub const fn spacing(mut self, units: u16) -> Self {
self.spacing = units;
self
}
/// Sets the spacing of the lines of the [`Wrap`](Wrap).
#[must_use]
pub const fn line_spacing(mut self, units: u16) -> Self {
self.line_spacing = units;
self
}
/// Sets the minimal length of the lines of the [`Wrap`](Wrap).
#[must_use]
pub const fn line_minimal_length(mut self, units: u32) -> Self {
self.line_minimal_length = units;
self
}
/// Sets the padding of the elements in the [`Wrap`](Wrap).
#[must_use]
pub const fn padding(mut self, units: u16) -> Self {
self.padding = units;
self
}
/// Sets the width of the [`Wrap`](Wrap).
#[must_use]
pub const fn width_items(mut self, width: Length) -> Self {
self.width = width;
self
}
/// Sets the height of the [`Wrap`](Wrap).
#[must_use]
pub const fn height_items(mut self, height: Length) -> Self {
self.height = height;
self
}
/// Sets the maximum width of the [`Wrap`](Wrap).
#[must_use]
pub const fn max_width(mut self, max_width: u32) -> Self {
self.max_width = max_width;
self
}
/// Sets the maximum height of the [`Wrap`](Wrap).
#[must_use]
pub const fn max_height(mut self, max_height: u32) -> Self {
self.max_height = max_height;
self
}
/// Sets the alignment of the [`Wrap`](Wrap).
#[must_use]
pub const fn align_items(mut self, align: Align) -> Self {
self.alignment = align;
self
}
/// Pushes an [`Element`](iced_native::Element) to the [`Wrap`](Wrap).
pub fn push<E>(mut self, element: E) -> Self
where
E: Into<Element<'a, Message, Renderer>>,
{
self.elements.push(element.into());
self
}
}
impl<'a, Message, Renderer, Direction> Widget<Message, Renderer>
for Wrap<'a, Message, Renderer, Direction>
where
Self: WrapLayout<Renderer>,
Renderer: iced_native::Renderer,
{
fn width(&self) -> Length {
self.width
}
fn height(&self) -> Length {
self.height
}
fn layout(&self, renderer: &Renderer, limits: &layout::Limits) -> layout::Node {
self.inner_layout(renderer, limits)
}
fn hash_layout(&self, state: &mut Hasher) {
use std::hash::Hash;
#[allow(clippy::missing_docs_in_private_items)]
struct Marker;
std::any::TypeId::of::<Marker>().hash(state);
self.alignment.hash(state);
self.line_minimal_length.hash(state);
self.width.hash(state);
self.height.hash(state);
self.max_width.hash(state);
self.max_height.hash(state);
self.line_spacing.hash(state);
self.padding.hash(state);
self.spacing.hash(state);
for elem in &self.elements {
elem.hash_layout(state)
}
}
fn on_event(
&mut self,
event: Event,
layout: Layout<'_>,
cursor_position: Point,
renderer: &Renderer,
clipboard: &mut dyn Clipboard,
messages: &mut Vec<Message>,
) -> event::Status {
self.elements
.iter_mut()
.zip(layout.children())
.map(|(child, layout)| {
child.on_event(
event.clone(),
layout,
cursor_position,
renderer,
clipboard,
messages,
)
})
.fold(event::Status::Ignored, event::Status::merge)
}
fn overlay(&mut self, layout: Layout<'_>) -> Option<overlay::Element<'_, Message, Renderer>> {
self.elements
.iter_mut()
.zip(layout.children())
.find_map(|(child, layout)| child.overlay(layout))
}
fn draw(
&self,
renderer: &mut Renderer,
defaults: &Renderer::Defaults,
layout: Layout<'_>,
cursor_position: Point,
viewport: &Rectangle,
) -> Renderer::Output {
self.inner_draw(renderer, defaults, layout, cursor_position, viewport)
}
}
impl<'a, Message, Renderer> From<Wrap<'a, Message, Renderer, direction::Horizontal>>
for Element<'a, Message, Renderer>
where
Renderer: 'a + iced_native::row::Renderer,
Message: 'a,
{
fn from(
wrap: Wrap<'a, Message, Renderer, direction::Horizontal>,
) -> Element<'a, Message, Renderer> {
Element::new(wrap)
}
}
impl<'a, Message, Renderer> From<Wrap<'a, Message, Renderer, direction::Vertical>>
for Element<'a, Message, Renderer>
where
Renderer: 'a + iced_native::column::Renderer,
Message: 'a,
{
fn from(
wrap: Wrap<'a, Message, Renderer, direction::Vertical>,
) -> Element<'a, Message, Renderer> {
Element::new(wrap)
}
}
impl<'a, Message, Renderer, Direction> Default for Wrap<'a, Message, Renderer, Direction> {
fn default() -> Self {
Self {
elements: vec![],
alignment: Align::Start,
width: Length::Shrink,
height: Length::Shrink,
max_width: u32::MAX,
max_height: u32::MAX,
padding: 0,
spacing: 0,
line_spacing: 0,
line_minimal_length: 10,
_direction: PhantomData::default(),
}
}
}
/// A inner layout of the [`Wrap`](Wrap).
pub trait WrapLayout<Renderer>
where
Renderer: iced_native::Renderer,
{
/// A inner layout of the [`Wrap`](Wrap).
fn inner_layout(&self, renderer: &Renderer, limits: &layout::Limits) -> layout::Node;
/// A inner draw of the [`Wrap`](Wrap).
fn inner_draw(
&self,
renderer: &mut Renderer,
defaults: &Renderer::Defaults,
layout: Layout<'_>,
cursor_position: Point,
viewport: &Rectangle,
) -> Renderer::Output;
}
impl<'a, Message, Renderer> WrapLayout<Renderer>
for Wrap<'a, Message, Renderer, direction::Horizontal>
where
Renderer: iced_native::row::Renderer + 'a,
{
#[allow(clippy::inline_always)]
#[inline(always)]
fn inner_layout(&self, renderer: &Renderer, limits: &Limits) -> Node {
let padding = f32::from(self.padding);
let spacing = f32::from(self.spacing);
let line_spacing = f32::from(self.line_spacing);
#[allow(clippy::cast_precision_loss)] // TODO: possible precision loss
let line_minimal_length = self.line_minimal_length as f32;
let limits = limits
.pad(padding)
.width(self.width)
.height(self.height)
.max_width(self.max_width)
.max_height(self.max_height);
let max_width = limits.max().width;
let mut curse = padding;
let mut deep_curse = padding;
let mut current_line_height = line_minimal_length;
let mut max_main = curse;
let mut align = vec![];
let mut start = 0;
let mut end = 0;
let mut nodes: Vec<Node> = self
.elements
.iter()
.map(|elem| {
let node_limit = Limits::new(
Size::new(limits.min().width, line_minimal_length),
limits.max(),
);
let mut node = elem.layout(renderer, &node_limit);
let size = node.size();
let offset_init = size.width + spacing;
let offset = curse + offset_init;
if offset > max_width {
deep_curse += current_line_height + line_spacing;
align.push((start..end, current_line_height));
start = end;
end += 1;
current_line_height = line_minimal_length;
node.move_to(Point::new(padding, deep_curse));
curse = offset_init + padding;
} else {
node.move_to(Point::new(curse, deep_curse));
curse = offset;
end += 1;
}
current_line_height = current_line_height.max(size.height);
max_main = max_main.max(curse);
node
})
.collect();
if end != start {
align.push((start..end, current_line_height));
}
align.into_iter().for_each(|(range, max_length)| {
nodes[range].iter_mut().for_each(|node| {
let size = node.size();
let space = Size::new(size.width, max_length);
node.align(Align::Start, self.alignment, space);
});
});
let (width, height) = (
max_main - padding,
deep_curse - padding + current_line_height,
);
let size = limits.resolve(Size::new(width, height));
Node::with_children(size.pad(padding), nodes)
}
#[allow(clippy::inline_always)]
#[inline(always)]
fn inner_draw(
&self,
renderer: &mut Renderer,
defaults: &Renderer::Defaults,
layout: Layout<'_>,
cursor_position: Point,
viewport: &Rectangle,
) -> Renderer::Output {
renderer.draw(defaults, &self.elements, layout, cursor_position, viewport)
}
}
impl<'a, Message, Renderer> WrapLayout<Renderer>
for Wrap<'a, Message, Renderer, direction::Vertical>
where
Renderer: iced_native::column::Renderer + 'a,
{
#[allow(clippy::inline_always)]
#[inline(always)]
fn inner_layout(&self, renderer: &Renderer, limits: &Limits) -> Node {
let padding = f32::from(self.padding);
let spacing = f32::from(self.spacing);
let line_spacing = f32::from(self.line_spacing);
#[allow(clippy::cast_precision_loss)] // TODO: possible precision loss
let line_minimal_length = self.line_minimal_length as f32;
let limits = limits
.pad(padding)
.width(self.width)
.height(self.height)
.max_width(self.max_width)
.max_height(self.max_height);
let max_height = limits.max().height;
let mut curse = padding;
let mut wide_curse = padding;
let mut current_line_width = line_minimal_length;
let mut max_main = curse;
let mut align = vec![];
let mut start = 0;
let mut end = 0;
let mut nodes: Vec<Node> = self
.elements
.iter()
.map(|elem| {
let node_limit = Limits::new(
Size::new(line_minimal_length, limits.min().height),
limits.max(),
);
let mut node = elem.layout(renderer, &node_limit);
let size = node.size();
let offset_init = size.height + spacing;
let offset = curse + offset_init;
if offset > max_height {
wide_curse += current_line_width + line_spacing;
align.push((start..end, current_line_width));
start = end;
end += 1;
current_line_width = line_minimal_length;
node.move_to(Point::new(wide_curse, padding));
curse = offset_init + padding;
} else {
node.move_to(Point::new(wide_curse, curse));
end += 1;
curse = offset;
}
current_line_width = current_line_width.max(size.width);
max_main = max_main.max(curse);
node
})
.collect();
if end != start {
align.push((start..end, current_line_width));
}
align.into_iter().for_each(|(range, max_length)| {
nodes[range].iter_mut().for_each(|node| {
let size = node.size();
let space = Size::new(max_length, size.height);
node.align(self.alignment, Align::Start, space);
});
});
let (width, height) = (
wide_curse - padding + current_line_width,
max_main - padding,
);
let size = limits.resolve(Size::new(width, height));
Node::with_children(size.pad(padding), nodes)
}
#[allow(clippy::inline_always)]
#[inline(always)]
fn inner_draw(
&self,
renderer: &mut Renderer,
defaults: &Renderer::Defaults,
layout: Layout<'_>,
cursor_position: Point,
viewport: &Rectangle,
) -> Renderer::Output {
renderer.draw(defaults, &self.elements, layout, cursor_position, viewport)
}
}
/// An optional directional attribute of the [`Wrap`](Wrap).
pub mod direction {
/// An vertical direction of the [`Wrap`](Wrap).
#[derive(Debug)]
pub struct Vertical;
/// An horizontal direction of the [`Wrap`](Wrap).
#[derive(Debug)]
pub struct Horizontal;
}
| true |
9e27664cf1ecfd1f645abe52fed3f5973257ea5e
|
Rust
|
agmcleod/adventofcode-2018
|
/22/src/tile_type.rs
|
UTF-8
| 344 | 3.484375 | 3 |
[
"MIT"
] |
permissive
|
#[derive(Copy, Clone, Debug, Eq, PartialEq)]
pub enum TileType {
Rocky,
Wet,
Narrow,
}
impl TileType {
pub fn from_num(num: usize) -> TileType {
match num {
0 => TileType::Rocky,
1 => TileType::Wet,
2 => TileType::Narrow,
_ => panic!("Invalid number"),
}
}
}
| true |
4f47226a8459dee5b7572955216cfbd5fe175443
|
Rust
|
liljencrantz/crush
|
/src/lang/data/struct.rs
|
UTF-8
| 8,550 | 2.546875 | 3 |
[
"MIT"
] |
permissive
|
use crate::lang::errors::{error, CrushError, CrushResult};
use crate::lang::pipe::CrushStream;
use crate::lang::data::table::ColumnType;
use crate::lang::data::table::Row;
use crate::lang::value::Value;
use crate::lang::value::ValueType;
use crate::util::identity_arc::Identity;
use crate::util::replace::Replace;
use chrono::Duration;
use lazy_static::lazy_static;
use ordered_map::OrderedMap;
use std::cmp::Ordering;
use std::collections::HashSet;
use std::hash::{Hash, Hasher};
use std::sync::{Arc, Mutex};
use std::fmt::{Formatter, Display};
use std::ops::Deref;
lazy_static! {
pub static ref STRUCT_STREAM_TYPE: Vec<ColumnType> = vec![
ColumnType::new("name", ValueType::String),
ColumnType::new("value", ValueType::Any),
];
}
#[derive(Clone)]
struct StructData {
parent: Option<Struct>,
lookup: OrderedMap<String, usize>,
cells: Vec<Value>,
}
#[derive(Clone)]
pub struct Struct {
data: Arc<Mutex<StructData>>,
}
impl Identity for Struct {
fn id(&self) -> u64 {
self.data.id()
}
}
impl Hash for Struct {
fn hash<H: Hasher>(&self, state: &mut H) {
let data = self.data.lock().unwrap();
data.cells.iter().for_each(|value| {
value.hash(state);
});
let p = data.parent.clone();
drop(data);
p.hash(state);
}
}
impl PartialEq for Struct {
fn eq(&self, other: &Self) -> bool {
let us = self.data.lock().unwrap().clone();
let them = other.data.lock().unwrap().clone();
if us.cells.len() != them.cells.len() {
return false;
}
for (v1, v2) in us.cells.iter().zip(them.cells.iter()) {
if !v1.eq(v2) {
return false;
}
}
for (name, idx) in us.lookup.iter() {
match them.lookup.get(name) {
None => return false,
Some(idx2) => {
if !idx.eq(idx2) {
return false;
}
}
}
}
true
}
}
impl PartialOrd for Struct {
fn partial_cmp(&self, _other: &Self) -> Option<Ordering> {
None
}
}
impl Struct {
pub fn empty(parent: Option<Struct>) -> Struct {
let v:Vec<(String, Value)> = Vec::new();
Struct::new(v, parent)
}
pub fn new(mut vec: Vec<(impl Into<String>, Value)>, parent: Option<Struct>) -> Struct {
let mut lookup = OrderedMap::new();
let mut cells = Vec::new();
vec.drain(..).for_each(|(key, value)| {
lookup.insert(key.into(), cells.len());
cells.push(value);
});
Struct {
data: Arc::new(Mutex::new(StructData {
parent,
cells,
lookup,
})),
}
}
pub fn from_vec(mut values: Vec<Value>, types: Vec<ColumnType>) -> Struct {
let mut lookup = OrderedMap::new();
let mut cells = Vec::new();
values.drain(..).zip(types).for_each(|(value, column)| {
lookup.insert(column.name, cells.len());
cells.push(value);
});
Struct {
data: Arc::new(Mutex::new(StructData {
parent: None,
lookup,
cells,
})),
}
}
pub fn local_signature(&self) -> Vec<ColumnType> {
let mut res = Vec::new();
let data = self.data.lock().unwrap();
let mut reverse_lookup = OrderedMap::new();
for (key, value) in &data.lookup {
reverse_lookup.insert(value.clone(), key);
}
for (idx, value) in data.cells.iter().enumerate() {
res.push(ColumnType::new(
reverse_lookup.get(&idx).unwrap().deref(),
value.value_type(),
));
}
res
}
pub fn local_elements(&self) -> Vec<(String, Value)> {
let mut reverse_lookup = OrderedMap::new();
let data = self.data.lock().unwrap();
for (key, value) in &data.lookup {
reverse_lookup.insert(value.clone(), key);
}
data.cells
.iter()
.enumerate()
.map(|(idx, v)| (reverse_lookup[&idx].to_string(), v.clone()))
.collect()
}
pub fn to_row(&self) -> Row {
Row::new(self.data.lock().unwrap().cells.clone())
}
pub fn get(&self, name: &str) -> Option<Value> {
let data = self.data.lock().unwrap();
match data.lookup.get(name) {
None => {
let p = data.parent.clone();
drop(data);
match p {
None => None,
Some(parent) => parent.get(name),
}
}
Some(idx) => Some(data.cells[*idx].clone()),
}
}
pub fn keys(&self) -> Vec<String> {
let mut fields = HashSet::new();
self.fill_keys(&mut fields);
fields.drain().collect()
}
fn fill_keys(&self, dest: &mut HashSet<String>) {
let data = self.data.lock().unwrap();
data.lookup.keys().for_each(|name| {
dest.insert(name.clone());
});
let parent = data.parent.clone();
drop(data);
if let Some(p) = parent {
p.fill_keys(dest);
}
}
pub fn map(&self) -> OrderedMap<String, Value> {
let mut map = OrderedMap::new();
self.fill_map(&mut map);
map
}
fn fill_map(&self, dest: &mut OrderedMap<String, Value>) {
let data = self.data.lock().unwrap();
data.lookup.iter().for_each(|(name, idx)| {
if !dest.contains_key(name) {
dest.insert(name.clone(), data.cells[*idx].clone());
}
});
let parent = data.parent.clone();
drop(data);
if let Some(p) = parent {
p.fill_map(dest);
}
}
pub fn set(&self, name: &str, value: Value) -> Option<Value> {
let mut data = self.data.lock().unwrap();
match data.lookup.get(name).cloned() {
None => {
let idx = data.lookup.len();
data.lookup.insert(name.to_string(), idx);
data.cells.push(value);
None
}
Some(idx) => Some(data.cells.replace(idx, value)),
}
}
pub fn materialize(&self) -> CrushResult<Struct> {
let data = self.data.lock().unwrap();
Ok(Struct {
data: Arc::new(Mutex::new(StructData {
parent: data.parent.clone(),
lookup: data.lookup.clone(),
cells: data
.cells
.iter()
.map(|value| value.clone().materialize())
.collect::<CrushResult<Vec<_>>>()?,
})),
})
}
pub fn set_parent(&self, parent: Option<Struct>) {
self.data.lock().unwrap().parent = parent;
}
}
impl Display for Struct {
fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result {
let elements = self.local_elements();
let data = self.data.lock().unwrap();
f.write_str("data")?;
if let Some(parent) = data.parent.clone() {
f.write_str(" parent=(")?;
parent.fmt(f)?;
f.write_str(")")?;
}
for (name, value) in elements.iter() {
f.write_str(" ")?;
name.fmt(f)?;
f.write_str("=(")?;
value.fmt(f)?;
f.write_str(")")?;
}
Ok(())
}
}
pub struct StructReader {
idx: usize,
rows: Vec<(String, Value)>,
}
impl StructReader {
pub fn new(s: Struct) -> StructReader {
StructReader {
idx: 0,
rows: s.map().drain().collect(),
}
}
}
impl CrushStream for StructReader {
fn read(&mut self) -> Result<Row, CrushError> {
if self.idx >= self.rows.len() {
return error("EOF");
}
self.idx += 1;
let (k, v) = self
.rows
.replace(self.idx - 1, ("".to_string(), Value::Empty));
Ok(Row::new(vec![Value::from(k), v]))
}
fn read_timeout(
&mut self,
_timeout: Duration,
) -> Result<Row, crate::lang::pipe::RecvTimeoutError> {
match self.read() {
Ok(r) => Ok(r),
Err(_) => Err(crate::lang::pipe::RecvTimeoutError::Disconnected),
}
}
fn types(&self) -> &[ColumnType] {
&STRUCT_STREAM_TYPE
}
}
| true |
189b06a184e8e3510da4fce898dd9ddd750de484
|
Rust
|
galenelias/AdventOfCode_2017
|
/src/main.rs
|
UTF-8
| 2,037 | 2.953125 | 3 |
[] |
no_license
|
extern crate clap;
extern crate regex;
extern crate serde_json;
extern crate chrono;
#[macro_use]
extern crate serde_derive;
use clap::{Arg,Command};
mod stats;
mod day1;
mod day2;
mod day3;
mod day4;
mod day5;
mod day6;
mod day7;
mod day8;
mod day9;
mod day10;
mod day11;
mod day12;
mod day13;
mod day14;
mod day15;
mod day16;
mod day17;
mod day18;
mod day19;
mod day20;
mod day21;
mod day22;
mod day23;
mod day24;
mod day25;
fn main() {
let matches = Command::new("Advent of Code")
.author("Galen Elias, [email protected]")
.version("0.1.0")
.about("Advent of code solutions in Rust")
.arg(
Arg::new("day")
.required(true)
.index(1)
.help("specifies which day's challenge to run")
.validator(|str|
str.parse::<u32>()
.or(Err("day must be an integer".to_owned()))
.and_then(|v| match v {
0..=25 => Ok(()),
_ => Err("day must be between 1 and 25".to_owned())
})))
.arg(
Arg::new("stats")
.long("stats")
.help("Parses leaderboard JSON into a readable format"))
.after_help("Longer explaination to appear after the options when \
displaying the help information from --help or -h")
.get_matches();
if matches.is_present("stats") {
stats::show_stats(matches.value_of("day").unwrap_or("0").parse::<u32>().unwrap());
return;
}
let day = matches.value_of("day").unwrap().parse::<u32>().unwrap();
match day {
1 => day1::solve(),
2 => day2::solve(),
3 => day3::solve(),
4 => day4::solve(),
5 => day5::solve(),
6 => day6::solve(),
7 => day7::solve(),
8 => day8::solve(),
9 => day9::solve(),
10 => day10::solve(),
11 => day11::solve(),
12 => day12::solve(),
13 => day13::solve(),
14 => day14::solve(),
15 => day15::solve(),
16 => day16::solve(),
17 => day17::solve(),
18 => day18::solve(),
19 => day19::solve(),
20 => day20::solve(),
21 => day21::solve(),
22 => day22::solve(),
23 => day23::solve(),
24 => day24::solve(),
25 => day25::solve(),
_ => println!("Oops! Day {} isn't implemented yet!", day)
}
}
| true |
3e7f83af021774511ac98cce857ecbae31a20782
|
Rust
|
skrap/aoc_rust_2018
|
/src/bin/8.rs
|
UTF-8
| 1,651 | 3.140625 | 3 |
[] |
no_license
|
extern crate regex;
struct Node {
children: Vec<Node>,
meta: Vec<i32>,
}
impl Node {
fn new() -> Node {
Node { children: Vec::new(), meta: Vec::new() }
}
fn parse(&mut self, tokens: &mut impl Iterator<Item = i32>)
{
let num_kids = tokens.next().unwrap();
let num_meta = tokens.next().unwrap();
for _i in 0..num_kids {
let mut kid = Node::new();
kid.parse(tokens);
self.children.push(kid);
}
for _i in 0..num_meta {
self.meta.push(tokens.next().unwrap());
}
}
fn sum_meta(&self) -> i32 {
let kids_sum : i32 = self.children.iter().map(|c| c.sum_meta()).sum();
let self_sum : i32 = self.meta.iter().sum();
kids_sum + self_sum
}
fn value(&self) -> i32 {
let result = if self.children.len() == 0 {
print!("leaf ");
self.meta.iter().sum()
} else {
let mut sum = 0i32;
for kid_idx in self.meta.iter() {
if let Some(kid) = self.children.get((*kid_idx - 1) as usize) {
sum += kid.value();
}
}
print!("parent ");
sum
};
println!("node value {}", result);
result
}
}
fn main() {
let input = include_str!("8_input");
let re = regex::Regex::new(r"\d+").unwrap();
let mut tokens = re.find_iter(input).map(|e| e.as_str().parse::<i32>().unwrap());
let mut node = Node::new();
node.parse(&mut tokens);
println!("meta sum {}", node.sum_meta());
println!("value {}", node.value());
}
| true |
14273eceea1ad2c6ee810c4f1ce8956a215d64aa
|
Rust
|
derekam/tag-simulator
|
/src/simulation.rs
|
UTF-8
| 6,548 | 3.203125 | 3 |
[] |
no_license
|
use crate::environment::Environment;
use crate::parameters::{TagParams};
use crate::tag_environment::TagEnvironment;
use dashmap::DashMap;
use crate::action::Action;
use crate::controls::Controls;
use iced::{Application, Settings, window};
use crate::agents::agent::Agent;
use std::collections::HashSet;
/// The main tag simulation instance.
///
/// # Examples
/// ## Standalone (no UI)
/// ```
/// let simulation: Simulation = Simulation::new(DEFAULT_PARAMS);
/// simulation.run_headless(Option::from(500));
/// ```
///
/// ## WIth Iced GUI
/// ```
/// Simulation::run_gui(DEFAULT_PARAMS);
/// ```
pub struct Simulation<X>
where
X: Agent + 'static
{
pub(crate) parameters: TagParams,
pub(crate) environment: TagEnvironment<X>,
pub(crate) is_running: bool,
pub(crate) controls: Controls,
}
impl<X: Agent + 'static> Simulation<X> {
pub fn new(parameters: TagParams) -> Self {
let mut sim = Simulation {
parameters,
environment: TagEnvironment {
agents: DashMap::with_capacity(parameters.speed as usize),
width: parameters.width as f32,
height: parameters.height as f32,
it: HashSet::new(),
show_numbers: parameters.numbered,
},
is_running: false,
controls: Controls::default(),
};
sim.environment.reset(parameters);
sim
}
pub fn run_gui(parameters: TagParams) {
let window = window::Settings {
size: (parameters.width as u32, parameters.height as u32),
resizable: false,
decorations: true
};
let settings = Settings {
window,
flags: parameters,
default_font: None,
antialiasing: true
};
Simulation::<X>::run(settings);
}
pub fn run_headless(&mut self, num_steps: Option<u128>) {
match num_steps {
None => {
self.is_running = true;
while self.is_running {
self.step();
}
}
Some(steps) => {
self.is_running = true;
for _ in 0..steps {
self.step();
}
self.is_running = false;
}
}
}
pub fn stop(&mut self) {
self.is_running = false;
}
pub(crate) fn step(&mut self) {
// TODO something like a countdown latch here or Rayon iters, or abandon turn-based altogether and have agents in their own threads.
let mut actions: Vec<Action> = Vec::with_capacity(self.environment.agents.len());
for agent in 0..self.environment.agents.len() {
actions.insert(agent, self.environment.agents.get(&agent).unwrap().act(&self.environment));
}
&self.environment.step_all(actions);
}
}
#[cfg(test)]
mod tests {
use crate::parameters::TagParams;
use crate::simulation::Simulation;
use crate::agents::agent_type::AgentType;
use crate::agents::agent::{Player};
use crate::agents::basic_directional::DirectionalAgent;
use test::Bencher;
#[test]
fn test_basic_functionality() {
let params: TagParams = TagParams {
speed: 10.0,
proximity: 2.0,
width: 100,
height: 100,
num_players: 5,
agent_type: AgentType::Default,
numbered: false,
num_it: 1
};
let mut sim: Simulation<Player> = Simulation::new(params);
assert_eq!(sim.is_running, false);
assert_eq!(sim.environment.height, 100.);
assert_eq!(sim.environment.width, 100.);
assert_eq!(sim.environment.agents.len(), 5);
let agent = sim.environment.agents.get(&0).unwrap().value().clone();
sim.run_headless(Option::from(10));
assert_ne!(agent.position, sim.environment.agents.get(&0).unwrap().position);
}
#[bench]
fn bench_headless_500_directional(b: &mut Bencher) {
let params: TagParams = TagParams {
speed: 5.0,
proximity: 2.0,
width: 1000,
height: 600,
num_players: 500,
agent_type: AgentType::BasicDirectional,
numbered: false,
num_it: 1
};
let mut sim: Simulation<DirectionalAgent> = Simulation::new(params);
b.iter(|| {
sim.step();
});
}
#[bench]
fn bench_headless_5000_directional(b: &mut Bencher) {
let params: TagParams = TagParams {
speed: 5.0,
proximity: 2.0,
width: 1000,
height: 600,
num_players: 5000,
agent_type: AgentType::BasicDirectional,
numbered: false,
num_it: 1
};
let mut sim: Simulation<DirectionalAgent> = Simulation::new(params);
b.iter(|| {
sim.step();
});
}
#[bench]
fn bench_headless_500_default(b: &mut Bencher) {
let params: TagParams = TagParams {
speed: 5.0,
proximity: 2.0,
width: 1000,
height: 600,
num_players: 500,
agent_type: AgentType::Default,
numbered: false,
num_it: 1
};
let mut sim: Simulation<Player> = Simulation::new(params);
b.iter(|| {
sim.step();
});
}
#[bench]
fn bench_headless_5000_default(b: &mut Bencher) {
let params: TagParams = TagParams {
speed: 5.0,
proximity: 2.0,
width: 1000,
height: 600,
num_players: 5000,
agent_type: AgentType::Default,
numbered: false,
num_it: 1
};
let mut sim: Simulation<Player> = Simulation::new(params);
b.iter(|| {
sim.step();
});
}
#[bench]
fn bench_headless_50000_default(b: &mut Bencher) {
let params: TagParams = TagParams {
speed: 5.0,
proximity: 2.0,
width: 1000,
height: 600,
num_players: 50000,
agent_type: AgentType::Default,
numbered: false,
num_it: 1
};
let mut sim: Simulation<Player> = Simulation::new(params);
b.iter(|| {
sim.step();
});
}
}
| true |
2f21c986bfd1d5459c41b96c2883f449b1d2be71
|
Rust
|
lazareviczoran/advent-of-code-2020
|
/day-06/src/main.rs
|
UTF-8
| 2,012 | 3.65625 | 4 |
[] |
no_license
|
use std::collections::HashSet;
use std::fs::File;
use std::io::prelude::*;
fn main() {
let data = read("input.txt");
println!("part1 solution: {}", count_all_unique_yes_answers(&data));
println!("part2 solution: {}", count_unique_yes_answers_by_all(&data));
}
fn count_all_unique_yes_answers(data: &[Vec<String>]) -> usize {
data.iter()
.map(|answers| {
answers.iter().fold(HashSet::new(), |mut unique, ans| {
ans.chars().for_each(|ch| {
unique.insert(ch);
});
unique
})
})
.map(|answers| answers.len())
.sum()
}
fn count_unique_yes_answers_by_all(data: &[Vec<String>]) -> usize {
data.iter()
.map(|answers| {
let initial_set: HashSet<char> = answers[0].chars().collect();
answers.iter().skip(1).fold(initial_set, |unique, ans| {
let curr_set = ans.chars().collect();
unique.intersection(&curr_set).cloned().collect()
})
})
.map(|answers| answers.len())
.sum()
}
fn read(filename: &str) -> Vec<Vec<String>> {
let mut file = File::open(filename).expect("File not found");
let mut content = String::new();
file.read_to_string(&mut content)
.expect("Failed to read file content");
let mut acc = Vec::new();
let mut answers = Vec::new();
content.lines().for_each(|s| {
if !s.is_empty() {
answers.push(s.to_string());
} else if !answers.is_empty() {
acc.push(answers.clone());
answers.clear();
}
});
acc.push(answers);
acc
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn part1_test() {
let data = read("test-input.txt");
assert_eq!(count_all_unique_yes_answers(&data), 11);
}
#[test]
fn part2_test() {
let data = read("test-input.txt");
assert_eq!(count_unique_yes_answers_by_all(&data), 6);
}
}
| true |
133f3fd3381db53c511d388a689ee516d9bf984a
|
Rust
|
entropitor/cpl-window-manager
|
/assignment/src/fixed_window_manager.rs
|
UTF-8
| 587 | 2.734375 | 3 |
[] |
no_license
|
//! This module adds some missing methods to properly implement all WindowManagers
use cplwm_api::types::{Window, WindowWithInfo};
use cplwm_api::wm::WindowManager;
/// The RealWindowInfo allows wrappers to access the interesting WindowWithInfo (not just the tiled one)
pub trait RealWindowInfo: WindowManager {
/// Get real window info. Equal to get_window_info unless the window is tiled
/// In that case, the geometry should equal the geometry of the window after toggle_floating()
fn get_real_window_info(&self, window: Window) -> Result<WindowWithInfo, Self::Error>;
}
| true |
29e103702281d7dba5a7029626b12fa02a086a04
|
Rust
|
opensource-assist/fuschia
|
/src/sys/test_adapters/rust/src/main.rs
|
UTF-8
| 2,862 | 2.578125 | 3 |
[
"BSD-3-Clause"
] |
permissive
|
// Copyright 2019 The Fuchsia Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
mod rust_test_adapter;
use {
anyhow::{format_err, Context as _, Error},
fidl_fuchsia_test as ftest, fuchsia_async as fasync,
fuchsia_component::server::ServiceFs,
fuchsia_syslog::fx_log_info,
futures::prelude::*,
rust_test_adapter::{RustTestAdapter, TestInfo},
std::env,
};
/// Parses the incoming arguments into a test to run and any arguments to that test
fn consume_args(args: Vec<String>) -> Result<TestInfo, Error> {
if args.len() < 2 {
return Err(format_err!("Usage: rust_test_adapter <test path in pkg>"));
}
let test_path = String::from(&args[1]);
let test_args = args[2..].to_vec();
Ok(TestInfo { test_path, test_args })
}
fn main() -> Result<(), Error> {
fuchsia_syslog::init_with_tags(&["rust_test_adapter"])?;
fx_log_info!("adapter started");
let mut executor = fasync::Executor::new().context("Error creating executor")?;
let mut fs = ServiceFs::new_local();
fs.dir("svc").add_fidl_service(move |stream: ftest::SuiteRequestStream| {
fasync::spawn_local(async move {
let test_info = consume_args(env::args().collect()).unwrap();
let adapter = RustTestAdapter::new(test_info).expect("Failed to create adapter");
adapter.run_test_suite(stream).await.expect("Failed to run test suite");
});
});
fs.take_and_serve_directory_handle()?;
executor.run_singlethreaded(fs.collect::<()>());
Ok(())
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn parse_no_args() {
let args = vec![String::from("my_package"), String::from("my_test")];
let expected_info = TestInfo { test_path: String::from("my_test"), test_args: vec![] };
let actual_info = consume_args(args).unwrap();
assert_eq!(expected_info, actual_info);
}
#[test]
fn parse_one_arg() {
let args = vec![String::from("my_package"), String::from("my_test"), String::from("one")];
let expected_info =
TestInfo { test_path: String::from("my_test"), test_args: vec![String::from("one")] };
let actual_info = consume_args(args).unwrap();
assert_eq!(expected_info, actual_info);
}
#[test]
fn parse_multiple_args() {
let args = vec![
String::from("my_package"),
String::from("my_test"),
String::from("one"),
String::from("two"),
];
let expected_info = TestInfo {
test_path: String::from("my_test"),
test_args: vec![String::from("one"), String::from("two")],
};
let actual_info = consume_args(args).unwrap();
assert_eq!(expected_info, actual_info);
}
}
| true |
d87e9ec463a00286a097a3738c52d157ca5b661f
|
Rust
|
Patryk27/cr8r
|
/apps/controller/src/system/modules/attachment/actor.rs
|
UTF-8
| 2,028 | 2.703125 | 3 |
[
"MIT"
] |
permissive
|
use std::path::PathBuf;
use std::sync::atomic::Ordering;
use chrono::{DateTime, Utc};
use log::*;
use tokio::fs::{File, remove_file};
use tokio::stream::StreamExt;
use tokio::time::{delay_for, Duration};
use lib_core_channel::URx;
use lib_interop::models::{DAttachmentId, DAttachmentName};
use lib_interop::proto::models::PAttachmentSize;
use super::{AttachmentMsg, AttachmentStatus};
pub struct AttachmentActor {
pub id: DAttachmentId,
pub name: DAttachmentName,
pub size: PAttachmentSize,
pub path: PathBuf,
pub created_at: DateTime<Utc>,
pub status: AttachmentStatus,
}
impl AttachmentActor {
pub async fn start(mut self, file: File, mut mailbox: URx<AttachmentMsg>) {
trace!("Actor started");
trace!("-> id = {}", self.id);
trace!("-> name = {}", self.name);
trace!("-> size = {}", self.size);
trace!("-> path = {}", self.path.display());
self.status = AttachmentStatus::Pending {
file,
uploaded_bytes: 0,
};
while let Some(msg) = mailbox.next().await {
if msg.handle(&mut self).await.actor_should_stop() {
break;
}
}
trace!("Actor is halting");
self.clean_up().await;
trace!("Actor halted");
}
async fn clean_up(&mut self) {
if let AttachmentStatus::Ready { active_download_tokens } = &self.status {
// As long as someone holds a download token, we cannot remove attachment from the filesystem - so let's
// just wait until all tokens are back.
//
// We could use some more sophisticated mechanism (e.g. a condvar), but polling is good enough.
while active_download_tokens.load(Ordering::SeqCst) != 0 {
delay_for(Duration::from_secs(1)).await;
}
}
if let Err(err) = remove_file(&self.path).await {
error!("Could not remove file for attachment [id={}]: {:?}", self.id, err);
}
}
}
| true |
ca5f9b08097d6be397cc1049c4dcea34ba062429
|
Rust
|
inda21plusplus/mathm-chess
|
/chess-engine/src/tests.rs
|
UTF-8
| 10,188 | 3.046875 | 3 |
[] |
no_license
|
use std::collections::HashSet;
use crate::*;
#[test]
fn arabic_parsing() {
for (input, output) in [
("a4a5", Move::from(((0, 4).into(), (0, 3).into()))),
("h5h1", ((7, 3).into(), (7, 7).into()).into()),
("a1h8", ((0, 7).into(), (7, 0).into()).into()),
("b5a7", ((1, 3).into(), (0, 1).into()).into()),
(
"a2h8q",
((0, 6).into(), (7, 0).into(), piece::Kind::Queen).into(),
),
(
"e7e8n",
((4, 1).into(), (4, 0).into(), piece::Kind::Knight).into(),
),
] {
assert_eq!(Move::arabic(input), Ok(output), "at: {}", input);
}
}
#[test]
fn queen_cant_threaten_king_through_own_pieces() {
let game = Game::new(Board::from_fen("7K/8/8/4P1Q/1k/8/8/8 b - - 0 0").unwrap());
assert!(game
.all_legal_moves()
.any(|m| m == Move::arabic("b4b5").unwrap()));
}
#[test]
fn pawn_checkmate() {
let setup_moves = [
Move::arabic("c2c4").unwrap(), // white
Move::arabic("h7h6").unwrap(),
Move::arabic("c4c5").unwrap(), // white
Move::arabic("h6h5").unwrap(),
Move::arabic("c5c6").unwrap(), // white
Move::arabic("h5h4").unwrap(),
Move::arabic("c6d7").unwrap(), // white
];
let mut game = Game::new(Board::default());
for m in setup_moves {
assert_eq!(Ok(GameState::Ongoing), game.make_move(m));
}
let expected = [
Move::arabic("b8d7").unwrap(),
Move::arabic("c8d7").unwrap(),
Move::arabic("d8d7").unwrap(),
Move::arabic("e8d7").unwrap(),
];
let actual = game.all_legal_moves().collect::<Vec<Move>>();
assert_eq!(
expected.iter().copied().collect::<HashSet<Move>>(),
actual.iter().copied().collect::<HashSet<Move>>(),
"\n{}\n{}",
expected
.iter()
.fold(String::new(), |acc, m| acc + " " + &m.as_arabic()),
actual
.iter()
.fold(String::new(), |acc, m| acc + " " + &m.as_arabic())
)
}
#[test]
fn arabic_parsing_fails() {
assert!(matches!(Move::arabic("a4a"), Err(Error::ParsingError)));
assert!(matches!(Move::arabic("i2a3"), Err(Error::ParsingError)));
assert!(matches!(Move::arabic("a2u3"), Err(Error::ParsingError)));
assert!(matches!(Move::arabic("a4a4 "), Err(Error::ParsingError)));
assert!(matches!(Move::arabic(" a4a4"), Err(Error::ParsingError)));
assert!(matches!(Move::arabic("a4a4l"), Err(Error::ParsingError)));
}
#[test]
fn piece_parsing() {
for color in [Color::White, Color::Black] {
use piece::Kind::*;
for (c, kind) in [('p', Pawn), ('r', Rook), ('b', Bishop)] {
assert_eq!(
Piece::from_name(if color == Color::Black {
c
} else {
c.to_ascii_uppercase()
}),
Ok(Piece::new(color, kind)),
);
}
}
}
#[test]
fn piece_parsing_fail() {
for c in "acdefghijlmostuvwxyzACDEFGHIJLMOSTUVWXYZ".chars() {
assert!(Piece::from_name(c).is_err())
}
}
fn perft(game: Game, depth: usize) -> usize {
if depth == 0 {
return 1;
}
let mut ans = 0;
for mut move_ in game.all_legal_moves() {
if game.missing_promotion(move_) {
for kind in [
piece::Kind::Bishop,
piece::Kind::Knight,
piece::Kind::Queen,
piece::Kind::Rook,
] {
move_.promotion = Some(kind);
let mut g = Game::new(game.board().clone());
g.make_move(move_).unwrap();
ans += perft(g, depth - 1);
}
} else {
let mut g = Game::new(game.board().clone());
g.make_move(move_).unwrap();
ans += perft(g, depth - 1);
}
}
ans
}
#[test]
fn perft_1() {
let game = Game::new(
Board::from_fen("rnbqkbnr/pppppppp/8/8/8/8/PPPPPPPP/RNBQKBNR w KQkq - 0 1").unwrap(),
);
assert_eq!(20, perft(game.clone(), 1));
assert_eq!(400, perft(game.clone(), 2));
assert_eq!(8902, perft(game.clone(), 3));
// assert_eq!(197281, perft(game.clone(), 4));
}
#[test]
fn perft_2() {
let game = Game::new(
Board::from_fen("r3k2r/p1ppqpb1/bn2pnp1/3PN3/1p2P3/2N2Q1p/PPPBBPPP/R3K2R w KQkq - 0 1")
.unwrap(),
);
assert_eq!(48, perft(game.clone(), 1));
assert_eq!(2039, perft(game.clone(), 2));
// assert_eq!(97862, perft(game.clone(), 3));
// assert_eq!(4085603, perft(game.clone(), 4));
}
#[test]
fn perft_3() {
let game = Game::new(Board::from_fen("8/2p5/3p4/KP5r/1R3p1k/8/4P1P1/8 w - - 0 1").unwrap());
assert_eq!(14, perft(game.clone(), 1));
// assert_eq!(191, perft(game.clone(), 2));
// assert_eq!(2812, perft(game.clone(), 3));
}
#[test]
fn perft_4() {
let game = Game::new(
Board::from_fen("r3k2r/Pppp1ppp/1b3nbN/nP6/BBP1P3/q4N2/Pp1P2PP/R2Q1RK1 w kq - 0 1")
.unwrap(),
);
assert_eq!(6, perft(game.clone(), 1));
assert_eq!(264, perft(game.clone(), 2));
// assert_eq!(9467, perft(game.clone(), 3));
}
#[test]
fn perft_5() {
let game = Game::new(
Board::from_fen("rnbq1k1r/pp1Pbppp/2p5/8/2B5/8/PPP1NnPP/RNBQK2R w KQ - 1 8").unwrap(),
);
assert_eq!(44, perft(game.clone(), 1));
assert_eq!(1486, perft(game.clone(), 2));
// assert_eq!(62379, perft(game.clone(), 3));
}
#[test]
fn perft_6() {
let game = Game::new(
Board::from_fen("r4rk1/1pp1qppp/p1np1n2/2b1p1B1/2B1P1b1/P1NP1N2/1PP1QPPP/R4RK1 w - - 0 10")
.unwrap(),
);
assert_eq!(46, perft(game.clone(), 1));
assert_eq!(2079, perft(game.clone(), 2));
// assert_eq!(89890, perft(game.clone(), 3));
}
#[test]
fn few_simple_moves() {
let mut game = Game::new(Board::default());
assert_eq!(
"rnbqkbnr/pppppppp/8/8/8/8/PPPPPPPP/RNBQKBNR w KQkq - 0 1",
game.board().to_fen(),
);
assert_eq!(
Ok(GameState::Ongoing),
game.make_move(Move::arabic("e2e4").unwrap())
);
assert_eq!(
"rnbqkbnr/pppppppp/8/8/4P3/8/PPPP1PPP/RNBQKBNR b KQkq e3 0 1",
game.board().to_fen(),
);
assert_eq!(
Ok(GameState::Ongoing),
game.make_move(Move::arabic("c7c5").unwrap())
);
assert_eq!(
"rnbqkbnr/pp1ppppp/8/2p5/4P3/8/PPPP1PPP/RNBQKBNR w KQkq c6 0 2",
game.board().to_fen(),
);
assert_eq!(
Ok(GameState::Ongoing),
game.make_move(Move::arabic("g1f3").unwrap())
);
assert_eq!(
"rnbqkbnr/pp1ppppp/8/2p5/4P3/5N2/PPPP1PPP/RNBQKB1R b KQkq - 1 2",
game.board().to_fen(),
);
}
#[test]
fn default_board() {
assert_eq!(
Board::default(),
Board {
tiles: [
[
Some(Piece {
color: Color::Black,
kind: crate::piece::Kind::Rook,
}),
Some(Piece {
color: Color::Black,
kind: crate::piece::Kind::Knight,
}),
Some(Piece {
color: Color::Black,
kind: crate::piece::Kind::Bishop,
}),
Some(Piece {
color: Color::Black,
kind: crate::piece::Kind::Queen,
}),
Some(Piece {
color: Color::Black,
kind: crate::piece::Kind::King,
}),
Some(Piece {
color: Color::Black,
kind: crate::piece::Kind::Bishop,
}),
Some(Piece {
color: Color::Black,
kind: crate::piece::Kind::Knight,
}),
Some(Piece {
color: Color::Black,
kind: crate::piece::Kind::Rook,
}),
],
[Some(Piece {
color: Color::Black,
kind: crate::piece::Kind::Pawn,
}); 8],
[None; 8],
[None; 8],
[None; 8],
[None; 8],
[Some(Piece {
color: Color::White,
kind: crate::piece::Kind::Pawn,
}); 8],
[
Some(Piece {
color: Color::White,
kind: crate::piece::Kind::Rook,
}),
Some(Piece {
color: Color::White,
kind: crate::piece::Kind::Knight,
}),
Some(Piece {
color: Color::White,
kind: crate::piece::Kind::Bishop,
}),
Some(Piece {
color: Color::White,
kind: crate::piece::Kind::Queen,
}),
Some(Piece {
color: Color::White,
kind: crate::piece::Kind::King,
}),
Some(Piece {
color: Color::White,
kind: crate::piece::Kind::Bishop,
}),
Some(Piece {
color: Color::White,
kind: crate::piece::Kind::Knight,
}),
Some(Piece {
color: Color::White,
kind: crate::piece::Kind::Rook,
}),
],
],
next_to_move: Color::White,
can_castle_white_kingside: true,
can_castle_white_queenside: true,
can_castle_black_kingside: true,
can_castle_black_queenside: true,
en_passant_square: None,
halfmove_counter: 0,
move_number: 1,
}
);
}
| true |
3c0ecf018d7b15505df8e93aa72f5547ad635e54
|
Rust
|
abusch/rtiow
|
/src/hitable/boxx.rs
|
UTF-8
| 2,145 | 2.734375 | 3 |
[] |
no_license
|
use std::sync::Arc;
use crate::aabb::Aabb;
use crate::hitable::{FlipNormals, HitRecord, Hitable, XYRect, XZRect, YZRect};
use crate::material::Material;
use crate::ray::Ray;
use crate::Vec3;
pub struct Boxx {
pmin: Vec3,
pmax: Vec3,
list_ptr: Vec<Arc<dyn Hitable>>,
}
impl Boxx {
pub fn new(p0: Vec3, p1: Vec3, mat: Arc<dyn Material>) -> Boxx {
let list: Vec<Arc<dyn Hitable>> = vec![
Arc::new(XYRect::new(
p0.x(),
p1.x(),
p0.y(),
p1.y(),
p1.z(),
mat.clone(),
)),
Arc::new(FlipNormals::new(Arc::new(XYRect::new(
p0.x(),
p1.x(),
p0.y(),
p1.y(),
p0.z(),
mat.clone(),
)))),
Arc::new(XZRect::new(
p0.x(),
p1.x(),
p0.z(),
p1.z(),
p1.y(),
mat.clone(),
)),
Arc::new(FlipNormals::new(Arc::new(XZRect::new(
p0.x(),
p1.x(),
p0.z(),
p1.z(),
p0.y(),
mat.clone(),
)))),
Arc::new(YZRect::new(
p0.y(),
p1.y(),
p0.z(),
p1.z(),
p1.x(),
mat.clone(),
)),
Arc::new(FlipNormals::new(Arc::new(YZRect::new(
p0.y(),
p1.y(),
p0.z(),
p1.z(),
p0.x(),
mat.clone(),
)))),
];
Boxx {
pmin: p0,
pmax: p1,
list_ptr: list,
}
}
}
impl Hitable for Boxx {
fn hit(&self, r: &Ray, t_min: f32, t_max: f32, rec: &mut HitRecord) -> bool {
let list = &self.list_ptr[..];
list.hit(r, t_min, t_max, rec)
}
fn bounding_box(&self, _t0: f32, _t1: f32, aabb: &mut Aabb) -> bool {
*aabb = Aabb::new(&self.pmin, &self.pmax);
true
}
}
| true |
94d4c31ff3da9825f6159573f3ecfdfc1d92a260
|
Rust
|
erikjara/rut-lib
|
/examples/range.rs
|
UTF-8
| 324 | 3.234375 | 3 |
[
"MIT"
] |
permissive
|
use rut_lib::Rut;
fn main() {
let number = 24136773;
match Rut::from_number(number) {
Ok(rut) => {
println!("Number: {:#}", rut.number());
println!("DV: {:#}", rut.dv());
println!("RUT: {:#}", rut)
}
Err(error) => println!("Error: {:#}", error),
}
}
| true |
364aed03d5089708574be73fafd57d6d80137a7c
|
Rust
|
noshi91/n91lib_rs
|
/src/algorithm/determinant.rs
|
UTF-8
| 1,386 | 3.203125 | 3 |
[
"CC0-1.0"
] |
permissive
|
/*
Description
T: 体
a: T 上の n × n 行列
a の行列式を計算する。
時間計算量: Θ(n^3) 回の演算と Θ(n) 回の除算
オーソドックスな掃き出し法。
*/
use crate::other::algebraic::{one, zero, Field};
pub fn determinant<T>(mut a: Vec<Vec<T>>) -> T
where
T: Field + Clone,
{
let n = a.len();
for a in &a {
assert_eq!(a.len(), n);
}
let mut res: T = one();
for col in 0..n {
match (col..n).find(|&row| !a[row][col].is_zero()) {
None => return zero(),
Some(row) => {
if row != col {
a.swap(col, row);
res = -res;
}
}
}
{
let c = a[col][col].clone();
let inv_c = T::one() / c.clone();
for a in &mut a[col][col..] {
*a *= inv_c.clone();
}
res *= c;
}
let (p, r) = a.split_at_mut(col + 1);
let p = p.last().unwrap();
for r in r {
let c = r[col].clone();
for (p, r) in p[col..].iter().zip(&mut r[col..]) {
*r -= c.clone() * p.clone();
}
}
}
res
}
#[test]
fn test_determinant() {
use crate::other::Fp;
let a = vec![vec![Fp(5), Fp(2)], vec![Fp(3), Fp(4)]];
assert_eq!(determinant(a), Fp(14));
}
| true |
573ef9b2e886fef0f4683eb3db18cbb9fa08e27e
|
Rust
|
LeJane/rust
|
/v1/src/postgresql_db_contexts/equipments.rs
|
UTF-8
| 3,934 | 2.53125 | 3 |
[] |
no_license
|
use crate::models::equipments::Equipment;
use crate::schema::equipments;
use crate::{utils::binary_read_helper::*, BinaryDecode, BinaryEncode};
use crate::{
FrontDisplayMetaVersion, FrontDisplayMetaVersionRelation, MetadataInstance, MetadataTypeEnum,
TableIdEnum,
};
use anyhow::Result;
use diesel::prelude::*;
use std::io::Cursor;
impl Equipment {
pub fn get_equipment_list(conn: &PgConnection) -> QueryResult<Vec<Equipment>> {
equipments::table.get_results(conn)
}
pub fn get_equipment_list_by_kid_page(
conn: &PgConnection,
kid: i64,
limit: i64,
offset: i64,
) -> QueryResult<Vec<Self>> {
equipments::table
.filter(equipments::kid.eq(kid))
.limit(limit)
.offset(offset)
.get_results(conn)
}
pub fn get_equipment_data_by_id(conn: &PgConnection, eid: i64) -> QueryResult<Equipment> {
equipments::table.find(eid).first(conn)
}
pub fn get_default_equipment_id_list(conn: &PgConnection) -> QueryResult<Vec<i64>> {
equipments::table
.filter(equipments::is_default.eq(2))
.select(equipments::eid)
.get_results(conn)
}
}
impl MetadataInstance for Equipment {
fn get_table_id() -> Result<i32> {
Ok(TableIdEnum::Equipment.to_i32())
}
fn get_single_instance(conn: &PgConnection, id: i64) -> Result<MetadataTypeEnum> {
let data = Equipment::get_equipment_data_by_id(conn, id)?;
Ok(MetadataTypeEnum::Equipment(data))
}
fn get_instance_list(conn: &PgConnection) -> Result<FrontDisplayMetaVersion> {
let list = Equipment::get_equipment_list(conn)?;
let table_id = Self::get_table_id()?;
let data_list: Vec<FrontDisplayMetaVersionRelation> = list
.into_iter()
.map(|data| FrontDisplayMetaVersionRelation {
action_type: 0,
table_id,
data: MetadataTypeEnum::Equipment(data),
})
.collect();
Ok(FrontDisplayMetaVersion {
update_type: 2,
data_list,
})
}
}
impl BinaryEncode for Equipment {
fn encode(&self) -> Result<Vec<u8>> {
let mut encoded = Vec::new();
binary_write_i64(&mut encoded, self.eid)?;
binary_write_i64(&mut encoded, self.kid)?;
binary_write_string(&mut encoded, self.name.as_str())?;
binary_write_string(&mut encoded, self.thumbnail.as_str())?;
binary_write_i32(&mut encoded, self.price)?;
binary_write_i32(&mut encoded, self.hp)?;
binary_write_f32(&mut encoded, self.multiplier)?;
binary_write_i16(&mut encoded, self.kind)?;
binary_write_i16(&mut encoded, self.is_default)?;
binary_write_time(&mut encoded, self.modify_time)?;
binary_write_time(&mut encoded, self.created_time)?;
//set item length
encoded.encode()
}
}
impl<'a> BinaryDecode<'a> for Equipment {
fn decode(cursor: &mut Cursor<&'a [u8]>, bytes: &'a [u8]) -> Result<Equipment> {
let eid = binary_read_i64(cursor)?;
let kid = binary_read_i64(cursor)?;
let name = binary_read_string(cursor, bytes)?;
let thumbnail = binary_read_string(cursor, bytes)?;
let price = binary_read_i32(cursor)?;
let hp = binary_read_i32(cursor)?;
let multiplier = binary_read_f32(cursor)?;
let kind = binary_read_i16(cursor)?;
let is_default = binary_read_i16(cursor)?;
let modify_time = binary_read_time(cursor, bytes)?;
let created_time = binary_read_time(cursor, bytes)?;
let data = Equipment {
eid,
kid,
name,
thumbnail,
price,
hp,
multiplier,
kind,
is_default,
modify_time,
created_time,
};
Ok(data)
}
}
| true |
2119ae5e289577ce9467819168bd073c0d1adc03
|
Rust
|
netvipec/AoC2017
|
/Rust/day24/src/main.rs
|
UTF-8
| 4,200 | 3.171875 | 3 |
[] |
no_license
|
use std::collections::HashMap;
use std::io::{self, BufRead};
type InputT = Vec<(usize, usize)>;
type OutputT = usize;
fn read_input() -> InputT {
let stdin = io::stdin();
let input: InputT = stdin
.lock()
.lines()
.map(|x| {
let line = x.unwrap();
let elements = line.split('/').collect::<Vec<&str>>();
let first = elements[0].parse::<usize>().unwrap();
let second = elements[1].parse::<usize>().unwrap();
if first < second {
(first, second)
} else {
(second, first)
}
})
.collect();
return input;
}
fn solve1(
input: &InputT,
map_index: &HashMap<usize, Vec<usize>>,
origin: usize,
used_bridges: &mut Vec<bool>,
strength: usize,
) -> usize {
match map_index.get(&origin) {
Some(x) => {
let mut max_s = strength;
x.iter().for_each(|e| {
if !used_bridges[*e] {
let other = if input[*e].0 == origin {
input[*e].1
} else {
input[*e].0
};
used_bridges[*e] = true;
let s = solve1(
&input,
&map_index,
other,
used_bridges,
strength + input[*e].0 + input[*e].1,
);
if s > max_s {
max_s = s;
}
used_bridges[*e] = false;
}
});
return max_s;
}
None => return strength,
};
}
fn part1(input: &InputT) -> OutputT {
let mut map_index: HashMap<usize, Vec<usize>> = HashMap::new();
input.iter().enumerate().for_each(|ie| {
map_index.entry((ie.1).0).or_insert(Vec::new()).push(ie.0);
if (ie.1).0 != (ie.1).1 {
map_index.entry((ie.1).1).or_insert(Vec::new()).push(ie.0);
}
});
let mut used_bridges = vec![false; input.len()];
return solve1(&input, &map_index, 0, &mut used_bridges, 0);
}
fn solve2(
input: &InputT,
map_index: &HashMap<usize, Vec<usize>>,
origin: usize,
used_bridges: &mut Vec<bool>,
length: usize,
strength: usize,
) -> (usize, usize) {
match map_index.get(&origin) {
Some(x) => {
let mut max_l = length;
let mut max_s = strength;
x.iter().for_each(|e| {
if !used_bridges[*e] {
let other = if input[*e].0 == origin {
input[*e].1
} else {
input[*e].0
};
used_bridges[*e] = true;
let s = solve2(
&input,
&map_index,
other,
used_bridges,
length + 1,
strength + input[*e].0 + input[*e].1,
);
if s.0 > max_l {
max_l = s.0;
max_s = s.1;
} else if s.0 == max_l && s.1 > max_s {
max_s = s.1;
}
used_bridges[*e] = false;
}
});
return (max_l, max_s);
}
None => return (length, strength),
};
}
fn part2(input: &InputT) -> OutputT {
let mut map_index: HashMap<usize, Vec<usize>> = HashMap::new();
input.iter().enumerate().for_each(|ie| {
map_index.entry((ie.1).0).or_insert(Vec::new()).push(ie.0);
if (ie.1).0 != (ie.1).1 {
map_index.entry((ie.1).1).or_insert(Vec::new()).push(ie.0);
}
});
let mut used_bridges = vec![false; input.len()];
return solve2(&input, &map_index, 0, &mut used_bridges, 0, 0).1;
}
fn main() {
let input = read_input();
// println!("{:?}", input);
let sol1 = part1(&input);
println!("Part1: {}", sol1);
let sol2 = part2(&input);
println!("Part2: {}", sol2);
}
| true |
47b3e1dbd71d5efaccef803126b7bc2440730fe7
|
Rust
|
dstaatz/kdtree-na
|
/benches/bench.rs
|
UTF-8
| 2,245 | 2.71875 | 3 |
[
"Apache-2.0",
"MIT",
"LicenseRef-scancode-unknown-license-reference"
] |
permissive
|
#![feature(test)]
extern crate kdtree_na;
extern crate nalgebra;
extern crate rand;
extern crate test;
use kdtree_na::norm::EuclideanNormSquared;
use kdtree_na::KdTree;
use nalgebra::{DVector, Vector3};
use test::Bencher;
fn rand_data_static() -> (Vector3<f64>, f64) {
(Vector3::new_random(), rand::random())
}
#[bench]
fn bench_add_to_kdtree_with_1k_3d_points_static(b: &mut Bencher) {
let len = 1000usize;
let point = rand_data_static();
let mut points = vec![];
let mut kdtree = KdTree::with_capacity_static(16);
for _ in 0..len {
points.push(rand_data_static());
}
for i in 0..points.len() {
kdtree.add(&points[i].0, points[i].1).unwrap();
}
b.iter(|| kdtree.add(&point.0, point.1).unwrap());
}
#[bench]
fn bench_nearest_from_kdtree_with_1k_3d_points_static(b: &mut Bencher) {
let len = 1000usize;
let point = rand_data_static();
let mut points = vec![];
let mut kdtree = KdTree::with_capacity_static(16);
for _ in 0..len {
points.push(rand_data_static());
}
for i in 0..points.len() {
kdtree.add(&points[i].0, points[i].1).unwrap();
}
b.iter(|| kdtree.nearest(&point.0, 8, &EuclideanNormSquared).unwrap());
}
fn rand_data_dynamic() -> (DVector<f64>, f64) {
(DVector::new_random(3), rand::random())
}
#[bench]
fn bench_add_to_kdtree_with_1k_3d_points_dynamic(b: &mut Bencher) {
let len = 1000usize;
let point = rand_data_dynamic();
let mut points = vec![];
let mut kdtree = KdTree::with_capacity_dynamic(3, 16);
for _ in 0..len {
points.push(rand_data_dynamic());
}
for i in 0..points.len() {
kdtree.add(&points[i].0, points[i].1).unwrap();
}
b.iter(|| kdtree.add(&point.0, point.1).unwrap());
}
#[bench]
fn bench_nearest_from_kdtree_with_1k_3d_points_dynamic(b: &mut Bencher) {
let len = 1000usize;
let point = rand_data_dynamic();
let mut points = vec![];
let mut kdtree = KdTree::with_capacity_dynamic(3, 16);
for _ in 0..len {
points.push(rand_data_dynamic());
}
for i in 0..points.len() {
kdtree.add(&points[i].0, points[i].1).unwrap();
}
b.iter(|| kdtree.nearest(&point.0, 8, &EuclideanNormSquared).unwrap());
}
| true |
07e53b5f85aab1250d0679a6955c0cc201426c4a
|
Rust
|
GaiaWorld/pi_lib
|
/hash/src/lib.rs
|
UTF-8
| 2,617 | 2.953125 | 3 |
[
"Apache-2.0",
"MIT"
] |
permissive
|
//! 本模块提供`XHashMap`、`XHashSet`两种容器,来替代标准库的HashMap和HashSet。
//! 意在鼓励外部库大多数时候都使用本模块提供的XHashMap、XHashSet来替代标准库或其它库的HashMap和HashSet。
//! 使用本模块的优势在于,库的编写者,在使用这两种容器时,不用关心使用哪种hash算法
//! 而由具体的应用程序关心,应用程序可以设置不同的feature,来确定自己需要哪种hash算法。
//!
//! 例如:
//! 一个名为`gui`的库,使用了本模块的XHashMap;
//!
//! 另一个库`gui_web`,是对`gui`的再次封装,意在编译为asm供web平台使用,考虑到asm中,64位整数的计算速度明显低于32位,因此
//! 希望使用一个32位的hash算法,另外,gui的hashMap,大部分的key的长度,
//! 使用xxhash对比其它hash算法会更快(不同的hash算法,在不同的场景中有着自身的优势和劣势),因此,`gui_web`决定使用32位的xxhash
//! `gui_web`仅需在在Cargo.toml中添加`feature`位`xxhash`,就能控制`gui`库使用的HashMap的Hash算法位`xxhash`
//! 至于要使用**32**位的xxhash,本模块可以根据编译目标,自动选择字长。如要编译为wasm、asm,会自动选择32位的算法
//! 当然,也有可能,在应用的过程中,发现xxhash不时一个好的选择,你可以低成本的更换算法(只需要修改`feature`)
//!
//! 目前,本库支持的hash算法有限,仅支持了xxhash、fxhash。
extern crate fxhash;
extern crate twox_hash;
use std::hash::{BuildHasherDefault};
use std::collections::{HashMap, HashSet};
// 32位平台下, not(feature = "xxhash")时,默认使用FxHasher32
#[cfg(all( not(feature = "xxhash") , target_pointer_width = "32"))]
pub type DefaultHasher = fxhash::FxHasher32;
// 64位平台下,not(feature = "xxhash")时,默认使用FxHasher64
#[cfg(all( not(feature = "xxhash") , target_pointer_width = "64"))]
pub type DefaultHasher = fxhash::FxHasher64;
// 32位平台下,feature = "xxhash"时,默认使用XxHash32
#[cfg(all(feature = "xxhash", target_pointer_width = "32"))]
pub type DefaultHasher = twox_hash::XxHash32;
// 64位平台下,feature = "xxhash"时,默认使用XxHash64
#[cfg(all(feature = "xxhash", target_pointer_width = "64"))]
pub type DefaultHasher = twox_hash::XxHash64;
// 当前默认的HashMap和HashSet(使用根据平台字长、和feature来决定的DefaultHasher)
pub type XHashMap<K, V> = HashMap<K, V, BuildHasherDefault<DefaultHasher>>;
pub type XHashSet<K> = HashSet<K, BuildHasherDefault<DefaultHasher>>;
| true |
a1d16944982e36f7001f041c0519e6bee07b7ea7
|
Rust
|
seritools/rust
|
/src/tools/clippy/clippy_lints/src/returns.rs
|
UTF-8
| 9,788 | 2.890625 | 3 |
[
"Apache-2.0",
"MIT",
"LicenseRef-scancode-other-permissive",
"BSD-3-Clause",
"BSD-2-Clause",
"NCSA"
] |
permissive
|
use if_chain::if_chain;
use rustc_ast::ast;
use rustc_ast::visit::FnKind;
use rustc_errors::Applicability;
use rustc_lint::{EarlyContext, EarlyLintPass, LintContext};
use rustc_middle::lint::in_external_macro;
use rustc_session::{declare_lint_pass, declare_tool_lint};
use rustc_span::source_map::Span;
use rustc_span::BytePos;
use crate::utils::{snippet_opt, span_lint_and_sugg, span_lint_and_then};
declare_clippy_lint! {
/// **What it does:** Checks for return statements at the end of a block.
///
/// **Why is this bad?** Removing the `return` and semicolon will make the code
/// more rusty.
///
/// **Known problems:** If the computation returning the value borrows a local
/// variable, removing the `return` may run afoul of the borrow checker.
///
/// **Example:**
/// ```rust
/// fn foo(x: usize) -> usize {
/// return x;
/// }
/// ```
/// simplify to
/// ```rust
/// fn foo(x: usize) -> usize {
/// x
/// }
/// ```
pub NEEDLESS_RETURN,
style,
"using a return statement like `return expr;` where an expression would suffice"
}
declare_clippy_lint! {
/// **What it does:** Checks for unit (`()`) expressions that can be removed.
///
/// **Why is this bad?** Such expressions add no value, but can make the code
/// less readable. Depending on formatting they can make a `break` or `return`
/// statement look like a function call.
///
/// **Known problems:** The lint currently misses unit return types in types,
/// e.g., the `F` in `fn generic_unit<F: Fn() -> ()>(f: F) { .. }`.
///
/// **Example:**
/// ```rust
/// fn return_unit() -> () {
/// ()
/// }
/// ```
pub UNUSED_UNIT,
style,
"needless unit expression"
}
#[derive(PartialEq, Eq, Copy, Clone)]
enum RetReplacement {
Empty,
Block,
}
declare_lint_pass!(Return => [NEEDLESS_RETURN, UNUSED_UNIT]);
impl Return {
// Check the final stmt or expr in a block for unnecessary return.
fn check_block_return(&mut self, cx: &EarlyContext<'_>, block: &ast::Block) {
if let Some(stmt) = block.stmts.last() {
match stmt.kind {
ast::StmtKind::Expr(ref expr) | ast::StmtKind::Semi(ref expr) => {
self.check_final_expr(cx, expr, Some(stmt.span), RetReplacement::Empty);
},
_ => (),
}
}
}
// Check the final expression in a block if it's a return.
fn check_final_expr(
&mut self,
cx: &EarlyContext<'_>,
expr: &ast::Expr,
span: Option<Span>,
replacement: RetReplacement,
) {
match expr.kind {
// simple return is always "bad"
ast::ExprKind::Ret(ref inner) => {
// allow `#[cfg(a)] return a; #[cfg(b)] return b;`
if !expr.attrs.iter().any(attr_is_cfg) {
Self::emit_return_lint(
cx,
span.expect("`else return` is not possible"),
inner.as_ref().map(|i| i.span),
replacement,
);
}
},
// a whole block? check it!
ast::ExprKind::Block(ref block, _) => {
self.check_block_return(cx, block);
},
// an if/if let expr, check both exprs
// note, if without else is going to be a type checking error anyways
// (except for unit type functions) so we don't match it
ast::ExprKind::If(_, ref ifblock, Some(ref elsexpr)) => {
self.check_block_return(cx, ifblock);
self.check_final_expr(cx, elsexpr, None, RetReplacement::Empty);
},
// a match expr, check all arms
ast::ExprKind::Match(_, ref arms) => {
for arm in arms {
self.check_final_expr(cx, &arm.body, Some(arm.body.span), RetReplacement::Block);
}
},
_ => (),
}
}
fn emit_return_lint(cx: &EarlyContext<'_>, ret_span: Span, inner_span: Option<Span>, replacement: RetReplacement) {
match inner_span {
Some(inner_span) => {
if in_external_macro(cx.sess(), inner_span) || inner_span.from_expansion() {
return;
}
span_lint_and_then(cx, NEEDLESS_RETURN, ret_span, "unneeded `return` statement", |diag| {
if let Some(snippet) = snippet_opt(cx, inner_span) {
diag.span_suggestion(ret_span, "remove `return`", snippet, Applicability::MachineApplicable);
}
})
},
None => match replacement {
RetReplacement::Empty => {
span_lint_and_sugg(
cx,
NEEDLESS_RETURN,
ret_span,
"unneeded `return` statement",
"remove `return`",
String::new(),
Applicability::MachineApplicable,
);
},
RetReplacement::Block => {
span_lint_and_sugg(
cx,
NEEDLESS_RETURN,
ret_span,
"unneeded `return` statement",
"replace `return` with an empty block",
"{}".to_string(),
Applicability::MachineApplicable,
);
},
},
}
}
}
impl EarlyLintPass for Return {
fn check_fn(&mut self, cx: &EarlyContext<'_>, kind: FnKind<'_>, span: Span, _: ast::NodeId) {
match kind {
FnKind::Fn(.., Some(block)) => self.check_block_return(cx, block),
FnKind::Closure(_, body) => self.check_final_expr(cx, body, Some(body.span), RetReplacement::Empty),
FnKind::Fn(.., None) => {},
}
if_chain! {
if let ast::FnRetTy::Ty(ref ty) = kind.decl().output;
if let ast::TyKind::Tup(ref vals) = ty.kind;
if vals.is_empty() && !ty.span.from_expansion() && get_def(span) == get_def(ty.span);
then {
lint_unneeded_unit_return(cx, ty, span);
}
}
}
fn check_block(&mut self, cx: &EarlyContext<'_>, block: &ast::Block) {
if_chain! {
if let Some(ref stmt) = block.stmts.last();
if let ast::StmtKind::Expr(ref expr) = stmt.kind;
if is_unit_expr(expr) && !stmt.span.from_expansion();
then {
let sp = expr.span;
span_lint_and_sugg(
cx,
UNUSED_UNIT,
sp,
"unneeded unit expression",
"remove the final `()`",
String::new(),
Applicability::MachineApplicable,
);
}
}
}
fn check_expr(&mut self, cx: &EarlyContext<'_>, e: &ast::Expr) {
match e.kind {
ast::ExprKind::Ret(Some(ref expr)) | ast::ExprKind::Break(_, Some(ref expr)) => {
if is_unit_expr(expr) && !expr.span.from_expansion() {
span_lint_and_sugg(
cx,
UNUSED_UNIT,
expr.span,
"unneeded `()`",
"remove the `()`",
String::new(),
Applicability::MachineApplicable,
);
}
},
_ => (),
}
}
fn check_poly_trait_ref(&mut self, cx: &EarlyContext<'_>, poly: &ast::PolyTraitRef, _: &ast::TraitBoundModifier) {
let segments = &poly.trait_ref.path.segments;
if_chain! {
if segments.len() == 1;
if ["Fn", "FnMut", "FnOnce"].contains(&&*segments[0].ident.name.as_str());
if let Some(args) = &segments[0].args;
if let ast::GenericArgs::Parenthesized(generic_args) = &**args;
if let ast::FnRetTy::Ty(ty) = &generic_args.output;
if ty.kind.is_unit();
then {
lint_unneeded_unit_return(cx, ty, generic_args.span);
}
}
}
}
fn attr_is_cfg(attr: &ast::Attribute) -> bool {
attr.meta_item_list().is_some() && attr.has_name(sym!(cfg))
}
// get the def site
#[must_use]
fn get_def(span: Span) -> Option<Span> {
if span.from_expansion() {
Some(span.ctxt().outer_expn_data().def_site)
} else {
None
}
}
// is this expr a `()` unit?
fn is_unit_expr(expr: &ast::Expr) -> bool {
if let ast::ExprKind::Tup(ref vals) = expr.kind {
vals.is_empty()
} else {
false
}
}
fn lint_unneeded_unit_return(cx: &EarlyContext<'_>, ty: &ast::Ty, span: Span) {
let (ret_span, appl) = if let Ok(fn_source) = cx.sess().source_map().span_to_snippet(span.with_hi(ty.span.hi())) {
fn_source
.rfind("->")
.map_or((ty.span, Applicability::MaybeIncorrect), |rpos| {
(
#[allow(clippy::cast_possible_truncation)]
ty.span.with_lo(BytePos(span.lo().0 + rpos as u32)),
Applicability::MachineApplicable,
)
})
} else {
(ty.span, Applicability::MaybeIncorrect)
};
span_lint_and_sugg(
cx,
UNUSED_UNIT,
ret_span,
"unneeded unit return type",
"remove the `-> ()`",
String::new(),
appl,
);
}
| true |
f65db069c084b4f14ac3b8aea903544b2ec19151
|
Rust
|
GavinRay97/reaper-rs
|
/main/medium/src/reaper_pointer.rs
|
UTF-8
| 3,082 | 2.84375 | 3 |
[
"MIT"
] |
permissive
|
use super::{MediaItem, MediaItemTake, MediaTrack, ReaProject, TrackEnvelope};
use crate::{concat_reaper_strs, PcmSource, ReaperStr, ReaperStringArg};
use std::borrow::Cow;
use std::os::raw::c_void;
/// Validatable REAPER pointer.
#[derive(Clone, Eq, PartialEq, Hash, Debug)]
pub enum ReaperPointer<'a> {
MediaTrack(MediaTrack),
ReaProject(ReaProject),
MediaItem(MediaItem),
MediaItemTake(MediaItemTake),
TrackEnvelope(TrackEnvelope),
PcmSource(PcmSource),
/// If a variant is missing in this enum, you can use this custom one as a resort.
///
/// Use [`custom()`] to create this variant.
///
/// [`custom()`]: #method.custom
Custom {
type_name: Cow<'a, ReaperStr>,
pointer: *mut c_void,
},
}
impl<'a> ReaperPointer<'a> {
/// Convenience function for creating a [`Custom`] pointer.
///
/// **Don't** include the trailing asterisk (`*`)! It will be added automatically.
///
/// [`Custom`]: #variant.Custom
pub fn custom(
pointer: *mut c_void,
type_name: impl Into<ReaperStringArg<'a>>,
) -> ReaperPointer<'a> {
ReaperPointer::Custom {
pointer,
type_name: type_name.into().into_inner(),
}
}
pub(crate) fn key_into_raw(self) -> Cow<'a, ReaperStr> {
use ReaperPointer::*;
match self {
MediaTrack(_) => reaper_str!("MediaTrack*").into(),
ReaProject(_) => reaper_str!("ReaProject*").into(),
MediaItem(_) => reaper_str!("MediaItem*").into(),
MediaItemTake(_) => reaper_str!("MediaItem_Take*").into(),
TrackEnvelope(_) => reaper_str!("TrackEnvelope*").into(),
PcmSource(_) => reaper_str!("PCM_source*").into(),
Custom {
pointer: _,
type_name,
} => concat_reaper_strs(type_name.as_ref(), reaper_str!("*")).into(),
}
}
pub(crate) fn ptr_as_void(&self) -> *mut c_void {
use ReaperPointer::*;
match self {
MediaTrack(p) => p.as_ptr() as *mut _,
ReaProject(p) => p.as_ptr() as *mut _,
MediaItem(p) => p.as_ptr() as *mut _,
MediaItemTake(p) => p.as_ptr() as *mut _,
TrackEnvelope(p) => p.as_ptr() as *mut _,
PcmSource(p) => p.to_raw() as *mut _,
Custom { pointer, .. } => *pointer,
}
}
}
/// For just having to pass a NonNull pointer to `validate_ptr_2`. Very convenient!
macro_rules! impl_from_ptr_to_variant {
($struct_type: ty, $enum_name: ident) => {
impl<'a> From<$struct_type> for ReaperPointer<'a> {
fn from(p: $struct_type) -> Self {
ReaperPointer::$enum_name(p)
}
}
};
}
impl_from_ptr_to_variant!(MediaTrack, MediaTrack);
impl_from_ptr_to_variant!(ReaProject, ReaProject);
impl_from_ptr_to_variant!(MediaItem, MediaItem);
impl_from_ptr_to_variant!(MediaItemTake, MediaItemTake);
impl_from_ptr_to_variant!(TrackEnvelope, TrackEnvelope);
impl_from_ptr_to_variant!(PcmSource, PcmSource);
| true |
00a1c1d7c95863ee33307775bc8c27225665b4c2
|
Rust
|
ericdke/NatoPhone-Rust
|
/src/decode.rs
|
UTF-8
| 1,462 | 3.296875 | 3 |
[] |
no_license
|
use std::collections::HashMap;
pub fn translated(words: Vec<String>, yell: bool) -> String {
let st = translate(words).join("");
if yell { st.to_uppercase() } else { st }
}
fn translate(words: Vec<String>) -> Vec<String> {
let dic = get_map();
let mut bucket: Vec<String> = Vec::new();
for word in words {
let low = word.to_lowercase();
let lref: &str = low.as_ref();
let letter = match dic.get(lref) {
Some(l) => l.to_string(),
None => " ".to_string(),
};
bucket.push(letter);
}
bucket
}
fn get_map<'a>() -> HashMap<&'a str, &'a str> {
[("alpha", "a"),
("bravo", "b"),
("charlie", "c"),
("delta", "d"),
("echo", "e"),
("foxtrot", "f"),
("golf", "g"),
("hotel", "h"),
("india", "i"),
("juliet", "j"),
("kilo", "k"),
("lima", "l"),
("mike", "m"),
("november", "n"),
("oscar", "o"),
("papa", "p"),
("quebec", "q"),
("romeo", "r"),
("sierra", "s"),
("tango", "t"),
("uniform", "u"),
("victor", "v"),
("whiskey", "w"),
("x-ray", "x"),
("yankee", "y"),
("zulu", "z"),
("zero", "0"),
("one", "1"),
("two", "2"),
("three", "3"),
("four", "4"),
("five", "5"),
("six", "6"),
("seven", "7"),
("eight", "8"),
("niner", "9"),
("stop", ".")]
.iter()
.cloned()
.collect()
}
| true |
d30d530dd86afec040e11dbb690718e8aecebd6a
|
Rust
|
sum12/rustbook
|
/strings/string_ownsership.rs
|
UTF-8
| 317 | 3.546875 | 4 |
[] |
no_license
|
fn main() {
let s = String::from("get length of this string");
println!("{}", calc_len(&s));
let (x, s) = ret_tuple(s);
println!("length from tuple {}", x);
println!("{}", s);
}
fn calc_len(s: &String) -> usize {
s.len()
}
fn ret_tuple(x: String) -> (usize, String) {
(x.len(), x)
}
| true |
35c23187d42026bf72387b1c80a01c7a82155372
|
Rust
|
colin353/rules_rust
|
/tools/rustdoc/rustdoc_test_writer.rs
|
UTF-8
| 6,234 | 3.09375 | 3 |
[
"Apache-2.0"
] |
permissive
|
//! A utility for writing scripts for use as test executables intended to match the
//! subcommands of Bazel build actions so `rustdoc --test`, which builds and tests
//! code in a single call, can be run as a test target in a hermetic manner.
use std::cmp::Reverse;
use std::collections::{BTreeMap, BTreeSet};
use std::env;
use std::fs;
use std::path::{Path, PathBuf};
#[derive(Debug)]
struct Options {
/// A list of environment variable keys to parse from the build action env.
env_keys: BTreeSet<String>,
/// A list of substrings to strip from [Options::action_argv].
strip_substrings: Vec<String>,
/// The path where the script should be written.
output: PathBuf,
/// The `argv` of the configured rustdoc build action.
action_argv: Vec<String>,
}
/// Parse command line arguments
fn parse_args() -> Options {
let args: Vec<String> = env::args().into_iter().collect();
let (writer_args, action_args) = {
let split = args
.iter()
.position(|arg| arg == "--")
.expect("Unable to find split identifier `--`");
// Converting each set into a vector makes them easier to parse in
// the absence of nightly features
let (writer, action) = args.split_at(split);
(writer.to_vec(), action.to_vec())
};
// Remove the leading `--` which is expected to be the first
// item in `action_args`
debug_assert_eq!(action_args[0], "--");
let action_argv = action_args[1..].to_vec();
let output = writer_args
.iter()
.find(|arg| arg.starts_with("--output="))
.and_then(|arg| arg.splitn(2, '=').last())
.map(PathBuf::from)
.expect("Missing `--output` argument");
let (strip_substring_args, writer_args): (Vec<String>, Vec<String>) = writer_args
.into_iter()
.partition(|arg| arg.starts_with("--strip_substring="));
let mut strip_substrings: Vec<String> = strip_substring_args
.into_iter()
.map(|arg| {
arg.splitn(2, '=')
.last()
.expect("--strip_substring arguments must have assignments using `=`")
.to_owned()
})
.collect();
// Strip substrings should always be in reverse order of the length of each
// string so when filtering we know that the longer strings are checked
// first in order to avoid cases where shorter strings might match longer ones.
strip_substrings.sort_by_key(|b| Reverse(b.len()));
strip_substrings.dedup();
let env_keys = writer_args
.into_iter()
.filter(|arg| arg.starts_with("--action_env="))
.map(|arg| {
arg.splitn(2, '=')
.last()
.expect("--env arguments must have assignments using `=`")
.to_owned()
})
.collect();
Options {
env_keys,
strip_substrings,
output,
action_argv,
}
}
/// Write a unix compatible test runner
fn write_test_runner_unix(
path: &Path,
env: &BTreeMap<String, String>,
argv: &[String],
strip_substrings: &[String],
) {
let mut content = vec![
"#!/usr/bin/env bash".to_owned(),
"".to_owned(),
"exec env - \\".to_owned(),
];
content.extend(env.iter().map(|(key, val)| format!("{}='{}' \\", key, val)));
let argv_str = argv
.iter()
// Remove any substrings found in the argument
.map(|arg| {
let mut stripped_arg = arg.to_owned();
strip_substrings
.iter()
.for_each(|substring| stripped_arg = stripped_arg.replace(substring, ""));
stripped_arg
})
.map(|arg| format!("'{}'", arg))
.collect::<Vec<String>>()
.join(" ");
content.extend(vec![argv_str, "".to_owned()]);
fs::write(path, content.join("\n")).expect("Failed to write test runner");
}
/// Write a windows compatible test runner
fn write_test_runner_windows(
path: &Path,
env: &BTreeMap<String, String>,
argv: &[String],
strip_substrings: &[String],
) {
let env_str = env
.iter()
.map(|(key, val)| format!("$env:{}='{}'", key, val))
.collect::<Vec<String>>()
.join(" ; ");
let argv_str = argv
.iter()
// Remove any substrings found in the argument
.map(|arg| {
let mut stripped_arg = arg.to_owned();
strip_substrings
.iter()
.for_each(|substring| stripped_arg = stripped_arg.replace(substring, ""));
stripped_arg
})
.map(|arg| format!("'{}'", arg))
.collect::<Vec<String>>()
.join(" ");
let content = vec![
"@ECHO OFF".to_owned(),
"".to_owned(),
format!("powershell.exe -c \"{} ; & {}\"", env_str, argv_str),
"".to_owned(),
];
fs::write(path, content.join("\n")).expect("Failed to write test runner");
}
#[cfg(target_family = "unix")]
fn set_executable(path: &Path) {
use std::os::unix::prelude::PermissionsExt;
let mut perm = fs::metadata(path)
.expect("Failed to get test runner metadata")
.permissions();
perm.set_mode(0o755);
fs::set_permissions(path, perm).expect("Failed to set permissions on test runner");
}
#[cfg(target_family = "windows")]
fn set_executable(_path: &Path) {
// Windows determines whether or not a file is executable via the PATHEXT
// environment variable. This function is a no-op for this platform.
}
fn write_test_runner(
path: &Path,
env: &BTreeMap<String, String>,
argv: &[String],
strip_substrings: &[String],
) {
if cfg!(target_family = "unix") {
write_test_runner_unix(path, env, argv, strip_substrings);
} else if cfg!(target_family = "windows") {
write_test_runner_windows(path, env, argv, strip_substrings);
}
set_executable(path);
}
fn main() {
let opt = parse_args();
let env: BTreeMap<String, String> = env::vars()
.into_iter()
.filter(|(key, _)| opt.env_keys.iter().any(|k| k == key))
.collect();
write_test_runner(&opt.output, &env, &opt.action_argv, &opt.strip_substrings);
}
| true |
10648413b854c575bbf9978f02bf9de86ba80ba8
|
Rust
|
GrayChrysTea/mtdalgos
|
/src/macros/errconvert.rs
|
UTF-8
| 2,120 | 3.40625 | 3 |
[
"MIT"
] |
permissive
|
//! This module contains macro definitions for handling [`Result`]s and
//! [`Options`].
/// Destructures an [`Option`] and sends the item contained within back out
/// if the [`Option`] destructures to [`Some(..)`], else a [`std::io::Error`]
/// is propagated from the function.
#[macro_export]
macro_rules! unwrapoption {
($result: expr) => {
match $result {
Some(res) => res,
None => {
return Err(std::io::Error::new(
std::io::ErrorKind::NotFound,
"Could not find requested item.",
))
}
}
};
}
/// Unwraps a request to lock a [`std::sync::Mutex`] but propagates the error
/// as a [`std::io::Error`] instead of panicking.
#[macro_export]
macro_rules! unwrapmutex {
($result: expr) => {{
match $result {
Ok(lock) => lock,
Err(_error) => {
return Err(std::io::Error::new(
std::io::ErrorKind::PermissionDenied,
"The Mutex was poisoned.",
))
}
}
}};
}
/// Unwraps a request to read a [`std::sync::mpsc::Receiver`] but propagates
/// the error as a [`std::io::Error`] instead of panicking.
#[macro_export]
macro_rules! unwrapreceiver {
($result: expr) => {{
match $result {
Ok(message) => message,
Err(_error) => {
return Err(std::io::Error::new(
std::io::ErrorKind::ConnectionAborted,
"Transmitter was dropped.",
))
}
}
}};
}
/// Unwraps a request to write to a [`std::sync::mpsc::Sender`] but propagates
/// the error as a [`std::io::Error`] instead of panicking.
#[macro_export]
macro_rules! unwrapsender {
($result: expr) => {{
match $result {
Ok(res) => res,
Err(_error) => {
return Err(std::io::Error::new(
std::io::ErrorKind::ConnectionAborted,
"Receiver was dropped.",
))
}
}
}};
}
| true |
414df50556c4c2dba797be85a47cecd9bfddcb0f
|
Rust
|
dbrgn/atat
|
/atat/src/client.rs
|
UTF-8
| 18,633 | 2.71875 | 3 |
[
"MIT",
"Apache-2.0",
"LicenseRef-scancode-unknown-license-reference"
] |
permissive
|
use bbqueue::framed::FrameConsumer;
use embedded_hal_nb::{nb, serial};
use fugit::ExtU32;
use crate::error::{Error, Response};
use crate::helpers::LossyStr;
use crate::traits::{AtatClient, AtatCmd, AtatUrc};
use crate::Config;
#[derive(Debug, PartialEq)]
#[cfg_attr(feature = "defmt", derive(defmt::Format))]
enum ClientState {
Idle,
AwaitingResponse,
}
/// Whether the AT client should block while waiting responses or return early.
#[derive(Debug, Copy, Clone, Hash, PartialEq, Eq)]
#[cfg_attr(feature = "defmt", derive(defmt::Format))]
pub enum Mode {
/// The function call will wait as long as necessary to complete the operation
Blocking,
/// The function call will not wait at all to complete the operation, and only do what it can.
NonBlocking,
/// The function call will wait only up the max timeout of each command to complete the operation.
Timeout,
}
/// Client responsible for handling send, receive and timeout from the
/// userfacing side. The client is decoupled from the ingress-manager through
/// some spsc queue consumers, where any received responses can be dequeued. The
/// Client also has an spsc producer, to allow signaling commands like
/// `reset` to the ingress-manager.
pub struct Client<
Tx,
CLK,
const TIMER_HZ: u32,
const RES_CAPACITY: usize,
const URC_CAPACITY: usize,
> where
Tx: serial::Write<u8>,
CLK: fugit_timer::Timer<TIMER_HZ>,
{
/// Serial writer
tx: Tx,
/// The response consumer receives responses from the ingress manager
res_c: FrameConsumer<'static, RES_CAPACITY>,
/// The URC consumer receives URCs from the ingress manager
urc_c: FrameConsumer<'static, URC_CAPACITY>,
state: ClientState,
timer: CLK,
config: Config,
}
impl<Tx, CLK, const TIMER_HZ: u32, const RES_CAPACITY: usize, const URC_CAPACITY: usize>
Client<Tx, CLK, TIMER_HZ, RES_CAPACITY, URC_CAPACITY>
where
Tx: serial::Write<u8>,
CLK: fugit_timer::Timer<TIMER_HZ>,
{
pub fn new(
tx: Tx,
res_c: FrameConsumer<'static, RES_CAPACITY>,
urc_c: FrameConsumer<'static, URC_CAPACITY>,
mut timer: CLK,
config: Config,
) -> Self {
timer.start(config.cmd_cooldown.millis()).ok();
Self {
tx,
res_c,
urc_c,
state: ClientState::Idle,
config,
timer,
}
}
}
impl<Tx, CLK, const TIMER_HZ: u32, const RES_CAPACITY: usize, const URC_CAPACITY: usize> AtatClient
for Client<Tx, CLK, TIMER_HZ, RES_CAPACITY, URC_CAPACITY>
where
Tx: serial::Write<u8>,
CLK: fugit_timer::Timer<TIMER_HZ>,
{
fn send<A: AtatCmd<LEN>, const LEN: usize>(
&mut self,
cmd: &A,
) -> nb::Result<A::Response, Error> {
if self.state == ClientState::Idle {
// compare the time of the last response or URC and ensure at least
// `self.config.cmd_cooldown` ms have passed before sending a new
// command
nb::block!(self.timer.wait()).ok();
let cmd_buf = cmd.as_bytes();
if cmd_buf.len() < 50 {
debug!("Sending command: \"{:?}\"", LossyStr(&cmd_buf));
} else {
debug!(
"Sending command with too long payload ({} bytes) to log!",
cmd_buf.len(),
);
}
for c in cmd_buf {
nb::block!(self.tx.write(c)).map_err(|_e| Error::Write)?;
}
nb::block!(self.tx.flush()).map_err(|_e| Error::Write)?;
self.state = ClientState::AwaitingResponse;
}
if !A::EXPECTS_RESPONSE_CODE {
self.state = ClientState::Idle;
return cmd.parse(Ok(&[])).map_err(nb::Error::Other);
}
match self.config.mode {
Mode::Blocking => Ok(nb::block!(self.check_response(cmd))?),
Mode::NonBlocking => self.check_response(cmd),
Mode::Timeout => {
self.timer.start(A::MAX_TIMEOUT_MS.millis()).ok();
Ok(nb::block!(self.check_response(cmd))?)
}
}
}
fn peek_urc_with<URC: AtatUrc, F: FnOnce(URC::Response) -> bool>(&mut self, f: F) {
if let Some(urc_grant) = self.urc_c.read() {
self.timer.start(self.config.cmd_cooldown.millis()).ok();
if let Some(urc) = URC::parse(urc_grant.as_ref()) {
if !f(urc) {
return;
}
} else {
error!("Parsing URC FAILED: {:?}", LossyStr(urc_grant.as_ref()));
}
urc_grant.release();
}
}
fn check_response<A: AtatCmd<LEN>, const LEN: usize>(
&mut self,
cmd: &A,
) -> nb::Result<A::Response, Error> {
if let Some(mut res_grant) = self.res_c.read() {
res_grant.auto_release(true);
let res = match Response::from(res_grant.as_ref()) {
Response::Result(r) => r,
Response::Prompt(_) => Ok(&[][..]),
};
return cmd
.parse(res)
.map_err(nb::Error::from)
.and_then(|r| {
if self.state == ClientState::AwaitingResponse {
self.timer.start(self.config.cmd_cooldown.millis()).ok();
self.state = ClientState::Idle;
Ok(r)
} else {
Err(nb::Error::WouldBlock)
}
})
.map_err(|e| {
self.timer.start(self.config.cmd_cooldown.millis()).ok();
self.state = ClientState::Idle;
e
});
} else if self.config.mode == Mode::Timeout && self.timer.wait().is_ok() {
self.state = ClientState::Idle;
return Err(nb::Error::Other(Error::Timeout));
}
Err(nb::Error::WouldBlock)
}
fn get_mode(&self) -> Mode {
self.config.mode
}
fn reset(&mut self) {
while let Some(grant) = self.res_c.read() {
grant.release();
}
while let Some(grant) = self.urc_c.read() {
grant.release();
}
}
}
#[cfg(test)]
mod test {
use super::*;
use crate::{self as atat, InternalError};
use crate::{
atat_derive::{AtatCmd, AtatEnum, AtatResp, AtatUrc},
clock::Clock,
};
use bbqueue::framed::FrameProducer;
use bbqueue::BBBuffer;
use heapless::String;
const TEST_RX_BUF_LEN: usize = 256;
const TEST_RES_CAPACITY: usize = 3 * TEST_RX_BUF_LEN;
const TEST_URC_CAPACITY: usize = 3 * TEST_RX_BUF_LEN;
const TIMER_HZ: u32 = 1000;
struct CdMock<const TIMER_HZ: u32>;
impl<const TIMER_HZ: u32> Clock<TIMER_HZ> for CdMock<TIMER_HZ> {
type Error = core::convert::Infallible;
/// Return current time `Instant`
fn now(&mut self) -> fugit::TimerInstantU32<TIMER_HZ> {
fugit::TimerInstantU32::from_ticks(0)
}
/// Start countdown with a `duration`
fn start(
&mut self,
_duration: fugit::TimerDurationU32<TIMER_HZ>,
) -> Result<(), Self::Error> {
Ok(())
}
/// Stop timer
fn cancel(&mut self) -> Result<(), Self::Error> {
Ok(())
}
/// Wait until countdown `duration` set with the `fn start` has expired
fn wait(&mut self) -> nb::Result<(), Self::Error> {
Ok(())
}
}
#[derive(Debug)]
pub enum SerialError {}
impl serial::Error for SerialError {
fn kind(&self) -> serial::ErrorKind {
serial::ErrorKind::Other
}
}
struct TxMock {
s: String<64>,
}
impl TxMock {
fn new(s: String<64>) -> Self {
TxMock { s }
}
}
impl serial::ErrorType for TxMock {
type Error = serial::ErrorKind;
}
impl serial::Write<u8> for TxMock {
fn write(&mut self, c: u8) -> nb::Result<(), Self::Error> {
self.s
.push(c as char)
.map_err(|_| nb::Error::Other(serial::ErrorKind::Other))
}
fn flush(&mut self) -> nb::Result<(), Self::Error> {
Ok(())
}
}
#[derive(Debug, PartialEq, Eq)]
pub enum InnerError {
Test,
}
impl core::str::FromStr for InnerError {
// This error will always get mapped to `atat::Error::Parse`
type Err = ();
fn from_str(_s: &str) -> Result<Self, Self::Err> {
Ok(Self::Test)
}
}
#[derive(Debug, PartialEq, AtatCmd)]
#[at_cmd("+CFUN", NoResponse, error = "InnerError")]
struct ErrorTester {
x: u8,
}
#[derive(Clone, AtatCmd)]
#[at_cmd("+CFUN", NoResponse, timeout_ms = 180000)]
pub struct SetModuleFunctionality {
#[at_arg(position = 0)]
pub fun: Functionality,
#[at_arg(position = 1)]
pub rst: Option<ResetMode>,
}
#[derive(Clone, AtatCmd)]
#[at_cmd("+FUN", NoResponse, timeout_ms = 180000)]
pub struct Test2Cmd {
#[at_arg(position = 1)]
pub fun: Functionality,
#[at_arg(position = 0)]
pub rst: Option<ResetMode>,
}
#[derive(Clone, AtatCmd)]
#[at_cmd("+CUN", TestResponseString, timeout_ms = 180000)]
pub struct TestRespStringCmd {
#[at_arg(position = 0)]
pub fun: Functionality,
#[at_arg(position = 1)]
pub rst: Option<ResetMode>,
}
#[derive(Clone, AtatCmd)]
#[at_cmd("+CUN", TestResponseStringMixed, timeout_ms = 180000, attempts = 1)]
pub struct TestRespStringMixCmd {
#[at_arg(position = 1)]
pub fun: Functionality,
#[at_arg(position = 0)]
pub rst: Option<ResetMode>,
}
// #[derive(Clone, AtatCmd)]
// #[at_cmd("+CUN", TestResponseStringMixed, timeout_ms = 180000)]
// pub struct TestUnnamedStruct(Functionality, Option<ResetMode>);
#[derive(Clone, PartialEq, AtatEnum)]
#[at_enum(u8)]
pub enum Functionality {
#[at_arg(value = 0)]
Min,
#[at_arg(value = 1)]
Full,
#[at_arg(value = 4)]
APM,
#[at_arg(value = 6)]
DM,
}
#[derive(Clone, PartialEq, AtatEnum)]
#[at_enum(u8)]
pub enum ResetMode {
#[at_arg(value = 0)]
DontReset,
#[at_arg(value = 1)]
Reset,
}
#[derive(Clone, AtatResp, PartialEq, Debug)]
pub struct NoResponse;
#[derive(Clone, AtatResp, PartialEq, Debug)]
pub struct TestResponseString {
#[at_arg(position = 0)]
pub socket: u8,
#[at_arg(position = 1)]
pub length: usize,
#[at_arg(position = 2)]
pub data: String<64>,
}
#[derive(Clone, AtatResp, PartialEq, Debug)]
pub struct TestResponseStringMixed {
#[at_arg(position = 1)]
pub socket: u8,
#[at_arg(position = 2)]
pub length: usize,
#[at_arg(position = 0)]
pub data: String<64>,
}
#[derive(Clone, AtatResp)]
pub struct MessageWaitingIndication {
#[at_arg(position = 0)]
pub status: u8,
#[at_arg(position = 1)]
pub code: u8,
}
#[derive(Clone, AtatUrc)]
pub enum Urc {
#[at_urc(b"+UMWI")]
MessageWaitingIndication(MessageWaitingIndication),
}
macro_rules! setup {
($config:expr) => {{
static mut RES_Q: BBBuffer<TEST_RES_CAPACITY> = BBBuffer::new();
let (res_p, res_c) = unsafe { RES_Q.try_split_framed().unwrap() };
static mut URC_Q: BBBuffer<TEST_URC_CAPACITY> = BBBuffer::new();
let (urc_p, urc_c) = unsafe { URC_Q.try_split_framed().unwrap() };
let tx_mock = TxMock::new(String::new());
let client: Client<
TxMock,
CdMock<TIMER_HZ>,
TIMER_HZ,
TEST_RES_CAPACITY,
TEST_URC_CAPACITY,
> = Client::new(tx_mock, res_c, urc_c, CdMock, $config);
(client, res_p, urc_p)
}};
}
pub fn enqueue_res(
producer: &mut FrameProducer<'static, TEST_RES_CAPACITY>,
res: Result<&[u8], InternalError>,
) {
let header: crate::error::Encoded = res.into();
let mut grant = producer.grant(header.len()).unwrap();
match header {
crate::error::Encoded::Simple(h) => grant[..1].copy_from_slice(&[h]),
crate::error::Encoded::Nested(h, b) => {
grant[..1].copy_from_slice(&[h]);
grant[1..2].copy_from_slice(&[b]);
}
crate::error::Encoded::Array(h, b) => {
grant[..1].copy_from_slice(&[h]);
grant[1..header.len()].copy_from_slice(&b);
}
crate::error::Encoded::Slice(h, b) => {
grant[..1].copy_from_slice(&[h]);
grant[1..header.len()].copy_from_slice(b);
}
};
grant.commit(header.len());
}
#[test]
fn error_response() {
let (mut client, mut p, _) = setup!(Config::new(Mode::Blocking));
let cmd = ErrorTester { x: 7 };
enqueue_res(&mut p, Err(InternalError::Error));
assert_eq!(client.state, ClientState::Idle);
assert_eq!(nb::block!(client.send(&cmd)), Err(Error::Error));
assert_eq!(client.state, ClientState::Idle);
}
#[test]
fn generic_error_response() {
let (mut client, mut p, _) = setup!(Config::new(Mode::Blocking));
let cmd = SetModuleFunctionality {
fun: Functionality::APM,
rst: Some(ResetMode::DontReset),
};
enqueue_res(&mut p, Err(InternalError::Error));
assert_eq!(client.state, ClientState::Idle);
assert_eq!(nb::block!(client.send(&cmd)), Err(Error::Error));
assert_eq!(client.state, ClientState::Idle);
}
#[test]
fn string_sent() {
let (mut client, mut p, _) = setup!(Config::new(Mode::Blocking));
let cmd = SetModuleFunctionality {
fun: Functionality::APM,
rst: Some(ResetMode::DontReset),
};
enqueue_res(&mut p, Ok(&[]));
assert_eq!(client.state, ClientState::Idle);
assert_eq!(client.send(&cmd), Ok(NoResponse));
assert_eq!(client.state, ClientState::Idle);
assert_eq!(
client.tx.s,
String::<32>::from("AT+CFUN=4,0\r\n"),
"Wrong encoding of string"
);
enqueue_res(&mut p, Ok(&[]));
let cmd = Test2Cmd {
fun: Functionality::DM,
rst: Some(ResetMode::Reset),
};
assert_eq!(client.send(&cmd), Ok(NoResponse));
assert_eq!(
client.tx.s,
String::<32>::from("AT+CFUN=4,0\r\nAT+FUN=1,6\r\n"),
"Reverse order string did not match"
);
}
#[test]
fn blocking() {
let (mut client, mut p, _) = setup!(Config::new(Mode::Blocking));
let cmd = SetModuleFunctionality {
fun: Functionality::APM,
rst: Some(ResetMode::DontReset),
};
enqueue_res(&mut p, Ok(&[]));
assert_eq!(client.state, ClientState::Idle);
assert_eq!(client.send(&cmd), Ok(NoResponse));
assert_eq!(client.state, ClientState::Idle);
assert_eq!(client.tx.s, String::<32>::from("AT+CFUN=4,0\r\n"));
}
#[test]
fn non_blocking() {
let (mut client, mut p, _) = setup!(Config::new(Mode::NonBlocking));
let cmd = SetModuleFunctionality {
fun: Functionality::APM,
rst: Some(ResetMode::DontReset),
};
assert_eq!(client.state, ClientState::Idle);
assert_eq!(client.send(&cmd), Err(nb::Error::WouldBlock));
assert_eq!(client.state, ClientState::AwaitingResponse);
assert_eq!(client.check_response(&cmd), Err(nb::Error::WouldBlock));
enqueue_res(&mut p, Ok(&[]));
assert_eq!(client.state, ClientState::AwaitingResponse);
assert_eq!(client.check_response(&cmd), Ok(NoResponse));
assert_eq!(client.state, ClientState::Idle);
}
// Test response containing string
#[test]
fn response_string() {
let (mut client, mut p, _) = setup!(Config::new(Mode::Blocking));
// String last
let cmd = TestRespStringCmd {
fun: Functionality::APM,
rst: Some(ResetMode::DontReset),
};
let response = b"+CUN: 22,16,\"0123456789012345\"";
enqueue_res(&mut p, Ok(response));
assert_eq!(client.state, ClientState::Idle);
assert_eq!(
client.send(&cmd),
Ok(TestResponseString {
socket: 22,
length: 16,
data: String::<64>::from("0123456789012345")
})
);
assert_eq!(client.state, ClientState::Idle);
// Mixed order for string
let cmd = TestRespStringMixCmd {
fun: Functionality::APM,
rst: Some(ResetMode::DontReset),
};
let response = b"+CUN: \"0123456789012345\",22,16";
enqueue_res(&mut p, Ok(response));
assert_eq!(
client.send(&cmd),
Ok(TestResponseStringMixed {
socket: 22,
length: 16,
data: String::<64>::from("0123456789012345")
})
);
assert_eq!(client.state, ClientState::Idle);
}
#[test]
fn urc() {
let (mut client, _, mut urc_p) = setup!(Config::new(Mode::NonBlocking));
let response = b"+UMWI: 0, 1";
let mut grant = urc_p.grant(response.len()).unwrap();
grant.copy_from_slice(response.as_ref());
grant.commit(response.len());
assert_eq!(client.state, ClientState::Idle);
assert!(client.check_urc::<Urc>().is_some());
assert_eq!(client.state, ClientState::Idle);
}
#[test]
fn invalid_response() {
let (mut client, mut p, _) = setup!(Config::new(Mode::Blocking));
// String last
let cmd = TestRespStringCmd {
fun: Functionality::APM,
rst: Some(ResetMode::DontReset),
};
let response = b"+CUN: 22,16,22";
enqueue_res(&mut p, Ok(response));
assert_eq!(client.state, ClientState::Idle);
assert_eq!(client.send(&cmd), Err(nb::Error::Other(Error::Parse)));
assert_eq!(client.state, ClientState::Idle);
}
}
| true |
f77d8841c0cce0eee793808b4064175c40d4ee93
|
Rust
|
PartyLich/tetrs
|
/cell_engine/src/menu/mod.rs
|
UTF-8
| 1,625 | 3.328125 | 3 |
[] |
no_license
|
use std::fmt;
/// A selectable item in a `Menu`
#[derive(Debug, PartialEq)]
pub struct MenuItem<T>
where
T: fmt::Debug + PartialEq + Clone,
{
pub label: &'static str,
pub event: T,
}
impl<T> MenuItem<T>
where
T: fmt::Debug + PartialEq + Clone,
{
pub fn new(label: &'static str, event: T) -> Self {
MenuItem { label, event }
}
}
/// A user menu for selecting...things
#[derive(Debug, PartialEq)]
pub struct Menu<T>
where
T: fmt::Debug + PartialEq + Clone,
{
selection: usize,
pub menu_items: Vec<MenuItem<T>>,
}
impl<T> Menu<T>
where
T: fmt::Debug + PartialEq + Clone,
{
/// Create a new `Menu` containing the provided `MenuItems`
pub fn new(menu_items: Vec<MenuItem<T>>) -> Self {
Menu {
menu_items,
selection: 0,
}
}
/// Get the current selection index
pub fn selection(&self) -> usize {
self.selection
}
/// Modify `current` by `change` within the bounds `min` to `max` (inclusive)
fn update_selection(&mut self, change: i32) {
let mut i = change + self.selection as i32;
i %= self.menu_items.len() as i32;
i = std::cmp::max(i, 0);
self.selection = i as usize;
}
/// Increment the menu selection
pub fn inc_selection(&mut self) {
self.update_selection(1);
}
/// Decrement the menu selection
pub fn dec_selection(&mut self) {
self.update_selection(-1);
}
/// Return domain event for current selection
pub fn select_item(&self) -> &T {
&self.menu_items.get(self.selection).unwrap().event
}
}
| true |
f93eafb93d01cdc848bffd7e87c047b8f46d0fad
|
Rust
|
miso24/rust_brainfuck
|
/src/parser.rs
|
UTF-8
| 3,463 | 3.359375 | 3 |
[] |
no_license
|
use crate::lexer::Token;
use std::collections::HashMap;
use std::fmt;
#[derive(Debug, PartialEq)]
pub enum Instruction {
Add(u8),
Sub(u8),
AddPtr(u8),
SubPtr(u8),
Write,
Read,
JumpToLBracket(usize),
JumpToRBracket(usize),
}
#[derive(Debug)]
pub enum ParseError {
UnclosedBracket,
}
impl fmt::Display for ParseError {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
match self {
ParseError::UnclosedBracket => write!(f, "Unclosed bracket"),
}
}
}
impl std::error::Error for ParseError {}
pub struct Program {
pub insts: Vec<Instruction>,
pub lbr_pos: HashMap<usize, usize>,
pub rbr_pos: HashMap<usize, usize>,
}
impl Program {
fn new() -> Self {
Self {
insts: Vec::new(),
lbr_pos: HashMap::new(),
rbr_pos: HashMap::new(),
}
}
fn add_instruction(&mut self, inst: Instruction) {
self.insts.push(inst);
}
fn add_rbracket(&mut self, id: usize, pos: usize) {
self.rbr_pos.insert(id, pos);
}
fn add_lbracket(&mut self, id: usize, pos: usize) {
self.lbr_pos.insert(id, pos);
}
}
pub fn parse(tokens: &Vec<Token>) -> Result<Program, ParseError> {
let mut pos = 0;
let mut inst_pos = 0;
let mut bracket_stack = Vec::new();
let mut bracket_id = 0;
let mut program = Program::new();
macro_rules! append_opt_instruction {
($tok_kind:expr, $op_kind:expr) => {{
let (num, _pos) = consume_many_tokens(&tokens, pos, $tok_kind);
program.add_instruction($op_kind(num));
pos = _pos;
}};
}
macro_rules! append_instruction {
($op_kind:expr) => {{
program.add_instruction($op_kind);
pos += 1;
}};
}
while pos < tokens.len() {
match tokens[pos] {
Token::Plus => append_opt_instruction!(Token::Plus, Instruction::Add),
Token::Minus => append_opt_instruction!(Token::Minus, Instruction::Sub),
Token::Greater => append_opt_instruction!(Token::Greater, Instruction::SubPtr),
Token::Less => append_opt_instruction!(Token::Less, Instruction::AddPtr),
Token::Period => append_instruction!(Instruction::Write),
Token::Comma => append_instruction!(Instruction::Read),
Token::LBracket => {
program.add_lbracket(bracket_id, inst_pos);
bracket_stack.push(bracket_id);
program.add_instruction(Instruction::JumpToRBracket(bracket_id));
bracket_id += 1;
pos += 1;
}
Token::RBracket => {
match bracket_stack.pop() {
Some(id) => {
program.add_rbracket(id, inst_pos);
program.add_instruction(Instruction::JumpToLBracket(id));
}
None => return Err(ParseError::UnclosedBracket),
}
pos += 1;
}
}
inst_pos += 1;
}
if !bracket_stack.is_empty() {
Err(ParseError::UnclosedBracket)
} else {
Ok(program)
}
}
fn consume_many_tokens(tokens: &Vec<Token>, mut pos: usize, t: Token) -> (u8, usize) {
let mut counter = 0;
while pos < tokens.len() && tokens[pos] == t && counter < 255 {
counter += 1;
pos += 1;
}
(counter, pos)
}
| true |
fd87dc6b376b5432d0e264c558bc6c51252a8893
|
Rust
|
m-kann0/atcoder-rust
|
/examples/tdpc_dice.rs
|
UTF-8
| 2,451 | 3.421875 | 3 |
[] |
no_license
|
use std::io::Read;
fn main() {
let mut buf = String::new();
std::io::stdin().read_to_string(&mut buf).unwrap();
let answer = solve(&buf);
println!("{}", answer);
}
fn solve(input: &str) -> String {
let mut iterator = input.split_whitespace();
let n: usize = iterator.next().unwrap().parse().unwrap();
let mut d: usize = iterator.next().unwrap().parse().unwrap();
let mut dp: Vec<Vec<Vec<f64>>> = vec![vec![vec![0.0; 105]; 105]; 105];
dp[0][0][0] = 1.0;
for i in 0..n {
let mut ndp: Vec<Vec<Vec<f64>>> = vec![vec![vec![0.0; 105]; 105]; 105];
for a in 0..100 {
for b in 0..100 {
for c in 0..100 {
// 1
ndp[a][b][c] += dp[a][b][c] / 6.0;
// 2
ndp[a + 1][b][c] += dp[a][b][c] / 6.0;
// 3
ndp[a][b + 1][c] += dp[a][b][c] / 6.0;
// 4
ndp[a + 2][b][c] += dp[a][b][c] / 6.0;
// 5
ndp[a][b][c + 1] += dp[a][b][c] / 6.0;
// 6
ndp[a + 1][b + 1][c] += dp[a][b][c] / 6.0;
}
}
}
dp = ndp;
}
let mut d1 = 0;
while d % 2 == 0 {
d /= 2;
d1 += 1;
}
let mut d2 = 0;
while d % 3 == 0 {
d /= 3;
d2 += 1;
}
let mut d3 = 0;
while d % 5 == 0 {
d /= 5;
d3 += 1;
}
if d != 1 {
return format!("{}", 0.0);
}
let mut ans = 0.0;
for a in 0..100 {
if a < d1 { continue }
for b in 0..100 {
if b < d2 { continue }
for c in 0..100 {
if c < d3 { continue }
ans += dp[a][b][c];
}
}
}
ans.to_string()
}
#[test]
fn test() {
let cases: Vec<(&str, &str)> = vec![
("2 6", "0.416666667"),
("3 2", "0.875000000"),
];
let mut all_ok = true;
for (i, case) in cases.iter().enumerate() {
print!("case {} : ", i);
let expected = case.1;
let actual = solve(case.0);
if expected == actual {
println!("OK");
} else {
println!("NG");
println!(" Expected: {}", expected);
println!(" Actual : {}", actual);
all_ok = false;
}
}
assert_eq!(all_ok, true);
}
| true |
26bc7933b2d99ac29b84c06cff5a99ec32f362af
|
Rust
|
b-r-u/nori
|
/src/main.rs
|
UTF-8
| 11,647 | 2.671875 | 3 |
[
"Apache-2.0",
"MIT"
] |
permissive
|
use std::fs::File;
use anyhow::Context;
use clap::{Arg, ArgGroup, App, AppSettings, SubCommand};
use geomatic::Point4326;
use serde::Serialize;
mod bounding_box;
mod compare;
mod density;
mod geojson_writer;
mod network;
mod polyline;
mod poi;
mod route;
mod routing_machine;
mod sampling;
use bounding_box::BoundingBox;
use network::Network;
use route::RouteCollectionWriter;
use routing_machine::RoutingMachine;
use sampling::Sampling;
fn main() -> anyhow::Result<()> {
let matches = App::new("nori - naive aggregated traffic estimation")
.version("0.1")
.author("Johannes Hofmann <[email protected]>")
.about("Estimate average daily traffic on a road network by sampling a distribution of shortest paths.")
.setting(AppSettings::ArgRequiredElseHelp)
.subcommand(SubCommand::with_name("sample")
.about("Create pairs of points and store the shortest paths between them.")
.arg(Arg::with_name("osrm")
.long("osrm")
.value_name("FILE")
.help("Sets an input *.osrm file")
.takes_value(true)
.required(true)
)
.arg(Arg::with_name("routes")
.long("routes")
.value_name("FILE")
.help("Sets the output file to store the routes")
.takes_value(true)
.required(true)
)
.arg(Arg::with_name("geojson")
.long("geojson")
.value_name("FILE")
.help("Sets the output GeoJSON file to store the road network with traffic counts")
.takes_value(true)
)
.arg(Arg::with_name("compare")
.long("compare")
.value_names(&["FILE", "GEOJSON PROPERTY"])
.help("Specify the GeoJSON file and the name of the relevant numerical property that contains empirical traffic data")
.takes_value(true)
.number_of_values(2),
)
.arg(Arg::with_name("png")
.long("png")
.value_name("FILE")
.help("Sets the output PNG file to store a rendering of the road network with traffic counts")
.takes_value(true)
)
.arg(Arg::with_name("number")
.long("number")
.short("n")
.value_name("INT")
.help("Sets the number of samples")
.takes_value(true)
.required(true)
.validator(is_number::<u32>)
)
.arg(Arg::with_name("bounds")
.long("bounds")
.value_name("sw.lat sw.lon ne.lat ne.lon")
.help("Sets the bounding box. Input values are the two coordinate pairs for the
south-west and the north-east corner of the bounding box")
.takes_value(true)
.number_of_values(4)
.validator(is_number::<f64>)
)
.arg(Arg::with_name("max_dist")
.long("max-dist")
.value_name("METERS")
.help("Sets the maximum distance between source and destination points in
meters.")
.takes_value(true)
.validator(is_number::<f64>)
)
.arg(Arg::with_name("uniform2d")
.long("uniform2d")
.help("Sample the 2D plane uniformly.")
.requires_all(&["bounds", "max_dist"])
)
.arg(Arg::with_name("weighted")
.long("weighted")
.value_name("FILE.csv")
.help("sample from a list of weighted points from the given CSV file.")
.takes_value(true)
.requires_all(&["max_dist"])
)
.arg(Arg::with_name("complex")
.long("complex")
.help("Use complex sampling that combines different density inputs.")
.takes_value(false)
.requires_all(&["population", "pois"])
)
.arg(Arg::with_name("population")
.long("population")
.value_name("FILE.csv")
.help("Specify population density as weighted points from the given CSV file.")
.takes_value(true)
)
.arg(Arg::with_name("pois")
.long("pois")
.value_name("FILE.csv")
.help("Specify POI density as weighted points from the given CSV file.")
.takes_value(true)
)
.group(ArgGroup::with_name("sampling")
.args(&["uniform2d", "weighted", "complex"])
.required(true))
)
.subcommand(SubCommand::with_name("routes")
.about("Read *.routes files.")
.arg(Arg::with_name("input")
.long("input")
.value_name("FILE")
.help("Sets an input *.routes file")
.takes_value(true)
.required(true)
)
)
.subcommand(SubCommand::with_name("filter-poi")
.about("Read *.osm.pbf file with OpenStreetMap data to filter POIs and write to CSV.")
.arg(Arg::with_name("input")
.long("input")
.value_name("FILE")
.help("Sets an input *.osm.pbf file")
.takes_value(true)
.required(true)
)
.arg(Arg::with_name("output")
.long("output")
.value_name("FILE")
.help("Sets an output *.csv file that includes filtered and clustered POI")
.takes_value(true)
.required(true)
)
)
.get_matches();
run(matches)
}
fn run(matches: clap::ArgMatches) -> anyhow::Result<()> {
if let Some(matches) = matches.subcommand_matches("sample") {
let number_of_samples = matches.value_of("number").unwrap().parse::<u32>().unwrap();
let osrm_path = matches.value_of("osrm").unwrap();
let routes_path = matches.value_of("routes").unwrap();
let bounds = if matches.is_present("bounds") {
let aabb: Vec<_> = matches.values_of("bounds").unwrap()
.map(|s| s.parse::<f64>().unwrap()).collect();
assert_eq!(aabb.len(), 4);
Some(BoundingBox::new(
Point4326::new(aabb[0], aabb[1]),
Point4326::new(aabb[2], aabb[3]))
)
} else {
None
};
let mut machine = RoutingMachine::new();
machine.test_connection()
.context("Failed to connect to routing server. Start osrm-routed like this:\
\n osrm-routed --algorithm mld an_example_file.osrm")?;
println!("Read *.osrm file {:?}", osrm_path);
let mut net = Network::from_path(osrm_path)
.with_context(|| format!(
"Failed to read *.osrm file {:?}", osrm_path
))?;
let mut writer = RouteCollectionWriter::new(
routes_path,
osrm_path,
"sample",
)?;
if matches.is_present("uniform2d") {
let max_dist: f64 = matches.value_of("max_dist").unwrap().parse::<f64>()?;
let mut uni_sample = sampling::Uniform2D::new(bounds.unwrap(), max_dist);
sample(&mut uni_sample, number_of_samples, &mut machine, &mut writer, &mut net)?;
} else if matches.is_present("weighted") {
let max_dist: f64 = matches.value_of("max_dist").unwrap().parse::<f64>()?;
let csv_path = matches.value_of("weighted").unwrap();
let mut sampl = sampling::Weighted::from_csv(csv_path, bounds, max_dist)?;
sample(&mut sampl, number_of_samples, &mut machine, &mut writer, &mut net)?;
} else if matches.is_present("complex") {
let max_dist: f64 = matches.value_of("max_dist").unwrap().parse::<f64>()?;
let population_csv = matches.value_of("population").unwrap();
let poi_csv = matches.value_of("pois").unwrap();
let mut sampl = sampling::Complex::from_csv(population_csv, poi_csv, bounds, max_dist)?;
sample(&mut sampl, number_of_samples, &mut machine, &mut writer, &mut net)?;
}
writer.finish()?;
if let Some(geojson_path) = matches.value_of("geojson") {
net.write_to_geojson(geojson_path)?;
}
if let Some(png_path) = matches.value_of("png") {
if let Some(bounds) = bounds {
net.write_png(png_path, bounds, 2048, 2048)?;
} else {
net.write_png(png_path, net.get_bounds(), 2048, 2048)?;
}
}
if let Some(mut compare_args) = matches.values_of("compare") {
let geojson_path = compare_args.next().unwrap();
let number_property = compare_args.next().unwrap();
compare::compare(&net, geojson_path, number_property)?;
}
} else if let Some(matches) = matches.subcommand_matches("routes") {
let routes_path = matches.value_of("input").unwrap();
let reader = route::RouteCollectionReader::new(&routes_path)
.with_context(|| format!(
"Failed to read the routes file {:?}", routes_path
))?;
#[derive(Serialize)]
struct CsvRecord {
num_nodes: usize,
distance: f64,
distance_bee_line: f64,
}
let mut csv_writer = csv::Writer::from_path("distances.csv")?;
let net = Network::from_path(&reader.header().osrm_file)?;
for (i, route) in reader.enumerate() {
let route = route?;
csv_writer.serialize(CsvRecord {
num_nodes: route.node_ids.len(),
distance: route.distance,
distance_bee_line: route.distance_bee_line(),
})?;
let dist = route.distance;
let dist_bl = route.distance_bee_line();
if dist_bl * 10.0 < dist && dist_bl > 105.0 {
route.write_to_geojson(format!("long_route_{}.geojson", i), &net)?;
}
}
csv_writer.flush()?;
} else if let Some(matches) = matches.subcommand_matches("filter-poi") {
let input = matches.value_of("input").unwrap();
let output = matches.value_of("output").unwrap();
poi::filter_poi(input, output)?;
}
Ok(())
}
fn is_number<T: std::str::FromStr>(s: String) -> Result<(), String> {
match s.parse::<T>() {
Ok(_) => Ok(()),
Err(_) => Err(format!("need a number")),
}
}
fn sample<S: Sampling>(
sampl: &mut S,
number_of_samples: u32,
machine: &mut RoutingMachine,
writer: &mut RouteCollectionWriter<File>,
net: &mut Network,
) -> anyhow::Result<()>
{
for i in 0..number_of_samples {
let a;
let b;
loop {
let source = sampl.gen_source();
if let Some(destination) = sampl.gen_destination(source) {
a = source;
b = destination;
break;
}
}
println!("{:.2}%, {}: {} {}", (100.0 * (i + 1) as f64) / (number_of_samples as f64), i + 1, a, b);
let res = machine.find_route(a, b)?;
let res = writer.write_route(res)?;
net.bump_edges(&res.node_ids);
}
Ok(())
}
| true |
a0ad2b870879e1529fd046682701d6dc053bf7da
|
Rust
|
isgasho/telefork
|
/examples/yoyo_client_raw.rs
|
UTF-8
| 1,913 | 3.359375 | 3 |
[] |
no_license
|
use telefork::{telefork, telepad, wait_for_exit, TeleforkLocation};
use std::net::TcpStream;
use std::os::unix::io::FromRawFd;
fn main() {
let args: Vec<String> = std::env::args().collect();
let destination = args.get(1).expect("expected arg: address of teleserver");
println!("Hello, I'm a process that's about to telefork myself onto a server!");
let mut foo = 103;
println!("I have a local variable that says foo={}", foo);
let mut stream = TcpStream::connect(destination).unwrap();
let loc = telefork(&mut stream).unwrap();
match loc {
TeleforkLocation::Child(fd) => {
let mut stream = unsafe { TcpStream::from_raw_fd(fd) };
println!("I'm a process that teleported itself to a different computer");
println!(
"My local variable says foo={} and I'm going to exit with that status",
foo
);
// Do some work on the remote server
foo = 42;
let loc = telefork(&mut stream).unwrap();
std::mem::forget(stream); // parent drops stream not us
match loc {
TeleforkLocation::Child(_) => {
println!("Arrived back on client machine with foo={}", foo);
}
TeleforkLocation::Parent => println!("teleforked result process back to client!"),
};
std::process::exit(0);
}
TeleforkLocation::Parent => println!("I succesfully teleforked myself!"),
};
let child = telepad(&mut stream, 0).unwrap();
println!("got child back with pid = {}", child);
let status = wait_for_exit(child).unwrap();
println!("child exited with status = {}", status);
// let mut got_back = String::new();
// let bytes_read = stream.read_to_string(&mut got_back).unwrap();
// println!("read {} bytes: {:?}", bytes_read, got_back);
}
| true |
4ea3393ea22cf5a8fea6efc46905d4ca865c7d8f
|
Rust
|
tatakahashi35/atcoder
|
/abs/src/bin/abc049c.rs
|
UTF-8
| 819 | 2.953125 | 3 |
[] |
no_license
|
use proconio::input;
fn main() {
input!{
s: String,
}
let mut answer = true;
let l = s.len();
let mut i = 0;
while i < l {
if s[i..].to_string().starts_with(&"dreameraser") {
i += 11;
} else if s[i..].to_string().starts_with(&"dreamerase") {
i += 10;
} else if s[i..].to_string().starts_with(&"dreamer") {
i += 7;
} else if s[i..].to_string().starts_with(&"dream") {
i += 5;
} else if s[i..].to_string().starts_with(&"eraser") {
i += 6;
} else if s[i..].to_string().starts_with(&"erase") {
i += 5;
} else {
answer = false;
break;
}
}
if answer {
println!("YES");
} else {
println!("NO");
}
}
| true |
0d19f90484eecdc027559504b2be5ffa996d96d7
|
Rust
|
svmk/fund-watch-bot
|
/src/market/market_data/model/actual_chart_period.rs
|
UTF-8
| 1,351 | 2.828125 | 3 |
[] |
no_license
|
use crate::{app::model::datetime::DateTime, market::market_data::model::chart_period::ChartPeriod};
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct ActualChartPeriod {
#[serde(rename = "period")]
period: Option<ChartPeriod>,
}
impl ActualChartPeriod {
pub fn new_uncached() -> ActualChartPeriod {
return ActualChartPeriod {
period: None,
}
}
pub fn new(chart_period: ChartPeriod) -> ActualChartPeriod {
return ActualChartPeriod {
period: Some(chart_period),
}
}
pub fn get_period(&self) -> Option<&ChartPeriod> {
return self.period.as_ref();
}
pub fn is_actual(&self, expected_period: &ChartPeriod) -> bool {
match self.period {
Some(ref period) => {
return period.contains(expected_period);
},
None => {
return false;
},
}
}
pub fn is_actual_datetime(&self, datetime: &DateTime) -> bool {
match self.period {
Some(ref period) => {
return period.contains_datetime(datetime);
},
None => {
return false;
},
}
}
pub fn update_chart_period(&mut self, chart_period: ChartPeriod) {
self.period = Some(chart_period);
}
}
| true |
dbce77545c1ce2230e52b6431ddac22597da0015
|
Rust
|
darshanparajuli/AdventOfCode2020
|
/src/bin/day1.rs
|
UTF-8
| 880 | 3.09375 | 3 |
[] |
no_license
|
use aoc_2020::read_input;
fn main() {
let input: Vec<_> = read_input()
.iter()
.map(|e| e.parse::<u32>().unwrap())
.collect();
part1(&input);
part2(&input);
}
fn part1(input: &[u32]) {
let mut answer = 0;
'outer: for i in input {
for j in input.iter().filter(|e| *e != i) {
if i + j == 2020 {
answer = i * j;
break 'outer;
}
}
}
println!("part 1: {}", answer);
}
fn part2(input: &[u32]) {
let mut answer = 0;
'outer: for i in input {
for j in input.iter().filter(|e| *e != i) {
for k in input.iter().filter(|e| *e != j) {
if i + j + k == 2020 {
answer = i * j * k;
break 'outer;
}
}
}
}
println!("part 2: {}", answer);
}
| true |
cfbde0d718d53a9c6d2251e8d010649d81bbec79
|
Rust
|
shadow/shadow
|
/src/main/utility/synchronization/count_down_latch.rs
|
UTF-8
| 12,128 | 3.40625 | 3 |
[
"LicenseRef-scancode-public-domain"
] |
permissive
|
use std::sync::{Arc, Condvar, Mutex};
/// A latch counter.
///
/// If a counter is cloned, it will inherit the counter's state for the current generation. For
/// example if a counter is cloned after it has already counted down, then the new counter will also
/// be treated as if it had already counted down in the current generation. If a counter is cloned
/// before it has counted down, then the new counter will also need to count down in the current
/// generation.
#[derive(Debug)]
pub struct LatchCounter {
inner: Arc<LatchInner>,
/// An ID for this counter's count-down round.
generation: usize,
}
/// A latch waiter.
///
/// If a waiter is cloned, it will inherit the waiter's state for the current generation. For
/// example if a waiter is cloned after it has already waited, then the new waiter will also be
/// treated as if it had already waited in the current generation. If a waiter is cloned before it
/// has waited, then the new waiter will also need to wait in the current generation.
#[derive(Debug)]
pub struct LatchWaiter {
inner: Arc<LatchInner>,
/// An ID for this waiter's count-down round.
generation: usize,
}
#[derive(Debug)]
struct LatchInner {
lock: Mutex<LatchState>,
cond: Condvar,
}
#[derive(Debug)]
struct LatchState {
/// The current latch "round".
generation: usize,
/// Number of counters remaining.
counters: usize,
/// Number of waiters remaining.
waiters: usize,
/// Total number of counters.
total_counters: usize,
/// Total number of waiters.
total_waiters: usize,
}
/// Build a latch counter and waiter. The counter and waiter can be cloned to create new counters
/// and waiters.
pub fn build_count_down_latch() -> (LatchCounter, LatchWaiter) {
let inner = Arc::new(LatchInner {
lock: Mutex::new(LatchState {
generation: 0,
counters: 1,
waiters: 1,
total_counters: 1,
total_waiters: 1,
}),
cond: Condvar::new(),
});
let counter = LatchCounter {
inner: Arc::clone(&inner),
generation: 0,
};
let waiter = LatchWaiter {
inner,
generation: 0,
};
(counter, waiter)
}
impl LatchState {
pub fn advance_generation(&mut self) {
debug_assert_eq!(self.counters, 0);
debug_assert_eq!(self.waiters, 0);
self.counters = self.total_counters;
self.waiters = self.total_waiters;
self.generation = self.generation.wrapping_add(1);
}
}
impl LatchCounter {
/// Decrement the latch count and wake the waiters if the count reaches 0. This must not be
/// called more than once per generation (must not be called again until all of the waiters have
/// returned from their [`LatchWaiter::wait()`] calls), otherwise it will panic.
pub fn count_down(&mut self) {
let counters;
{
let mut lock = self.inner.lock.lock().unwrap();
if self.generation != lock.generation {
let latch_gen = lock.generation;
std::mem::drop(lock);
panic!(
"Counter generation does not match latch generation ({} != {})",
self.generation, latch_gen
);
}
lock.counters = lock.counters.checked_sub(1).unwrap();
counters = lock.counters;
}
// if this is the last counter, notify the waiters
if counters == 0 {
self.inner.cond.notify_all();
}
self.generation = self.generation.wrapping_add(1);
}
}
impl LatchWaiter {
/// Wait for the latch count to reach 0. If the latch count has already reached 0 for the
/// current genration, this will return immediately.
pub fn wait(&mut self) {
{
let lock = self.inner.lock.lock().unwrap();
let mut lock = self
.inner
.cond
// wait until we're in the active generation and all counters have counted down
.wait_while(lock, |x| self.generation != x.generation || x.counters > 0)
.unwrap();
lock.waiters = lock.waiters.checked_sub(1).unwrap();
// if this is the last waiter (and we already know that there are no more counters), start
// the next generation
if lock.waiters == 0 {
lock.advance_generation();
}
}
self.generation = self.generation.wrapping_add(1);
}
}
impl Clone for LatchCounter {
fn clone(&self) -> Self {
let mut lock = self.inner.lock.lock().unwrap();
lock.total_counters = lock.total_counters.checked_add(1).unwrap();
// if we haven't already counted down during the current generation
if self.generation == lock.generation {
lock.counters = lock.counters.checked_add(1).unwrap();
}
LatchCounter {
inner: Arc::clone(&self.inner),
generation: self.generation,
}
}
}
impl Clone for LatchWaiter {
fn clone(&self) -> Self {
let mut lock = self.inner.lock.lock().unwrap();
lock.total_waiters = lock.total_waiters.checked_add(1).unwrap();
// if we haven't already waited during the current generation
if self.generation == lock.generation {
lock.waiters = lock.waiters.checked_add(1).unwrap();
}
LatchWaiter {
inner: Arc::clone(&self.inner),
generation: self.generation,
}
}
}
impl std::ops::Drop for LatchCounter {
fn drop(&mut self) {
let mut lock = self.inner.lock.lock().unwrap();
lock.total_counters = lock.total_counters.checked_sub(1).unwrap();
// if we haven't already counted down during the current generation
if self.generation == lock.generation {
lock.counters = lock.counters.checked_sub(1).unwrap();
}
// if this is the last counter, notify the waiters
if lock.counters == 0 {
self.inner.cond.notify_all();
}
}
}
impl std::ops::Drop for LatchWaiter {
fn drop(&mut self) {
let mut lock = self.inner.lock.lock().unwrap();
lock.total_waiters = lock.total_waiters.checked_sub(1).unwrap();
// if we haven't already waited during the current generation
if self.generation == lock.generation {
lock.waiters = lock.waiters.checked_sub(1).unwrap();
}
// if this is the last waiter and there are no more counters, start the next generation
if lock.waiters == 0 && lock.counters == 0 {
lock.advance_generation();
}
}
}
#[cfg(test)]
mod tests {
use std::time::Duration;
use atomic_refcell::AtomicRefCell;
use rand::{Rng, SeedableRng};
use super::*;
#[test]
fn test_clone() {
let (mut counter, mut waiter) = build_count_down_latch();
let (mut counter_clone, mut waiter_clone) = (counter.clone(), waiter.clone());
counter.count_down();
counter_clone.count_down();
waiter.wait();
waiter_clone.wait();
}
#[test]
fn test_clone_before_countdown() {
let (mut counter, mut waiter) = build_count_down_latch();
// the cloned counter will also need to count down for the current generation
let mut counter_clone = counter.clone();
counter.count_down();
counter_clone.count_down();
waiter.wait();
counter.count_down();
counter_clone.count_down();
waiter.wait();
let (mut counter, mut waiter) = build_count_down_latch();
// the cloned waiter will also need to wait for the current generation
let mut waiter_clone = waiter.clone();
counter.count_down();
waiter.wait();
waiter_clone.wait();
counter.count_down();
waiter.wait();
waiter_clone.wait();
}
#[test]
fn test_clone_after_countdown() {
let (mut counter, mut waiter) = build_count_down_latch();
counter.count_down();
// the cloned counter will also be considered "counted down" for the current generation
let mut counter_clone = counter.clone();
// if the cloned counter did count down here, it would panic
waiter.wait();
counter.count_down();
counter_clone.count_down();
waiter.wait();
let (mut counter, mut waiter) = build_count_down_latch();
let mut waiter_clone = waiter.clone();
counter.count_down();
waiter.wait();
// the cloned waiter will also be considered "waited" for the current generation
let mut waiter_clone_2 = waiter.clone();
// if the cloned waiter did wait here, it would be waiting for the next generation
waiter_clone.wait();
counter.count_down();
waiter.wait();
waiter_clone.wait();
waiter_clone_2.wait();
}
#[test]
#[should_panic]
fn test_double_count() {
let (mut counter, mut _waiter) = build_count_down_latch();
counter.count_down();
counter.count_down();
}
#[test]
fn test_single_thread() {
let (mut counter, mut waiter) = build_count_down_latch();
counter.count_down();
waiter.wait();
counter.count_down();
waiter.wait();
counter.count_down();
waiter.wait();
let mut waiter_clone = waiter.clone();
counter.count_down();
waiter.wait();
waiter_clone.wait();
counter.count_down();
waiter.wait();
waiter_clone.wait();
}
#[test]
fn test_multi_thread() {
let (mut exclusive_counter, mut exclusive_waiter) = build_count_down_latch();
let (mut shared_counter, mut shared_waiter) = build_count_down_latch();
let repeat = 30;
let lock = Arc::new(AtomicRefCell::new(()));
let lock_clone = Arc::clone(&lock);
// The goal of this test is to make sure that the new threads alternate with the main thread
// to access the atomic refcell. The new threads each hold on to a shared borrow of the
// atomic refcell for ~5 ms, then the main thread gets an exclusive borrow for ~5 ms,
// repeating. If these time slices ever overlap, then either a shared or exclusive borrow
// will cause a panic and the test will fail. Randomness is added to the sleeps to vary the
// order in which threads wait and count down, to try to cover more edge cases.
let thread_fn = move |seed| {
let mut rng = rand::rngs::StdRng::seed_from_u64(seed);
for _ in 0..repeat {
// wait for the main thread to be done with its exclusive borrow
std::thread::sleep(Duration::from_millis(5));
exclusive_waiter.wait();
{
// a shared borrow for a duration in the range of 0-10 ms
let _x = lock_clone.borrow();
std::thread::sleep(Duration::from_millis(rng.gen_range(0..10)));
}
shared_counter.count_down();
}
};
// start 5 threads
let handles: Vec<_> = (0..5)
.map(|seed| {
let mut f = thread_fn.clone();
std::thread::spawn(move || f(seed))
})
.collect();
std::mem::drop(thread_fn);
let mut rng = rand::rngs::StdRng::seed_from_u64(100);
for _ in 0..repeat {
{
// an exclusive borrow for a duration in the range of 0-10 ms
let _x = lock.borrow_mut();
std::thread::sleep(Duration::from_millis(rng.gen_range(0..10)));
}
exclusive_counter.count_down();
// wait for the other threads to be done with their shared borrow
std::thread::sleep(Duration::from_millis(5));
shared_waiter.wait();
}
for h in handles {
h.join().unwrap();
}
}
}
| true |
28666df7d43a7578b7bac1722959cd5232222940
|
Rust
|
Josh7GAS/Oficina
|
/Rust/test/web_crawler/src/main.rs
|
UTF-8
| 1,749 | 3.09375 | 3 |
[] |
no_license
|
use html5ever::tokenizer::{
BufferQueue,
Tag,
TagKind,
TagToken,
Token,
TokenSink,
TokenSinkResult,
Tokenizer,
TokenizerOpts,
};
use std::borrow::Borrow;
use url::{ ParseError, Url};
#[derive(Default, Debug)]
struct LinkQueue{
links: Vec<String>,
}
impl TokenSink for &mut LinkQueue{
type Handle = ();
//<a href="link">some text</a>
fn process_token(
&mut self,
token:Token,
line_number: u64
) -> match token {
TagToken(
ref tag @ Tag {
kind: TagKind::StartTag,
..
},
) => {if tag.name.as_ref() = "head"{
for attributes in tag.attrs.iter(){
if attributes.name.local.as_ref() = "href" {
let url_str: &[u8] = attribute.value.borrow();
self.links
.push(String::from_utf8_lossy(url_str).into_owned());
}
}
}
}
_ => {}
}TokenSinkResult<Self::Handle>{
match token {
}
TokenSinkResult::Continue
}
}
pub fn get_links(url: &Url, page: String) -> Vec<Url>{
let mut domain_url = url.clone();
domain_url.set_path("");
domain_url.set_path(None);
let mut queue = LinkQueue::default();
let mut tokenizer = Tokenizer::new(&mut queue, TokenizerOpts::default());
let mut buffer = BufferQueue::new();
buffer.push_back(page.into());
let _ = tokenizer.feed(&mut buffer);
queue
.links
.iter()
.map(|link| match Url::parse(link){
Err(ParseError::RelativeUrlWhithoutBase) => domain_url.join(link).unwrap(),
Err(_) => panic!("Malformed link found: {}", link),
Ok(url)=>url,
})
.collect()
}
fn main(){
println!("Hello World");
}
| true |
a0cdacc0595aee426e1edf768473c4e75c435dba
|
Rust
|
jblindsay/whitebox-tools
|
/whitebox-common/src/algorithms/delaunay_triangulation.rs
|
UTF-8
| 18,923 | 3.6875 | 4 |
[
"MIT",
"LicenseRef-scancode-unknown-license-reference"
] |
permissive
|
/*!
The following code has been modified from the original delaunator-rs project:
https://github.com/mourner/delaunator-rs
For a description of the data structure, including the halfedge connectivity, see:
https://mapbox.github.io/delaunator/
# Description
A very fast 2D [Delaunay Triangulation](https://en.wikipedia.org/wiki/Delaunay_triangulation) library for Rust.
A port of [Delaunator](https://github.com/mapbox/delaunator).
A triangle edge may be shared with another triangle. Instead of thinking about each edge A↔︎B, we will use two half-edges A→B and B→A. Having two half-edges is the key to everything this library provides.
Half-edges e are the indices into both of delaunator’s outputs:
delaunay.triangles[e] returns the point id where the half-edge starts
delaunay.halfedges[e] returns the opposite half-edge in the adjacent triangle, or -1 if there is no adjacent triangle
Triangle ids and half-edge ids are related.
The half-edges of triangle t are 3*t, 3*t + 1, and 3*t + 2.
The triangle of half-edge id e is floor(e/3
# Example
```rust
use delaunator::triangulate;
use structures::Point2D
let points = vec![
Point2D { x: 0., y: 0. },
Point2D { x: 1., y: 0. },
Point2D { x: 1., y: 1. },
Point2D { x: 0., y: 1. },
];
let result = triangulate(&points).expect("No triangulation exists.");
println!("{:?}", result.triangles); // [0, 2, 1, 0, 3, 2]
```
*/
use crate::structures::Point2D;
use std::collections::HashSet;
use std::f64;
/// Represents the area outside of the triangulation.
/// Halfedges on the convex hull (which don't have an adjacent halfedge)
/// will have this value.
pub const EMPTY: usize = usize::max_value();
/// A data structure used to perform Delaunay triangulation on
/// a set of input vector points. Connectivity between points,
/// triangles, and halfedges is as follows:
///
/// - edge → edges: next_halfedge, prevHalfedge, halfedges[]
/// - edge → points: triangles[]
/// - edge → triangle: triangle_of_edge
/// - triangle → edges: edges_of_triangle
/// - triangle → points: points_of_triangle
/// - triangle → triangles: triangles_adjacent_to_triangle
/// - point → incoming edges: edges_around_point
/// - point → outgoing edges: edges_around_point + halfedge[]
/// - point → points: edges_around_point + triangles[]
/// - point → triangles: edges_around_point + triangle_of_edge
pub struct Triangulation {
/// A vector of point indices where each triple represents a Delaunay triangle.
/// All triangles are directed counter-clockwise.
pub triangles: Vec<usize>,
/// A vector of adjacent halfedge indices that allows traversing the triangulation graph.
///
/// `i`-th half-edge in the array corresponds to vertex `triangles[i]`
/// the half-edge is coming from. `halfedges[i]` is the index of a twin half-edge
/// in an adjacent triangle (or `EMPTY` for outer half-edges on the convex hull).
pub halfedges: Vec<usize>,
/// A vector of indices that reference points on the convex hull of the triangulation,
/// counter-clockwise.
pub hull: Vec<usize>,
}
impl Triangulation {
/// Constructs a new *Triangulation*.
fn new(n: usize) -> Self {
let max_triangles = 2 * n - 5;
Self {
triangles: Vec::with_capacity(max_triangles * 3),
halfedges: Vec::with_capacity(max_triangles * 3),
hull: Vec::new(),
}
}
/// The number of triangles in the triangulation.
pub fn len(&self) -> usize {
self.triangles.len() / 3
}
/// Next halfedge in a triangle.
pub fn next_halfedge(&self, edge: usize) -> usize {
if edge % 3 == 2 {
edge - 2
} else {
edge + 1
}
}
/// Previous halfedge in a triangle.
pub fn prev_halfedge(&self, edge: usize) -> usize {
if edge % 3 == 0 {
edge + 2
} else {
edge - 1
}
}
/// Returns the triangle of an edge.
pub fn triangle_of_edge(&self, edge: usize) -> usize {
edge / 3
}
/// Returns the edges of a triangle.
pub fn edges_of_triangle(&self, triangle: usize) -> [usize; 3] {
[3 * triangle, 3 * triangle + 1, 3 * triangle + 2]
}
/// Returns the points of a triangle.
pub fn points_of_triangle(&self, triangle: usize) -> [usize; 3] {
// self.edges_of_triangle(t)
// .into_iter()
// .map(|e| self.triangles[*e])
// .collect()
let e = self.edges_of_triangle(triangle);
[
self.triangles[e[0]],
self.triangles[e[1]],
self.triangles[e[2]],
]
}
/// Triangle circumcenter.
pub fn triangle_center(&self, points: &[Point2D], triangle: usize) -> Point2D {
let p = self.points_of_triangle(triangle);
points[p[0]].circumcenter(&points[p[1]], &points[p[2]])
}
/// Returns the edges around a point connected to halfedge '*start*'.
pub fn edges_around_point(&self, start: usize) -> Vec<usize> {
let mut result = vec![];
let mut incoming = start;
let mut outgoing: usize;
// let mut i = 0;
loop {
if result.contains(&incoming) {
break;
}
result.push(incoming);
outgoing = self.next_halfedge(incoming);
incoming = self.halfedges[outgoing];
if incoming == EMPTY {
break;
} else if incoming == start {
result.push(incoming);
break;
}
// i += 1;
// if i > 100 {
// // println!("{} {} {}", outgoing, incoming, start);
// break;
// }
}
result
}
pub fn natural_neighbours_from_incoming_edge(&self, start: usize) -> Vec<usize> {
let mut result = vec![];
//result.push(self.triangles[self.next_halfedge(start)]);
let mut incoming = start;
let mut outgoing: usize;
loop {
result.push(self.triangles[incoming]);
outgoing = self.next_halfedge(incoming);
incoming = self.halfedges[outgoing];
if incoming == EMPTY {
break;
} else if incoming == start {
break;
}
}
result
}
pub fn natural_neighbours_2nd_order(&self, start: usize) -> Vec<usize> {
let mut set = HashSet::new();
let mut edges = vec![];
// result.push(self.triangles[self.next_halfedge(start)]);
// set.insert(self.triangles[self.next_halfedge(start)]);
let mut incoming = start;
let mut outgoing: usize;
loop {
set.insert(self.triangles[incoming]);
outgoing = self.next_halfedge(incoming);
incoming = self.halfedges[outgoing];
edges.push(outgoing);
if incoming == EMPTY {
break;
} else if incoming == start {
break;
}
}
for start in edges {
incoming = start;
loop {
set.insert(self.triangles[incoming]);
outgoing = self.next_halfedge(incoming);
incoming = self.halfedges[outgoing];
if incoming == EMPTY {
break;
} else if incoming == start {
break;
}
}
}
set.into_iter().map(|i| i).collect()
}
/// Returns the indices of the adjacent triangles to a triangle.
pub fn triangles_adjacent_to_triangle(&self, triangle: usize) -> Vec<usize> {
let mut adjacent_triangles: Vec<usize> = vec![];
let mut opposite: usize;
for e in self.edges_of_triangle(triangle).iter() {
opposite = self.halfedges[*e];
if opposite != EMPTY {
adjacent_triangles.push(self.triangle_of_edge(opposite));
}
}
adjacent_triangles
}
fn add_triangle(
&mut self,
i0: usize,
i1: usize,
i2: usize,
a: usize,
b: usize,
c: usize,
) -> usize {
let t = self.triangles.len();
self.triangles.push(i0);
self.triangles.push(i1);
self.triangles.push(i2);
self.halfedges.push(a);
self.halfedges.push(b);
self.halfedges.push(c);
if a != EMPTY {
self.halfedges[a] = t;
}
if b != EMPTY {
self.halfedges[b] = t + 1;
}
if c != EMPTY {
self.halfedges[c] = t + 2;
}
t
}
fn legalize(&mut self, a: usize, points: &[Point2D], hull: &mut Hull) -> usize {
let b = self.halfedges[a];
// if the pair of triangles doesn't satisfy the Delaunay condition
// (p1 is inside the circumcircle of [p0, pl, pr]), flip them,
// then do the same check/flip recursively for the new pair of triangles
//
// pl pl
// /||\ / \
// al/ || \bl al/ \a
// / || \ / \
// / a||b \ flip /___ar___\
// p0\ || /p1 => p0\---bl---/p1
// \ || / \ /
// ar\ || /br b\ /br
// \||/ \ /
// pr pr
//
let ar = self.prev_halfedge(a);
if b == EMPTY {
return ar;
}
let al = self.next_halfedge(a);
let bl = self.prev_halfedge(b);
let p0 = self.triangles[ar];
let pr = self.triangles[a];
let pl = self.triangles[al];
let p1 = self.triangles[bl];
let illegal = (&points[p0]).in_circle(&points[pr], &points[pl], &points[p1]);
if illegal {
self.triangles[a] = p1;
self.triangles[b] = p0;
let hbl = self.halfedges[bl];
let har = self.halfedges[ar];
// edge swapped on the other side of the hull (rare); fix the halfedge reference
if hbl == EMPTY {
let mut e = hull.start;
loop {
if hull.tri[e] == bl {
hull.tri[e] = a;
break;
}
e = hull.next[e];
if e == hull.start || e == EMPTY {
// notice, I added the || e == EMPTY after
// finding a bug. I don't know about this.
break;
}
}
}
self.halfedges[a] = hbl;
self.halfedges[b] = har;
self.halfedges[ar] = bl;
if hbl != EMPTY {
self.halfedges[hbl] = a;
}
if har != EMPTY {
self.halfedges[har] = b;
}
if bl != EMPTY {
self.halfedges[bl] = ar;
}
let br = self.next_halfedge(b);
self.legalize(a, points, hull);
return self.legalize(br, points, hull);
}
ar
}
}
// data structure for tracking the edges of the advancing convex hull
struct Hull {
prev: Vec<usize>,
next: Vec<usize>,
tri: Vec<usize>,
hash: Vec<usize>,
start: usize,
center: Point2D,
}
impl Hull {
fn new(n: usize, center: Point2D, i0: usize, i1: usize, i2: usize, points: &[Point2D]) -> Self {
let hash_len = (n as f64).sqrt() as usize;
let mut hull = Self {
prev: vec![0; n], // edge to prev edge
next: vec![0; n], // edge to next edge
tri: vec![0; n], // edge to adjacent halfedge
hash: vec![EMPTY; hash_len], // angular edge hash
start: i0,
center,
};
hull.next[i0] = i1;
hull.prev[i2] = i1;
hull.next[i1] = i2;
hull.prev[i0] = i2;
hull.next[i2] = i0;
hull.prev[i1] = i0;
hull.tri[i0] = 0;
hull.tri[i1] = 1;
hull.tri[i2] = 2;
hull.hash_edge(&points[i0], i0);
hull.hash_edge(&points[i1], i1);
hull.hash_edge(&points[i2], i2);
hull
}
fn hash_key(&self, p: &Point2D) -> usize {
let dx = p.x - self.center.x;
let dy = p.y - self.center.y;
let p = dx / (dx.abs() + dy.abs());
let a = (if dy > 0.0 { 3.0 - p } else { 1.0 + p }) / 4.0; // [0..1]
let len = self.hash.len();
(((len as f64) * a).floor() as usize) % len
}
fn hash_edge(&mut self, p: &Point2D, i: usize) {
let key = self.hash_key(p);
self.hash[key] = i;
}
fn find_visible_edge(&self, p: &Point2D, points: &[Point2D]) -> (usize, bool) {
let mut start: usize = 0;
let key = self.hash_key(p);
let len = self.hash.len();
for j in 0..len {
start = self.hash[(key + j) % len];
if start != EMPTY && self.next[start] != EMPTY {
break;
}
}
start = self.prev[start];
let mut e = start;
while !p.orient(&points[e], &points[self.next[e]]) {
e = self.next[e];
if e == start {
return (EMPTY, false);
}
}
(e, e == start)
}
}
fn calc_bbox_center(points: &[Point2D]) -> Point2D {
let mut min_x = f64::INFINITY;
let mut min_y = f64::INFINITY;
let mut max_x = f64::NEG_INFINITY;
let mut max_y = f64::NEG_INFINITY;
for p in points.iter() {
min_x = min_x.min(p.x);
min_y = min_y.min(p.y);
max_x = max_x.max(p.x);
max_y = max_y.max(p.y);
}
Point2D {
x: (min_x + max_x) / 2.0,
y: (min_y + max_y) / 2.0,
}
}
fn find_closest_point(points: &[Point2D], p0: &Point2D) -> Option<usize> {
let mut min_dist = f64::INFINITY;
let mut k: usize = 0;
for (i, p) in points.iter().enumerate() {
let d = p0.distance_squared(p);
if d > 0.0 && d < min_dist {
k = i;
min_dist = d;
}
}
if min_dist == f64::INFINITY {
None
} else {
Some(k)
}
}
fn find_seed_triangle(points: &[Point2D]) -> Option<(usize, usize, usize)> {
// pick a seed point close to the center
let bbox_center = calc_bbox_center(points);
let i0 = find_closest_point(points, &bbox_center)?;
let p0 = &points[i0];
// find the point closest to the seed
let i1 = find_closest_point(points, p0)?;
let p1 = &points[i1];
// find the third point which forms the smallest circumcircle with the first two
let mut min_radius = f64::INFINITY;
let mut i2: usize = 0;
for (i, p) in points.iter().enumerate() {
if i == i0 || i == i1 {
continue;
}
let r = p0.circumradius2(p1, p);
if r < min_radius {
i2 = i;
min_radius = r;
}
}
if min_radius == f64::INFINITY {
None
} else {
// swap the order of the seed points for counter-clockwise orientation
Some(if p0.orient(p1, &points[i2]) {
(i0, i2, i1)
} else {
(i0, i1, i2)
})
}
}
/// Triangulate a set of 2D points.
/// Returns `None` if no triangulation exists for the input (e.g. all points are collinear).
pub fn triangulate(points: &[Point2D]) -> Option<Triangulation> {
let n = points.len();
let (i0, i1, i2) = find_seed_triangle(points)?;
let center = (&points[i0]).circumcenter(&points[i1], &points[i2]);
let mut triangulation = Triangulation::new(n);
triangulation.add_triangle(i0, i1, i2, EMPTY, EMPTY, EMPTY);
// sort the points by distance from the seed triangle circumcenter
let mut dists: Vec<_> = points
.iter()
.enumerate()
.map(|(i, point)| (i, center.distance_squared(point)))
.collect();
dists.sort_unstable_by(|&(_, da), &(_, db)| da.partial_cmp(&db).unwrap());
let mut hull = Hull::new(n, center, i0, i1, i2, points);
for (k, &(i, _)) in dists.iter().enumerate() {
let p = &points[i];
// skip near-duplicates
if k > 0 && p.nearly_equals(&points[dists[k - 1].0]) {
continue;
}
// skip seed triangle points
if i == i0 || i == i1 || i == i2 {
continue;
}
// find a visible edge on the convex hull using edge hash
let (mut e, walk_back) = hull.find_visible_edge(p, points);
if e == EMPTY {
continue; // likely a near-duplicate point; skip it
}
// add the first triangle from the point
let t = triangulation.add_triangle(e, i, hull.next[e], EMPTY, EMPTY, hull.tri[e]);
// recursively flip triangles from the point until they satisfy the Delaunay condition
hull.tri[i] = triangulation.legalize(t + 2, points, &mut hull);
hull.tri[e] = t; // keep track of boundary triangles on the hull
// walk forward through the hull, adding more triangles and flipping recursively
let mut n = hull.next[e];
loop {
let q = hull.next[n];
if !p.orient(&points[n], &points[q]) {
break;
}
let t = triangulation.add_triangle(n, i, q, hull.tri[i], EMPTY, hull.tri[n]);
hull.tri[i] = triangulation.legalize(t + 2, points, &mut hull);
hull.next[n] = EMPTY; // mark as removed
n = q;
}
// walk backward from the other side, adding more triangles and flipping
if walk_back {
loop {
let q = hull.prev[e];
if !p.orient(&points[q], &points[e]) {
break;
}
let t = triangulation.add_triangle(q, i, e, EMPTY, hull.tri[e], hull.tri[q]);
triangulation.legalize(t + 2, points, &mut hull);
hull.tri[q] = t;
hull.next[e] = EMPTY; // mark as removed
e = q;
}
}
// update the hull indices
hull.prev[i] = e;
hull.next[i] = n;
hull.prev[n] = i;
hull.next[e] = i;
hull.start = e;
// save the two new edges in the hash table
hull.hash_edge(p, i);
hull.hash_edge(&points[e], e);
}
// expose hull as a vector of point indices
let mut e = hull.start;
loop {
triangulation.hull.push(e);
e = hull.next[e];
if e == hull.start {
break;
}
}
triangulation.triangles.shrink_to_fit();
triangulation.halfedges.shrink_to_fit();
Some(triangulation)
}
| true |
3a2c6344f90793b610cc37eec41071107b72ea91
|
Rust
|
iCodeIN/edu-json-parser
|
/edu-json-parser/src/lib.rs
|
UTF-8
| 10,203 | 2.53125 | 3 |
[] |
no_license
|
#[macro_use]
extern crate compre_combinee;
extern crate combine;
mod errors;
mod details;
mod traits;
mod stop_watch;
use std::collections::{HashMap};
use combine::{parser, eof, satisfy, choice, attempt};
use combine::parser::range::{take_while1};
use combine::parser::char::*;
use combine::{Parser, many, optional, skip_many, sep_by, between};
pub use crate::errors::ErrorCause;
pub use crate::details::Node;
pub use crate::traits::*;
use std::{f64, mem, str};
use std::convert::TryFrom;
use smol_str::SmolStr;
fn parse_hex<'a>() -> impl Parser<&'a str, Output = u32> {
satisfy(|c: char|
(c >= '0' && c <= '9') ||
(c >= 'a' && c <= 'f') ||
(c >= 'A' && c <= 'F')
).map(|c| if c >= '0' && c <= '9' {
c as u64 - '0' as u64
} else if c >= 'a' && c <= 'f' {
10 + c as u64 - 'a' as u64
} else {
10 + c as u64 - 'A' as u64
} as u32
)
}
fn unicode_char<'a>() -> impl Parser<&'a str, Output = Option<char>> {
c_hx_do!{
__ <- string(r#"\u"#),
d3 <- parse_hex(),
d2 <- parse_hex(),
d1 <- parse_hex(),
d0 <- parse_hex();
{
let unicode = d0 +
0x10 * d1 +
0x100 * d2 +
0x1000 * d3;
char::try_from(unicode).ok()
}
}
}
#[derive(PartialEq)]
enum StringPiece<'a >
{
Ref(&'a str),
Char(Option<char>)
}
fn braced_parser<'a, PBL, P, PBR, O>(pbl: PBL, p: P, pbr: PBR) -> impl Parser<&'a str, Output = O>
where
PBL: Parser<&'a str>,
PBR: Parser<&'a str>,
P: Parser<&'a str, Output = O>
{
between(
c_compre![c; c <- pbl, __ <- skip_many(space())],
c_compre![c; __ <- skip_many(space()), c <- pbr],
p
)
}
fn string_part<'a>() -> impl Parser<&'a str, Output = Vec<StringPiece<'a >>> {
many(
choice(
(
attempt(take_while1(|c: char| c != '\\' && c != '"' && c != '\n' && c != '\r' && c != '\t')
.map(|chars: &str| StringPiece::Ref(chars))),
attempt(string("\\\"").map(|_|StringPiece::Ref("\""))),
attempt(string("\\\\").map(|_|StringPiece::Ref("\\"))),
attempt(string("\\n").map(|_|StringPiece::Ref("\n"))),
attempt(string("\\t").map(|_|StringPiece::Ref("\t"))),
attempt(string("\\/").map(|_|StringPiece::Ref("/"))),
attempt(string("\\r").map(|_|StringPiece::Ref("\r"))),
attempt(string("\\f").map(|_|StringPiece::Ref("\u{000c}"))),
attempt(string("\\b").map(|_|StringPiece::Ref("\u{0008}"))),
attempt(unicode_char().map(|s|StringPiece::Char(s))),
)
)
)
}
fn string_parser_inner<'a>() -> impl Parser<&'a str, Output = SmolStr> {
c_hx_do! {
x <- between(char('"'), char('"'), string_part());
{
let cap = x.iter().fold(0, |acc, s|
acc +
match s {
StringPiece::Ref(strref) => strref.len(),
StringPiece::Char(c) => c.map(|c_inner| c_inner.len_utf8()).unwrap_or(0)
}
);
if cap <= 22 {
let mut buf: [u8; 22] = [0; 22];
let mut offset = 0;
for s in x.iter() {
match s {
StringPiece::Ref(strref) => {
for &b in strref.as_bytes() {
buf[offset] = b;
offset += 1;
}
},
StringPiece::Char(c) => {
if let Some(chr) = c {
chr.encode_utf8(&mut buf[offset..]);
offset += chr.len_utf8();
}
}
}
}
return unsafe {
SmolStr::new(str::from_utf8_unchecked(&buf[0..cap]))
};
}
let mut str = String::with_capacity(cap);
for s in x.iter() {
match s {
StringPiece::Ref(strref) => str.push_str(strref),
StringPiece::Char(c) => if let Some(chr) = c { str.push(*chr); }
}
}
SmolStr::new(str)
}
}
}
fn string_parser<'a>() -> impl Parser<&'a str, Output = Node> {
string_parser_inner().map(|x| Node::String(x))
}
fn digit_sequence<'a>() -> impl Parser<&'a str, Output = &'a str> {
take_while1(|c: char| c >= '0' && c <= '9')
}
#[inline(always)]
fn power(lhs: f64, rhs: f64) -> f64 {
lhs.powf(rhs)
}
fn trailing_digit_sequence<'a>() -> impl Parser<&'a str, Output = &'a str> {
c_hx_do! {
__ <- char('.'),
rest <- digit_sequence();
rest
}
}
fn exponent_parser<'a>() -> impl Parser<&'a str, Output = f64> {
c_hx_do!{
__ <- satisfy(|c: char| c == 'e' || c == 'E'),
sign_char <- optional(satisfy(|c: char| c == '+' || c == '-')),
digits <- digit_sequence();
{
let sign = match sign_char {
Some('-') => -1.0,
_ => 1.0
};
let mut acc = 0;
for c in digits.as_bytes() {
acc = acc * 10 + (c - b'0') as u64;
}
power(10.0, sign * acc as f64)
}
}
}
#[derive(PartialEq, Copy, Clone)]
enum NumberPrefix<'a >
{
LeadingZero,
Digits(char, &'a str)
}
fn leading_zero_parser <'a>() -> impl Parser<&'a str, Output = NumberPrefix<'a >> {
char('0').map(|_| NumberPrefix::LeadingZero)
}
fn leading_digits_parser <'a>() -> impl Parser<&'a str, Output = NumberPrefix<'a >> {
c_hx_do! {
leading_digit <- satisfy(|c: char| c >= '1' && c <= '9'),
digs <- optional(digit_sequence());
NumberPrefix::Digits(leading_digit, digs.unwrap_or(""))
}
}
fn leading_parser <'a>() -> impl Parser<&'a str, Output = NumberPrefix<'a >> {
choice((
attempt(leading_digits_parser()),
attempt(leading_zero_parser()),
))
}
fn number_parser<'a>() -> impl Parser<&'a str, Output = Node> {
c_hx_do! {
minus_sign <- optional(char('-')),
leading <- leading_parser(),
trail <- optional(trailing_digit_sequence()),
exp <- optional(exponent_parser());
{
Node::Number({
let mut acc = match leading {
NumberPrefix::LeadingZero => 0.0,
NumberPrefix::Digits(leading_digit, l_digs) => {
let mut l = (leading_digit as u8 - b'0') as u64;
for c in l_digs.as_bytes() {
l = l * 10 + (c - b'0') as u64;
}
l as f64
}
};
if let Some(t_digs) = trail {
let mut divider = 1.0;
for c in t_digs.as_bytes() {
divider /= 10.0;
acc += (c - b'0') as f64 * divider;
}
}
if let Some(exponent) = exp {
acc *= exponent;
}
if let Some(_) = minus_sign {
-acc
} else {
acc
}
})
}
}
}
fn bool_parser<'a>() -> impl Parser<&'a str, Output = Node> {
c_hx_do!{
word <- string("true").or(string("false"));
match word {
"true" => Node::Boolean(true),
_ => Node::Boolean(false)
}
}
}
fn null_parser<'a>() -> impl Parser<&'a str, Output = Node> {
c_hx_do!{
_word <- string("null");
Node::Null
}
}
macro_rules! ref_parser {
($parser_fn:ident) => {
parser(|input| {
let _: &mut &str = input;
$parser_fn().parse_stream(input).into_result()
})
}
}
fn primitive_parser<'a>() -> impl Parser<&'a str, Output = Node> {
let possible_parser = bool_parser()
.or(number_parser())
.or(string_parser())
.or(null_parser())
.or(ref_parser!(array_parser))
.or(ref_parser!(dictionary_parser));
c_hx_do! {
__ <- skip_many(space()),
pars <- possible_parser,
___ <- skip_many(space());
pars
}
}
fn array_parser<'a>() -> impl Parser<&'a str, Output = Node> {
braced_parser(
char('['),
sep_by(primitive_parser(), char(',')),
char(']')
).map(|nodes: Vec<Node>|
Node::Array(nodes)
)
}
fn pair_parser<'a>() -> impl Parser<&'a str, Output = Option<(SmolStr, Node)>> {
let str_parser = c_hx_do!{
__ <- skip_many(space()),
stp <- string_parser_inner(),
___ <- skip_many(space());
stp
};
c_hx_do!{
l <- str_parser,
__ <- char(':'),
r <- primitive_parser();
Some((l, r))
}
}
fn dictionary_parser<'a>() -> impl Parser<&'a str, Output = Node> {
braced_parser(
char('{'),
sep_by(pair_parser(), char(',')),
char('}')
).map(|mut pairs: Vec<Option<(SmolStr, Node)>>| {
let mut dict = HashMap::with_capacity(pairs.len());
for mut pair in pairs {
let (l, r) = mem::replace(&mut pair, None).unwrap();
dict.insert(l, r);
}
Node::Object(
dict
)
})
}
fn json_parser<'a>() -> impl Parser<&'a str, Output = Node> {
null_parser()
.or(bool_parser())
.or(number_parser())
.or(string_parser())
.or(array_parser())
.or(dictionary_parser())
}
pub fn parse_json(content: &str) -> Result<Node, String> {
let mut parser = c_hx_do!{
__ <- skip_many(space()),
json <- json_parser(),
___ <- skip_many(space()),
____ <- eof();
json
};
let res = parser.parse(content);
match res {
Err(x) => Err(format!("{}", x.to_string())),
Ok((res,_)) => Ok(res)
}
}
| true |
650bf934fa78786cb7ce1f0594ecc24b64b5ae75
|
Rust
|
jasbok/rxr
|
/src/dosbox_config.rs
|
UTF-8
| 3,918 | 3.1875 | 3 |
[] |
no_license
|
use std::collections::HashMap;
use std::error::Error;
use std::io::BufReader;
use std::fs::File;
use std::io::prelude::*;
use std::path::PathBuf;
use regex::Regex;
#[derive(Default, Deserialize, Debug, Clone, PartialEq)]
pub struct DosboxConfig {
pub autoexec: Vec<String>,
pub settings: HashMap<String, HashMap<String, String>>,
}
impl DosboxConfig {
pub fn read(path: &PathBuf) -> Result<DosboxConfig, Box<Error>> {
let lines: Vec<String> = BufReader::new(File::open(&path)?)
.lines()
.filter_map(|line| line.ok())
.collect();
DosboxConfig::parse(lines.as_slice())
}
pub fn parse<T>(lines: &[T]) -> Result<DosboxConfig, Box<Error>>
where
T: AsRef<str>,
{
lazy_static! {
static ref SECTION_RE : Regex = Regex::new("\\[(.+)\\]").unwrap();
static ref COMMENT_RE : Regex = Regex::new("#(.*)").unwrap();
}
let filtered = lines
.iter()
.map(|line| line.as_ref().trim())
.filter(|line| !line.is_empty())
.filter(|line| !COMMENT_RE.is_match(line));
let mut section: Option<String> = None;
let mut autoexec: Vec<String> = Vec::new();
let mut settings: HashMap<String, HashMap<String, String>> = HashMap::new();
for line in filtered {
if let Some(caps) = SECTION_RE.captures(line) {
section = Some(String::from(caps.get(1).unwrap().as_str()));
} else if let Some(section) = section.as_ref() {
match section.as_str() {
"autoexec" => autoexec.push(DosboxConfig::command_from(line)),
section => {
if let Some((key, val)) = DosboxConfig::setting_from(line) {
settings
.entry(String::from(section)) // TODO: Avoid multiple clones.
.or_insert_with(HashMap::new)
.insert(key, val);
}
}
}
}
}
Ok(DosboxConfig {
autoexec: autoexec,
settings: settings,
})
}
fn command_from(line: &str) -> String {
line.replace("\\", "/")
}
fn setting_from(line: &str) -> Option<(String, String)> {
lazy_static! {
static ref KEY_VAL_RE : Regex = Regex::new("(.+?)\\s*=\\s*(.+)").unwrap();
}
if let Some(caps) = KEY_VAL_RE.captures(line) {
return Some((
String::from(caps.get(1).unwrap().as_str()),
String::from(caps.get(2).unwrap().as_str()),
));
}
None
}
pub fn merge(&self, config_b: &DosboxConfig) -> DosboxConfig {
let mut merged = self.clone();
for line in &config_b.autoexec {
merged.autoexec.push(line.clone());
}
for (section, settings) in &config_b.settings {
if let Some(msettings) = merged.settings.get_mut(section) {
for (key, val) in settings {
msettings.insert(key.clone(), val.clone());
}
continue;
}
merged.settings.insert(section.clone(), settings.clone());
}
merged
}
pub fn write(&self, path: &PathBuf) -> Result<(), Box<Error>> {
let mut file = File::create(path)?;
for (section, settings) in &self.settings {
file.write_all(format!("[{}]\n", section).as_bytes())?;
for (key, val) in settings {
file.write_all(format!("{}={}\n", key, val).as_bytes())?;
}
}
file.write_all(b"[autoexec]\n")?;
for line in &self.autoexec {
file.write_all(format!("{}\n", line).as_bytes())?;
}
file.flush()?;
Ok(())
}
}
| true |
38d74aee11840118cae1af177080609957e29f83
|
Rust
|
kalleakerblom/AdventOfCode2019
|
/src/day11.rs
|
UTF-8
| 4,209 | 3.296875 | 3 |
[] |
no_license
|
use super::int_code::Program;
use std::collections::HashMap;
struct Robot {
pos: (i32, i32),
dir: (i32, i32),
program: Program,
}
enum Turn {
Left,
Right,
}
#[derive(Clone, Copy)]
enum Color {
Black,
White,
}
impl Robot {
fn new(program: Program) -> Self {
Robot { dir: (0, 1), pos: (0, 0), program }
}
fn turn(&mut self, turn: Turn) {
match turn {
// (0,1) (-1,0) (0,-1) (1,0) (0,1)
Turn::Left => self.dir = (-self.dir.1, self.dir.0),
Turn::Right => self.dir = (self.dir.1, -self.dir.0),
}
}
fn paint(&mut self, camera_input: Color) -> Option<Color> {
let input = match camera_input {
Color::Black => 0,
Color::White => 1,
};
let paint = match self.program.run_input(Some(input))? {
0 => Color::Black,
1 => Color::White,
_ => panic!("invalid paint code"),
};
let turn = match self.program.run_input(None)? {
0 => Turn::Left,
1 => Turn::Right,
_ => panic!("invalid turn code"),
};
self.turn(turn);
Some(paint)
}
fn step(&mut self) {
self.pos = (self.pos.0 + self.dir.0, self.pos.1 + self.dir.1);
}
}
fn print_paint(paint_map: &HashMap<(i32, i32), Color>) {
use std::cmp;
let mut min_x = i32::max_value();
let mut max_x = i32::min_value();
let mut min_y = i32::max_value();
let mut max_y = i32::min_value();
for &(x, y) in paint_map.keys() {
min_x = cmp::min(min_x, x);
min_y = cmp::min(min_y, y);
max_x = cmp::max(max_x, x);
max_y = cmp::max(max_y, y);
}
for y in (min_y..=max_y).rev() {
for x in min_x..=max_x {
if let Some(Color::White) = paint_map.get(&(x, y)) {
print!("#");
} else {
print!(".");
}
}
println!();
}
}
#[cfg(test)]
mod tests {
use super::print_paint;
use super::Color;
use super::Program;
use super::Robot;
use super::Turn;
use std::collections::HashMap;
use std::fs;
use std::iter;
#[test]
fn robot_turns() {
let mut robot = Robot::new(Program::new(Vec::new()));
assert_eq!(robot.dir, (0, 1));
robot.turn(Turn::Left);
assert_eq!(robot.dir, (-1, 0));
robot.turn(Turn::Left);
assert_eq!(robot.dir, (0, -1));
robot.turn(Turn::Left);
assert_eq!(robot.dir, (1, 0));
robot.turn(Turn::Right);
assert_eq!(robot.dir, (0, -1));
robot.turn(Turn::Right);
assert_eq!(robot.dir, (-1, 0));
robot.turn(Turn::Right);
assert_eq!(robot.dir, (0, 1));
}
#[test]
fn day11_part1() {
let mut code: Vec<i64> = fs::read_to_string("input/day11")
.unwrap()
.trim()
.split(',')
.map(|s| s.parse().unwrap())
.collect();
code.extend(iter::repeat(0).take(1000));
let prog = Program::new(code);
let mut robot = Robot::new(prog);
let mut painted = HashMap::new();
let mut camera_input = Color::Black;
while let Some(paint_color) = robot.paint(camera_input) {
painted.insert(robot.pos, paint_color);
robot.step();
camera_input = *painted.get(&robot.pos).unwrap_or(&Color::Black);
}
assert_eq!(painted.len(), 2343);
}
#[test]
fn day11_part2() {
let mut code: Vec<i64> = fs::read_to_string("input/day11")
.unwrap()
.trim()
.split(',')
.map(|s| s.parse().unwrap())
.collect();
code.extend(iter::repeat(0).take(1000));
let prog = Program::new(code);
let mut robot = Robot::new(prog);
let mut painted = HashMap::new();
let mut camera_input = Color::White;
while let Some(paint_color) = robot.paint(camera_input) {
painted.insert(robot.pos, paint_color);
robot.step();
camera_input = *painted.get(&robot.pos).unwrap_or(&Color::Black);
}
print_paint(&painted);
}
}
| true |
817fcf404a1ba5629e7cca0c791d8682595e5536
|
Rust
|
Matthew-Maclean/stag
|
/src/rgb.rs
|
UTF-8
| 6,839 | 2.609375 | 3 |
[
"MIT"
] |
permissive
|
use image::RgbImage;
use rand::Rng;
use itertools::Itertools;
use utils::*;
use codec::Codec;
pub struct RgbCodec;
impl Codec for RgbCodec
{
type Input = RgbImage;
type Mode = RgbMode;
fn encode<R: Rng>(
source: &mut RgbImage,
payload: &[u8],
_mode: RgbMode,
mut rng: R)
{
// iterate over pixels mutably
for (mut pixels, bytes) in source.pixels_mut()
// ~2.7 pixels per byte, so instead by use 24-pixel
// and 3-byte chunks
.chunks(8).into_iter()
// make them vectors so we can check their lengths
.map(|ch| ch.collect::<Vec<_>>())
// give each chunk 3 bytes
.zip(payload.iter().map(|x| *x)
.chunks(3).into_iter()
// make them also vectors
.map(|ch| ch.collect::<Vec<_>>()))
{
// if any of the chunks are short, abort
if pixels.len() != 8 || bytes.len() != 3
{
return;
}
let rng = &mut rng;
// i looked for a better way...
fix_u8(&mut pixels[0].data[0], get_bit(bytes[0], 0), rng);
fix_u8(&mut pixels[0].data[1], get_bit(bytes[0], 1), rng);
fix_u8(&mut pixels[0].data[2], get_bit(bytes[0], 2), rng);
fix_u8(&mut pixels[1].data[0], get_bit(bytes[0], 3), rng);
fix_u8(&mut pixels[1].data[1], get_bit(bytes[0], 4), rng);
fix_u8(&mut pixels[1].data[2], get_bit(bytes[0], 5), rng);
fix_u8(&mut pixels[2].data[0], get_bit(bytes[0], 6), rng);
fix_u8(&mut pixels[2].data[1], get_bit(bytes[0], 7), rng);
fix_u8(&mut pixels[2].data[2], get_bit(bytes[1], 0), rng);
fix_u8(&mut pixels[3].data[0], get_bit(bytes[1], 1), rng);
fix_u8(&mut pixels[3].data[1], get_bit(bytes[1], 2), rng);
fix_u8(&mut pixels[3].data[2], get_bit(bytes[1], 3), rng);
fix_u8(&mut pixels[4].data[0], get_bit(bytes[1], 4), rng);
fix_u8(&mut pixels[4].data[1], get_bit(bytes[1], 5), rng);
fix_u8(&mut pixels[4].data[2], get_bit(bytes[1], 6), rng);
fix_u8(&mut pixels[5].data[0], get_bit(bytes[1], 7), rng);
fix_u8(&mut pixels[5].data[1], get_bit(bytes[2], 0), rng);
fix_u8(&mut pixels[5].data[2], get_bit(bytes[2], 1), rng);
fix_u8(&mut pixels[6].data[0], get_bit(bytes[2], 2), rng);
fix_u8(&mut pixels[6].data[1], get_bit(bytes[2], 3), rng);
fix_u8(&mut pixels[6].data[2], get_bit(bytes[2], 4), rng);
fix_u8(&mut pixels[7].data[0], get_bit(bytes[2], 5), rng);
fix_u8(&mut pixels[7].data[1], get_bit(bytes[2], 6), rng);
fix_u8(&mut pixels[7].data[2], get_bit(bytes[2], 7), rng);
}
}
fn decode(
source: &RgbImage,
payload: &mut [u8],
len: usize,
_mode: RgbMode)
{
assert!(len <= payload.len());
let exlen = len + (len % 3);
// iterate over the pixels
for (index, pixels) in source.pixels()
// grab them in chunks of 8
.chunks(8).into_iter()
// make them vectors
.map(|ch| ch.collect::<Vec<_>>())
.take(exlen / 3) // exlen is always exactly divisible by 3
.enumerate()
{
if pixels.len() != 8
{
return;
}
let mut byte = 0u8;
byte = set_bit(byte, 0, pixels[0].data[0] % 2 == 1);
byte = set_bit(byte, 1, pixels[0].data[1] % 2 == 1);
byte = set_bit(byte, 2, pixels[0].data[2] % 2 == 1);
byte = set_bit(byte, 3, pixels[1].data[0] % 2 == 1);
byte = set_bit(byte, 4, pixels[1].data[1] % 2 == 1);
byte = set_bit(byte, 5, pixels[1].data[2] % 2 == 1);
byte = set_bit(byte, 6, pixels[2].data[0] % 2 == 1);
byte = set_bit(byte, 7, pixels[2].data[1] % 2 == 1);
if index * 3 < len
{
payload[index * 3] = byte;
}
byte = set_bit(byte, 0, pixels[2].data[2] % 2 == 1);
byte = set_bit(byte, 1, pixels[3].data[0] % 2 == 1);
byte = set_bit(byte, 2, pixels[3].data[1] % 2 == 1);
byte = set_bit(byte, 3, pixels[3].data[2] % 2 == 1);
byte = set_bit(byte, 4, pixels[4].data[0] % 2 == 1);
byte = set_bit(byte, 5, pixels[4].data[1] % 2 == 1);
byte = set_bit(byte, 6, pixels[4].data[2] % 2 == 1);
byte = set_bit(byte, 7, pixels[5].data[0] % 2 == 1);
if index * 3 + 1 < len
{
payload[index * 3 + 1] = byte;
}
byte = set_bit(byte, 0, pixels[5].data[1] % 2 == 1);
byte = set_bit(byte, 1, pixels[5].data[2] % 2 == 1);
byte = set_bit(byte, 2, pixels[6].data[0] % 2 == 1);
byte = set_bit(byte, 3, pixels[6].data[1] % 2 == 1);
byte = set_bit(byte, 4, pixels[6].data[2] % 2 == 1);
byte = set_bit(byte, 5, pixels[7].data[0] % 2 == 1);
byte = set_bit(byte, 6, pixels[7].data[1] % 2 == 1);
byte = set_bit(byte, 7, pixels[7].data[2] % 2 == 1);
if index * 3 + 2 < len
{
payload[index * 3 + 2] = byte;
}
}
}
fn estimate(
source: &RgbImage,
mode: RgbMode) -> Option<usize>
{
Some(match mode
{
RgbMode::All =>
source.width() as usize * source.height() as usize / 3
})
}
}
#[derive(Copy, Clone)]
pub enum RgbMode
{
All,
}
use std::default::Default;
impl Default for RgbMode
{
fn default() -> RgbMode
{
RgbMode::All
}
}
use std::str::FromStr;
impl FromStr for RgbMode
{
type Err = ();
fn from_str(s: &str) -> Result<RgbMode, ()>
{
if s == "all"
{
Ok(RgbMode::All)
}
else
{
Err(())
}
}
}
#[cfg(test)]
mod test
{
use image::{ImageBuffer, Rgb};
use rand::StdRng;
use codec::Codec;
use super::*;
#[test]
fn all()
{
let mut image = ImageBuffer::from_pixel(
30,
8,
Rgb
{
data: [127u8; 3],
});
let payload = vec![
1,2, 3, 4, 5, 6, 7, 8, 9, 10,
11, 12, 13, 14, 15, 16, 17, 18, 19, 20,
21, 22, 23, 24, 25, 26, 27, 28, 29, 30
];
let mut buf = vec![0; 30];
let rng = StdRng::new().unwrap();
RgbCodec::encode(&mut image, &payload, RgbMode::All, rng);
RgbCodec::decode(&image, &mut buf, 30, RgbMode::All);
assert_eq!(payload, buf);
}
}
| true |
2c57a05c7cfa7e8a66b402bf9a350f94387afaab
|
Rust
|
kulicuu/Peregrine
|
/src/bin/VulkanoPeregrine/utils.rs
|
UTF-8
| 6,612 | 2.859375 | 3 |
[] |
no_license
|
use vulkano::buffer::{BufferUsage, CpuAccessibleBuffer, ImmutableBuffer};
use vulkano::device::{Device, DeviceExtensions};
use std::sync::Arc;
use std::str::FromStr;
use std::io::prelude::*;
use std::path::Path;
pub fn process_str_ints(input : &str) -> Vec<u32> {
let start = String::from(input);
let mut lines = start.lines();
let mut condition = true;
let mut ret_vec : Vec<u32> = Vec::new();
while condition == true {
let cursor = lines.next();
if cursor == None {
condition = false;
} else {
let x300 = u32::from_str(cursor.unwrap());
if x300.is_ok() == true {
ret_vec.push(x300.unwrap());
} else {
println!("error on index parse with");
}
}
}
ret_vec
}
pub fn find_three_floats(input : &str) -> Option<Vec<f64>> {
let x300 = String::from(input);
let x301 : Vec<&str> = x300.split(" ").collect();
if x301.len() == 3 {
let x302 = f64::from_str(x301[0]);
let x303 = f64::from_str(x301[1]);
let x304 = f64::from_str(x301[2]);
if (x302.is_ok() == true) && (x304.is_ok() == true) && (x304.is_ok() == true) {
return Some(vec!(x302.unwrap(), x303.unwrap(), x304.unwrap()));
}
else {
return None
}
} else {
return None
}
}
pub fn process_str_floats(input: &str) -> Vec<Vec<f64>> {
let start = String::from(input);
let mut lines = start.lines();
let mut condition = true;
let mut ret_vec : Vec<Vec<f64>> = Vec::new();
while condition == true {
let cursor = lines.next();
if cursor == None {
condition = false;
} else {
// println!("The line: {:?}", cursor.unwrap());
let x200 = find_three_floats(&cursor.unwrap());
if x200 != None {
ret_vec.push(x200.unwrap());
}
}
}
ret_vec
}
#[derive(Default, Copy, Clone)]
pub struct Vertex {
pub position: (f32, f32, f32)
}
vulkano::impl_vertex!(Vertex, position);
#[derive(Default, Copy, Clone)]
pub struct Normal {
pub normal: (f32, f32, f32)
}
vulkano::impl_vertex!(Normal, normal);
pub struct Package {
pub vertex_buffer: std::sync::Arc<CpuAccessibleBuffer::<[f32]>>,
pub normals_buffer: std::sync::Arc<CpuAccessibleBuffer::<[f32]>>,
pub index_buffer: std::sync::Arc<CpuAccessibleBuffer::<[u32]>>
}
// April-28: Can we start one process that loads terrain, keep it
// alive while we restart another compiled process separate, then share
// data on the heap via an <Arc> ?
// That would be a great way to have terrain already loaded and waiting.
// Not such a bad development line, as although it doesn't directly address
// the simulation requirements, it does support development velocity, so offers
// perhaps the best long-term utility on immediate work.
// Anyways there is something slighty amiss in our terrain generation algo, because
// there are degenerate triangles.
// I don't want the terrain build process to bottleneck the main development process, as it takes quite a bit of time.
// Maybe this could be a separate process, which sends the data over networked.
// Begs the question of how to transmit the data over network. Loading the data
// into a CpuAccessibleBuffer is a non-starter as those are not available to transfer between processes, or they may be? Can two applications share a Vulkan instance in some way I wonder. This could be a research angle.
// Also, Todo, there were some degenerate triangles in this generated mesh, will need to fix the algo.
pub fn build_terrain(device: Arc<Device>) -> (Arc<CpuAccessibleBuffer::<[Vertex]>>, Arc<CpuAccessibleBuffer::<[Normal]>>, Arc<CpuAccessibleBuffer::<[u32]>>) {
let mut terrain_f = std::fs::File::open("models_materials/terrain_mesh_003.txt").unwrap();
let mut terrain_buffer = String::new();
terrain_f.read_to_string(&mut terrain_buffer).unwrap();
// The poor naming was a stub to move quickly, can refactor
// with integration of some of these lines, and improve naming.
let x99 : Vec<&str> = terrain_buffer.split("Vertices:").collect();
let x100 = String::from(x99[1]);
let x101 : Vec<&str> = x100.split("Indices:").collect();
let x102 = String::from(x101[0]); // This should just mostly be vertices with maybe a blank line and the title line "Vertices:"
let x103 = String::from(x101[1]); // This should have indices and normals
let x104 : Vec<&str> = x103.split("Normals:").collect();
let x160 = String::from(x104[0]); // This should be indices
let x105 = String::from(x104[1]); // This should be normals
let x106 = process_str_floats(&x102); // This should be a vector that we can turn into a positions buffer vertex_buffer
let mut x200 : Vec<Vertex> = Vec::new();
for (idx, item) in x106.iter().enumerate() {
x200.push( Vertex { position: (item[0] as f32, item[1] as f32, item[2] as f32)} );
}
let vertex_buffer_terrain = CpuAccessibleBuffer::from_iter(device.clone(), BufferUsage::all(), false, x200.iter().cloned()).unwrap();
let x107 = process_str_floats(&x105);
let mut x300 : Vec<Normal> = Vec::new();
for (idx, item) in x107.iter().enumerate() {
x300.push( Normal { normal: (item[0] as f32, item[1] as f32, item[2] as f32)} );
}
let normals_buffer_terrain = CpuAccessibleBuffer::from_iter(device.clone(), BufferUsage::all(), false, x300.iter().cloned()).unwrap();
let x161 = process_str_ints(&x160);
let index_buffer_terrain = CpuAccessibleBuffer::from_iter(device.clone(), BufferUsage::all(), false, x161.iter().cloned()).unwrap();
(vertex_buffer_terrain, normals_buffer_terrain, index_buffer_terrain)
}
pub fn build_lear(device: Arc<Device>) -> Vec<Package> {
let lear = tobj::load_obj(&Path::new("models_materials/lear_300.obj"));
let (models, materials) = lear.unwrap();
let mut mashes : Vec<Package> = Vec::new();
for (index, model) in models.iter().enumerate() {
let mesh_500 = &model.mesh;
mashes.push(Package {
vertex_buffer: CpuAccessibleBuffer::from_iter(device.clone(), BufferUsage::all(), false, mesh_500.positions.iter().cloned()).unwrap(),
normals_buffer: CpuAccessibleBuffer::from_iter(device.clone(), BufferUsage::all(), false, mesh_500.normals.iter().cloned()).unwrap(),
index_buffer: CpuAccessibleBuffer::from_iter(device.clone(), BufferUsage::all(), false, mesh_500.indices.iter().cloned()).unwrap()
});
}
mashes
}
| true |
ebfeabc1c2a26a2a8b8400c3c0d36c55dc042ec0
|
Rust
|
ajruckman/libvirt-grpc-api
|
/src/thread_safe_virt_conn.rs
|
UTF-8
| 488 | 2.578125 | 3 |
[] |
no_license
|
use std::sync::{Mutex, MutexGuard};
use virt::connect::Connect;
pub struct ThreadSafeVirtConn {
conn: Mutex<Connect>,
}
impl ThreadSafeVirtConn {
pub fn new(uri: &str) -> ThreadSafeVirtConn {
ThreadSafeVirtConn {
conn: Mutex::new(Connect::open(uri).unwrap()),
}
}
pub fn lock(&self) -> MutexGuard<'_, Connect> {
self.conn.lock().unwrap()
}
}
unsafe impl Send for ThreadSafeVirtConn {}
unsafe impl Sync for ThreadSafeVirtConn {}
| true |
8bfbb60dd4fd86439ed6765abefe0d41f16615b8
|
Rust
|
wasmbet/wasmbet-contract
|
/tests/integration.rs
|
UTF-8
| 4,752 | 2.765625 | 3 |
[
"Apache-2.0"
] |
permissive
|
//! This integration test tries to run and call the generated wasm.
//! It depends on a Wasm build being available, which you can create with `cargo wasm`.
//! Then running `cargo integration-test` will validate we can properly call into that generated Wasm.
//!
//! You can easily convert unit tests to integration tests.
//! 1. First copy them over verbatum,
//! 2. Then change
//! let mut deps = mock_dependencies(20, &[]);
//! to
//! let mut deps = mock_instance(WASM, &[]);
//! 3. If you access raw storage, where ever you see something like:
//! deps.storage.get(CONFIG_KEY).expect("no data stored");
//! replace it with:
//! deps.with_storage(|store| {
//! let data = store.get(CONFIG_KEY).expect("no data stored");
//! //...
//! });
//! 4. Anywhere you see query(&deps, ...) you must replace it with query(&mut deps, ...)
use cosmwasm_std::{ HumanAddr,coins, from_binary, HandleResponse, HandleResult, InitResponse, StdError,Uint128};
use cosmwasm_vm::testing::{handle, init, mock_env, mock_instance, query};
use wasmbet_contract_dice::msg::{RoomStateResponse,StateResponse, HandleMsg, InitMsg, QueryMsg};
// This line will test the output of cargo wasm
static WASM: &[u8] = include_bytes!("../target/wasm32-unknown-unknown/release/wasmbet_contract_dice.wasm");
// You can uncomment this line instead to test productionified build from rust-optimizer
// static WASM: &[u8] = include_bytes!("../contract.wasm");
#[test]
fn proper_initialization() {
let mut deps = mock_instance(WASM, &[]);
let seed = String::from("Hello, world!");
let fee = 15000 as u64;
let msg = InitMsg {
seed: seed,
min_credit: Uint128::from(1000000u128),
max_credit: Uint128::from(10000000u128),
house_fee: fee,
};
let env = mock_env("creator", &coins(10000000000000000, "ukrw"));
// we can just call .unwrap() to assert this was a success
let res: InitResponse = init(&mut deps, env, msg).unwrap();
assert_eq!(res.messages.len(), 0);
// it worked, let's query the state
let res = query(&mut deps, QueryMsg::Getstate{}).unwrap();
let value: StateResponse = from_binary(&res).unwrap();
//assert_eq!(value.contract_owner, "creator" );
//assert_eq!(value.pot_pool, 100000000000000 );
//assert_eq!(value.seed, "Hello, world!" );
assert_eq!(value.min_credit, 1000000 );
assert_eq!(value.max_credit, 10000000 );
//assert_eq!(value.house_fee, 1 );
let mut env = mock_env("creator", &coins(2000000, "ukrw"));
env.block.height = 2;
env.block.time = 1232342344153;
let env2 = mock_env("creator2", &coins(2000000, "ukrw"));
let seed = String::from("Hello, world!123312");
let seed2 = String::from("Hello, world!2");
let under = String::from("under");
let over = String::from("under");
let ruler = HandleMsg::Ruler {
phrase: seed,
prediction_number:50,
position: under,
bet_amount: Uint128::from(2000000u128),
};
let ruler2 = HandleMsg::Ruler {
phrase: seed2,
prediction_number:50,
position: over,
bet_amount: Uint128::from(2000000u128),
};
let try_ruler_response: HandleResponse = handle(&mut deps, env, ruler).unwrap();
assert_eq!(try_ruler_response.messages.len(), 0);
let try_ruler_response2: HandleResponse = handle(&mut deps, env2, ruler2).unwrap();
assert_eq!(try_ruler_response2.messages.len(), 0);
// it worked, let's query the state
let addres = HumanAddr("creator".to_string());
let addres2 = HumanAddr("creator2".to_string());
let res = query(&mut deps, QueryMsg::GetMyRoomState{address:addres}).unwrap();
let value: RoomStateResponse = from_binary(&res).unwrap();
let res2 = query(&mut deps, QueryMsg::GetMyRoomState{address:addres2}).unwrap();
let value2: RoomStateResponse = from_binary(&res2).unwrap();
let res3 = query(&mut deps, QueryMsg::Getstate{}).unwrap();
let value3: StateResponse = from_binary(&res3).unwrap();
//assert_eq!(value.contract_owner, "creator" );
assert_eq!(value.lucky_number, 99999998079400 );
//assert_eq!(value2.results, 99999998079400 );
//assert_eq!(value3.pot_pool, 99999998079400 );
//assert_eq!(value.seed, "Hello, world!" );
//assert_eq!(value3.min_credit, 1000000 );
//assert_eq!(value3.max_credit, 10000000 );
//assert_eq!(value3.house_fee, 1 );
//assert_eq!(value.prediction_number, 50 );
//assert_eq!(value.position, "under" );
//assert_eq!(value.bet_amount, 2000000 );
//assert_eq!(value2.prediction_number, 50 );
//assert_eq!(value2.position, "over" );
//assert_eq!(value2.bet_amount, 2000000 );
}
| true |
ae8d1ed9c8e1fa1b1131b02ec283f19557f16712
|
Rust
|
Emulator000/telegram-bot
|
/raw/src/requests/forward_message.rs
|
UTF-8
| 1,544 | 3.15625 | 3 |
[
"MIT"
] |
permissive
|
use std::ops::Not;
use requests::*;
use types::*;
/// Use this method to forward messages of any kind.
#[derive(Debug, Clone, PartialEq, PartialOrd, Serialize)]
#[must_use = "requests do nothing unless sent"]
pub struct ForwardMessage {
chat_id: ChatRef,
from_chat_id: ChatRef,
#[serde(skip_serializing_if = "Not::not")]
disable_notification: bool,
message_id: MessageId,
}
impl Request for ForwardMessage {
type Type = JsonRequestType<Self>;
type Response = JsonIdResponse<Message>;
fn serialize(&self) -> Result<HttpRequest, Error> {
Self::Type::serialize(RequestUrl::method("forwardMessage"), self)
}
}
impl ForwardMessage {
pub fn new<M, F, T>(message: M, from: F, to: T) -> Self
where
M: ToMessageId,
F: ToChatRef,
T: ToChatRef,
{
ForwardMessage {
chat_id: to.to_chat_ref(),
from_chat_id: from.to_chat_ref(),
disable_notification: false,
message_id: message.to_message_id(),
}
}
pub fn disable_notification(&mut self) -> &mut Self {
self.disable_notification = true;
self
}
}
/// Forward message.
pub trait CanForwardMessage {
fn forward<T>(&self, to: T) -> ForwardMessage
where
T: ToChatRef;
}
impl<M> CanForwardMessage for M
where
M: ToMessageId + ToSourceChat,
{
fn forward<T>(&self, to: T) -> ForwardMessage
where
T: ToChatRef,
{
ForwardMessage::new(self.to_message_id(), self.to_source_chat(), to)
}
}
| true |
d5662113de65a5fc04153d5b15b61f9204d9a302
|
Rust
|
ruza-net/eru
|
/src/components/general/main_layout.rs
|
UTF-8
| 9,164 | 2.53125 | 3 |
[] |
no_license
|
use crate::styles::container::PADDING;
use crate::components::{
opetope,
app::{ Error, GlobalMessage, Data },
pop_up::{ self, PopUp, Form },
general::Sidebar,
};
#[derive(Debug, Clone)]
pub enum Message {
UpdatedName(String),
UpdatedFirstWrap(String),
UpdatedSecondWrap(String),
ExitPopUp,
ConfirmPopUp,
}
const ERR_DURATION: u64 = 5;
#[derive(Default, Debug, Clone)]
pub struct NameSlot {
pub state: iced::text_input::State,
pub value: String,
}
impl From<String> for NameSlot {
fn from(value: String) -> Self {
Self {
value,
..fill![]
}
}
}
#[derive(Debug, Clone)]
pub enum State {
Default,
Rename {
pop_up: pop_up::State,
remaining: Vec<NameSlot>,
renamed: Vec<String>,
},
ProvideExtrude {
pop_up: pop_up::State,
name: NameSlot,
wrap: NameSlot,
},
ProvideSprout {
pop_up: pop_up::State,
last_end: NameSlot,
last_wrap: NameSlot,
wraps: Vec<String>,
ends: Vec<(opetope::ViewIndex, opetope::Cell<Data>, Option<String>)>,
},
ProvideSplit {
pop_up: pop_up::State,
name: NameSlot,
wrap_top: NameSlot,
wrap_bot: NameSlot,
},
ProvidePass {
pop_up: pop_up::State,
last: NameSlot,
wraps: Vec<opetope::MetaCell<Data>>,
groups_left: Vec<(opetope::Face, opetope::MetaCell<Data>)>,
},
}
impl Default for State {
fn default() -> Self {
Self::Default
}
}
impl State {
pub fn extrude() -> Self {
Self::ProvideExtrude { pop_up: fill![], name: fill![], wrap: fill![] }
}
pub fn sprout() -> Self {
Self::ProvideSprout { pop_up: fill![], last_end: fill![], last_wrap: fill![], ends: fill![], wraps: fill![] }
}
pub fn split() -> Self {
Self::ProvideSplit { pop_up: fill![], name: fill![], wrap_top: fill![], wrap_bot: fill![] }
}
pub fn pass(groups_left: Vec<(opetope::Face, opetope::MetaCell<Data>)>) -> Self {
Self::ProvidePass { pop_up: fill![], last: fill![], wraps: fill![], groups_left }
}
pub fn rename(names: Vec<String>) -> Self {
let remaining = names.into_iter().map(Into::into).collect();
Self::Rename { pop_up: fill![], remaining, renamed: vec![] }
}
pub fn take(&mut self) -> Self {
let mut ret = None;
take_mut::take(self, |this| {
ret = Some(this);
Self::default()
});
ret.unwrap()
}
}
#[derive(Default)]
pub struct Layout {
error_countdown: u64,
error_pop_up: pop_up::State,
error: Option<Error>,
pub state: State,
sidebar: Sidebar,
}
impl Layout {
pub fn error(&mut self, e: Error) {
self.error = Some(e);
self.error_countdown = ERR_DURATION;
}
pub fn tick(&mut self) {
if self.error_countdown > 0 {
self.error_countdown -= 1;
}
}
pub fn view<'app>(&'app mut self, opetope: &'app mut opetope::Diagram<Data>) -> iced::Element<'app, GlobalMessage> {
let interact =
match self.state {
State::Default => crate::model::Render::Interactive,
_ => crate::model::Render::Static,
};
let sidebar = self.sidebar.view(interact).map(GlobalMessage::Sidebar);// TODO: Max height or portion
let opetope = opetope.view(interact).map(GlobalMessage::Opetope);
let opetope =
iced::Container::new(opetope)
.padding(PADDING);
let mut main =
iced::Row::new()
.push(sidebar)
.push(opetope)
.into();
if self.error_countdown > 0 {
let err_text = self.error.as_ref().unwrap().to_string();
let err_msg = Form::error(err_text);
main =
PopUp::new(main, err_msg)
.location(pop_up::Location::Top)
.view(&mut self.error_pop_up)
}
match &mut self.state {
State::Default =>
main,
State::Rename { pop_up, remaining, .. } =>
PopUp::new(
main,
Form::new(GlobalMessage::Layout(Message::ExitPopUp), GlobalMessage::Layout(Message::ConfirmPopUp))
.push({
let last = remaining.last_mut().unwrap();
iced::TextInput::new(
&mut last.state,
"Cell name",
&last.value,
|s| GlobalMessage::Layout(Message::UpdatedName(s)),
).padding(PADDING)
}),
).view(pop_up),
State::ProvideExtrude { pop_up, name, wrap } =>
PopUp::new(
main,
Form::new(GlobalMessage::Layout(Message::ExitPopUp), GlobalMessage::Layout(Message::ConfirmPopUp))
.push(
iced::TextInput::new(
&mut name.state,
"Group name",
&name.value,
|s| GlobalMessage::Layout(Message::UpdatedName(s)),
).padding(PADDING)
)
.push(
iced::TextInput::new(
&mut wrap.state,
"Group wrap",
&wrap.value,
|s| GlobalMessage::Layout(Message::UpdatedFirstWrap(s)),
).padding(PADDING)
),
).view(pop_up),
State::ProvideSprout { pop_up, last_end, last_wrap, wraps, ends } =>
PopUp::new(
main,
Form::new(GlobalMessage::Layout(Message::ExitPopUp), GlobalMessage::Layout(Message::ConfirmPopUp))
.push(
iced::TextInput::new(
&mut last_end.state,
&format!["{} sprout's name", ends[wraps.len()].1.data().inner()],
&last_end.value,
|s| GlobalMessage::Layout(Message::UpdatedName(s)),
).padding(PADDING)
)
.push(
iced::TextInput::new(
&mut last_wrap.state,
"Wrap's name",
&last_wrap.value,
|s| GlobalMessage::Layout(Message::UpdatedFirstWrap(s)),
).padding(PADDING)
),
).view(pop_up),
State::ProvideSplit { pop_up, name, wrap_top, wrap_bot } =>
PopUp::new(
main,
Form::new(GlobalMessage::Layout(Message::ExitPopUp), GlobalMessage::Layout(Message::ConfirmPopUp))
.push(
iced::TextInput::new(
&mut name.state,
"Group name",
&name.value,
|s| GlobalMessage::Layout(Message::UpdatedName(s)),
).padding(PADDING)
)
.push(
iced::TextInput::new(
&mut wrap_top.state,
"Top part wrap",
&wrap_top.value,
|s| GlobalMessage::Layout(Message::UpdatedFirstWrap(s)),
).padding(PADDING)
)
.push(
iced::TextInput::new(
&mut wrap_bot.state,
"Bottom part wrap",
&wrap_bot.value,
|s| GlobalMessage::Layout(Message::UpdatedSecondWrap(s)),
).padding(PADDING)
),
).view(pop_up),
State::ProvidePass { pop_up, groups_left, last, .. } =>
PopUp::new(
main,
Form::new(GlobalMessage::Layout(Message::ExitPopUp), GlobalMessage::Layout(Message::ConfirmPopUp))
.push(
iced::TextInput::new(
&mut last.state,
&format!["{} wrap's name", groups_left.last().unwrap().1.data().inner()],
&last.value,
|s| GlobalMessage::Layout(Message::UpdatedFirstWrap(s)),
).padding(PADDING)
),
).view(pop_up),
}
}
}
| true |
3847def25254f011edbd3bfbf3743e8e9175100e
|
Rust
|
kohbis/leetcode
|
/algorithms/1403.minimum-subsequence-in-non-increasing-order/solution.rs
|
UTF-8
| 444 | 2.75 | 3 |
[] |
no_license
|
impl Solution {
pub fn min_subsequence(nums: Vec<i32>) -> Vec<i32> {
let mut sub = Vec::new();
let mut sub_sum = 0;
let mut sum = nums.iter().sum();
let mut nums = nums.clone();
nums.sort();
nums.reverse();
while sub_sum <= sum {
sub.push(nums[0]);
sum -= nums[0];
sub_sum += nums[0];
nums.remove(0);
}
sub
}
}
| true |
d7677f017cdc09c25c96c240f8f7620f1a9cbb8d
|
Rust
|
xarvic/ui_system
|
/src/renderer/renderer.rs
|
UTF-8
| 8,167 | 2.671875 | 3 |
[] |
no_license
|
use glium::backend::{Facade, Context};
use glium::{Program, DrawParameters, Blend, Frame, Surface, ProgramCreationError};
use std::fs::{read_to_string, File};
use std::rc::Rc;
use crate::renderer::{load_texture, CommandBuffer, Builder};
use image::ImageFormat;
use glium::texture::{texture2d::Texture2d};
use std::path::Path;
use std::io::{BufReader, BufRead};
use std::mem::replace;
use std::fmt::{Formatter, Debug};
use glium::index::PrimitiveType;
use crate::component::Component;
#[derive(Copy, Clone)]
enum ShaderType {
Vertex,
Geometry,
Framgent,
}
pub enum ShaderError {
CompilationError(ProgramCreationError),
ReadingError(std::io::Error),
MultipleSourceError(String),
MissingSource(String),
}
impl Debug for ShaderError {
fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result {
match self{
ShaderError::CompilationError(error) => {Debug::fmt(error, f)},
ShaderError::ReadingError(error) => {Debug::fmt(error, f)},
ShaderError::MultipleSourceError(error) => {writeln!(f, "{}", error)},
ShaderError::MissingSource(error) => {writeln!(f, "{}", error)},
}
}
}
impl From<ProgramCreationError> for ShaderError {
fn from(error: ProgramCreationError) -> Self {
ShaderError::CompilationError(error)
}
}
impl From<std::io::Error> for ShaderError {
fn from(error: std::io::Error) -> Self {
ShaderError::ReadingError(error)
}
}
pub fn make_shader_single_file(path: &str, facade: &dyn Facade) -> Result<Program, ShaderError> {
let reader = BufReader::new(File::open(Path::new(path))?);
let mut vertex_shader: Option<String> = None;
let mut geometry_shader: Option<String> = None;
let mut fragment_shader: Option<String> = None;
let mut set_shaders = |
s_type: ShaderType,
current_source: String
| {
match s_type {
ShaderType::Vertex => {
if let Some(_) = vertex_shader {
return Err(ShaderError::MultipleSourceError(format!("Multiple Vertex Sources for Shader {}", path)));
} else {
vertex_shader = Some(current_source);
}
}
ShaderType::Geometry => {
if let Some(_) = geometry_shader {
return Err(ShaderError::MultipleSourceError(format!("Multiple Geometry Sources for Shader {}", path)));
} else {
geometry_shader = Some(current_source);
}
}
ShaderType::Framgent => {
if let Some(_) = fragment_shader {
return Err(ShaderError::MultipleSourceError(format!("Multiple Fragment Sources for Shader {}", path)));
} else {
fragment_shader = Some(current_source);
}
}
}
Ok(())
};
let mut current_type: Option<ShaderType> = None;
let mut current_source = String::new();
for (_line_number, line) in reader.lines().enumerate() {
let line = line?;
let line = line.trim();
if line.eq("#vertex") || line.eq("#fragment") || line.eq("#geometry") {
if let Some(type_) = current_type {
set_shaders(type_, replace(&mut current_source, String::new()))?;
} else {
current_source = String::new();
}
current_type = match line {
"#vertex" => {
Some(ShaderType::Vertex)
}
"#geometry" => {
Some(ShaderType::Geometry)
}
"#fragment" => {
Some(ShaderType::Framgent)
}
_ => {
unreachable!()
}
};
} else {
current_source += "\n";
current_source += line;
}
}
if let Some(type_) = current_type {
set_shaders(type_, current_source)?;
}
let geometry_shader = match geometry_shader {
Some(ref source) => Some(source.as_str()),
None => None,
};
let vertex_shader = match vertex_shader {
Some(vs) => vs,
None => return Err(ShaderError::MissingSource(format!("No Vertex Source for Shader {}", path))),
};
let fragment_shader = match fragment_shader {
Some(fs) => fs,
None => return Err(ShaderError::MissingSource(format!("No Fragment Source for Shader {}", path))),
};
Ok(Program::from_source(facade,
vertex_shader.as_str(),
fragment_shader.as_str(),
geometry_shader)?)
}
pub struct Renderer{
color_rect_program: Program,
glyph_program: Program,
line_program: Program,
font_buffer: Texture2d,
context: Rc<Context>,
}
impl Renderer{
pub fn new(context: &impl Facade) -> Result<Renderer, ShaderError> {
let context = context.get_context();
Ok(Renderer::from(make_shader_single_file("shaders/rounded.glsl", context)?,
make_shader_single_file("shaders/glyph.glsl", context)?,
make_shader_single_file("shaders/border.glsl", context)?,
load_texture("data/font2.png", ImageFormat::Png, context),
context
))
}
pub fn from(color_rect_program: Program, glyph_program: Program, line_program: Program, glyph_texture: Texture2d, context: &Rc<Context>) -> Renderer{
Renderer{
color_rect_program,
glyph_program,
line_program,
font_buffer: glyph_texture,
context: context.clone()
}
}
pub fn render(&mut self, buffer: &mut CommandBuffer, frame: &mut Frame){
//------------------------------------------General--------------------------------------------
let mut draw_params = DrawParameters::default();
draw_params.blend = Blend::alpha_blending();
draw_params.multisampling = false;
draw_params.dithering = false;
// building the index buffer
let index_buffer = glium::index::NoIndices(glium::index::PrimitiveType::TrianglesList);
let size = frame.get_dimensions();
let data: [f32; 2] = [size.0 as f32, size.1 as f32];
frame.clear_color(1.0, 1.0, 1.0, 1.0);
//-----------------------------------------ColorRects------------------------------------------
{
let verticies =
glium::VertexBuffer::new(&self.context, buffer.color_rects.as_mut()).unwrap();
let uniforms = uniform! {
display: data,
};
frame.draw(&verticies, &index_buffer, &self.color_rect_program, &uniforms, &draw_params).unwrap();
}
//-----------------------------------------Glyphs----------------------------------------------
{
let verticies = glium::VertexBuffer::new(&self.context, buffer.glyphs.as_mut()).unwrap();
let uniforms = uniform! {
display: data,
tex: &self.font_buffer,
};
frame.draw(&verticies, &index_buffer, &self.glyph_program, &uniforms, &draw_params).expect("Cant render on surface!");
}
//----------------------------------------Lines------------------------------------------------
{
let verticies = glium::VertexBuffer::new(&self.context, buffer.line_elements.as_mut()).unwrap();
let uniforms = uniform! {
display: data,
};
let index_buffer = glium::index::NoIndices(PrimitiveType::LineStrip);
frame.draw(&verticies, &index_buffer, &self.line_program, &uniforms, &draw_params).expect("Cant render on surface!");
}
}
pub fn render_screen(&mut self, component: &mut dyn Component, mut frame: Frame){
let mut buffer = CommandBuffer::new();
component.build(Builder::create_with(&mut buffer));
self.render(&mut buffer, &mut frame);
frame.finish().unwrap();
}
}
| true |
8f6bbac0115577873a87248b77bdf9f6c60bb3b0
|
Rust
|
mmmpa/snippets
|
/Rust/file.rs
|
UTF-8
| 1,581 | 3.125 | 3 |
[] |
no_license
|
#[test]
fn test_simple_file() {
let path = "./tmp/test_simple_file.txt";
fs::write(path, "a\nb\nc");
// Rust raw fs call syscall each instruction.
// Should use BufReader or BufWriter due to its bad performance.
let mut f = OpenOptions::new()
.read(true)
.write(true)
.open(path)
.unwrap();
// position moves after each instruction.
let mut s = String::new();
f.seek(SeekFrom::Start(0));
f.read_to_string(&mut s);
assert_eq!(s, "a\nb\nc");
let mut v = vec![];
f.seek(SeekFrom::Start(0));
f.read_to_end(&mut v);
assert_eq!(v, b"a\nb\nc");
let mut s = String::new();
f.seek(SeekFrom::Start(0));
f.write(b"z").unwrap();
f.seek(SeekFrom::Start(0));
f.read_to_string(&mut s);
assert_eq!(s, "z\nb\nc");
}
#[test]
fn test_buf_file() {
let path = "./tmp/test_buf_file.txt";
fs::write(path, "a\nb\nc");
let mut f = OpenOptions::new()
.read(true)
.write(true)
.open(path)
.unwrap();
let mut reader = BufReader::new(&f);
let mut writer = BufWriter::new(&f);
// position moves after each instruction.
let mut s = String::new();
reader.read_to_string(&mut s);
assert_eq!(s, "a\nb\nc");
let mut v = vec![];
reader.seek(SeekFrom::Start(0));
reader.read_to_end(&mut v);
assert_eq!(v, b"a\nb\nc");
let mut s = String::new();
writer.seek(SeekFrom::Start(0));
writer.write(b"z").unwrap();
writer.seek(SeekFrom::Start(0));
reader.read_to_string(&mut s);
assert_eq!(s, "z\nb\nc");
}
| true |
21f386139f26a1ccee2a78f643097e025f952b34
|
Rust
|
heavypackets/skr
|
/src/types.rs
|
UTF-8
| 1,007 | 2.6875 | 3 |
[] |
no_license
|
use std::collections::{HashMap, HashSet};
pub type TypeName = String;
pub type TypeAttributeKey = String;
pub type TypeAtrributeValue = String;
pub type TypeAttributes = HashMap<TypeAttributeKey, TypeAtrributeValue>;
pub type TypeCollection = HashMap<TypeName, Type>;
pub type Types = HashSet<TypeName>;
pub type EnumVariants = HashSet<TypeName>;
#[derive(Serialize, Deserialize, PartialEq, Debug, Clone)]
pub enum TypeClass {
Native(NativeType),
Derived(TypeName),
Enum(EnumVariants),
Composite {
derived_types: Types,
subtypes: TypeCollection,
},
List(TypeName),
Unknown,
}
// TODO - Store span of where type was defined
#[derive(Serialize, Deserialize, PartialEq, Clone, Debug)]
pub struct Type {
pub name: TypeName,
pub attributes: Option<TypeAttributes>,
pub type_class: TypeClass,
}
#[derive(Serialize, Deserialize, PartialEq, Debug, Clone, Copy)]
pub enum NativeType {
Number,
Float,
String,
Boolean,
Date,
Time,
}
| true |
3c9c044676b3f94baaca9039fff142e95340db79
|
Rust
|
urielvan/leetcode-rust
|
/src/p0001_two_sum.rs
|
UTF-8
| 664 | 3.5625 | 4 |
[
"MIT"
] |
permissive
|
use std::collections::HashMap;
pub fn two_sum(nums: Vec<i32>, target: i32) -> Vec<i32> {
let length = nums.len();
let mut rest: i32;
let mut hash: HashMap<&i32, i32> = HashMap::with_capacity(length);
for i in 0..length {
rest = target - nums[i];
if hash.contains_key(&rest) {
return vec![hash[&rest] as i32, i as i32];
} else {
hash.insert(&nums[i], i as i32);
}
}
vec![]
}
#[cfg(test)]
mod test {
use super::two_sum;
#[test]
fn test_two_sum() {
assert_eq!(two_sum(vec![1, 2, 3, 7], 9), vec![1, 3]);
assert_eq!(two_sum(vec![1], 9), vec![]);
}
}
| true |
79b450bcd2e8fe03231977316db7cb9c5415b7f2
|
Rust
|
ritsz/rust-programming
|
/clone_trait.rs
|
UTF-8
| 2,030 | 4.21875 | 4 |
[] |
no_license
|
use std::fmt;
/* Copy can be derived only by data that can implement implicit copy.
Can't have String, we used &str here.
That results in another issue then. Since str is a reference and we don't own it,
we need to tell how long the reference needs to stay around.
We use 'a to specify that reference stays till the lifetime of the struct.
This is called lifetime specifier.
*/
#[derive(Copy)]
struct Data<'a> {
name : &'a str,
age : i32,
}
/*
Lifetime elision allows lifetimes to be calculated implicitly.
eg:
1. impl Reader for BufReader instead of impl<'a> Reader for BufReader<'a>
2. fn get_str(s: &str) -> &str instead of fn get_str<'a>(s: &'a str') -> &'a str;
Since Data needs to have lifetime 'a, we need to specify impl lifetime as 'a as well
or else we get :=> use of undeclared lifetime name `'a` for Data
*/
impl<'a> Clone for Data<'a> {
fn clone(&self) -> Data<'a>
{
*self
}
}
impl<'a> fmt::Debug for Data<'a> {
fn fmt(& self, f: &mut fmt::Formatter) -> fmt::Result
{
write!(f, "Name {} of age {}", self.name, self.age)
}
}
struct Foo<'a> {
x: &'a i32,
}
fn main()
{
let string = "Hello World".to_string();
let string2 = string.clone();
println!("{} and {}", string , string2 );
let obj = Data{name: "John", age : 42};
let mut cloned_obj = obj.clone();
cloned_obj.name = "Jane";
cloned_obj.age = 43;
println!("{:?} \n{:?}", obj, cloned_obj);
/* LIFETIMES:
let x; // -+ `x` comes into scope.
// |
{ // |
let y = &5; // ---+ `y` comes into scope.
let f = Foo { x: y }; // ---+ `f` comes into scope.
x = &f.x; // | | This causes an error.
} // ---+ `f` and y go out of scope.
// |
// Error: borrowed value does not live long enough. Lifetime of f.x is much smaller that x
println!("{}", x);
*/
}
| true |
39e9487d828b748c4c77205565fb44c4eea9157a
|
Rust
|
cuviper/debug-helper
|
/examples/fake_struct.rs
|
UTF-8
| 535 | 3.109375 | 3 |
[
"MIT"
] |
permissive
|
#[macro_use(impl_debug_for_struct)]
extern crate debug_helper;
use std::fmt::{self, Debug, Formatter};
pub struct A(pub u8, pub i16, pub f64);
impl Debug for A {
fn fmt(&self, f: &mut Formatter<'_>) -> Result<(), fmt::Error> {
impl_debug_for_struct!(A, f, self, let .f1 = self.0, let .f2 = self.1, let .f3 = self.2);
}
}
fn main() {
let a = A(1, 2, std::f64::consts::PI);
println!("{:#?}", a);
/*
A {
f1: 1,
f2: 2,
f3: 3.141592653589793,
}
*/
}
| true |
95f31a1cf5c61a776589cc856bea1b3b39af69b8
|
Rust
|
kosslab-kr/rust
|
/src/test/compile-fail/issue-21221-1.rs
|
UTF-8
| 2,510 | 3.203125 | 3 |
[
"MIT",
"Apache-2.0",
"NCSA",
"ISC",
"LicenseRef-scancode-public-domain",
"BSD-3-Clause",
"BSD-2-Clause",
"Unlicense",
"LicenseRef-scancode-other-permissive"
] |
permissive
|
// Copyright 2016 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
mod mul1 {
pub trait Mul {}
}
mod mul2 {
pub trait Mul {}
}
mod mul3 {
enum Mul {
Yes,
No
}
}
mod mul4 {
type Mul = String;
}
mod mul5 {
struct Mul{
left_term: u32,
right_term: u32
}
}
#[derive(Debug)]
struct Foo;
// When we comment the next line:
//use mul1::Mul;
// BEFORE, we got the following error for the `impl` below:
// error: use of undeclared trait name `Mul` [E0405]
// AFTER, we get this message:
// error: trait `Mul` is not in scope.
// help: ...
// help: you can import several candidates into scope (`use ...;`):
// help: `mul1::Mul`
// help: `mul2::Mul`
// help: `std::ops::Mul`
impl Mul for Foo {
//~^ ERROR trait `Mul` is not in scope
//~| HELP `mul1::Mul`
//~| HELP `mul2::Mul`
//~| HELP `std::ops::Mul`
//~| HELP you can import several candidates into scope (`use ...;`):
}
// BEFORE, we got:
// error: use of undeclared type name `Mul` [E0412]
// AFTER, we get:
// error: type name `Mul` is not in scope. Maybe you meant:
// help: ...
// help: you can import several candidates into scope (`use ...;`):
// help: `mul1::Mul`
// help: `mul2::Mul`
// help: `mul3::Mul`
// help: `mul4::Mul`
// help: and 2 other candidates
fn getMul() -> Mul {
//~^ ERROR type name `Mul` is undefined or not in scope
//~| HELP `mul1::Mul`
//~| HELP `mul2::Mul`
//~| HELP `mul3::Mul`
//~| HELP `mul4::Mul`
//~| HELP and 2 other candidates
//~| HELP you can import several candidates into scope (`use ...;`):
}
// Let's also test what happens if the trait doesn't exist:
impl ThisTraitReallyDoesntExistInAnyModuleReally for Foo {
//~^ ERROR trait `ThisTraitReallyDoesntExistInAnyModuleReally` is not in scope
//~| HELP no candidates by the name of `ThisTraitReallyDoesntExistInAnyModuleReally` found
}
// Let's also test what happens if there's just one alternative:
impl Div for Foo {
//~^ ERROR trait `Div` is not in scope
//~| HELP `use std::ops::Div;`
}
fn main() {
let foo = Foo();
println!("Hello, {:?}!", foo);
}
| true |
6b7c87659543277166b010d78dd9671ee9d8ae44
|
Rust
|
tauri-apps/tauri
|
/core/tauri-utils/src/resources.rs
|
UTF-8
| 4,188 | 2.9375 | 3 |
[
"Apache-2.0",
"CC0-1.0",
"MIT",
"CC-BY-NC-ND-4.0"
] |
permissive
|
// Copyright 2019-2023 Tauri Programme within The Commons Conservancy
// SPDX-License-Identifier: Apache-2.0
// SPDX-License-Identifier: MIT
use std::path::{Component, Path, PathBuf};
/// Given a path (absolute or relative) to a resource file, returns the
/// relative path from the bundle resources directory where that resource
/// should be stored.
pub fn resource_relpath(path: &Path) -> PathBuf {
let mut dest = PathBuf::new();
for component in path.components() {
match component {
Component::Prefix(_) => {}
Component::RootDir => dest.push("_root_"),
Component::CurDir => {}
Component::ParentDir => dest.push("_up_"),
Component::Normal(string) => dest.push(string),
}
}
dest
}
/// Parses the external binaries to bundle, adding the target triple suffix to each of them.
pub fn external_binaries(external_binaries: &[String], target_triple: &str) -> Vec<String> {
let mut paths = Vec::new();
for curr_path in external_binaries {
paths.push(format!(
"{}-{}{}",
curr_path,
target_triple,
if target_triple.contains("windows") {
".exe"
} else {
""
}
));
}
paths
}
/// A helper to iterate through resources.
pub struct ResourcePaths<'a> {
/// the patterns to iterate.
pattern_iter: std::slice::Iter<'a, String>,
/// the glob iterator if the path from the current iteration is a glob pattern.
glob_iter: Option<glob::Paths>,
/// the walkdir iterator if the path from the current iteration is a directory.
walk_iter: Option<walkdir::IntoIter>,
/// whether the resource paths allows directories or not.
allow_walk: bool,
/// the pattern of the current iteration.
current_pattern: Option<String>,
/// whether the current pattern is valid or not.
current_pattern_is_valid: bool,
}
impl<'a> ResourcePaths<'a> {
/// Creates a new ResourcePaths from a slice of patterns to iterate
pub fn new(patterns: &'a [String], allow_walk: bool) -> ResourcePaths<'a> {
ResourcePaths {
pattern_iter: patterns.iter(),
glob_iter: None,
walk_iter: None,
allow_walk,
current_pattern: None,
current_pattern_is_valid: false,
}
}
}
impl<'a> Iterator for ResourcePaths<'a> {
type Item = crate::Result<PathBuf>;
fn next(&mut self) -> Option<crate::Result<PathBuf>> {
loop {
if let Some(ref mut walk_entries) = self.walk_iter {
if let Some(entry) = walk_entries.next() {
let entry = match entry {
Ok(entry) => entry,
Err(error) => return Some(Err(crate::Error::from(error))),
};
let path = entry.path();
if path.is_dir() {
continue;
}
self.current_pattern_is_valid = true;
return Some(Ok(path.to_path_buf()));
}
}
self.walk_iter = None;
if let Some(ref mut glob_paths) = self.glob_iter {
if let Some(glob_result) = glob_paths.next() {
let path = match glob_result {
Ok(path) => path,
Err(error) => return Some(Err(error.into())),
};
if path.is_dir() {
if self.allow_walk {
let walk = walkdir::WalkDir::new(path);
self.walk_iter = Some(walk.into_iter());
continue;
} else {
return Some(Err(crate::Error::NotAllowedToWalkDir(path)));
}
}
self.current_pattern_is_valid = true;
return Some(Ok(path));
} else if let Some(current_path) = &self.current_pattern {
if !self.current_pattern_is_valid {
self.glob_iter = None;
return Some(Err(crate::Error::GlobPathNotFound(current_path.clone())));
}
}
}
self.glob_iter = None;
if let Some(pattern) = self.pattern_iter.next() {
self.current_pattern = Some(pattern.to_string());
self.current_pattern_is_valid = false;
let glob = match glob::glob(pattern) {
Ok(glob) => glob,
Err(error) => return Some(Err(error.into())),
};
self.glob_iter = Some(glob);
continue;
}
return None;
}
}
}
| true |
810c14193d78ada07b84a94b6a842dbbf5e56e03
|
Rust
|
korrat/fw
|
/src/projectile/mod.rs
|
UTF-8
| 3,101 | 2.703125 | 3 |
[
"WTFPL"
] |
permissive
|
use crate::config::Config;
use crate::errors::AppError;
use regex::Regex;
use slog::debug;
use slog::Logger;
use std::borrow::ToOwned;
use std::fs;
use std::io;
use std::io::Write;
use std::path::PathBuf;
pub fn projectile(maybe_config: Result<Config, AppError>, logger: &Logger) -> Result<(), AppError> {
let config: Config = maybe_config?;
let projects_paths: Vec<PathBuf> = config
.clone()
.projects
.into_iter()
.map(|(_, p)| config.actual_path_to_project(&p, logger))
.collect();
let home_dir: PathBuf = dirs::home_dir().ok_or_else(|| AppError::UserError("$HOME not set".to_owned()))?;
let mut projectile_bookmarks: PathBuf = home_dir.clone();
projectile_bookmarks.push(".emacs.d");
projectile_bookmarks.push("projectile-bookmarks.eld");
let writer = fs::File::create(projectile_bookmarks)?;
persist(logger, &home_dir, writer, projects_paths)
}
fn persist<W>(logger: &Logger, home_dir: &PathBuf, writer: W, paths: Vec<PathBuf>) -> Result<(), AppError>
where
W: io::Write,
{
let paths: Vec<String> = paths.into_iter().flat_map(|path_buf| path_buf.to_str().map(ToOwned::to_owned)).collect();
let mut buffer = io::BufWriter::new(writer);
buffer.write_all(b"(")?;
for path in paths {
let path = replace_path_with_tilde(&path, home_dir.clone()).unwrap_or(path);
debug!(logger, "Writing projectile entry"; "entry" => &path);
buffer.write_all(format!("\"{}/\"", path).as_bytes())?;
buffer.write_all(b" ")?;
}
buffer.write_all(b")")?;
Ok(())
}
fn replace_path_with_tilde(path: &str, path_to_replace: PathBuf) -> Result<String, AppError> {
let replace_string = path_to_replace.into_os_string().into_string().expect("path should be a valid string");
let mut pattern: String = "^".to_string();
pattern.push_str(&replace_string);
let regex = Regex::new(&pattern)?;
Ok(regex.replace_all(path, "~").into_owned())
}
#[cfg(test)]
mod tests {
use super::*;
use slog::o;
use spectral::prelude::*;
use std::path::Path;
#[test]
fn test_persists_projectile_config() {
use std::io::Cursor;
use std::str;
let mut buffer = Cursor::new(vec![0; 61]);
let logger = a_logger();
let paths = vec![PathBuf::from("/home/mriehl/test"), PathBuf::from("/home/mriehl/go/src/github.com/test2")];
let home_dir = Path::new("/home/blubb").to_path_buf();
persist(&logger, &home_dir, &mut buffer, paths).unwrap();
assert_that(&str::from_utf8(buffer.get_ref()).unwrap()).is_equal_to("(\"/home/mriehl/test/\" \"/home/mriehl/go/src/github.com/test2/\" )");
}
#[test]
fn test_replace_path_with_tilde() {
let home_dir = Path::new("/home/blubb").to_path_buf();
let replaced_string = replace_path_with_tilde("/home/blubb/moep/home/blubb/test.txt", home_dir).expect("should succeed");
assert_that(&replaced_string).is_equal_to("~/moep/home/blubb/test.txt".to_string());
}
fn a_logger() -> Logger {
use slog::Drain;
let plain = slog_term::PlainSyncDecorator::new(std::io::stdout());
let drain = slog_term::FullFormat::new(plain).build().fuse();
Logger::root(drain, o!())
}
}
| true |
e6489d045c451693057846c4c4ae0f75b06614ad
|
Rust
|
thepowersgang/equation-editor
|
/src/expression.rs
|
UTF-8
| 9,109 | 3.59375 | 4 |
[] |
no_license
|
//!
//! Expression type
//!
#[derive(Debug,PartialEq,Eq,Copy,Clone)]
pub enum Op
{
Equality,
AddSub, // Note: Subtract is handled with `negated`
MulDiv,
ExpRoot, // NOTE: Root doesn't actually exist
}
#[derive(Debug,Clone,PartialEq)]
pub enum Expression
{
/// Negate the inner value
Negative(Box<Expression>),
/// Group of like operators
SubNode(ExprNode),
/// Literal value
Literal(f32),
/// Variable name
Variable(String),
}
#[derive(Debug,Clone,PartialEq)]
pub struct SubExpression
{
/// Indicates subtract/divide instead of add/multiply
pub inverse: bool,
pub val: Expression,
}
/// Representation of a chained set of expressions with the same precedence (e.g. `a + b - c` or `a / b * c`)
#[derive(Debug,Clone,PartialEq)]
pub struct ExprNode
{
pub operation: Op,
pub values: Vec<SubExpression>,
}
#[derive(Debug)]
pub enum ParseError {
Empty,
Unexpected(String),
BadToken(String),
}
#[derive(PartialEq,PartialOrd,Eq,Ord)]
enum Precedence
{
Equality,
AddSub,
MulDiv,
Exp,
Lit,
}
impl Precedence
{
fn of_op(op: Op) -> Precedence
{
match op
{
Op::Equality => Precedence::Equality,
Op::AddSub => Precedence::AddSub,
Op::MulDiv => Precedence::MulDiv,
Op::ExpRoot => Precedence::Exp,
}
}
fn of_expr(e: &Expression) -> Precedence
{
match e
{
Expression::SubNode(sn) => Precedence::of_op(sn.operation),
_ => Precedence::Lit,
}
}
}
impl Expression {
pub fn needs_parens(&self, op: Op) -> bool {
Precedence::of_expr(self) <= Precedence::of_op(op)
}
}
impl std::str::FromStr for Expression
{
type Err = ParseError;
fn from_str(s: &str) -> Result<Expression, ParseError> {
Self::parse_from_str_with_comment(s).map(|v| v.0)
}
}
impl std::fmt::Display for Expression
{
fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result {
match self
{
Expression::Negative(sn) =>
match **sn
{
Expression::Literal(_) => write!(f, "-{}", sn),
Expression::Variable(_) => write!(f, "-{}", sn),
_ => write!(f, "-({})", sn),
},
Expression::SubNode(sn) => std::fmt::Display::fmt(sn, f),
Expression::Literal(v) => std::fmt::Display::fmt(&v, f),
Expression::Variable(n) => std::fmt::Display::fmt(&n[..], f),
}
}
}
impl std::fmt::Display for ExprNode
{
fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result {
use std::fmt::Write;
assert!(self.values.len() > 1);
let emit_with_parens = |v: &Expression, f: &mut std::fmt::Formatter| {
let needs_parens = v.needs_parens(self.operation);
if needs_parens {
f.write_char('(')?;
}
std::fmt::Display::fmt(v, f)?;
if needs_parens {
f.write_char(')')?;
}
Ok( () )
};
for (i,v) in self.values.iter().enumerate()
{
if i == 0 {
}
else {
match self.operation
{
Op::AddSub => if v.inverse {
f.write_char('-')?;
}
else {
f.write_char('+')?;
},
Op::MulDiv => if v.inverse {
f.write_char('/')?;
}
else {
f.write_char('*')?;
},
Op::ExpRoot => { assert!(!v.inverse); f.write_char('^')? },
Op::Equality => f.write_char('=')?,
}
}
emit_with_parens(&v.val, f)?;
}
Ok( () )
}
}
#[derive(Debug,Copy,Clone,PartialEq)]
enum Token<'a> {
Eof,
Whitespace,
Comment(&'a str),
Ident(&'a str),
Literal(f32),
Op(char),
ParenOpen,
ParenClose,
}
::plex::lexer! {
fn lex_next_token(text: 'a) -> Result<Token<'a>, ParseError>;
r#"[ \t\r\n]+"# => Ok(Token::Whitespace),
r#"#.*"# => Ok(Token::Comment(text)),
r#"[0-9]+(\.[0-9]*)?"# =>
if let Ok(i) = text.parse() {
Ok(Token::Literal(i))
} else {
Err(ParseError::BadToken(text.to_owned()))
},
r#"[a-zA-Z][a-zA-Z0-9_']*"# => Ok(Token::Ident(text)),
r#"\+"# => Ok(Token::Op('+')),
r#"-"# => Ok(Token::Op('-')),
r#"\*"# => Ok(Token::Op('*')),
r#"/"# => Ok(Token::Op('/')),
r#"\^"# => Ok(Token::Op('^')),
r#"="# => Ok(Token::Op('=')),
r#"\("# => Ok(Token::ParenOpen),
r#"\)"# => Ok(Token::ParenClose),
r"." => Err(ParseError::BadToken(text.to_owned())),
}
struct Lexer<'a>
{
//base: &'a str,
remaining: &'a str,
cur_token: Token<'a>,
}
impl<'a> Lexer<'a>
{
fn new(s: &'a str) -> Result<Lexer<'a>, ParseError> {
let mut rv = Lexer {
//base: s,
remaining: s,
cur_token: Token::Eof,
};
rv.consume()?;
Ok(rv)
}
pub fn consume(&mut self) -> Result<Token<'a>, ParseError> {
let mut t;
loop
{
t = if let Some((tok_res, new_rem)) = lex_next_token(self.remaining) {
self.remaining = new_rem;
tok_res?
}
else {
Token::Eof
};
if t == Token::Whitespace {
continue ;
}
break;
}
Ok( ::std::mem::replace(&mut self.cur_token, t) )
}
pub fn cur(&self) -> Token<'a> {
self.cur_token
}
pub fn consume_if(&mut self, t: Token<'_>) -> Result<bool,ParseError> {
Ok(if self.cur_token == t {
self.consume()?;
true
}
else {
false
})
}
}
impl Expression
{
pub fn opt_parse_from_str_with_comment(s: &str) -> Result< (Option<Expression>, String), ParseError > {
let mut l = Lexer::new(s)?;
let rv = if let Token::Comment(_) = l.cur() {
None
}
else {
Some( Self::parse_root(&mut l)? )
};
let c = if let Token::Comment(_) = l.cur() {
match l.consume()? { Token::Comment(c) => c.to_owned(), _ => panic!(""), }
}
else {
String::new()
};
if l.cur() != Token::Eof {
return Err( ParseError::Unexpected( format!("{:?}", l.cur()) ) );
}
Ok( (rv, c,) )
}
pub fn parse_from_str_with_comment(s: &str) -> Result< (Expression, String), ParseError > {
match Self::opt_parse_from_str_with_comment(s)
{
Err(v) => Err(v),
Ok( (None, _) ) => Err(ParseError::Empty),
Ok( (Some(e), c) ) => Ok( (e, c) ),
}
}
fn parse_root(lexer: &mut Lexer) -> Result<Expression,ParseError> {
Self::parse_0(lexer)
}
fn parse_0(lexer: &mut Lexer) -> Result<Expression,ParseError> {
let v = Self::parse_1(lexer)?;
if let Token::Op('=') = lexer.cur() {
let mut values = vec![SubExpression { inverse: false, val: v }];
while lexer.consume_if( Token::Op('=') )? {
values.push(SubExpression { inverse: false, val: Self::parse_1(lexer)? });
}
Ok(Expression::SubNode(ExprNode {
operation: Op::Equality,
values: values,
}))
}
else {
Ok(v)
}
}
// Add/Subtract
fn parse_1(lexer: &mut Lexer) -> Result<Expression,ParseError> {
let mut v = Self::parse_2(lexer)?;
let mut values = vec![];
let mut is_neg = false;
loop
{
let new_is_neg = if lexer.consume_if(Token::Op('-'))? {
true
}
else if lexer.consume_if(Token::Op('+'))? {
false
}
else {
break;
};
values.push(SubExpression { inverse: is_neg, val: v });
is_neg = new_is_neg;
v = Self::parse_2(lexer)?;
}
if values.len() > 0
{
values.push(SubExpression { inverse: is_neg, val: v });
Ok( Expression::SubNode(ExprNode {
operation: Op::AddSub,
values: values,
}) )
}
else
{
Ok(v)
}
}
// Multiply / Divide
fn parse_2(lexer: &mut Lexer) -> Result<Expression,ParseError> {
let mut v = Self::parse_3(lexer)?;
let mut values = vec![];
let mut is_div = false;
loop
{
let new_is_div = if lexer.consume_if(Token::Op('/'))? {
true
}
else if lexer.consume_if(Token::Op('*'))? {
false
}
else {
break;
};
values.push(SubExpression { inverse: is_div, val: v });
is_div = new_is_div;
v = Self::parse_3(lexer)?;
}
if values.len() > 0
{
values.push(SubExpression { inverse: is_div, val: v });
Ok( Expression::SubNode(ExprNode {
operation: Op::MulDiv,
values: values,
}) )
}
else
{
Ok(v)
}
}
// Unary Negation
fn parse_3(lexer: &mut Lexer) -> Result<Expression,ParseError> {
if lexer.consume_if(Token::Op('-'))?
{
let v = Self::parse_4(lexer)?;
Ok(Expression::Negative( Box::new(v) ))
}
else
{
Self::parse_4(lexer)
}
}
// Exponent
fn parse_4(lexer: &mut Lexer) -> Result<Expression,ParseError> {
let mut v = Self::parse_5(lexer)?;
let mut values = vec![];
loop
{
if lexer.consume_if(Token::Op('^'))? {
false
}
else {
break;
};
values.push(SubExpression { inverse: false, val: v });
v = Self::parse_5(lexer)?;
}
if values.len() > 0
{
values.push(SubExpression { inverse: false, val: v });
Ok( Expression::SubNode(ExprNode {
operation: Op::ExpRoot,
values: values,
}) )
}
else
{
Ok(v)
}
}
fn parse_5(lexer: &mut Lexer) -> Result<Expression,ParseError> {
Self::parse_value(lexer)
}
fn parse_value(lexer: &mut Lexer) -> Result<Expression,ParseError> {
Ok(match lexer.cur()
{
Token::Literal(v) => {
lexer.consume()?;
Expression::Literal(v)
},
Token::Ident(i) => {
lexer.consume()?;
Expression::Variable(i.to_owned())
},
Token::ParenOpen => {
lexer.consume()?;
let rv = Self::parse_1(lexer)?;
if !lexer.consume_if(Token::ParenClose)? {
return Err(ParseError::Unexpected( format!("{:?}", lexer.cur()) ));
}
return Ok(rv)
},
_ => return Err(ParseError::Unexpected( format!("{:?}", lexer.cur()) )),
})
}
}
| true |
5a2e48d557896ffd19340ba3d558d1ee61bb36a2
|
Rust
|
BarePotato/gooey
|
/core/src/styles/font_size.rs
|
UTF-8
| 748 | 3.28125 | 3 |
[
"Apache-2.0",
"MIT"
] |
permissive
|
use figures::Figure;
use stylecs::StyleComponent;
use crate::Scaled;
/// The font size for drawing text.
#[derive(Debug, Copy, Clone)]
pub struct FontSize(pub Figure<f32, Scaled>);
impl Default for FontSize {
fn default() -> Self {
Self::new(14.)
}
}
impl FontSize {
/// Creates a new `FontSize` using `value` in `Unit`.
#[must_use]
pub const fn new(value: f32) -> Self {
Self(Figure::new(value))
}
/// Returns the raw font size value.
#[must_use]
pub fn get(self) -> f32 {
self.0.get()
}
/// Returns the font size as a type-safe measurement.
#[must_use]
pub const fn length(self) -> Figure<f32, Scaled> {
self.0
}
}
impl StyleComponent for FontSize {}
| true |
4d5ba681bff5e2295afc920658f10adac1038ef3
|
Rust
|
CSML-by-Clevy/csml-engine
|
/csml_interpreter/src/data/error_info.rs
|
UTF-8
| 3,186 | 2.875 | 3 |
[
"Apache-2.0"
] |
permissive
|
use crate::data::{
literal::{create_error_info, Literal},
position::Position,
};
use serde::{Deserialize, Serialize};
use std::collections::HashMap;
////////////////////////////////////////////////////////////////////////////////
// DATA STRUCTURE
////////////////////////////////////////////////////////////////////////////////
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct ErrorInfo {
pub position: Position,
pub message: String,
pub additional_info: Option<HashMap<String, Literal>>,
}
////////////////////////////////////////////////////////////////////////////////
// STATIC FUNCTIONS
////////////////////////////////////////////////////////////////////////////////
impl ErrorInfo {
pub fn new(position: Position, message: String) -> Self {
let error_info = create_error_info(&message, position.interval);
Self {
position,
message,
additional_info: Some(error_info),
}
}
pub fn add_info(&mut self, key: &str, value: Literal) {
match self.additional_info {
Some(ref mut map) => {
map.insert(key.to_owned(), value);
}
None => {
let mut info = HashMap::new();
info.insert(key.to_owned(), value);
self.additional_info = Some(info);
}
}
}
pub fn add_info_block(&mut self, info: HashMap<String, Literal>) {
match self.additional_info {
Some(ref mut map) => {
for (key, value) in info {
map.insert(key, value);
}
}
None => {
self.additional_info = Some(info);
}
}
}
}
////////////////////////////////////////////////////////////////////////////////
// METHOD FUNCTIONS
////////////////////////////////////////////////////////////////////////////////
impl ErrorInfo {
pub fn format_error(&self) -> String {
format!(
"{} at line {}, column {} at flow [{}]",
self.message,
self.position.interval.start_line,
self.position.interval.start_column,
self.position.flow,
)
}
}
impl From<std::io::Error> for ErrorInfo {
fn from(e: std::io::Error) -> Self {
Self {
position: Position::default(),
message: e.to_string(),
additional_info: None,
}
}
}
impl From<serde_json::Error> for ErrorInfo {
fn from(e: serde_json::Error) -> Self {
Self {
position: Position::default(),
message: e.to_string(),
additional_info: None,
}
}
}
impl From<uuid::Error> for ErrorInfo {
fn from(e: uuid::Error) -> Self {
Self {
position: Position::default(),
message: e.to_string(),
additional_info: None,
}
}
}
impl From<std::time::SystemTimeError> for ErrorInfo {
fn from(e: std::time::SystemTimeError) -> Self {
Self {
position: Position::default(),
message: e.to_string(),
additional_info: None,
}
}
}
| true |
fdd1997599e293c4c2cbeccc5ac068e55d5356f3
|
Rust
|
teromene/activityrust
|
/src/entities/link.rs
|
UTF-8
| 3,938 | 2.65625 | 3 |
[] |
no_license
|
use crate::content::*;
use crate::entities::entity::{
ActivityStreamEntity, ActivityStreamEntityType, BoxedActivityStreamEntity,
};
use crate::traits::properties::*;
use crate::MaybeOptional;
use serde::{Deserialize, Serialize};
use url::Url;
impl ActivityStreamLinkProperties for ActivityStreamLink {
fn get_id(&self) -> &Option<Url> {
&self.id
}
fn set_id<T: MaybeOptional<Url>>(&mut self, id: T) {
self.id = id.get_optional();
}
fn register_context<T>(&mut self, new_context: T)
where
ActivityStreamContext: From<T>,
{
let new_context: ActivityStreamContext = ActivityStreamContext::from(new_context);
if let Some(ref mut context) = self.context {
if !context.contains(&new_context) {
context.push(new_context);
}
} else {
self.context = Some(vec![new_context]);
}
}
fn get_href(&self) -> &Option<Url> {
&self.href
}
fn set_href<T: MaybeOptional<Url>>(&mut self, href: T) {
self.href = href.get_optional();
}
fn get_hreflang(&self) -> &Option<String> {
&self.hreflang
}
fn set_hreflang<T: MaybeOptional<String>>(&mut self, hreflang: T) {
self.hreflang = hreflang.get_optional();
}
fn get_media_type(&self) -> &Option<String> {
&self.mediaType
}
fn set_media_type<T: MaybeOptional<String>>(&mut self, media_type: T) {
self.mediaType = media_type.get_optional();
}
fn get_name(&self) -> &Option<ActivityStreamMultilangString> {
&self.name
}
fn set_name<S, T: MaybeOptional<S>>(&mut self, name: T)
where
ActivityStreamMultilangString: From<S>,
{
if let Some(name) = name.get_optional() {
self.name = Some(ActivityStreamMultilangString::from(name));
}
}
fn get_height(&self) -> &Option<usize> {
&self.height
}
fn set_height<T: MaybeOptional<usize>>(&mut self, height: T) {
self.height = height.get_optional();
}
fn get_width(&self) -> &Option<usize> {
&self.width
}
fn set_width<T: MaybeOptional<usize>>(&mut self, width: T) {
self.width = width.get_optional();
}
fn get_preview(&self) -> &Option<BoxedActivityStreamEntity> {
&self.preview
}
fn set_preview<S, T: MaybeOptional<S>>(&mut self, preview: T)
where
ActivityStreamEntity: From<S>,
{
if let Some(preview) = preview.get_optional() {
self.preview = Some(Box::new(ActivityStreamEntity::from(preview)));
}
}
}
generate_basics!(ActivityStreamLink, ActivityStreamEntityType::Link);
#[allow(non_snake_case)]
#[derive(Debug, Default, Serialize, Deserialize, PartialEq)]
pub struct ActivityStreamLink {
#[serde(skip_serializing_if = "Option::is_none", default)]
id: Option<Url>,
#[serde(skip_serializing_if = "Option::is_none", default)]
#[serde(deserialize_with = "ActivityStreamLink::deserialize_type")]
r#type: Option<ActivityStreamEntityType>,
#[serde(skip_serializing_if = "Option::is_none", default)]
#[serde(rename = "@context", with = "crate::traits::vecserializer")]
context: Option<Vec<ActivityStreamContext>>,
#[serde(skip_serializing_if = "Option::is_none", default)]
href: Option<Url>,
#[serde(skip_serializing_if = "Option::is_none", default)]
hreflang: Option<String>,
#[serde(skip_serializing_if = "Option::is_none", default)]
mediaType: Option<String>,
#[serde(skip_serializing_if = "Option::is_none", default)]
name: Option<ActivityStreamMultilangString>,
#[serde(skip_serializing_if = "Option::is_none", default)]
height: Option<usize>,
#[serde(skip_serializing_if = "Option::is_none", default)]
width: Option<usize>,
#[serde(skip_serializing_if = "Option::is_none", default)]
preview: Option<BoxedActivityStreamEntity>,
}
| true |
1d46c6bf9541d17c0d24d80bdd475b714c5e116d
|
Rust
|
xujihui1985/learningrust
|
/adder/src/lib.rs
|
UTF-8
| 685 | 3.671875 | 4 |
[] |
no_license
|
#[derive(Debug)]
pub struct Rectangle{
length: u32,
width: u32,
}
impl Rectangle {
pub fn can_hold(&self, other: &Rectangle) -> bool {
self.length > other.length && self.width > other.width
}
pub fn panic() {
panic!("panic");
}
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn larger_can_hold() {
let larger = Rectangle {
length: 8,
width: 7
};
let smaller = Rectangle {
length: 7,
width: 6
};
assert_eq!(true, larger.can_hold(&smaller));
}
#[test]
#[should_panic]
fn should_panic() {
Rectangle::panic();
}
}
| true |
1c2ab600812b849caa9c88c61ddaabf0c8d48808
|
Rust
|
KappaDistributive/aoc2015rust
|
/day10/src/main.rs
|
UTF-8
| 663 | 3.328125 | 3 |
[] |
no_license
|
//const INPUT: Vec<u32> = [1,1,1,3,2,2,2,1,1,3].to_vec();
fn look_and_say(input: &Vec<usize>) -> Vec<usize>{
let mut result: Vec<usize> = Vec::new();
let mut i: usize = 0;
while i < input.len() {
let digit = input[i];
let mut counter: usize = 0;
while i < input.len() && input[i] == digit {
counter += 1;
i += 1;
}
result.push(counter);
result.push(digit);
}
result
}
fn main() {
let mut input: Vec<usize> = [1,1,1,3,2,2,2,1,1,3].to_vec();
for _ in 0..50 {
input = look_and_say(&input);
}
println!("Answer part 1:{}", input.len());
}
| true |
74b15a21b0d61ba4be805fa5b2b54e7a06e7f30d
|
Rust
|
nghenglim/sqlink
|
/sqlink/src/postgres/op.rs
|
UTF-8
| 1,017 | 2.578125 | 3 |
[
"MIT"
] |
permissive
|
use crate::postgres::query_field::{ParameterValueAsRef};
use crate::postgres::query_token::{format_query, TmpQueryTokens};
pub fn eq<S: Into<String>>(field: S, arg: ParameterValueAsRef) -> (TmpQueryTokens, Vec<ParameterValueAsRef>) {
format_query(format!("{} = {{}}", field.into()), vec![arg])
}
pub fn lt<S: Into<String>>(field: S, arg: ParameterValueAsRef) -> (TmpQueryTokens, Vec<ParameterValueAsRef>) {
format_query(format!("{} < {{}}", field.into()), vec![arg])
}
pub fn lte<S: Into<String>>(field: S, arg: ParameterValueAsRef) -> (TmpQueryTokens, Vec<ParameterValueAsRef>) {
format_query(format!("{} <= {{}}", field.into()), vec![arg])
}
pub fn gt<S: Into<String>>(field: S, arg: ParameterValueAsRef) -> (TmpQueryTokens, Vec<ParameterValueAsRef>) {
format_query(format!("{} > {{}}", field.into()), vec![arg])
}
pub fn gte<S: Into<String>>(field: S, arg: ParameterValueAsRef) -> (TmpQueryTokens, Vec<ParameterValueAsRef>) {
format_query(format!("{} >= {{}}", field.into()), vec![arg])
}
| true |
e68c172731503e411982046e15ed0545ef409a07
|
Rust
|
Nugine/nuclear
|
/src/router.rs
|
UTF-8
| 9,247 | 2.90625 | 3 |
[
"MIT"
] |
permissive
|
use crate::error::StatusError;
use crate::http::Method;
use crate::internal_prelude::*;
use std::ops::{Range, RangeFrom};
use std::str::FromStr;
use smallvec::SmallVec;
#[derive(Default)]
pub struct SimpleRouter {
router: Router,
effects: Vec<Box<dyn Handler>>,
default: Option<Box<dyn Handler>>,
}
pub struct CaptureOwned {
path: Box<str>,
captures: Captures,
}
impl CaptureOwned {
fn get_param(&self, name: &str) -> Option<&str> {
self.captures.get_param(self.path.as_ref(), name)
}
}
pub trait SimpleRouterExt {
fn capture(&self) -> Option<&CaptureOwned>;
fn param(&self, name: &str) -> Option<&str> {
self.capture()?.get_param(name)
}
#[track_caller]
fn expect_param(&self, name: &str) -> &str {
match self.capture() {
Some(c) => match c.get_param(name) {
Some(s) => s,
None => panic!("param {:?} not found", name),
},
None => panic!("capture not found"),
}
}
fn parse_param<T: FromStr>(&self, name: &str) -> Option<Result<T, T::Err>> {
self.param(name).map(FromStr::from_str)
}
}
impl SimpleRouterExt for Request {
fn capture(&self) -> Option<&CaptureOwned> {
self.extensions().get::<CaptureOwned>()
}
}
impl SimpleRouter {
pub fn new() -> Self {
Self {
effects: Vec::new(),
default: None,
router: Router::new(),
}
}
pub fn set_default(&mut self, h: Box<dyn Handler>) {
self.default = Some(h);
}
pub fn at(&mut self, pattern: &'static str) -> RouteSetter<'_> {
RouteSetter {
router: self,
pattern,
}
}
pub fn add_route(&mut self, methods: &[Method], pattern: &'static str, h: Box<dyn Handler>) {
let idx = self.effects.len();
self.router.add_route(methods, pattern, idx, true);
self.effects.push(h);
}
pub fn find(&self, method: &Method, path: &str) -> Option<(&dyn Handler, CaptureOwned)> {
let mut captures = Captures::empty();
let idx = self.router.find(&mut captures, method, path)?;
let capture_owned = CaptureOwned {
path: path.into(),
captures,
};
let f = &*self.effects[idx];
Some((f, capture_owned))
}
}
impl Handler for SimpleRouter {
fn handle<'t, 'a>(&'t self, mut req: Request) -> BoxFuture<'a, Result<Response>>
where
't: 'a,
Self: 'a,
{
Box::pin(async move {
let hreq = &mut *req;
let method = hreq.method();
let path = hreq.uri().path();
match self.find(method, path) {
Some((h, capture)) => {
let _ = hreq.extensions_mut().insert(capture);
h.handle(req).await
}
None => match self.default.as_ref() {
Some(h) => h.handle(req).await,
None => Ok(StatusError::NOT_FOUND.into()),
},
}
})
}
}
pub struct RouteSetter<'r> {
router: &'r mut SimpleRouter,
pattern: &'static str,
}
macro_rules! define_method {
{$name:ident, $method:expr} => {
pub fn $name(&mut self, h: Box<dyn Handler>) -> &mut Self {
self.router.add_route(&[$method], self.pattern, h);
self
}
};
}
impl RouteSetter<'_> {
define_method! {get, Method::GET}
define_method! {post, Method::POST}
define_method! {put, Method::PUT}
define_method! {delete, Method::DELETE}
define_method! {head, Method::HEAD}
define_method! {options, Method::OPTIONS}
define_method! {connect, Method::CONNECT}
define_method! {patch, Method::PATCH}
define_method! {trace, Method::TRACE}
}
#[derive(Default)]
struct Router {
routes: Vec<Route>,
}
struct Route {
segments: Box<[Segment]>,
catch_tail: bool,
data_index: usize,
method_mask: u16,
}
enum Segment {
Static(&'static str),
Capture(&'static str),
}
#[derive(Debug)]
struct Captures {
params: Option<Vec<(&'static str, Range<usize>)>>,
tail: Option<RangeFrom<usize>>,
}
const METHODS: [Method; 9] = [
Method::GET,
Method::POST,
Method::PUT,
Method::DELETE,
Method::HEAD,
Method::OPTIONS,
Method::CONNECT,
Method::PATCH,
Method::TRACE,
];
fn to_index(method: &Method) -> u8 {
for (i, m) in METHODS.iter().enumerate() {
if m == method {
return i as u8;
}
}
panic!("unsupported method: {:?}", method);
}
impl Route {
fn try_match<'p>(
&self,
captures: &mut Captures,
path: &'p str,
parts: &[&'p str],
) -> Option<usize> {
let segment_num: usize = self.segments.len() + self.catch_tail as usize;
if segment_num > parts.len() {
return None;
}
if self.segments.len() < parts.len() && !self.catch_tail {
return None;
}
let params = captures.params.get_or_insert_with(Vec::new);
let origin_len = params.len();
let iter = self.segments.iter().zip(parts.iter());
let mut tail_start: usize = 0;
for (segment, &part) in iter {
tail_start += part.len() + 1;
match *segment {
Segment::Static(s) => {
if s != part {
params.truncate(origin_len);
return None;
}
}
Segment::Capture(name) => {
let range = calc_range(path, part);
params.push((name, range));
}
}
}
captures.tail = if self.catch_tail {
Some(tail_start..)
} else {
None
};
Some(self.data_index)
}
}
fn calc_range(base: &str, part: &str) -> Range<usize> {
let start = (part.as_ptr() as usize) - (base.as_ptr() as usize);
let end = start + part.len();
start..end
}
impl Router {
fn new() -> Self {
Self { routes: Vec::new() }
}
fn find(&self, captures: &mut Captures, method: &Method, path: &str) -> Option<usize> {
assert!(path.starts_with('/'));
let parts: SmallVec<[&str; 4]> = path.split('/').skip(1).collect();
let mask: u16 = 1_u16 << to_index(method);
for route in self.routes.iter() {
if route.method_mask & mask == 0 {
continue;
}
if let Some(index) = route.try_match(captures, path, &parts) {
return Some(index);
}
}
None
}
fn add_route(
&mut self,
methods: &[Method],
pattern: &'static str,
data_index: usize,
allow_tail: bool,
) {
assert!(pattern.starts_with('/'));
let mut segments: Vec<&str> = pattern.split('/').skip(1).collect();
let catch_tail = if *segments.last().unwrap() == "**" {
if !allow_tail {
panic!("pattern {:?} can not contain tail wildcard", pattern);
}
segments.pop();
true
} else {
false
};
let segments: Box<[Segment]> = segments
.into_iter()
.map(|s| match s.as_bytes() {
[b':', ..] => Segment::Capture(&s[1..]),
_ => Segment::Static(s),
})
.collect();
let method_mask = methods
.iter()
.fold(0_u16, |acc, m| acc | (1_u16 << to_index(m)));
let route: Route = Route {
segments,
catch_tail,
data_index,
method_mask,
};
self.routes.push(route);
}
}
impl Captures {
fn empty() -> Self {
Self {
params: None,
tail: None,
}
}
fn get_param<'p>(&self, path: &'p str, name: &str) -> Option<&'p str> {
let params = self.params.as_deref()?;
for &(n, ref range) in params.iter() {
if n == name {
return Some(&path[range.clone()]);
}
}
None
}
}
#[test]
fn simple_router() {
let mut router = Router::new();
const GET: Method = Method::GET;
const POST: Method = Method::POST;
router.add_route(&[POST], "/posts", 1, true);
router.add_route(&[GET, POST], "/posts/:pid", 2, true);
router.add_route(&[GET], "/static/**", 3, true);
let mut captures = Captures::empty();
assert_eq!(router.find(&mut captures, &GET, "/posts/asd"), Some(2));
assert_eq!(router.find(&mut captures, &POST, "/posts/asd"), Some(2));
assert_eq!(router.find(&mut captures, &GET, "/posts/"), Some(2));
assert_eq!(router.find(&mut captures, &POST, "/posts/"), Some(2));
assert_eq!(router.find(&mut captures, &GET, "/posts"), None);
assert_eq!(router.find(&mut captures, &POST, "/posts"), Some(1));
assert_eq!(router.find(&mut captures, &GET, "/static"), None);
assert_eq!(router.find(&mut captures, &GET, "/static/"), Some(3));
assert_eq!(router.find(&mut captures, &GET, "/static/asd"), Some(3));
dbg!(&captures);
}
| true |
17a830df99974e06d59ceb4357b01904a2a58800
|
Rust
|
filipeamoreira/playground
|
/languages/rust/playground.rs
|
UTF-8
| 384 | 3.40625 | 3 |
[] |
no_license
|
struct Number {
odd: bool,
value: i32
}
trait Signed {
fn is_negative(self) -> bool;
}
impl Signed for Number {
fn is_negative(self) -> bool {
self.value < 0
}
}
fn main() {
let odd = Number { odd: false, value: -40 };
let even = Number { odd: true, value: 0 };
println!("{}", odd.is_negative());
println!("{}", even.is_negative());
}
| true |
0c73e7540d93d1d6838f99d85b4f562a40724122
|
Rust
|
radhakrishnang/goko
|
/serve_goko/src/errors.rs
|
UTF-8
| 4,490 | 2.8125 | 3 |
[
"Apache-2.0"
] |
permissive
|
use std::error::Error;
use std::fmt;
use goko::errors::GokoError;
use tokio::sync::oneshot;
pub enum InternalServiceError {
Other(GokoError),
FailedSend,
FailedRecv,
FailedRespSend,
DoubleRead,
ClientDropped,
}
impl From<oneshot::error::RecvError> for InternalServiceError {
fn from(_e: oneshot::error::RecvError) -> InternalServiceError {
InternalServiceError::FailedRecv
}
}
impl From<GokoError> for InternalServiceError {
fn from(e: GokoError) -> InternalServiceError {
InternalServiceError::Other(e)
}
}
impl fmt::Display for InternalServiceError {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
use InternalServiceError::*;
match *self {
FailedSend => f.pad("Send Failed"),
Other(ref se) => fmt::Display::fmt(&se, f),
FailedRecv => f.pad("Recv Failed"),
FailedRespSend => f.pad("Unable to Respond, client hung up."),
DoubleRead => f.pad("Attempted to read a message twice"),
ClientDropped => f.pad("Client Dropped"),
}
}
}
impl fmt::Debug for InternalServiceError {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
use InternalServiceError::*;
match *self {
FailedSend => f.pad("SendFailed"),
Other(ref se) => write!(f, "Other({:?})", se),
FailedRecv => f.pad("RecvFailed"),
FailedRespSend => f.pad("FailedRespSend"),
DoubleRead => f.pad("DoubleRead"),
ClientDropped => f.pad("ClientDropped"),
}
}
}
impl Error for InternalServiceError {
fn source(&self) -> Option<&(dyn Error + 'static)> {
use InternalServiceError::*;
match *self {
FailedSend => None,
Other(ref e) => e.source(),
FailedRecv => None,
FailedRespSend => None,
DoubleRead => None,
ClientDropped => None,
}
}
}
//use serde::{Deserialize, Serialize};
//
pub enum GokoClientError {
Underlying(InternalServiceError),
MalformedQuery(&'static str),
Http(hyper::Error),
Parse(Box<dyn std::error::Error + Send + Sync>),
MissingBody,
}
impl GokoClientError {
pub fn parse(err: Box<dyn std::error::Error + Send + Sync>) -> Self {
GokoClientError::Parse(err)
}
}
impl From<GokoError> for GokoClientError {
fn from(e: GokoError) -> GokoClientError {
GokoClientError::Underlying(InternalServiceError::Other(e))
}
}
impl From<oneshot::error::RecvError> for GokoClientError {
fn from(_e: oneshot::error::RecvError) -> GokoClientError {
GokoClientError::Underlying(InternalServiceError::FailedRecv)
}
}
impl From<InternalServiceError> for GokoClientError {
fn from(e: InternalServiceError) -> GokoClientError {
GokoClientError::Underlying(e)
}
}
impl From<hyper::Error> for GokoClientError {
fn from(e: hyper::Error) -> GokoClientError {
GokoClientError::Http(e)
}
}
impl fmt::Display for GokoClientError {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
match *self {
GokoClientError::Underlying(ref se) => fmt::Display::fmt(se, f),
GokoClientError::MalformedQuery(ref se) => fmt::Display::fmt(se, f),
GokoClientError::Http(ref se) => fmt::Display::fmt(se, f),
GokoClientError::Parse(ref se) => fmt::Display::fmt(se, f),
GokoClientError::MissingBody => f.pad("Body Missing"),
}
}
}
impl fmt::Debug for GokoClientError {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
match *self {
GokoClientError::Underlying(ref se) => write!(f, "Underlying({:?})", se),
GokoClientError::MalformedQuery(ref se) => write!(f, "MalformedQuery({:?})", se),
GokoClientError::Http(ref se) => write!(f, "Http({:?})", se),
GokoClientError::Parse(ref se) => write!(f, "Underlying({:?})", se),
GokoClientError::MissingBody => f.pad("MissingBody"),
}
}
}
impl Error for GokoClientError {
fn source(&self) -> Option<&(dyn Error + 'static)> {
match *self {
GokoClientError::Underlying(ref se) => Some(se),
GokoClientError::Http(ref se) => Some(se),
GokoClientError::Parse(ref se) => se.source(),
GokoClientError::MalformedQuery(_) => None,
GokoClientError::MissingBody => None,
}
}
}
| true |
eea57ab70b6af15e1d1db11a3838545eaeafd746
|
Rust
|
aptend/leetcode-rua
|
/Rust/src/n1201_ugly_number_iii.rs
|
UTF-8
| 1,002 | 3.453125 | 3 |
[] |
no_license
|
pub fn nth_ugly_number(n: i32, a: i32, b: i32, c: i32) -> i32 {
fn gcd(mut x: i64, mut y: i64) -> i64 {
while y > 0 {
let tmp = x;
x = y;
y = tmp % y;
}
x
}
let (n, a, b, c) = (i64::from(n), i64::from(a), i64::from(b), i64::from(c));
let ab = a * b / gcd(a, b);
let ac = a * c / gcd(a, c);
let bc = b * c / gcd(b, c);
let abc = ab * c / gcd(ab, c);
let small_cnt = |x| x / a + x / b + x / c - x / ab - x / ac - x / bc + x / abc;
let mut lo = 1;
let mut hi = 2e9 as i32 + 1;
while lo <= hi {
let mid = lo + (hi - lo) / 2;
if small_cnt(i64::from(mid)) >= n {
hi = mid - 1;
} else {
lo = mid + 1;
}
}
lo
}
#[test]
fn test_1201() {
assert_eq!(4, nth_ugly_number(3, 2, 3, 5));
assert_eq!(9, nth_ugly_number(3, 3, 3, 3));
assert_eq!(
1999999984,
nth_ugly_number(1000000000, 2, 217983653, 336916467)
);
}
| true |
adb83166f42f0b1fe1a51953a4b07c40b6ab68bb
|
Rust
|
plorefice/tracy
|
/tracy/src/rendering/pattern.rs
|
UTF-8
| 6,157 | 3.28125 | 3 |
[] |
no_license
|
use crate::math::{Matrix, Point3};
use super::Color;
/// A nestable, colored pattern.
#[cfg_attr(
feature = "serde-support",
derive(serde::Serialize, serde::Deserialize)
)]
#[derive(Debug, Clone, PartialEq)]
pub struct Pattern {
kind: PatternKind,
#[cfg_attr(feature = "serde-support", serde(default))]
transform: Matrix,
}
/// Different kinds of patterns.
#[cfg_attr(
feature = "serde-support",
derive(serde::Serialize, serde::Deserialize),
serde(rename_all = "snake_case")
)]
#[derive(Debug, Clone, PartialEq)]
pub enum PatternKind {
/// A single solid color.
Solid(Color),
/// Two repeating, equally spaced pattern stripes.
///
/// The pattern is constant in the `y` and `z` coordinates, and alternates at each integer unit
/// of the `x` coordinate.
Stripes(Box<Pattern>, Box<Pattern>),
/// Two repeating, equally spaced pattern rings.
///
/// The pattern is constant in the `y` coordinate, and alternates at each integer concentric
/// ring on the `xz` plane.
Rings(Box<Pattern>, Box<Pattern>),
/// Alternating cubes in two patterns.
Checkers(Box<Pattern>, Box<Pattern>),
/// Average of two patterns.
Blended(Box<Pattern>, Box<Pattern>),
/// Linear gradient between two colors.
///
/// The pattern is constant in the `y` and `z` coordinates, with gradient stops at each integer
/// unit of the `x` coordinate.
LinearGradient(Color, Color),
/// Radial gradient between two colors.
///
/// The pattern is constant in the `y` coordinate, with gradient stops at each integer
/// concentric ring on the `xz` plane.
RadialGradient(Color, Color),
/// Test pattern that returns a color with the same coordinate of the point hit.
Test,
}
impl From<Color> for PatternKind {
fn from(c: Color) -> Self {
Self::Solid(c)
}
}
impl Pattern {
/// Create a new pattern with an identity trasformation applied.
pub fn new(kind: PatternKind) -> Self {
Self::new_with_transform(kind, Matrix::identity(4))
}
/// Creates a new pattern with an applied transformation.
pub fn new_with_transform(kind: PatternKind, transform: Matrix) -> Self {
Self { kind, transform }
}
/// Returns the pattern kind of `self`.
pub fn kind(&self) -> &PatternKind {
&self.kind
}
/// Returns the transformation applied to `self`.
pub fn transform(&self) -> &Matrix {
&self.transform
}
/// Returns the color of `self` at object-space coordinates `p`.
pub fn color_at(&self, p: &Point3) -> Color {
let p = self.transform.inverse().unwrap() * p;
match &self.kind {
&PatternKind::Solid(c) => c,
PatternKind::Stripes(a, b) => {
if (p.x.floor() as i32) % 2 == 0 {
a.color_at(&p)
} else {
b.color_at(&p)
}
}
PatternKind::Rings(a, b) => {
if (p.x.powi(2) + p.z.powi(2)).sqrt().floor() as i32 % 2 == 0 {
a.color_at(&p)
} else {
b.color_at(&p)
}
}
PatternKind::Checkers(a, b) => {
if (p.x.floor() + p.y.floor() + p.z.floor()) as i32 % 2 == 0 {
a.color_at(&p)
} else {
b.color_at(&p)
}
}
PatternKind::Blended(a, b) => (a.color_at(&p) + b.color_at(&p)) / 2.0,
PatternKind::LinearGradient(a, b) => a + (b - a) * (p.x - p.x.floor()),
PatternKind::RadialGradient(a, b) => {
let dist = (p.x.powi(2) + p.z.powi(2)).sqrt();
a + (b - a) * (dist - dist.floor())
}
PatternKind::Test => Color::new(p.x, p.y, p.z),
}
}
}
#[cfg(all(feature = "serde-support", test))]
mod tests {
use serde_test::{assert_de_tokens, Token};
use super::*;
#[test]
fn deserialize_complex_pattern() {
let p = Pattern::new(PatternKind::Blended(
Box::new(Pattern::new(Color::WHITE.into())),
Box::new(Pattern::new(Color::BLACK.into())),
));
/*
pattern:
kind:
blended:
- kind:
solid: [1, 1, 1]
- kind:
solid: [0, 0, 0]
*/
assert_de_tokens(
&p,
&[
// outer pattern
Token::Struct {
name: "Pattern",
len: 1,
},
Token::Str("kind"),
// outer blended pattern
Token::Enum {
name: "PatternKind",
},
Token::Str("blended"),
Token::Seq { len: Some(2) },
// first Solid start
Token::Struct {
name: "Pattern",
len: 1,
},
Token::Str("kind"),
Token::Enum {
name: "PatternKind",
},
Token::Str("solid"),
Token::Seq { len: Some(2) },
Token::F32(1.0),
Token::F32(1.0),
Token::F32(1.0),
Token::SeqEnd,
Token::StructEnd,
// first Solid end
// second Solid start
Token::Struct {
name: "Pattern",
len: 1,
},
Token::Str("kind"),
Token::Enum {
name: "PatternKind",
},
Token::Str("solid"),
Token::Seq { len: Some(2) },
Token::F32(0.0),
Token::F32(0.0),
Token::F32(0.0),
Token::SeqEnd,
Token::StructEnd,
// second Solid end
Token::SeqEnd,
// outer Blended end
Token::StructEnd,
],
);
}
}
| true |
e4ed91b9d24bf1a2e0a57f8f9205b93212ad148a
|
Rust
|
StoneDot/regtail
|
/tests/utils/mod.rs
|
UTF-8
| 5,529 | 2.5625 | 3 |
[
"Apache-2.0"
] |
permissive
|
/*
* Copyright 2019 StoneDot (Hiroaki Goto)
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
use std::fs::{self, File, OpenOptions};
use std::io::{Read, Write};
use std::path::PathBuf;
use std::process::{Child, Command, Stdio};
pub fn setup(test_directory: &str) -> (WorkingDir, Command) {
let dir = PathBuf::from(format!("integration_tests/{}", test_directory));
let _ = fs::remove_dir_all(&dir);
fs::create_dir_all(&dir).unwrap();
let test_exec_path = std::env::current_exe().unwrap();
let exec_dir = test_exec_path.parent().unwrap().parent().unwrap();
let mut exec_path = exec_dir.to_path_buf();
exec_path.push("regtail");
let mut command = Command::new(dbg!(exec_path));
command.stdout(Stdio::piped());
let working_dir = WorkingDir::create(dir);
(working_dir, command)
}
pub struct WorkingDir {
parent_path: PathBuf,
}
impl WorkingDir {
pub fn create(working_directory: PathBuf) -> Self {
WorkingDir {
parent_path: working_directory,
}
}
#[allow(dead_code)]
pub fn put_file<T: AsRef<[u8]>>(self: &Self, relative_path: &str, content: T) {
let mut new_file_path = self.parent_path.clone();
new_file_path.push(relative_path);
if let Some(parent_dir) = new_file_path.parent() {
let _ = fs::create_dir_all(parent_dir);
}
let file_path_str = new_file_path.display().to_string();
let mut fh = File::create(new_file_path)
.expect(format!("Failed to open '{}'", file_path_str).as_ref());
fh.write_all(content.as_ref()).expect("Cannot put file");
fh.sync_all().expect("Failed to sync");
}
#[allow(dead_code)]
pub fn append_file(self: &Self, relative_path: &str, content: &str) {
let mut append_file_path = self.parent_path.clone();
append_file_path.push(relative_path);
let file_path_str = append_file_path.display().to_string();
let mut fh = OpenOptions::new()
.append(true)
.open(append_file_path)
.expect(format!("Failed to open '{}' with append mode", file_path_str).as_ref());
fh.write_all(content.as_bytes())
.expect("Cannot append file");
fh.sync_all().expect("Failed to sync");
}
#[allow(dead_code)]
pub fn remove_file(self: &Self, relative_path: &str) {
let mut remove_file_path = self.parent_path.clone();
remove_file_path.push(relative_path);
fs::remove_file(remove_file_path).expect("Cannot remove file");
}
#[allow(dead_code)]
pub fn rename_file(self: &Self, src_relative_path: &str, dest_relative_path: &str) {
let mut src_file_path = self.parent_path.clone();
src_file_path.push(src_relative_path);
let mut dest_file_path = self.parent_path.clone();
dest_file_path.push(dest_relative_path);
fs::rename(src_file_path, dest_file_path).expect("Cannot rename file");
}
#[allow(dead_code)]
pub fn shrink_file(self: &Self, relative_path: &str) {
let mut shrink_file_path = self.parent_path.clone();
shrink_file_path.push(relative_path);
let file_path_str = shrink_file_path.display().to_string();
let mut fh = OpenOptions::new()
.write(true)
.open(shrink_file_path)
.expect(format!("Failed to open '{}' with write mode", file_path_str).as_ref());
fh.write_all(b"").expect("Cannot shrink file");
fh.sync_all().expect("Failed to sync");
}
#[allow(dead_code)]
#[cfg(target_os = "linux")]
pub fn symlink(self: &Self, relative_src: &str, relative_dest: &str) {
let mut src_path = self.parent_path.clone();
src_path.push(relative_src);
src_path = src_path
.canonicalize()
.expect("Specified path cannot canonicalize");
let mut dest_path = self.parent_path.clone();
dest_path.push(relative_dest);
std::os::unix::fs::symlink(src_path, dest_path).expect("Failed to make symbolic link");
}
pub fn display(self: &Self) -> std::path::Display {
self.parent_path.display()
}
pub fn path_arg(self: &Self) -> String {
format!("-p={}", self.display())
}
}
pub struct RunningCommand {
child: Child,
}
#[derive(Debug, PartialEq)]
pub enum KillStatus {
AlreadyExited,
Killed,
}
impl RunningCommand {
pub fn create(child: Child) -> Self {
RunningCommand { child }
}
pub fn exit(self: &mut Self) -> KillStatus {
let kill_result = self
.child
.kill()
.err()
.map_or(KillStatus::Killed, |_| KillStatus::AlreadyExited);
self.child.wait().unwrap();
kill_result
}
pub fn output(self: &mut Self) -> String {
let mut output = String::new();
let _size = self
.child
.stdout
.as_mut()
.unwrap()
.read_to_string(&mut output);
output
}
}
| true |
d77490212377f1d42bb687825efebcf208017496
|
Rust
|
gnoliyil/fuchsia
|
/src/testing/sl4f/src/hwinfo/types.rs
|
UTF-8
| 3,439 | 2.5625 | 3 |
[
"BSD-2-Clause"
] |
permissive
|
// Copyright 2020 The Fuchsia Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
use fidl_fuchsia_hwinfo::{BoardInfo, DeviceInfo, ProductInfo};
use serde::Serialize;
#[derive(Clone, Debug, Serialize)]
pub struct SerializableDeviceInfo {
pub serial_number: Option<String>,
}
/// DeviceInfo object is not serializable so serialize the object.
impl SerializableDeviceInfo {
pub fn new(device: &DeviceInfo) -> Self {
SerializableDeviceInfo { serial_number: device.serial_number.clone() }
}
}
#[derive(Clone, Debug, Serialize)]
pub struct SerializableProductInfo {
pub sku: Option<String>,
pub language: Option<String>,
pub regulatory_domain: Option<String>,
pub locale_list: Option<Vec<String>>,
pub name: Option<String>,
pub model: Option<String>,
pub manufacturer: Option<String>,
pub build_date: Option<String>,
pub build_name: Option<String>,
pub colorway: Option<String>,
pub display: Option<String>,
pub memory: Option<String>,
pub nand_storage: Option<String>,
pub emmc_storage: Option<String>,
pub microphone: Option<String>,
pub audio_amplifier: Option<String>,
}
/// ProductInfo object is not serializable so serialize the object.
impl SerializableProductInfo {
pub fn new(product: &ProductInfo) -> Self {
let regulatory_domain = match &product.regulatory_domain {
Some(r) => r.country_code.clone(),
None => None,
};
let locale_list = match &product.locale_list {
Some(list) => {
let mut locale_id_list = Vec::new();
for locale in list.into_iter() {
locale_id_list.push(locale.id.to_string());
}
Some(locale_id_list)
}
None => None,
};
// the syntax of build_date is "2019-10-24T04:23:49", we want it to be "191024"
let build_date = match &product.build_date {
Some(date) => {
let sub = &date[2..10];
let result = sub.replace("-", "");
Some(result.to_string())
}
None => None,
};
SerializableProductInfo {
sku: product.sku.clone(),
language: product.language.clone(),
regulatory_domain: regulatory_domain,
locale_list: locale_list,
name: product.name.clone(),
model: product.model.clone(),
manufacturer: product.manufacturer.clone(),
build_date: build_date,
build_name: product.build_name.clone(),
colorway: product.colorway.clone(),
display: product.display.clone(),
memory: product.memory.clone(),
nand_storage: product.nand_storage.clone(),
emmc_storage: product.emmc_storage.clone(),
microphone: product.microphone.clone(),
audio_amplifier: product.audio_amplifier.clone(),
}
}
}
#[derive(Clone, Debug, Serialize)]
pub struct SerializableBoardInfo {
pub name: Option<String>,
pub revision: Option<String>,
}
/// Board object is not serializable so serialize the object.
impl SerializableBoardInfo {
pub fn new(board: &BoardInfo) -> Self {
SerializableBoardInfo { name: board.name.clone(), revision: board.revision.clone() }
}
}
| true |
5ee0f6e1a6eafe053bdefd9d0241c9079129b908
|
Rust
|
mrsekut/PracticeProgrammingLanguage
|
/Rust/src/compro.rs
|
UTF-8
| 293 | 3.359375 | 3 |
[] |
no_license
|
use std::io;
fn main() {
let mut guess = String::new();
io::stdin()
.read_line(&mut guess)
.expect("failed to read line"); // Errのときこの行が実行
let guess: u32 = guess.trim().parse().expect("please type a number")
print!("number: {:?}", guess);
}
| true |
903a5add5dc4dd886944805deb31dc69c7f9f192
|
Rust
|
WangZehuiGit/rust-data-structrue
|
/src/bin_tree/height.rs
|
UTF-8
| 3,029 | 3 | 3 |
[
"MIT"
] |
permissive
|
use super::node::{HeightNode, Node};
use super::BinNode;
use super::InsertErr;
use super::Ptr;
use std::cmp::max;
use std::ptr::NonNull;
pub trait UpdateHeight<T>: HeightNode<T> {
fn stature(ptr: Ptr<Self>) -> usize {
if let Some(node) = ptr {
return unsafe { node.as_ref().height() };
}
0
}
fn update_height(mut node: NonNull<Self>) -> usize {
unsafe {
let (lc, rc) = (node.as_ref().lc(), node.as_ref().rc());
node.as_mut()
.set_height(&(1 + max(Self::stature(lc), Self::stature(rc))));
node.as_ref().height()
}
}
fn update_height_above(mut node: NonNull<Self>) {
Self::update_height(node);
unsafe {
while let Some(parent) = node.as_ref().parent() {
Self::update_height(parent);
node = parent;
}
}
}
}
pub struct HeightBinNode<T> {
node: BinNode<T>,
height: usize,
}
impl<T> HeightBinNode<T> {
fn into(node: Ptr<Self>) -> Ptr<BinNode<T>> {
if let Some(mut node) = node {
return unsafe { NonNull::new(&mut node.as_mut().node) };
} else {
None
}
}
fn from(node: Ptr<BinNode<T>>) -> Ptr<Self> {
if let Some(node) = node {
return NonNull::new(node.as_ptr() as *mut HeightBinNode<T>);
} else {
None
}
}
}
impl<T> Node<T> for HeightBinNode<T>
where
Self: UpdateHeight<T>,
{
fn get(&mut self) -> &mut T {
self.node.get()
}
fn parent(&self) -> Ptr<Self> {
Self::from(self.node.parent())
}
fn lc(&self) -> Ptr<Self> {
Self::from(self.node.lc())
}
fn rc(&self) -> Ptr<Self> {
Self::from(self.node.rc())
}
fn new(value: &T, parent: Ptr<Self>) -> Self {
Self {
node: BinNode::new(value, Self::into(parent)),
height: 1,
}
}
fn set_parent(&mut self, value: &Ptr<Self>) {
let parent = self.node.parent();
self.node.set_parent(&Self::into(*value));
if parent != None {
Self::update_height_above(Self::from(parent).unwrap());
}
}
fn set_lc(&mut self, value: &Ptr<Self>) -> Result<(), InsertErr> {
let result = self.node.set_lc(&Self::into(*value));
Self::update_height_above(NonNull::new(self).unwrap());
result
}
fn set_rc(&mut self, value: &Ptr<Self>) -> Result<(), InsertErr> {
let result = self.node.set_rc(&Self::into(*value));
Self::update_height_above(NonNull::new(self).unwrap());
result
}
}
impl<T> HeightNode<T> for HeightBinNode<T> {
fn set_height(&mut self, value: &usize) {
self.height = *value;
}
fn height(&self) -> usize {
self.height
}
}
impl<T> UpdateHeight<T> for HeightBinNode<T> {}
| true |
468155f1bcc797a407b65bb11cd7d41a65c2664a
|
Rust
|
PushkarDureja/FizzBuzz
|
/fizz-buzz/rust.rs
|
UTF-8
| 281 | 2.9375 | 3 |
[
"Unlicense"
] |
permissive
|
use std::borrow::Cow;
fn main() {
for i in 1..100 {
println!("{}", match (i % 3, i % 5) {
(0, 0) => "FizzBuzz".into(),
(0, _) => "Fizz".into(),
(_, 0) => "Buzz".into(),
_ => Cow::from(i.to_string()),
});
}
}
| true |
53029769dbb6ee6ce2b8d22bca4d97e7b38398ca
|
Rust
|
afprusin/leetcode-rust
|
/src/bin/350_intersection_of_two_arrays_two.rs
|
UTF-8
| 878 | 3.25 | 3 |
[] |
no_license
|
use std::collections::{HashMap};
pub struct Solution {}
impl Solution {
pub fn intersect(nums1: Vec<i32>, nums2: Vec<i32>) -> Vec<i32> {
let mut exists: HashMap<i32, i32> = nums1.iter().fold(HashMap::new(), |mut acc, &c| {
*acc.entry(c).or_insert(0) += 1;
acc
});
let mut current = nums2.iter();
let mut result: Vec<i32> = Vec::new();
while let Some(&value) = current.next() {
if let Some(&count) = exists.get(&value) {
if count > 0 {
exists.insert(value, count - 1);
result.push(value);
}
}
}
return result;
}
}
fn main() {
assert_eq!(vec![2, 2], Solution::intersect(vec![1, 2, 2, 1], vec![2, 2]));
assert_eq!(vec![9, 4], Solution::intersect(vec![4, 9, 5], vec![9, 4, 9, 8, 4]));
}
| true |
d83190551b9c6fa22140c5aa6f9c496b014bb192
|
Rust
|
eqlabs/pathfinder
|
/crates/storage/src/schema/revision_0032.rs
|
UTF-8
| 10,644 | 2.546875 | 3 |
[
"MIT",
"LicenseRef-scancode-unknown-license-reference",
"Apache-2.0"
] |
permissive
|
use anyhow::Context;
use rusqlite::{params, Statement, Transaction};
/// Serialized to sqlite with full 32 bytes.
#[derive(Copy, Clone, serde::Deserialize)]
pub struct Felt(stark_hash::Felt);
/// Same as [Felt] but with leading zeros stripped when writing to sqlite.
#[derive(Copy, Clone, serde::Deserialize)]
pub struct CompressedFelt(stark_hash::Felt);
impl rusqlite::ToSql for Felt {
fn to_sql(&self) -> rusqlite::Result<rusqlite::types::ToSqlOutput<'_>> {
use rusqlite::types::{ToSqlOutput, ValueRef};
Ok(ToSqlOutput::Borrowed(ValueRef::Blob(self.0.as_be_bytes())))
}
}
impl rusqlite::ToSql for CompressedFelt {
fn to_sql(&self) -> rusqlite::Result<rusqlite::types::ToSqlOutput<'_>> {
use rusqlite::types::{ToSqlOutput, ValueRef};
let bytes = self.0.as_be_bytes();
let num_zeroes = bytes.iter().take_while(|v| **v == 0).count();
Ok(ToSqlOutput::Borrowed(ValueRef::Blob(&bytes[num_zeroes..])))
}
}
/// This migration transforms state update storage from json blobs into tables containing
/// the data.
pub(crate) fn migrate(tx: &Transaction<'_>) -> anyhow::Result<()> {
tx.execute(
r"-- contains all canonical deployed contracts and replaced class information
CREATE TABLE contract_updates (
block_number INTEGER REFERENCES canonical_blocks(number) ON DELETE CASCADE,
contract_address BLOB NOT NULL,
class_hash BLOB NOT NULL
)",
[],
)
.context("Creating contract_updates table")?;
tx.execute(
r"-- contains the nonce updates of all canonical blocks
CREATE TABLE nonce_updates (
block_number INTEGER REFERENCES canonical_blocks(number) ON DELETE CASCADE,
contract_address BLOB NOT NULL,
nonce BLOB NOT NULL
)",
[],
)
.context("Creating nonce_updates table")?;
tx.execute(
r"-- contains the storage updates of all of all canonical blocks
CREATE TABLE storage_updates (
block_number INTEGER REFERENCES canonical_blocks(number) ON DELETE CASCADE,
contract_address BLOB NOT NULL,
storage_address BLOB NOT NULL,
storage_value BLOB NOT NULL
)",
[],
)
.context("Creating storage_updates table")?;
let total: usize = tx
.query_row("SELECT count(1) FROM starknet_state_updates", [], |row| {
row.get(0)
})
.context("Counting number of rows in starknet_state_updates table")?;
tracing::info!(rows=%total, "Flattening state updates - this may take a while, please be patient. Progress will be logged regularly.");
let mut context = SqlContext::new(tx)?;
let mut state_update_query_stmt = tx
.prepare(
r"SELECT starknet_state_updates.data FROM starknet_state_updates JOIN canonical_blocks ON (starknet_state_updates.block_hash = canonical_blocks.hash) ORDER BY canonical_blocks.number ASC",
)
.context("Preparing state update query statement")?;
let mut rows = state_update_query_stmt
.query([])
.context("Querying for state updates")?;
let mut timer = std::time::Instant::now();
let mut block_number = 0usize;
while let Some(row) = rows
.next()
.context("Fetching next row of state update query")?
{
let state_update = row.get_ref_unwrap(0).as_blob().with_context(|| {
format!("Getting state update bytes from database row for block {block_number}")
})?;
let state_update = zstd::decode_all(state_update)
.with_context(|| format!("Decompressing state update for block {block_number}"))?;
let state_update: types::StateUpdate = serde_json::from_slice(&state_update)
.with_context(|| format!("Deserializing state update for block {block_number}"))?;
migrate_state_update(block_number, state_update, &mut context)
.with_context(|| format!("Migrating state update for block {block_number}"))?;
block_number += 1;
if timer.elapsed() > std::time::Duration::from_secs(10) {
let progress = Percentage(block_number * 100 / total);
tracing::info!(%progress, "Flattening state updates");
timer = std::time::Instant::now();
}
}
tx.execute_batch(
r"
CREATE INDEX nonce_updates_contract_address_block_number ON nonce_updates(contract_address, block_number);
CREATE INDEX contract_updates_address_block_number ON contract_updates(contract_address, block_number);
CREATE INDEX contract_updates_block_number ON contract_updates(block_number);
CREATE INDEX storage_updates_contract_address_storage_address_block_number ON storage_updates(contract_address, storage_address, block_number);
CREATE INDEX storage_updates_block_number ON storage_updates(block_number);
CREATE INDEX nonce_updates_block_number ON nonce_updates(block_number);"
)
.context("Creating indexes")?;
tx.execute("DROP TABLE starknet_state_updates", [])
.context("Dropping starknet_state_updates")?;
Ok(())
}
struct SqlContext<'tx> {
nonce_stmt: Statement<'tx>,
storage_stmt: Statement<'tx>,
contract_stmt: Statement<'tx>,
}
impl<'tx> SqlContext<'tx> {
fn new(tx: &'tx Transaction<'tx>) -> anyhow::Result<Self> {
let nonce_stmt = tx
.prepare(
"INSERT INTO nonce_updates (block_number, contract_address, nonce) VALUES (?, ?, ?)",
)
.context("Preparing nonce insert statement")?;
let storage_stmt = tx
.prepare("INSERT INTO storage_updates (block_number, contract_address, storage_address, storage_value) VALUES (?, ?, ?, ?)")
.context("Preparing nonce insert statement")?;
let contract_stmt = tx
.prepare("INSERT INTO contract_updates (block_number, contract_address, class_hash) VALUES (?, ?, ?)")
.context("Preparing contract insert statement")?;
Ok(Self {
nonce_stmt,
storage_stmt,
contract_stmt,
})
}
}
fn migrate_state_update(
block_number: usize,
state_update: types::StateUpdate,
context: &mut SqlContext<'_>,
) -> anyhow::Result<()> {
update_contracts(
block_number,
&state_update.state_diff.deployed_contracts,
&state_update.state_diff.replaced_classes,
context,
)
.context("Inserting contract updates")?;
update_nonces(block_number, &state_update.state_diff.nonces, context)
.context("Inserting nonce updates")?;
update_storage(
block_number,
&state_update.state_diff.storage_diffs,
context,
)
.context("Inserting storage updates")
}
fn update_contracts(
block_number: usize,
deployed: &[types::DeployedContract],
replaced: &[types::ReplacedClass],
context: &mut SqlContext<'_>,
) -> anyhow::Result<()> {
for types::DeployedContract {
address,
class_hash,
} in deployed
{
context
.contract_stmt
.execute(params![block_number, address, class_hash])
.context("Inserting contract update")?;
}
for types::ReplacedClass {
address,
class_hash,
} in replaced
{
context
.contract_stmt
.execute(params![block_number, address, class_hash])
.context("Inserting replaced class")?;
}
Ok(())
}
fn update_nonces(
block_number: usize,
nonces: &[types::NonceUpdate],
context: &mut SqlContext<'_>,
) -> anyhow::Result<()> {
for types::NonceUpdate {
contract_address,
nonce,
} in nonces
{
context
.nonce_stmt
.execute(params![block_number, contract_address, nonce])
.context("Inserting nonce update")?;
}
Ok(())
}
fn update_storage(
block_number: usize,
storage: &[types::StorageDiff],
context: &mut SqlContext<'_>,
) -> anyhow::Result<()> {
for types::StorageDiff {
address,
key,
value,
} in storage
{
context
.storage_stmt
.execute(params![block_number, address, key, value])
.context("Inserting storage update")?;
}
Ok(())
}
/// Helper which displays as an integer percentage.
struct Percentage(usize);
impl std::fmt::Display for Percentage {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
f.write_fmt(format_args!("{}%", self.0))
}
}
mod types {
//! Copy of state update types for deserialization so that this migration is
//! not coupled to any external type changes.
use super::{CompressedFelt, Felt};
use serde::Deserialize;
#[derive(Deserialize)]
#[serde(deny_unknown_fields)]
pub struct StateUpdate {
#[serde(default)]
pub block_hash: Option<Felt>,
pub new_root: Felt,
pub old_root: Felt,
pub state_diff: StateDiff,
}
#[derive(Deserialize)]
#[serde(deny_unknown_fields)]
pub struct StateDiff {
pub storage_diffs: Vec<StorageDiff>,
/// Refers to Declare V0 & V1 txns, these contain Cairo classes
pub declared_contracts: Vec<DeclaredCairoClass>,
/// Refers to pre-Starknet-0.11.0 Deploy txns
pub deployed_contracts: Vec<DeployedContract>,
#[serde(default)]
pub nonces: Vec<NonceUpdate>,
/// Refers to Declare V2 txns, these contain Sierra classes
#[serde(default)]
pub declared_sierra_classes: Vec<DeclaredSierraClass>,
/// Replaced classes, introduced in Starknet 0.11.0
#[serde(default)]
pub replaced_classes: Vec<ReplacedClass>,
}
#[derive(Deserialize)]
#[serde(deny_unknown_fields)]
pub struct StorageDiff {
pub address: Felt,
pub key: Felt,
pub value: CompressedFelt,
}
#[derive(Deserialize)]
#[serde(deny_unknown_fields)]
pub struct DeclaredCairoClass {
pub class_hash: Felt,
}
#[derive(Deserialize)]
#[serde(deny_unknown_fields)]
pub struct DeployedContract {
pub address: Felt,
pub class_hash: Felt,
}
#[derive(Deserialize)]
#[serde(deny_unknown_fields)]
pub struct DeclaredSierraClass {
pub class_hash: Felt,
pub compiled_class_hash: Felt,
}
#[derive(Deserialize)]
#[serde(deny_unknown_fields)]
pub struct NonceUpdate {
pub contract_address: Felt,
pub nonce: CompressedFelt,
}
#[derive(Deserialize)]
#[serde(deny_unknown_fields)]
pub struct ReplacedClass {
pub address: Felt,
pub class_hash: Felt,
}
}
| true |
fa1a3c13eff9e8ca248b6037845b92324ff83706
|
Rust
|
dhardy/rust-mersenne-twister
|
/benches/benchmarks.rs
|
UTF-8
| 3,157 | 2.640625 | 3 |
[
"MIT",
"LicenseRef-scancode-unknown-license-reference",
"Apache-2.0"
] |
permissive
|
#![feature(test)]
extern crate test;
extern crate rand;
extern crate mersenne_twister;
mod mt19937 {
use rand::Rng;
use mersenne_twister::MT19937;
#[bench]
fn benchmark_seeding(b: &mut ::test::Bencher) {
b.iter(|| MT19937::new_unseeded());
}
#[bench]
fn benchmark_fill_next_state(b: &mut ::test::Bencher) {
let mt = MT19937::new_unseeded();
b.iter(|| {
let mut mt = mt.clone();
// Note that the first call to next_u32() triggers a call
// to the fill_next_state() method, which is really what I
// want to benchmark here.
mt.next_u32()
})
}
#[bench]
fn benchmark_gen_624_u32(b: &mut ::test::Bencher) {
let mut mt = MT19937::new_unseeded();
// Note that next_u32() periodically calls fill_next_state() every 624
// uses, hence generate this many to get an average.
b.iter(|| {
let mut r = 0;
for _ in 0..624 {
r = mt.next_u32();
}
r // return last value to prevent over-optimisation
})
}
#[bench]
fn benchmark_gen_312_u64(b: &mut ::test::Bencher) {
let mut mt = MT19937::new_unseeded();
// Note that next_u64() periodically calls fill_next_state() every 312
// uses, hence generate this many to get an average.
b.iter(|| {
let mut r = 0;
for _ in 0..312 {
r = mt.next_u64();
}
r // return last value to prevent over-optimisation
})
}
}
mod mt19937_64 {
use rand::Rng;
use mersenne_twister::MT19937_64;
#[bench]
fn benchmark_seeding(b: &mut ::test::Bencher) {
b.iter(|| MT19937_64::new_unseeded());
}
#[bench]
fn benchmark_fill_next_state(b: &mut ::test::Bencher) {
let mt = MT19937_64::new_unseeded();
b.iter(|| {
let mut mt = mt.clone();
// Note that the first call to next_u32() triggers a call
// to the fill_next_state() method, which is really what I
// want to benchmark here.
mt.next_u64()
})
}
#[bench]
fn benchmark_gen_624_u32(b: &mut ::test::Bencher) {
let mut mt = MT19937_64::new_unseeded();
// Note that next_u32() periodically calls fill_next_state() every 312
// uses. Use 624 to be equivalent to 32-bit implementation.
b.iter(|| {
let mut r = 0;
for _ in 0..624 {
r = mt.next_u32();
}
r // return last value to prevent over-optimisation
})
}
#[bench]
fn benchmark_gen_312_u64(b: &mut ::test::Bencher) {
let mut mt = MT19937_64::new_unseeded();
// Note that next_u64() periodically calls fill_next_state() every 312
// uses, hence generate this many to get an average.
b.iter(|| {
let mut r = 0;
for _ in 0..312 {
r = mt.next_u64();
}
r // return last value to prevent over-optimisation
})
}
}
| true |
a462467359a0ca5f3e7a155bebeb54103b10dc73
|
Rust
|
Patryk-Kumor/University
|
/Rust/Projekt - aplikacja okienkowa/address-book/src/db.rs
|
UTF-8
| 2,293 | 2.90625 | 3 |
[] |
no_license
|
extern crate rustbreak;
extern crate failure;
use rustbreak::FileDatabase;
use rustbreak::deser::Ron;
use std::collections::HashMap;
#[derive(Eq, PartialEq, Debug, Serialize, Deserialize, Clone)]
pub struct Person {
pub name: String,
pub surname: String,
pub country: String,
pub city: String,
pub address: String,
pub phone: String,
pub email: String,
}
pub fn read_list(path: String) -> Result< Vec<String>, failure::Error> {
let db = FileDatabase::<HashMap<String, Person>, Ron>::from_path(path, HashMap::new())?;
db.load()?;
let mut res: Vec<String> = vec![];
db.read(|db| {
for elem in db {
res.push(elem.0.to_string());
}
})?;
res.sort();
Ok(res)
}
pub fn read_concact(path: String, search: String) -> Result< (Person), failure::Error> {
let db = FileDatabase::<HashMap<String, Person>, Ron>::from_path(path, HashMap::new())?;
db.load()?;
let mut res : Person = Person {
name: String::new(),
surname: String::new(),
country: String::new(),
city: String::new(),
address: String::new(),
phone: String::new(),
email: String::new()};
db.read(|db| {
res = db.get(&search).unwrap().clone() ;
})?;
Ok(res.clone())
}
pub fn delete_contact(path: String, search: String) -> Result< (), failure::Error> {
let db = FileDatabase::<HashMap<String, Person>, Ron>::from_path(path, HashMap::new())?;
db.load()?;
let mut new_data : HashMap<String, Person> = HashMap::new();
db.read(|db| {
new_data = db.clone();
})?;
new_data.remove(&search);
db.put_data(new_data, true)?; db.save()?;
Ok(())
}
pub fn add_contact(path: String, search: String, person: Person) -> Result< (bool), failure::Error> {
let db = FileDatabase::<HashMap<String, Person>, Ron>::from_path(path, HashMap::new())?;
db.load()?;
let mut check : bool = true;
db.read(|db| {
for elem in db {
if elem.0.to_string() == search {
check = false;
}
}
})?;
if check {
db.write(|db| {
db.insert(search.into(), person);
})?;
db.save()?;
return Ok(true);
}
else {
return Ok(false);
}
}
| true |
39aec71c3cba3e5f856b07a4c66c51e6302544d7
|
Rust
|
faern/rips-old
|
/src/icmp/icmp_tx.rs
|
UTF-8
| 3,625 | 2.8125 | 3 |
[
"Apache-2.0",
"MIT"
] |
permissive
|
use {Tx, Payload, CustomPayload, TxResult};
use ipv4::{Ipv4Fields, IpNextHeaderProtocols};
use pnet::packet::MutablePacket;
use pnet::packet::icmp::{IcmpCode, IcmpType, MutableIcmpPacket, checksum, IcmpTypes};
use pnet::packet::icmp::echo_request::{IcmpCodes, MutableEchoRequestPacket};
pub struct IcmpFields {
pub icmp_type: IcmpType,
pub icmp_code: IcmpCode,
pub build_header: Box<Fn(&mut MutableIcmpPacket)>,
}
impl IcmpFields {
pub fn echo_request() -> Self {
let build_header = Box::new(|pkg: &mut MutableIcmpPacket| {
let mut echo_pkg = MutableEchoRequestPacket::new(pkg.packet_mut()).unwrap();
echo_pkg.set_identifier(0);
echo_pkg.set_sequence_number(0);
});
IcmpFields {
icmp_type: IcmpTypes::EchoRequest,
icmp_code: IcmpCodes::NoCode,
build_header: build_header,
}
}
}
#[derive(Clone)]
pub struct IcmpTx<T> {
tx: T,
}
impl<T: Tx<Ipv4Fields>> IcmpTx<T> {
pub fn new(tx: T) -> Self {
IcmpTx { tx: tx }
}
// Sends an Echo Request packet (ping) with the given payload.
pub fn send_echo(&mut self, payload: &[u8]) -> Option<TxResult<()>> {
let mut payload = CustomPayload::new(IcmpFields::echo_request(), payload);
self.send(&mut payload)
}
}
impl<T: Tx<Ipv4Fields>> Tx<IcmpFields> for IcmpTx<T> {
fn send<'p, P>(&mut self, payload: &'p mut P) -> Option<TxResult<()>>
where P: Payload<IcmpFields>
{
let mut builder = IcmpBuilder::new(payload);
self.tx.send(&mut builder)
}
}
pub struct IcmpBuilder<'p, P: Payload<IcmpFields> + 'p> {
payload: &'p mut P,
}
impl<'p, P: Payload<IcmpFields>> IcmpBuilder<'p, P> {
pub fn new(payload: &'p mut P) -> Self {
IcmpBuilder { payload: payload }
}
}
impl<'p, P: Payload<IcmpFields>> Payload<Ipv4Fields> for IcmpBuilder<'p, P> {
fn fields(&self) -> &Ipv4Fields {
static FIELDS: Ipv4Fields = Ipv4Fields(IpNextHeaderProtocols::Icmp);
&FIELDS
}
fn num_packets(&self) -> usize {
self.payload.num_packets()
}
fn packet_size(&self) -> usize {
8 + self.payload.packet_size()
}
fn build(&mut self, buffer: &mut [u8]) {
let mut pkg = MutableIcmpPacket::new(buffer).unwrap();
{
let mut header_pkg = MutableIcmpPacket::new(&mut pkg.packet_mut()[..8]).unwrap();
header_pkg.set_icmp_type(self.payload.fields().icmp_type);
header_pkg.set_icmp_code(self.payload.fields().icmp_code);
(self.payload.fields().build_header)(&mut header_pkg);
}
self.payload.build(&mut pkg.packet_mut()[8..]);
let checksum = checksum(&pkg.to_immutable());
pkg.set_checksum(checksum);
}
}
#[cfg(test)]
mod tests {
use super::*;
use icmp::{IcmpTypes, EchoCodes};
use pnet::packet::Packet;
use pnet::packet::icmp::echo_request::EchoRequestPacket;
use testing::MockTx;
#[test]
fn test_send_echo() {
let (tx, read_handle) = MockTx::new();
let mut testee = IcmpTx::new(tx);
let tx_result = testee.send_echo(&[9, 55]).unwrap();
assert!(tx_result.is_ok());
let data = read_handle.try_recv().expect("Expected echo packet");
let echo_pkg = EchoRequestPacket::new(&data).unwrap();
assert_eq!(IcmpTypes::EchoRequest, echo_pkg.get_icmp_type());
assert_eq!(EchoCodes::NoCode, echo_pkg.get_icmp_code());
assert_eq!(61128, echo_pkg.get_checksum()); // For ident&seq == 0
assert_eq!([9, 55], echo_pkg.payload());
}
}
| true |
1c7a69e36851414d15f08c10bf91f35f2662084d
|
Rust
|
naomijub/Observable-btree
|
/src/model.rs
|
UTF-8
| 7,170 | 3.5 | 4 |
[
"MIT"
] |
permissive
|
use std::{
collections::{BTreeMap, HashMap},
convert::{TryFrom, TryInto},
};
pub enum Operation {
Add,
Replace,
// ...
}
/// Available types to use as `BTree` values.
#[derive(Debug, Clone, PartialEq)]
pub enum Types {
Char(char),
Integer(isize),
UInteger(usize),
String(String),
Float(f64),
Boolean(bool),
Vector(Vec<Types>),
HashMap(HashMap<String, Types>),
BTreeMap(BTreeMap<String, Types>),
KeyValue(String, Box<Types>),
Nil,
}
impl From<char> for Types {
fn from(t: char) -> Self {
Types::Char(t)
}
}
impl From<isize> for Types {
fn from(t: isize) -> Self {
Types::Integer(t)
}
}
impl From<i32> for Types {
fn from(t: i32) -> Self {
Types::Integer(t as isize)
}
}
impl From<i64> for Types {
fn from(t: i64) -> Self {
Types::Integer(t as isize)
}
}
impl From<usize> for Types {
fn from(t: usize) -> Self {
Types::UInteger(t)
}
}
impl From<String> for Types {
fn from(t: String) -> Self {
Types::String(t)
}
}
impl From<&str> for Types {
fn from(t: &str) -> Self {
Types::String(t.to_owned())
}
}
impl From<f64> for Types {
fn from(t: f64) -> Self {
Types::Float(t)
}
}
impl From<bool> for Types {
fn from(t: bool) -> Self {
Types::Boolean(t)
}
}
impl<T: Into<Types>> From<Vec<T>> for Types {
fn from(t: Vec<T>) -> Self {
let aux = t.into_iter().map(|e| e.into()).collect::<Vec<Types>>();
Types::Vector(aux)
}
}
impl<T: Into<Types>> From<HashMap<String, T>> for Types {
fn from(t: HashMap<String, T>) -> Self {
let aux = t
.into_iter()
.map(|(k, v)| (k, v.into()))
.collect::<HashMap<String, Types>>();
Types::HashMap(aux)
}
}
impl<T: Into<Types>> From<BTreeMap<String, T>> for Types {
fn from(t: BTreeMap<String, T>) -> Self {
let aux = t
.into_iter()
.map(|(k, v)| (k, v.into()))
.collect::<BTreeMap<String, Types>>();
Types::BTreeMap(aux)
}
}
impl<T: Into<Types>> From<(String, T)> for Types {
fn from(t: (String, T)) -> Self {
let (k, v) = t;
let (k, v): (String, Box<Types>) = (k, Box::new(v.into()));
Types::KeyValue(k, v)
}
}
impl<T: Into<Types>> From<Option<T>> for Types {
fn from(t: Option<T>) -> Self {
match t {
None => Types::Nil,
Some(v) => v.into(),
}
}
}
impl TryInto<isize> for Types {
type Error = String;
fn try_into(self) -> Result<isize, Self::Error> {
match self {
Types::Integer(t) => Ok(t),
_ => Err(format!("Could not convert {:?} to isize", self)),
}
}
}
impl TryInto<usize> for Types {
type Error = String;
fn try_into(self) -> Result<usize, Self::Error> {
match self {
Types::UInteger(t) => Ok(t),
_ => Err(format!("Could not convert {:?} to usize", self)),
}
}
}
impl TryInto<char> for Types {
type Error = String;
fn try_into(self) -> Result<char, Self::Error> {
match self {
Types::Char(t) => Ok(t),
_ => Err(format!("Could not convert {:?} to char", self)),
}
}
}
impl TryInto<f64> for Types {
type Error = String;
fn try_into(self) -> Result<f64, Self::Error> {
match self {
Types::Float(t) => Ok(t),
_ => Err(format!("Could not convert {:?} to f64", self)),
}
}
}
impl TryInto<bool> for Types {
type Error = String;
fn try_into(self) -> Result<bool, Self::Error> {
match self {
Types::Boolean(t) => Ok(t),
_ => Err(format!("Could not convert {:?} to bool", self)),
}
}
}
impl TryInto<String> for Types {
type Error = String;
fn try_into(self) -> Result<String, Self::Error> {
match self {
Types::String(t) => Ok(t),
_ => Err(format!("Could not convert {:?} to String", self)),
}
}
}
impl<T: TryFrom<Types>> TryInto<(String, T)> for Types {
type Error = String;
fn try_into(self) -> Result<(String, T), Self::Error> {
let err = format!("Could not convert {:?} to KeyValue", self);
match self {
Types::KeyValue(k, v) => Ok((k, (*v).try_into().map_err(|_| err.clone())?)),
_ => Err(format!("Could not convert {:?} to KeyValue", self)),
}
}
}
impl<T: TryFrom<Types>> TryInto<Vec<T>> for Types {
type Error = String;
fn try_into(self) -> Result<Vec<T>, Self::Error> {
let err = format!("Could not convert {:?} to Vec<T>", self);
match self {
Types::Vector(t) => t
.into_iter()
.map(|e| e.try_into().map_err(|_| err.clone()))
.collect::<Result<Vec<T>, String>>(),
_ => Err(err),
}
}
}
impl<T: TryFrom<Types>> TryInto<HashMap<String, T>> for Types {
type Error = String;
fn try_into(self) -> Result<HashMap<String, T>, Self::Error> {
let err = format!("Could not convert {:?} to HashMap<String, T>", self);
match self {
Types::HashMap(t) => {
let mut has_error = false;
let hm = t
.into_iter()
.map(|(k, v)| (k, v.try_into().map_err(|_| err.clone())))
.fold(
HashMap::new(),
|mut acc, (k, v): (String, Result<T, String>)| {
if let Ok(t) = v {
acc.insert(k, t);
} else {
has_error = true;
}
acc
},
);
if has_error {
Err(err)
} else {
Ok(hm)
}
}
_ => Err(err),
}
}
}
impl<T: TryFrom<Types>> TryInto<BTreeMap<String, T>> for Types {
type Error = String;
fn try_into(self) -> Result<BTreeMap<String, T>, Self::Error> {
let err = format!("Could not convert {:?} to BTreeMap<String, T>", self);
match self {
Types::BTreeMap(t) => {
let mut has_error = false;
let hm = t
.into_iter()
.map(|(k, v)| (k, v.try_into().map_err(|_| err.clone())))
.fold(
BTreeMap::new(),
|mut acc, (k, v): (String, Result<T, String>)| {
if let Ok(t) = v {
acc.insert(k, t);
} else {
has_error = true;
}
acc
},
);
if has_error {
Err(err)
} else {
Ok(hm)
}
}
_ => Err(err),
}
}
}
| true |
91e2fec35e687ee8c956343f8914ed884a2aef95
|
Rust
|
guoonho/fftactics-arithmeticks-calculator
|
/src/main.rs
|
UTF-8
| 250 | 2.53125 | 3 |
[] |
no_license
|
use structopt::StructOpt;
mod arithmeticks;
#[derive(StructOpt)]
struct Cli {
state: String
}
fn main() {
let _args = Cli::from_args();
arithmeticks::validate(&_args.state);
println!("{}", arithmeticks::calculate(&_args.state));
}
| true |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.