blob_id
stringlengths
40
40
language
stringclasses
1 value
repo_name
stringlengths
5
140
path
stringlengths
5
183
src_encoding
stringclasses
6 values
length_bytes
int64
12
5.32M
score
float64
2.52
4.94
int_score
int64
3
5
detected_licenses
listlengths
0
47
license_type
stringclasses
2 values
text
stringlengths
12
5.32M
download_success
bool
1 class
0f7a39a3022c4ddd8943a74c362e541fdb556682
Rust
across-travel/ruby-on-rust
/src/ast/node.rs
UTF-8
30,032
2.78125
3
[]
no_license
// https://raw.githubusercontent.com/whitequark/parser/2a73841d6da04a5ab9bd270561165fd766722d43/lib/parser/builders/default.rb use parser::token::Token; #[derive(Debug, PartialEq, Clone)] pub enum Node { // for rules which doesnot need to return a real node Dummy, // for rules which returns a vec of nodes Nodes(Vec<Node>), // for rules which may returns a result being `nil`, when the rule is applied so we cannot return a None, i guess. still not sure about this. Null, Nil, True, False, Int(isize), Str(String), DStr(Vec<Node>), Sym(String), Array(Vec<Node>), Pair { key: Box<Node>, value: Box<Node> }, Hash(Vec<Node>), // TODO Hash(Vec<Node::Pair>) after enum variants become types NSelf, LVar(String), Ident(String), Assign(Box<Node>, Token, Box<Node>), // TODO a dummy NodeType for builder.assign // assignable LVasgn(String, Vec<Node>), Begin(Vec<Node>), } // def unary_num(unary_t, numeric) // value, = *numeric // operator_loc = loc(unary_t) // case value(unary_t) // when '+' // value = +value // when '-' // value = -value // end // numeric.updated(nil, [ value ], // :location => // Source::Map::Operator.new( // operator_loc, // operator_loc.join(numeric.loc.expression))) // end // TODO INCOMPLETE // DUMMY ALWAYS Node::Int pub fn unary_num(t_unary: Token, n_simple_numeric: Node) -> Node { let mut numeric_value = if let Node::Int(int_value) = n_simple_numeric { int_value } else { panic!(); }; if let Token::T_UNARY_NUM(polarty) = t_unary { match polarty.as_ref() { "+" => (), "-" => { numeric_value = 0 - numeric_value; }, _ => { panic!(); } } } else { panic!(); } return Node::Int(numeric_value); } // # Strings // def string(string_t) // n(:str, [ string_value(string_t) ], // delimited_string_map(string_t)) // end // def string_internal(string_t) // n(:str, [ string_value(string_t) ], // unquoted_map(string_t)) // end // def string_compose(begin_t, parts, end_t) // if collapse_string_parts?(parts) // if begin_t.nil? && end_t.nil? // parts.first // else // n(:str, parts.first.children, // string_map(begin_t, parts, end_t)) // end // else // n(:dstr, [ *parts ], // string_map(begin_t, parts, end_t)) // end // end // TODO INCOMPLETE pub fn string_compose(parts: Node) -> Node { if is_collapse_string_parts(&parts) {} // TODO DUMMY if let Node::Str(string_value) = parts { return Node::Str(string_value); } if let Node::Nodes(str_nodes) = parts { if let Node::Str(str_value) = (*str_nodes.get(0).unwrap()).clone() { return Node::Str(str_value); } } panic!("string_compose UNIMPL"); } // def character(char_t) // n(:str, [ string_value(char_t) ], // prefix_string_map(char_t)) // end // def __FILE__(__FILE__t) // n0(:__FILE__, // token_map(__FILE__t)) // end // # Symbols // def symbol(symbol_t) // n(:sym, [ string_value(symbol_t).to_sym ], // prefix_string_map(symbol_t)) // end // def symbol_internal(symbol_t) // n(:sym, [ string_value(symbol_t).to_sym ], // unquoted_map(symbol_t)) // end // def symbol_compose(begin_t, parts, end_t) // if collapse_string_parts?(parts) // str = parts.first // // n(:sym, [ str.children.first.to_sym ], // collection_map(begin_t, str.loc.expression, end_t)) // elsif @parser.version == 18 && parts.empty? // diagnostic :error, :empty_symbol, nil, loc(begin_t).join(loc(end_t)) // else // n(:dsym, [ *parts ], // collection_map(begin_t, parts, end_t)) // end // end // # Executable strings // def xstring_compose(begin_t, parts, end_t) // n(:xstr, [ *parts ], // string_map(begin_t, parts, end_t)) // end // # Indented (interpolated, noninterpolated, executable) strings // def dedent_string(node, dedent_level) // if !dedent_level.nil? // dedenter = Lexer::Dedenter.new(dedent_level) // // if node.type == :str // str = node.children.first // dedenter.dedent(str) // elsif node.type == :dstr || node.type == :xstr // node.children.each do |str_node| // if str_node.type == :str // str = str_node.children.first // dedenter.dedent(str) // else // dedenter.interrupt // end // end // end // end // node // end // # Regular expressions // def regexp_options(regopt_t) // options = value(regopt_t). // each_char.sort.uniq. // map(&:to_sym) // // n(:regopt, options, // token_map(regopt_t)) // end // def regexp_compose(begin_t, parts, end_t, options) // begin // static_regexp(parts, options) // rescue RegexpError => e // diagnostic :error, :invalid_regexp, { :message => e.message }, // loc(begin_t).join(loc(end_t)) // end // // n(:regexp, (parts << options), // regexp_map(begin_t, end_t, options)) // end // # Arrays // def array(begin_t, elements, end_t) // n(:array, elements, // collection_map(begin_t, elements, end_t)) // end // def splat(star_t, arg=nil) // if arg.nil? // n0(:splat, // unary_op_map(star_t)) // else // n(:splat, [ arg ], // unary_op_map(star_t, arg)) // end // end // def word(parts) // if collapse_string_parts?(parts) // parts.first // else // n(:dstr, [ *parts ], // collection_map(nil, parts, nil)) // end // end // def words_compose(begin_t, parts, end_t) // n(:array, [ *parts ], // collection_map(begin_t, parts, end_t)) // end // def symbols_compose(begin_t, parts, end_t) // parts = parts.map do |part| // case part.type // when :str // value, = *part // part.updated(:sym, [ value.to_sym ]) // when :dstr // part.updated(:dsym) // else // part // end // end // n(:array, [ *parts ], // collection_map(begin_t, parts, end_t)) // end // # Hashes // def pair(key, assoc_t, value) // n(:pair, [ key, value ], // binary_op_map(key, assoc_t, value)) // end // def pair_list_18(list) // if list.size % 2 != 0 // diagnostic :error, :odd_hash, nil, list.last.loc.expression // else // list. // each_slice(2).map do |key, value| // n(:pair, [ key, value ], // binary_op_map(key, nil, value)) // end // end // end // def pair_keyword(key_t, value) // key_map, pair_map = pair_keyword_map(key_t, value) // key = n(:sym, [ value(key_t).to_sym ], key_map) // n(:pair, [ key, value ], pair_map) // end // def pair_quoted(begin_t, parts, end_t, value) // end_t, pair_map = pair_quoted_map(begin_t, end_t, value) // key = symbol_compose(begin_t, parts, end_t) // n(:pair, [ key, value ], pair_map) // end // def kwsplat(dstar_t, arg) // n(:kwsplat, [ arg ], // unary_op_map(dstar_t, arg)) // end // def associate(begin_t, pairs, end_t) // n(:hash, [ *pairs ], // collection_map(begin_t, pairs, end_t)) // end // # Ranges // def range_inclusive(lhs, dot2_t, rhs) // n(:irange, [ lhs, rhs ], // binary_op_map(lhs, dot2_t, rhs)) // end // def range_exclusive(lhs, dot3_t, rhs) // n(:erange, [ lhs, rhs ], // binary_op_map(lhs, dot3_t, rhs)) // end // # // # Access // # // def self(token) // n0(:self, // token_map(token)) // end // def ident(token) // n(:ident, [ value(token).to_sym ], // variable_map(token)) // end // def ivar(token) // n(:ivar, [ value(token).to_sym ], // variable_map(token)) // end // def gvar(token) // n(:gvar, [ value(token).to_sym ], // variable_map(token)) // end // def cvar(token) // n(:cvar, [ value(token).to_sym ], // variable_map(token)) // end // def back_ref(token) // n(:back_ref, [ value(token).to_sym ], // token_map(token)) // end // def nth_ref(token) // n(:nth_ref, [ value(token) ], // token_map(token)) // end // def accessible(node) // case node.type // when :__FILE__ // if @emit_file_line_as_literals // n(:str, [ node.loc.expression.source_buffer.name ], // node.loc.dup) // else // node // end // // when :__LINE__ // if @emit_file_line_as_literals // n(:int, [ node.loc.expression.line ], // node.loc.dup) // else // node // end // // when :__ENCODING__ // n(:const, [ n(:const, [ nil, :Encoding], nil), :UTF_8 ], // node.loc.dup) // // when :ident // name, = *node // // if @parser.static_env.declared?(name) // node.updated(:lvar) // else // name, = *node // n(:send, [ nil, name ], // var_send_map(node)) // end // // else // node // end // end // TODO INCOMPLETE pub fn accessible(node: Node) -> Node { println!("building node:accessible, node: {:?}", node); return match node { Node::Ident(n_ident_value) => { // TODO DUMMY // TODO handle static_env let node = Node::LVar(n_ident_value); return node; } _ => node }; } // def const(name_t) // n(:const, [ nil, value(name_t).to_sym ], // constant_map(nil, nil, name_t)) // end // def const_global(t_colon3, name_t) // cbase = n0(:cbase, token_map(t_colon3)) // n(:const, [ cbase, value(name_t).to_sym ], // constant_map(cbase, t_colon3, name_t)) // end // def const_fetch(scope, t_colon2, name_t) // n(:const, [ scope, value(name_t).to_sym ], // constant_map(scope, t_colon2, name_t)) // end // def __ENCODING__(__ENCODING__t) // n0(:__ENCODING__, // token_map(__ENCODING__t)) // end // # // # Assignment // # // def assignable(node) // case node.type // when :cvar // node.updated(:cvasgn) // // when :ivar // node.updated(:ivasgn) // // when :gvar // node.updated(:gvasgn) // // when :const // if @parser.in_def? // diagnostic :error, :dynamic_const, nil, node.loc.expression // end // // node.updated(:casgn) // // when :ident // name, = *node // @parser.static_env.declare(name) // // node.updated(:lVasgn) // // when :nil, :self, :true, :false, // :__FILE__, :__LINE__, :__ENCODING__ // diagnostic :error, :invalid_assignment, nil, node.loc.expression // // when :back_ref, :nth_ref // diagnostic :error, :backref_assignment, nil, node.loc.expression // end // end // TODO INCOMPLETE pub fn assignable(node: Node) -> Node { println!("DEBUGGING node::assignable: {:?}", node); match node { Node::Ident(ident) => { // name, = *node // @parser.static_env.declare(name) // // node.updated(:lVasgn) // TODO handle appending nodes list // TODO handle static_env return Node::LVasgn(ident, vec![]); }, _ => { panic!("node::assignable: UNIMPL branch"); } } } // def const_op_assignable(node) // node.updated(:casgn) // end // def assign(lhs, eql_t, rhs) // (lhs << rhs).updated(nil, nil, // :location => lhs.loc. // with_operator(loc(eql_t)). // with_expression(join_exprs(lhs, rhs))) // end // TODO INCOMPLETE pub fn assign(lhs_node: Node, token: Token, rhs_node: Node) -> Node { match lhs_node { Node::LVasgn(var_str, mut nodes) => { nodes.push(rhs_node); return Node::LVasgn(var_str, nodes); }, _ => { panic!("node::assign UNIMPL"); } } } // def op_assign(lhs, op_t, rhs) // case lhs.type // when :gvasgn, :ivasgn, :lVasgn, :cvasgn, :casgn, :send, :csend // operator = value(op_t)[0..-1].to_sym // source_map = lhs.loc. // with_operator(loc(op_t)). // with_expression(join_exprs(lhs, rhs)) // // case operator // when :'&&' // n(:and_asgn, [ lhs, rhs ], source_map) // when :'||' // n(:or_asgn, [ lhs, rhs ], source_map) // else // n(:op_asgn, [ lhs, operator, rhs ], source_map) // end // // when :back_ref, :nth_ref // diagnostic :error, :backref_assignment, nil, lhs.loc.expression // end // end // def multi_lhs(begin_t, items, end_t) // n(:mlhs, [ *items ], // collection_map(begin_t, items, end_t)) // end // def multi_assign(lhs, eql_t, rhs) // n(:masgn, [ lhs, rhs ], // binary_op_map(lhs, eql_t, rhs)) // end // # // # Class and module definition // # // def def_class(class_t, name, // lt_t, superclass, // body, end_t) // n(:class, [ name, superclass, body ], // module_definition_map(class_t, name, lt_t, end_t)) // end // def def_sclass(class_t, lshft_t, expr, // body, end_t) // n(:sclass, [ expr, body ], // module_definition_map(class_t, nil, lshft_t, end_t)) // end // def def_module(module_t, name, // body, end_t) // n(:module, [ name, body ], // module_definition_map(module_t, name, nil, end_t)) // end // # // # Method (un)definition // # // def def_method(def_t, name_t, args, // body, end_t) // n(:def, [ value(name_t).to_sym, args, body ], // definition_map(def_t, nil, name_t, end_t)) // end // def def_singleton(def_t, definee, dot_t, // name_t, args, // body, end_t) // case definee.type // when :int, :str, :dstr, :sym, :dsym, // :regexp, :array, :hash // diagnostic :error, :singleton_literal, nil, definee.loc.expression // else // n(:defs, [ definee, value(name_t).to_sym, args, body ], // definition_map(def_t, dot_t, name_t, end_t)) // end // end // def undef_method(undef_t, names) // n(:undef, [ *names ], // keyword_map(undef_t, nil, names, nil)) // end // def alias(alias_t, to, from) // n(:alias, [ to, from ], // keyword_map(alias_t, nil, [to, from], nil)) // end // # // # Formal arguments // # // def args(begin_t, args, end_t, check_args=true) // args = check_duplicate_args(args) if check_args // n(:args, args, // collection_map(begin_t, args, end_t)) // end // def arg(name_t) // n(:arg, [ value(name_t).to_sym ], // variable_map(name_t)) // end // def optarg(name_t, eql_t, value) // n(:optarg, [ value(name_t).to_sym, value ], // variable_map(name_t). // with_operator(loc(eql_t)). // with_expression(loc(name_t).join(value.loc.expression))) // end // def restarg(star_t, name_t=nil) // if name_t // n(:restarg, [ value(name_t).to_sym ], // arg_prefix_map(star_t, name_t)) // else // n0(:restarg, // arg_prefix_map(star_t)) // end // end // def kwarg(name_t) // n(:kwarg, [ value(name_t).to_sym ], // kwarg_map(name_t)) // end // def kwoptarg(name_t, value) // n(:kwoptarg, [ value(name_t).to_sym, value ], // kwarg_map(name_t, value)) // end // def kwrestarg(dstar_t, name_t=nil) // if name_t // n(:kwrestarg, [ value(name_t).to_sym ], // arg_prefix_map(dstar_t, name_t)) // else // n0(:kwrestarg, // arg_prefix_map(dstar_t)) // end // end // def shadowarg(name_t) // n(:shadowarg, [ value(name_t).to_sym ], // variable_map(name_t)) // end // def blockarg(amper_t, name_t) // n(:blockarg, [ value(name_t).to_sym ], // arg_prefix_map(amper_t, name_t)) // end // def procarg0(arg) // if self.class.emit_procarg0 // arg.updated(:procarg0) // else // arg // end // end // # Ruby 1.8 block arguments // def arg_expr(expr) // if expr.type == :lVasgn // expr.updated(:arg) // else // n(:arg_expr, [ expr ], // expr.loc.dup) // end // end // def restarg_expr(star_t, expr=nil) // if expr.nil? // n0(:restarg, token_map(star_t)) // elsif expr.type == :lVasgn // expr.updated(:restarg) // else // n(:restarg_expr, [ expr ], // expr.loc.dup) // end // end // def blockarg_expr(amper_t, expr) // if expr.type == :lVasgn // expr.updated(:blockarg) // else // n(:blockarg_expr, [ expr ], // expr.loc.dup) // end // end // # MacRuby Objective-C arguments // def objc_kwarg(kwname_t, assoc_t, name_t) // kwname_l = loc(kwname_t) // if assoc_t.nil? # a: b, not a => b // kwname_l = kwname_l.resize(kwname_l.size - 1) // operator_l = kwname_l.end.resize(1) // else // operator_l = loc(assoc_t) // end // n(:objc_kwarg, [ value(kwname_t).to_sym, value(name_t).to_sym ], // Source::Map::ObjcKwarg.new(kwname_l, operator_l, loc(name_t), // kwname_l.join(loc(name_t)))) // end // def objc_restarg(star_t, name=nil) // if name.nil? // n0(:restarg, arg_prefix_map(star_t)) // elsif name.type == :arg # regular restarg // name.updated(:restarg, nil, // { :location => name.loc.with_operator(loc(star_t)) }) // else # restarg with objc_kwarg inside // n(:objc_restarg, [ name ], // unary_op_map(star_t, name)) // end // end // # // # Method calls // # // def call_type_for_dot(dot_t) // if !dot_t.nil? && value(dot_t) == :anddot // :csend // else // # This case is a bit tricky. ruby23.y returns the token tDOT with // # the value :dot, and the token :tANDDOT with the value :anddot. // # // # But, ruby{18..22}.y (which unconditionally expect tDOT) just // # return "." there, since they are to be kept close to the corresponding // # Ruby MRI grammars. // # // # Thankfully, we don't have to care. // :send // end // end // def call_method(receiver, dot_t, selector_t, // lparen_t=nil, args=[], rparen_t=nil) // type = call_type_for_dot(dot_t) // if selector_t.nil? // n(type, [ receiver, :call, *args ], // send_map(receiver, dot_t, nil, lparen_t, args, rparen_t)) // else // n(type, [ receiver, value(selector_t).to_sym, *args ], // send_map(receiver, dot_t, selector_t, lparen_t, args, rparen_t)) // end // end // def call_lambda(lambda_t) // if self.class.emit_lambda // n0(:lambda, expr_map(loc(lambda_t))) // else // n(:send, [ nil, :lambda ], // send_map(nil, nil, lambda_t)) // end // end // def block(method_call, begin_t, args, body, end_t) // _receiver, _selector, *call_args = *method_call // if method_call.type == :yield // diagnostic :error, :block_given_to_yield, nil, method_call.loc.keyword, [loc(begin_t)] // end // last_arg = call_args.last // if last_arg && last_arg.type == :block_pass // diagnostic :error, :block_and_blockarg, nil, last_arg.loc.expression, [loc(begin_t)] // end // if [:send, :csend, :super, :zsuper, :lambda].include?(method_call.type) // n(:block, [ method_call, args, body ], // block_map(method_call.loc.expression, begin_t, end_t)) // else // # Code like "return foo 1 do end" is reduced in a weird sequence. // # Here, method_call is actually (return). // actual_send, = *method_call // block = // n(:block, [ actual_send, args, body ], // block_map(actual_send.loc.expression, begin_t, end_t)) // n(method_call.type, [ block ], // method_call.loc.with_expression(join_exprs(method_call, block))) // end // end // def block_pass(amper_t, arg) // n(:block_pass, [ arg ], // unary_op_map(amper_t, arg)) // end // def objc_varargs(pair, rest_of_varargs) // value, first_vararg = *pair // vararg_array = array(nil, [ first_vararg, *rest_of_varargs ], nil). // updated(:objc_varargs) // pair.updated(nil, [ value, vararg_array ], // { :location => pair.loc.with_expression( // pair.loc.expression.join(vararg_array.loc.expression)) }) // end // def attr_asgn(receiver, dot_t, selector_t) // method_name = (value(selector_t) + '=').to_sym // type = call_type_for_dot(dot_t) // # Incomplete method call. // n(type, [ receiver, method_name ], // send_map(receiver, dot_t, selector_t)) // end // def index(receiver, lbrack_t, indexes, rbrack_t) // n(:send, [ receiver, :[], *indexes ], // send_index_map(receiver, lbrack_t, rbrack_t)) // end // def index_asgn(receiver, lbrack_t, indexes, rbrack_t) // # Incomplete method call. // n(:send, [ receiver, :[]=, *indexes ], // send_index_map(receiver, lbrack_t, rbrack_t)) // end // def binary_op(receiver, operator_t, arg) // source_map = send_binary_op_map(receiver, operator_t, arg) // if @parser.version == 18 // operator = value(operator_t) // if operator == '!=' // method_call = n(:send, [ receiver, :==, arg ], source_map) // elsif operator == '!~' // method_call = n(:send, [ receiver, :=~, arg ], source_map) // end // if %w(!= !~).include?(operator) // return n(:not, [ method_call ], // expr_map(source_map.expression)) // end // end // n(:send, [ receiver, value(operator_t).to_sym, arg ], // source_map) // end // def match_op(receiver, match_t, arg) // source_map = send_binary_op_map(receiver, match_t, arg) // if (regexp = static_regexp_node(receiver)) // regexp.names.each do |name| // @parser.static_env.declare(name) // end // n(:match_with_lVasgn, [ receiver, arg ], // source_map) // else // n(:send, [ receiver, :=~, arg ], // source_map) // end // end // def unary_op(op_t, receiver) // case value(op_t) // when '+', '-' // method = value(op_t) + '@' // else // method = value(op_t) // end // n(:send, [ receiver, method.to_sym ], // send_unary_op_map(op_t, receiver)) // end // def not_op(not_t, begin_t=nil, receiver=nil, end_t=nil) // if @parser.version == 18 // n(:not, [ check_condition(receiver) ], // unary_op_map(not_t, receiver)) // else // if receiver.nil? // nil_node = n0(:begin, collection_map(begin_t, nil, end_t)) // n(:send, [ // nil_node, :'!' // ], send_unary_op_map(not_t, nil_node)) // else // n(:send, [ check_condition(receiver), :'!' ], // send_map(nil, nil, not_t, begin_t, [receiver], end_t)) // end // end // end // # // # Control flow // # // # Logical operations: and, or // def logical_op(type, lhs, op_t, rhs) // n(type, [ lhs, rhs ], // binary_op_map(lhs, op_t, rhs)) // end // # Conditionals // def condition(cond_t, cond, then_t, // if_true, else_t, if_false, end_t) // n(:if, [ check_condition(cond), if_true, if_false ], // condition_map(cond_t, cond, then_t, if_true, else_t, if_false, end_t)) // end // def condition_mod(if_true, if_false, cond_t, cond) // n(:if, [ check_condition(cond), if_true, if_false ], // keyword_mod_map(if_true || if_false, cond_t, cond)) // end // def ternary(cond, question_t, if_true, colon_t, if_false) // n(:if, [ check_condition(cond), if_true, if_false ], // ternary_map(cond, question_t, if_true, colon_t, if_false)) // end // # Case matching // def when(when_t, patterns, then_t, body) // children = patterns << body // n(:when, children, // keyword_map(when_t, then_t, children, nil)) // end // def case(case_t, expr, when_bodies, else_t, else_body, end_t) // n(:case, [ expr, *(when_bodies << else_body)], // condition_map(case_t, expr, nil, nil, else_t, else_body, end_t)) // end // # Loops // def loop(type, keyword_t, cond, do_t, body, end_t) // n(type, [ check_condition(cond), body ], // keyword_map(keyword_t, do_t, nil, end_t)) // end // def loop_mod(type, body, keyword_t, cond) // if body.type == :kwbegin // type = :"#{type}_post" // end // n(type, [ check_condition(cond), body ], // keyword_mod_map(body, keyword_t, cond)) // end // def for(for_t, iterator, in_t, iteratee, // do_t, body, end_t) // n(:for, [ iterator, iteratee, body ], // for_map(for_t, in_t, do_t, end_t)) // end // # Keywords // def keyword_cmd(type, keyword_t, lparen_t=nil, args=[], rparen_t=nil) // if type == :yield && args.count > 0 // last_arg = args.last // if last_arg.type == :block_pass // diagnostic :error, :block_given_to_yield, nil, loc(keyword_t), [last_arg.loc.expression] // end // end // n(type, args, // keyword_map(keyword_t, lparen_t, args, rparen_t)) // end // # BEGIN, END // def preexe(preexe_t, lbrace_t, compstmt, rbrace_t) // n(:preexe, [ compstmt ], // keyword_map(preexe_t, lbrace_t, [], rbrace_t)) // end // def postexe(postexe_t, lbrace_t, compstmt, rbrace_t) // n(:postexe, [ compstmt ], // keyword_map(postexe_t, lbrace_t, [], rbrace_t)) // end // # Exception handling // def rescue_body(rescue_t, // exc_list, assoc_t, exc_var, // then_t, compound_stmt) // n(:resbody, [ exc_list, exc_var, compound_stmt ], // rescue_body_map(rescue_t, exc_list, assoc_t, // exc_var, then_t, compound_stmt)) // end // def begin_body(compound_stmt, rescue_bodies=[], // else_t=nil, else_=nil, // ensure_t=nil, ensure_=nil) // if rescue_bodies.any? // if else_t // compound_stmt = // n(:rescue, // [ compound_stmt, *(rescue_bodies + [ else_ ]) ], // eh_keyword_map(compound_stmt, nil, rescue_bodies, else_t, else_)) // else // compound_stmt = // n(:rescue, // [ compound_stmt, *(rescue_bodies + [ nil ]) ], // eh_keyword_map(compound_stmt, nil, rescue_bodies, nil, nil)) // end // elsif else_t // statements = [] // if !compound_stmt.nil? // if compound_stmt.type == :begin // statements += compound_stmt.children // else // statements.push(compound_stmt) // end // end // statements.push( // n(:begin, [ else_ ], // collection_map(else_t, [ else_ ], nil))) // compound_stmt = // n(:begin, statements, // collection_map(nil, statements, nil)) // end // if ensure_t // compound_stmt = // n(:ensure, // [ compound_stmt, ensure_ ], // eh_keyword_map(compound_stmt, ensure_t, [ ensure_ ], nil, nil)) // end // compound_stmt // end // # // # Expression grouping // # // def compstmt(statements) // case // when statements.none? // nil // when statements.one? // statements.first // else // n(:begin, statements, // collection_map(nil, statements, nil)) // end // end pub fn compstmt(nodes: Node) -> Node { if let Node::Nodes(extracted_nodes) = nodes { match extracted_nodes.len() { 0 => { return Node::Null; } 1 => { return extracted_nodes.get(0).unwrap().clone(); } // TODO collection_map _ => { return Node::Begin(extracted_nodes); } } } else { panic!("compstmt: should pass in a Node::Nodes") } } // def begin(begin_t, body, end_t) // if body.nil? // # A nil expression: `()'. // n0(:begin, // collection_map(begin_t, nil, end_t)) // elsif body.type == :mlhs || // (body.type == :begin && // body.loc.begin.nil? && body.loc.end.nil?) // # Synthesized (begin) from compstmt "a; b" or (mlhs) // # from multi_lhs "(a, b) = *foo". // n(body.type, body.children, // collection_map(begin_t, body.children, end_t)) // else // n(:begin, [ body ], // collection_map(begin_t, [ body ], end_t)) // end // end // def begin_keyword(begin_t, body, end_t) // if body.nil? // # A nil expression: `begin end'. // n0(:kwbegin, // collection_map(begin_t, nil, end_t)) // elsif (body.type == :begin && // body.loc.begin.nil? && body.loc.end.nil?) // # Synthesized (begin) from compstmt "a; b". // n(:kwbegin, body.children, // collection_map(begin_t, body.children, end_t)) // else // n(:kwbegin, [ body ], // collection_map(begin_t, [ body ], end_t)) // end // end // # // # HELPERS // # // # Extract a static string from e.g. a regular expression, // # honoring the fact that MRI expands interpolations like #{""} // # at parse time. // def static_string(nodes) // nodes.map do |node| // case node.type // when :str // node.children[0] // when :begin // if (string = static_string(node.children)) // string // else // return nil // end // else // return nil // end // end.join // end // def static_regexp(parts, options) // source = static_string(parts) // return nil if source.nil? // source = case // when options.children.include?(:u) // source.encode(Encoding::UTF_8) // when options.children.include?(:e) // source.encode(Encoding::EUC_JP) // when options.children.include?(:s) // source.encode(Encoding::WINDOWS_31J) // when options.children.include?(:n) // source.encode(Encoding::BINARY) // else // source // end // Regexp.new(source, (Regexp::EXTENDED if options.children.include?(:x))) // end // def static_regexp_node(node) // if node.type == :regexp // parts, options = node.children[0..-2], node.children[-1] // static_regexp(parts, options) // end // end // def collapse_string_parts?(parts) // parts.one? && // [:str, :dstr].include?(parts.first.type) // end fn is_collapse_string_parts(parts: &Node) -> bool { true } // def value(token) // token[0] // end // def string_value(token) // unless token[0].valid_encoding? // diagnostic(:error, :invalid_encoding, nil, token[1]) // end // token[0] // end // def loc(token) // # Pass through `nil`s and return nil for tNL. // token[1] if token && token[0] // end // def diagnostic(type, reason, arguments, location, highlights=[]) // @parser.diagnostics.process( // Diagnostic.new(type, reason, arguments, location, highlights)) // if type == :error // @parser.send :yyerror // end // end // end
true
e07ca38d20d3a044e5e45e8ea8dfcd17de8d1f29
Rust
sapsan4eg/dialog
/src/test/mod.rs
UTF-8
350
2.828125
3
[]
no_license
use log::*; use Logger; use Handler; struct DummyHandler; impl Handler for DummyHandler { fn handle(&self, record: &LogRecord) -> bool { println!("{}", record.args().to_string()); true } } #[test] fn test_handler() { let logger = Logger::new(LogLevelFilter::Info); logger.append(DummyHandler); info!("hello"); }
true
a65f38e5f09806be52c28be604ba217ba022326f
Rust
pluxtore/maze-server
/maze_gameserver/src/gamelogic/unlocks.rs
UTF-8
615
3.171875
3
[]
no_license
use serde::{Serialize, Deserialize}; #[derive(Debug, Clone, Serialize, Deserialize,Copy)] pub struct Unlocks { raw: u8, } impl Unlocks { pub fn new() -> Self { Self { raw: 0 } } pub fn get(&self,index : u8) -> bool { ( self.raw>>index ) & 1 == 1 } pub fn get_raw(&self) -> u8 { self.raw } pub fn get_total(&self) -> i32 { let mut ret = 0; for i in 0..7 { if self.get(i) { ret+=1; } } ret } pub fn set(&mut self,index : u8) { self.raw = self.raw | (1<<index) } }
true
847999430b4873e182a729c189d95d7139660039
Rust
cthulhua/aoc2020
/d19/src/main.rs
UTF-8
546
2.5625
3
[]
no_license
#[macro_use] extern crate pest_derive; use pest::Parser; use std::error::Error; use std::fs::File; use std::io::{BufRead, BufReader}; #[derive(Parser)] #[grammar = "rules.pest"] struct RuleParser; fn main() -> Result<(), Box<dyn Error>> { let filename = std::env::args().nth(1).unwrap(); let input = File::open(filename)?; let buffered = BufReader::new(input); let v: usize = buffered .lines() .filter(|r| (RuleParser::parse(Rule::r0, &r.as_ref().unwrap())).is_ok()) .count(); dbg!(v); Ok(()) }
true
faef61c9743c3cef146e2739d1c4083ac0b4fa61
Rust
oshbec/grit
/tests/common/test_bed.rs
UTF-8
9,163
2.8125
3
[]
no_license
use std::{env, fs, path::PathBuf, process::Command}; use uuid::Uuid; use grit::compression; #[derive(Debug)] pub struct TestBed { pub root: PathBuf, } #[allow(dead_code)] impl TestBed { pub fn setup() -> TestBed { let root = env::temp_dir().join(format!("grit_test/{}", Uuid::new_v4())); let test_bed = TestBed { root }; fs::create_dir_all(test_bed.workspace()).expect("Couldn't create workspace directory"); fs::create_dir_all(test_bed.twin()).expect("Couldn't create twin directory"); env::set_current_dir(test_bed.workspace()).expect("Couldn't set workspace to CWD"); env::set_var("GIT_AUTHOR_NAME", "Count Dracula"); env::set_var("GIT_AUTHOR_EMAIL", "count@dracula"); env::set_var("GIT_COMMITTER_NAME", "Count Dracula"); env::set_var("GIT_COMMITTER_EMAIL", "count@dracula"); test_bed } pub fn teardown(&self) { fs::remove_dir_all(&self.root).expect("Couldn't delete the test workspace"); assert!(!&self.root.exists()); } // Path to the workspace directory, where `grit` operates pub fn workspace(&self) -> PathBuf { self.root.join("workspace") } // Path to the twin directory, where `git` operates pub fn twin(&self) -> PathBuf { self.root.join("twin") } // Sugar to get both workspace and twin for iteration in less boilerplate fn test_parallels(&self) -> Vec<PathBuf> { vec![self.workspace(), self.twin()] } // Create a file to both `workspace` and twin pub fn create_file(&self, relative_path: &str, contents: &str) { for test_parallel in self.test_parallels() { let path = test_parallel.join(relative_path); simple_write_file(&path, contents); } } // Create a directory in both `workspace` and `twin` pub fn create_directory(&self, relative_path: &str) { for test_parallel in self.test_parallels() { let path = test_parallel.join(relative_path); fs::create_dir_all(&path).expect("Couldn't create duplicate directory"); } } // Run a `git` command in `twin` pub fn git_command(&self, args: Vec<&str>) { Command::new("git") .args(args) .current_dir(&self.twin()) .output() .expect("Git command failed"); } // Finds the twin version of a path in the TestBed pub fn find_twin(&self, path: &PathBuf) -> PathBuf { let workspace = self.workspace().to_str().unwrap().to_owned(); let twin = self.twin().to_str().unwrap().to_owned(); // Stringify the path, and replace the part matching the workspace with the twin let converted_path = path.to_str().unwrap().to_owned().replace(&workspace, &twin); PathBuf::from(converted_path) } pub fn contained_by_twin(&self, path: &str) -> bool { let path_in_workspace = self.workspace().join(path); let path_in_twin = self.find_twin(&path_in_workspace); if path_in_workspace.is_dir() { let workspace_files = descendent_files(&path_in_workspace); return workspace_files.iter().all(|file| { let twin = self.find_twin(file); let identical = files_are_identical(file, &twin); if !identical { inspect_parallels(&path_in_workspace, &path_in_twin); } identical }); } if path_in_workspace.is_file() { return files_are_identical(&path_in_workspace, &path_in_twin); } false } } // Recursively delve into directories and collect every file found fn descendent_files(directory: &PathBuf) -> Vec<PathBuf> { let mut files: Vec<PathBuf> = Vec::new(); let dir_entries = match directory.read_dir() { Ok(dir_entries) => dir_entries, Err(_) => return vec![], // No files found do to read_dir error }; for dir_entry in dir_entries { let dir_entry = match dir_entry { Ok(dir_entry) => dir_entry.path(), Err(_) => continue, // Skip this dir_entry }; if dir_entry.is_dir() { let mut found_files = descendent_files(&dir_entry); files.append(&mut found_files); } if dir_entry.is_file() { files.push(dir_entry) } } files } pub fn files_are_identical(first: &PathBuf, second: &PathBuf) -> bool { let first_file = match simple_read_file(first) { Ok(file) => file, Err(_) => { println!("Couldn't read file: {:?}", first); return false; } }; let second_file = match simple_read_file(second) { Ok(file) => file, Err(_) => { println!("Couldn't read file: {:?}", second); return false; } }; let they_match = first_file == second_file; if !they_match { println!( "Files did not match :-(\n\n{:?}\n\n{:?}\n\n{:?}\n\n{:?}", first, first_file, second, second_file ); inspect_parallels(first, second); } they_match } pub fn inspect_parallels(first: &PathBuf, second: &PathBuf) { if first.is_file() && second.is_file() { println!("=== COMPARING FILES ==="); simple_print_file(first); println!("=== VS. ==="); simple_print_file(second); println!("=== END ==="); } if first.is_dir() && second.is_dir() { let first_files = descendent_files(first); let second_files = descendent_files(second); println!("=== FILES IN FIRST DIRECTORY ==="); for file in first_files { simple_print_file(&file); } println!("=== FILES IN SECOND DIRECTORY ==="); for file in second_files { simple_print_file(&file); } } } fn simple_print_file(path: &PathBuf) { let contents = match simple_read_file(path) { Ok(contents) => contents, _ => panic!("Couldnt read file {:?}", path), }; println!( "File at `{:?}` contains:\n{}", path, String::from_utf8_lossy(&contents) ); } // Reads a file, and returns a decrompressed version if that operation is successful // Could be very bad if a file starts with the right (wrong) bytes, might need to rethink if problems arise fn simple_read_file(path: &PathBuf) -> Result<Vec<u8>, std::io::Error> { let file = fs::read(path)?; match compression::decompress(&file) { Ok(decompressed) => Ok(decompressed), Err(_) => Ok(file), } } // Creates a file based on path and contents // Will create parent directory for the file if it doesn't yet exist fn simple_write_file(path: &PathBuf, contents: &str) { let parent_directory = path .parent() .expect("Couldn't determine parent directory from file path"); fs::create_dir_all(parent_directory).expect("Could not create parent directory for file"); fs::write(path, &contents).expect("Couldn't write file"); } #[cfg(feature = "helper_tests")] mod tests { use super::*; #[test] fn writes_file_to_twin_and_workspace() { let test_bed = TestBed::setup(); test_bed.create_file("README", "This is a readme"); test_bed.contained_by_twin("README"); test_bed.teardown(); } #[test] fn file_doesnt_match_twin_when_twin_doesnt_exist() { let test_bed = TestBed::setup(); fs::write(test_bed.workspace().join("README"), "Hello, friend").unwrap(); assert_eq!(test_bed.contained_by_twin("README"), false); test_bed.teardown(); } #[test] fn file_doesnt_match_existing_twin() { let test_bed = TestBed::setup(); fs::write(test_bed.workspace().join("README"), "Hello, friend").unwrap(); fs::write(test_bed.twin().join("README"), "Goodbye, friend").unwrap(); assert_eq!(test_bed.contained_by_twin("README"), false); test_bed.teardown(); } #[test] fn workspace_directory_contents_not_found_in_twin() { let test_bed = TestBed::setup(); test_bed.create_directory("something"); fs::write( test_bed.workspace().join("something/README"), "Hello, friend", ) .unwrap(); assert_eq!(test_bed.contained_by_twin("something"), false); test_bed.teardown(); } #[test] fn workspace_directory_file_contents_dont_match_in_twin() { let test_bed = TestBed::setup(); test_bed.create_directory("something"); fs::write( test_bed.workspace().join("something/README"), "Hello, friend", ) .unwrap(); fs::write(test_bed.twin().join("something/README"), "Goodbye, friend").unwrap(); assert_eq!(test_bed.contained_by_twin("something"), false); test_bed.teardown(); } #[test] fn git_commands_run_in_twin() { let test_bed = TestBed::setup(); test_bed.git_command(vec!["init"]); let git_path = test_bed.twin().join(".git"); assert!(git_path.exists()); } }
true
a2d86fa2ee23ef6f70f8178e4186efff04b1e64f
Rust
vctibor/Zuma
/zuma/src/code_generation/mod.rs
UTF-8
2,110
2.9375
3
[]
no_license
use crate::interpretation::*; mod tests; static INDENT_SIZE: usize = 4; //static SVG_OPEN: &str = "<svg xmlns=\"http://www.w3.org/2000/svg\" width=\"500\" height=\"500\">"; static SVG_OPEN: &str = "<svg xmlns=\"http://www.w3.org/2000/svg\" width=\"1000\" height=\"1000\">"; static SVG_CLOSE: &str = "</svg>"; pub fn generate(graphics: &Graphics) -> String { let mut document = "".to_owned(); document.push_str(SVG_OPEN); document.push_str("\n"); let graphics_result = gen_graphics(graphics, 1); document.push_str(&graphics_result); document.push_str(SVG_CLOSE); document } fn gen_graphics(graphics: &Graphics, indent_level: usize) -> String { let mut document = "".to_owned(); for node in graphics.get_nodes() { let xml_element = element(node, indent_level); let indent_chars = " ".repeat(indent_level * INDENT_SIZE); document.push_str(&indent_chars); document.push_str(&xml_element); document.push_str("\n"); } document } fn element(node: &GraphicNode, indent_level: usize) -> String { let name = node.get_name(); let attrs = gen_attributes(node.get_attributes()); let mut content = content(node.get_content(), indent_level + 1); if content != "" { let indent_chars = " ".repeat(indent_level * INDENT_SIZE); content = format!("\n{}{}", content, indent_chars); } match attrs { Some(a) => format!("<{} {}>{}</{}>", name, a, content, name), None => format!("<{}>{}</{}>", name, content, name) } } fn gen_attributes(attributes: Vec<(String, String)>) -> Option<String> { if attributes.len() == 0 { return None } let mut attrs = vec!(); for attr in attributes { attrs.push(format!("{}=\"{}\"", attr.0, attr.1)); } attrs.sort(); Some(attrs.join(" ")) } fn content(content: ElementContent, indent_level: usize) -> String { use ElementContent::*; match content { Empty => "".to_owned(), Text(t) => t, Elements(e) => gen_graphics(e.as_ref(), indent_level), } }
true
4373ef9b46b634cf5f9d3b0a0a8d76233c058588
Rust
mark-i-m/os1
/kernel/fs/error.rs
UTF-8
191
2.90625
3
[ "MIT" ]
permissive
//! A simple Error object for FS errors pub struct Error<'err> { msg: &'err str, } impl<'err> Error<'err> { pub fn new(msg: &'err str) -> Error { Error { msg: msg } } }
true
9d976e62e7fb1c937f5e58f973015cf6714ebb63
Rust
olanod/ruma
/ruma-api-macros/src/util.rs
UTF-8
15,251
2.828125
3
[ "MIT" ]
permissive
//! Functions to aid the `Api::to_tokens` method. use std::collections::BTreeSet; use proc_macro2::{Span, TokenStream}; use proc_macro_crate::{crate_name, FoundCrate}; use quote::quote; use syn::{ AngleBracketedGenericArguments, AttrStyle, Attribute, GenericArgument, Ident, Lifetime, ParenthesizedGenericArguments, PathArguments, Type, TypeArray, TypeBareFn, TypeGroup, TypeParen, TypePath, TypePtr, TypeReference, TypeSlice, TypeTuple, }; use crate::api::{metadata::Metadata, request::Request}; pub fn collect_lifetime_ident(lifetimes: &mut BTreeSet<Lifetime>, ty: &Type) { match ty { Type::Path(TypePath { path, .. }) => { for seg in &path.segments { match &seg.arguments { PathArguments::AngleBracketed(AngleBracketedGenericArguments { args, .. }) => { for gen in args { if let GenericArgument::Type(ty) = gen { collect_lifetime_ident(lifetimes, &ty); } else if let GenericArgument::Lifetime(lt) = gen { lifetimes.insert(lt.clone()); } } } PathArguments::Parenthesized(ParenthesizedGenericArguments { inputs, .. }) => { for ty in inputs { collect_lifetime_ident(lifetimes, ty); } } _ => {} } } } Type::Reference(TypeReference { elem, lifetime, .. }) => { collect_lifetime_ident(lifetimes, &*elem); if let Some(lt) = lifetime { lifetimes.insert(lt.clone()); } } Type::Tuple(TypeTuple { elems, .. }) => { for ty in elems { collect_lifetime_ident(lifetimes, ty); } } Type::Paren(TypeParen { elem, .. }) => collect_lifetime_ident(lifetimes, &*elem), Type::Group(TypeGroup { elem, .. }) => collect_lifetime_ident(lifetimes, &*elem), Type::Ptr(TypePtr { elem, .. }) => collect_lifetime_ident(lifetimes, &*elem), Type::Slice(TypeSlice { elem, .. }) => collect_lifetime_ident(lifetimes, &*elem), Type::Array(TypeArray { elem, .. }) => collect_lifetime_ident(lifetimes, &*elem), Type::BareFn(TypeBareFn { lifetimes: Some(syn::BoundLifetimes { lifetimes: fn_lifetimes, .. }), .. }) => { for lt in fn_lifetimes { let syn::LifetimeDef { lifetime, .. } = lt; lifetimes.insert(lifetime.clone()); } } _ => {} } } /// Generates a `TokenStream` of lifetime identifiers `<'lifetime>`. pub fn unique_lifetimes_to_tokens<'a, I: Iterator<Item = &'a Lifetime>>( lifetimes: I, ) -> TokenStream { let lifetimes = lifetimes.collect::<BTreeSet<_>>(); if lifetimes.is_empty() { TokenStream::new() } else { let lifetimes = quote! { #( #lifetimes ),* }; quote! { < #lifetimes > } } } pub fn has_lifetime(ty: &Type) -> bool { match ty { Type::Path(TypePath { path, .. }) => { let mut found = false; for seg in &path.segments { match &seg.arguments { PathArguments::AngleBracketed(AngleBracketedGenericArguments { args, .. }) => { for gen in args { if let GenericArgument::Type(ty) = gen { if has_lifetime(&ty) { found = true; }; } else if let GenericArgument::Lifetime(_) = gen { return true; } } } PathArguments::Parenthesized(ParenthesizedGenericArguments { inputs, .. }) => { for ty in inputs { if has_lifetime(ty) { found = true; } } } _ => {} } } found } Type::Reference(TypeReference { elem, lifetime, .. }) => { if lifetime.is_some() { true } else { has_lifetime(&elem) } } Type::Tuple(TypeTuple { elems, .. }) => { let mut found = false; for ty in elems { if has_lifetime(ty) { found = true; } } found } Type::Paren(TypeParen { elem, .. }) => has_lifetime(&elem), Type::Group(TypeGroup { elem, .. }) => has_lifetime(&*elem), Type::Ptr(TypePtr { elem, .. }) => has_lifetime(&*elem), Type::Slice(TypeSlice { elem, .. }) => has_lifetime(&*elem), Type::Array(TypeArray { elem, .. }) => has_lifetime(&*elem), Type::BareFn(TypeBareFn { lifetimes: Some(syn::BoundLifetimes { .. }), .. }) => true, _ => false, } } /// The first item in the tuple generates code for the request path from /// the `Metadata` and `Request` structs. The second item in the returned tuple /// is the code to generate a Request struct field created from any segments /// of the path that start with ":". /// /// The first `TokenStream` returned is the constructed url path. The second `TokenStream` is /// used for implementing `TryFrom<http::Request<Vec<u8>>>`, from path strings deserialized to Ruma /// types. pub(crate) fn request_path_string_and_parse( request: &Request, metadata: &Metadata, ruma_api: &TokenStream, ) -> (TokenStream, TokenStream) { let percent_encoding = quote! { #ruma_api::exports::percent_encoding }; if request.has_path_fields() { let path_string = metadata.path.value(); assert!(path_string.starts_with('/'), "path needs to start with '/'"); assert!( path_string.chars().filter(|c| *c == ':').count() == request.path_field_count(), "number of declared path parameters needs to match amount of placeholders in path" ); let format_call = { let mut format_string = path_string.clone(); let mut format_args = Vec::new(); while let Some(start_of_segment) = format_string.find(':') { // ':' should only ever appear at the start of a segment assert_eq!(&format_string[start_of_segment - 1..start_of_segment], "/"); let end_of_segment = match format_string[start_of_segment..].find('/') { Some(rel_pos) => start_of_segment + rel_pos, None => format_string.len(), }; let path_var = Ident::new( &format_string[start_of_segment + 1..end_of_segment], Span::call_site(), ); format_args.push(quote! { #percent_encoding::utf8_percent_encode( &self.#path_var.to_string(), #percent_encoding::NON_ALPHANUMERIC, ) }); format_string.replace_range(start_of_segment..end_of_segment, "{}"); } quote! { format_args!(#format_string, #(#format_args),*) } }; let path_fields = path_string[1..].split('/').enumerate().filter(|(_, s)| s.starts_with(':')).map( |(i, segment)| { let path_var = &segment[1..]; let path_var_ident = Ident::new(path_var, Span::call_site()); quote! { #path_var_ident: { use #ruma_api::error::RequestDeserializationError; let segment = path_segments.get(#i).unwrap().as_bytes(); let decoded = #ruma_api::try_deserialize!( request, #percent_encoding::percent_decode(segment) .decode_utf8(), ); #ruma_api::try_deserialize!( request, ::std::convert::TryFrom::try_from(&*decoded), ) } } }, ); (format_call, quote! { #(#path_fields,)* }) } else { (quote! { metadata.path.to_owned() }, TokenStream::new()) } } /// The function determines the type of query string that needs to be built /// and then builds it using `ruma_serde::urlencoded::to_string`. pub(crate) fn build_query_string(request: &Request, ruma_api: &TokenStream) -> TokenStream { let ruma_serde = quote! { #ruma_api::exports::ruma_serde }; if let Some(field) = request.query_map_field() { let field_name = field.ident.as_ref().expect("expected field to have identifier"); quote!({ // This function exists so that the compiler will throw an // error when the type of the field with the query_map // attribute doesn't implement IntoIterator<Item = (String, String)> // // This is necessary because the ruma_serde::urlencoded::to_string // call will result in a runtime error when the type cannot be // encoded as a list key-value pairs (?key1=value1&key2=value2) // // By asserting that it implements the iterator trait, we can // ensure that it won't fail. fn assert_trait_impl<T>(_: &T) where T: ::std::iter::IntoIterator<Item = (::std::string::String, ::std::string::String)>, {} let request_query = RequestQuery(self.#field_name); assert_trait_impl(&request_query.0); format_args!( "?{}", #ruma_serde::urlencoded::to_string(request_query)? ) }) } else if request.has_query_fields() { let request_query_init_fields = request.request_query_init_fields(); quote!({ let request_query = RequestQuery { #request_query_init_fields }; format_args!( "?{}", #ruma_serde::urlencoded::to_string(request_query)? ) }) } else { quote! { "" } } } /// Deserialize the query string. pub(crate) fn extract_request_query(request: &Request, ruma_api: &TokenStream) -> TokenStream { let ruma_serde = quote! { #ruma_api::exports::ruma_serde }; if request.query_map_field().is_some() { quote! { let request_query = #ruma_api::try_deserialize!( request, #ruma_serde::urlencoded::from_str( &request.uri().query().unwrap_or("") ), ); } } else if request.has_query_fields() { quote! { let request_query: <RequestQuery as #ruma_serde::Outgoing>::Incoming = #ruma_api::try_deserialize!( request, #ruma_serde::urlencoded::from_str( &request.uri().query().unwrap_or("") ), ); } } else { TokenStream::new() } } /// Generates the code to initialize a `Request`. /// /// Used to construct an `http::Request`s body. pub(crate) fn build_request_body(request: &Request, ruma_api: &TokenStream) -> TokenStream { let serde_json = quote! { #ruma_api::exports::serde_json }; if let Some(field) = request.newtype_raw_body_field() { let field_name = field.ident.as_ref().expect("expected field to have an identifier"); quote!(self.#field_name) } else if request.has_body_fields() || request.newtype_body_field().is_some() { let request_body_initializers = if let Some(field) = request.newtype_body_field() { let field_name = field.ident.as_ref().expect("expected field to have an identifier"); quote! { (self.#field_name) } } else { let initializers = request.request_body_init_fields(); quote! { { #initializers } } }; quote! { { let request_body = RequestBody #request_body_initializers; #serde_json::to_vec(&request_body)? } } } else { quote!(Vec::new()) } } pub(crate) fn parse_request_body(request: &Request) -> TokenStream { if let Some(field) = request.newtype_body_field() { let field_name = field.ident.as_ref().expect("expected field to have an identifier"); quote! { #field_name: request_body.0, } } else if let Some(field) = request.newtype_raw_body_field() { let field_name = field.ident.as_ref().expect("expected field to have an identifier"); quote! { #field_name: request.into_body(), } } else { request.request_init_body_fields() } } pub(crate) fn req_res_meta_word<T>( attr_kind: &str, field: &syn::Field, newtype_body_field: &mut Option<syn::Field>, body_field_kind: T, raw_field_kind: T, ) -> syn::Result<T> { if let Some(f) = &newtype_body_field { let mut error = syn::Error::new_spanned(field, "There can only be one newtype body field"); error.combine(syn::Error::new_spanned(f, "Previous newtype body field")); return Err(error); } *newtype_body_field = Some(field.clone()); Ok(match attr_kind { "body" => body_field_kind, "raw_body" => raw_field_kind, _ => unreachable!(), }) } pub(crate) fn req_res_name_value<T>( name: Ident, value: Ident, header: &mut Option<Ident>, field_kind: T, ) -> syn::Result<T> { if name != "header" { return Err(syn::Error::new_spanned( name, "Invalid #[ruma_api] argument with value, expected `header`", )); } *header = Some(value); Ok(field_kind) } pub(crate) fn is_valid_endpoint_path(string: &str) -> bool { string.as_bytes().iter().all(|b| (0x21..=0x7E).contains(b)) } pub fn import_ruma_api() -> TokenStream { if let Ok(FoundCrate::Name(possibly_renamed)) = crate_name("ruma-api") { let import = Ident::new(&possibly_renamed, Span::call_site()); quote! { ::#import } } else if let Ok(FoundCrate::Name(possibly_renamed)) = crate_name("ruma") { let import = Ident::new(&possibly_renamed, Span::call_site()); quote! { ::#import::api } } else { quote! { ::ruma_api } } } pub(crate) fn is_cfg_attribute(attr: &Attribute) -> bool { attr.style == AttrStyle::Outer && attr.path.is_ident("cfg") }
true
76e414c071d1ed49a9d82ba24404587f0e4707bb
Rust
wang-q/intspan
/src/libs/coverage.rs
UTF-8
3,555
3.296875
3
[ "MIT" ]
permissive
use crate::IntSpan; use std::collections::BTreeMap; #[derive(Default, Clone)] pub struct Coverage { max: i32, tiers: BTreeMap<i32, IntSpan>, } impl Coverage { pub fn max(&self) -> &i32 { &self.max } pub fn tiers(&self) -> &BTreeMap<i32, IntSpan> { &self.tiers } pub fn new(max: i32) -> Self { Self::new_len(max, 1_000_000_000) } pub fn new_len(max: i32, len: i32) -> Self { let mut tiers: BTreeMap<i32, IntSpan> = BTreeMap::new(); tiers.insert(-1, IntSpan::from_pair(1, len)); tiers.insert(0, IntSpan::from_pair(1, len)); for i in 1..=max { tiers.insert(i, IntSpan::new()); } Self { max, tiers } } fn begin_end(begin: i32, end: i32) -> (i32, i32) { let mut tup = (begin.min(end), begin.max(end)); if tup.0 == 0 { tup.0 = 1; } tup } /// ``` /// # use intspan::Coverage; /// let mut cover = Coverage::new(1); /// cover.bump(1, 100); /// cover.bump(90, 150); /// assert_eq!(cover.tiers().get(&1).unwrap().to_string(), "1-150"); /// # assert_eq!(cover.tiers().get(&0).unwrap().to_string(), "151-1000000000"); /// /// let mut cover = Coverage::new_len(1, 500); /// cover.bump(1, 100); /// cover.bump(90, 150); /// assert_eq!(cover.tiers().get(&1).unwrap().to_string(), "1-150"); /// # assert_eq!(cover.tiers().get(&0).unwrap().to_string(), "151-500"); /// # assert_eq!(cover.tiers().get(&-1).unwrap().to_string(), "1-500"); /// ``` pub fn bump(&mut self, begin: i32, end: i32) { let tup = Self::begin_end(begin, end); let mut intspan = IntSpan::from_pair(tup.0, tup.1); // reach max coverage in full sequence if self .tiers .get(&-1) .unwrap() .equals(self.tiers.get(&self.max).unwrap()) { return; } // remove intspan from uncovered regions self.tiers.entry(0).and_modify(|e| e.subtract(&intspan)); for i in 1..=self.max { let intersect = self.tiers.get(&i).unwrap().intersect(&intspan); self.tiers.entry(i).and_modify(|e| e.merge(&intspan)); if i + 1 > self.max { break; } intspan = intersect.copy(); } } /// ``` /// # use intspan::Coverage; /// let mut cover = Coverage::new(2); /// cover.bump(1, 100); /// cover.bump(90, 150); /// assert_eq!(cover.max_tier().to_string(), "90-100"); /// /// let mut cover = Coverage::new(5); /// cover.bump(1, 100); /// cover.bump(90, 150); /// assert_eq!(cover.max_tier().to_string(), "-"); /// ``` pub fn max_tier(&self) -> IntSpan { self.tiers().get(self.max()).unwrap().copy() } /// ``` /// # use intspan::Coverage; /// let mut cover = Coverage::new(2); /// cover.bump(1, 100); /// cover.bump(90, 150); /// /// assert_eq!(cover.uniq_tiers().get(&2).unwrap().to_string(), "90-100"); /// /// assert_eq!(cover.tiers().get(&1).unwrap().to_string(), "1-150"); /// assert_eq!(cover.uniq_tiers().get(&1).unwrap().to_string(), "1-89,101-150"); /// ``` pub fn uniq_tiers(&self) -> BTreeMap<i32, IntSpan> { let mut tiers = self.tiers.clone(); for i in 1..self.max { let intspan_next = tiers[&(i + 1)].copy(); tiers.entry(i).and_modify(|e| e.subtract(&intspan_next)); } tiers } }
true
1bf68639991957b57718a2fcd42bac0aacb7a415
Rust
fabien-michel/advent-of-code-2020
/src/days/day01.rs
UTF-8
1,466
2.75
3
[]
no_license
// use crate::utils::read_lines; // mod utils; use crate::utils::print_day_banner; use crate::utils::read_lines; use itertools::iproduct; pub fn day01_01() { print_day_banner(1, 1); let mut expenses = load_expenses(); expenses.sort(); for (exp_1, exp_2) in iproduct!(expenses.iter(), expenses.iter()) { let (exp_sum, exp_prod) = sum_prod_expenses(&[exp_1, exp_2]); if exp_sum == 2020 { println!( "({:?} ; {:?}) : {:?} : {:?}", exp_1, exp_2, exp_sum, exp_prod ); break; } } } pub fn day01_02() { print_day_banner(1, 2); let mut expenses = load_expenses(); expenses.sort(); for (exp_1, exp_2, exp_3) in iproduct!(expenses.iter(), expenses.iter(), expenses.iter()) { let (exp_sum, exp_prod) = sum_prod_expenses(&[exp_1, exp_2, exp_3]); if exp_sum == 2020 { println!( "({:?} ; {:?} ; {:?}) : {:?}: {:?}", exp_1, exp_2, exp_3, exp_sum, exp_prod ); break; } } } fn sum_prod_expenses(expenses: &[&i64]) -> (i64, i64) { let sum: i64 = expenses.iter().copied().sum(); let prod: i64 = expenses.iter().copied().product(); return (sum, prod); } fn load_expenses() -> Vec<i64> { return read_lines("./inputs/01") .unwrap() .filter_map(Result::ok) .map(|line| line.parse::<i64>().unwrap()) .collect(); }
true
650672c0c633050ba352fc7448a27b0340102462
Rust
Pajn/wlral
/wlral/src/input/event_filter.rs
UTF-8
3,378
2.515625
3
[]
no_license
use crate::input::events::*; use std::{cell::RefCell, ops::Deref, rc::Rc}; use wlroots_sys::{wlr_backend, wlr_backend_get_session, wlr_session_change_vt}; use xkbcommon::xkb; /// Implement EventFilter to handle input events. /// /// Each event handler return a bool to inform if it has handled /// the event or not. EventFilters are called in order added and /// as soon as the event is handled, the process stops. If no /// EventFilter handles the event it will be forwarded to the /// appropriate client. pub trait EventFilter { fn handle_keyboard_event(&self, _event: &KeyboardEvent) -> bool { false } fn handle_pointer_motion_event(&self, _event: &MotionEvent) -> bool { false } fn handle_pointer_button_event(&self, _event: &ButtonEvent) -> bool { false } fn handle_pointer_axis_event(&self, _event: &AxisEvent) -> bool { false } } impl<T> EventFilter for Rc<T> where T: EventFilter, { fn handle_keyboard_event(&self, event: &KeyboardEvent) -> bool { Deref::deref(self).handle_keyboard_event(event) } fn handle_pointer_motion_event(&self, event: &MotionEvent) -> bool { Deref::deref(self).handle_pointer_motion_event(event) } fn handle_pointer_button_event(&self, event: &ButtonEvent) -> bool { Deref::deref(self).handle_pointer_button_event(event) } fn handle_pointer_axis_event(&self, event: &AxisEvent) -> bool { Deref::deref(self).handle_pointer_axis_event(event) } } pub(crate) struct EventFilterManager { event_filters: RefCell<Vec<Box<dyn EventFilter>>>, } impl EventFilterManager { pub(crate) fn new() -> EventFilterManager { EventFilterManager { event_filters: RefCell::new(vec![]), } } pub(crate) fn add_event_filter(&self, filter: Box<dyn EventFilter>) { self.event_filters.borrow_mut().push(filter) } } impl EventFilter for EventFilterManager { fn handle_keyboard_event(&self, event: &KeyboardEvent) -> bool { self .event_filters .borrow() .iter() .any(|filter| filter.handle_keyboard_event(event)) } fn handle_pointer_motion_event(&self, event: &MotionEvent) -> bool { self .event_filters .borrow() .iter() .any(|filter| filter.handle_pointer_motion_event(event)) } fn handle_pointer_button_event(&self, event: &ButtonEvent) -> bool { self .event_filters .borrow() .iter() .any(|filter| filter.handle_pointer_button_event(event)) } fn handle_pointer_axis_event(&self, event: &AxisEvent) -> bool { self .event_filters .borrow() .iter() .any(|filter| filter.handle_pointer_axis_event(event)) } } pub struct VtSwitchEventFilter { backend: *mut wlr_backend, } impl VtSwitchEventFilter { pub fn new(backend: *mut wlr_backend) -> VtSwitchEventFilter { VtSwitchEventFilter { backend } } } impl EventFilter for VtSwitchEventFilter { fn handle_keyboard_event(&self, event: &KeyboardEvent) -> bool { let keysym = event.get_one_sym(); let vt_range = xkb::KEY_XF86Switch_VT_1..=xkb::KEY_XF86Switch_VT_12; if vt_range.contains(&keysym) { unsafe { let session = wlr_backend_get_session(self.backend); if !session.is_null() { let vt = keysym - xkb::KEY_XF86Switch_VT_1 + 1; wlr_session_change_vt(session, vt); } } true } else { false } } }
true
edc1ad4fa4f3a430244dae233bd217ce34489159
Rust
trondhe/trace-rs
/src/camera.rs
UTF-8
2,365
2.78125
3
[]
no_license
use crate::object::HitableList; use crate::tracer::Tracer; use crate::types::{Frame, TraceValueType, Vec3}; use crate::viewport::Viewport; use rayon::prelude::*; pub struct Camera { vp: Viewport, sensor: Sensor, tracer: Tracer, samples: usize, } pub struct CameraConfig { pub y_size: usize, pub x_size: usize, pub samples: usize, pub max_bounces: usize, } impl Camera { pub fn new(config: CameraConfig) -> Self { Self { vp: Viewport::new(config.x_size, config.y_size), sensor: Sensor::new(config.x_size, config.y_size), tracer: Tracer::new(config.max_bounces), samples: config.samples, } } pub fn capture(&mut self, hitable_list: HitableList) { let frames_collection = (0..self.samples) .into_par_iter() .map(|_| { let mut frame = vec![Vec3::new(0., 0., 0.); self.vp.y_size * self.vp.x_size]; for y_index in 0..self.vp.y_size { for x_index in 0..self.vp.x_size { let ray = self.vp.get_ray(x_index, y_index); let index = self.vp.x_size * y_index + x_index; frame[index] = self.tracer.trace(&ray, &hitable_list); } } frame }) .collect::<Vec<Frame>>(); for frame in frames_collection { self.sensor.store_frame(&frame); } } pub fn sensor_data(&self) -> &Frame { &self.sensor.light_values } } pub struct Sensor { light_values: Frame, samples: usize, pub x_size: usize, pub y_size: usize, } impl Sensor { pub fn new(x_size: usize, y_size: usize) -> Self { Self { light_values: vec![Vec3::new(0., 0., 0.); x_size * y_size], samples: 0, x_size, y_size, } } fn store_frame(&mut self, frame: &Frame) { assert!(self.light_values.len() == frame.len()); let n = self.samples as TraceValueType; for index in 0..self.light_values.len() { let light_previous = self.light_values[index]; let light_new = frame[index]; self.light_values[index] = (light_new + n * light_previous) / (n + 1.); } self.samples += 1; } }
true
4602d063dc4f66d86d4ca69a51d7e08dfe227c29
Rust
CarloMicieli/trenako
/crates/catalog/src/catalog_items/category.rs
UTF-8
3,601
2.859375
3
[ "MIT", "Apache-2.0" ]
permissive
use strum_macros; use strum_macros::{Display, EnumString}; /// The enumeration of the model categories. #[derive(Debug, Copy, Clone, PartialEq, Eq, EnumString, Display)] #[strum(serialize_all = "snake_case")] #[strum(ascii_case_insensitive)] pub enum Category { /// The steam locomotives category Locomotives, /// The train sets category TrainSets, /// The train sets category StarterSets, /// The freight cars category FreightCars, /// The passenger cars category PassengerCars, /// The electric multiple units category ElectricMultipleUnits, /// The railcars category Railcars, } /// The different kind of freight cars #[derive(Debug, Copy, Clone, PartialEq, Eq, EnumString, Display)] #[strum(serialize_all = "snake_case")] #[strum(ascii_case_insensitive)] pub enum FreightCarType { AutoTransportCars, BrakeWagon, ContainerCars, CoveredFreightCars, DumpCars, Gondola, HeavyGoodsWagons, HingedCoverWagons, HopperWagon, RefrigeratorCars, SiloContainerCars, SlideTarpaulinWagon, SlidingWallBoxcars, SpecialTransport, StakeWagons, SwingRoofWagon, TankCars, TelescopeHoodWagons, DeepWellFlatCars, } /// The different kinds of locomotives #[derive(Debug, Copy, Clone, PartialEq, Eq, EnumString, Display)] #[strum(serialize_all = "snake_case")] #[strum(ascii_case_insensitive)] pub enum LocomotiveType { /// The steam locomotives category SteamLocomotive, /// The diesel locomotives category DieselLocomotive, /// The electric locomotives category ElectricLocomotive, } #[derive(Debug, Copy, Clone, PartialEq, Eq, EnumString, Display)] #[strum(serialize_all = "snake_case")] #[strum(ascii_case_insensitive)] pub enum PassengerCarType { /// An "open coach" has a central aisle; the car's interior is often filled with row upon row of /// seats as in a passenger airliner. OpenCoach, /// "closed" coaches or "compartment" cars have a side corridor to connect individual compartments /// along the body of the train, each with two rows of seats facing each other. CompartmentCoach, /// A dining car (or diner) is used to serve meals to the passengers. DiningCar, /// Lounge cars carry a bar and public seating. Lounge, /// The observation car almost always operated as the last car in a passenger train, in US /// practice. Its interior could include features of a coach, lounge, diner, or sleeper. The /// main spotting feature was at the tail end of the car. Observation, ///Often called "sleepers" or "Pullman cars", these cars provide sleeping arrangements for ///passengers travelling at night. Early models were divided into sections, where coach /// seating converted at night into semi-private berths. SleepingCar, /// The baggage car is a car that was normally placed between the train's motive power and the /// remainder of the passenger train. The car's interior is normally wide open and is used to /// carry passengers' checked baggage. BaggageCar, DoubleDecker, CombineCar, DrivingTrailer, RailwayPostOffice, } #[derive(Debug, Copy, Clone, PartialEq, Eq, EnumString, Display)] #[strum(serialize_all = "snake_case")] #[strum(ascii_case_insensitive)] pub enum ElectricMultipleUnitType { PowerCar, TrailerCar, } #[derive(Debug, Copy, Clone, PartialEq, Eq, EnumString, Display)] #[strum(serialize_all = "snake_case")] #[strum(ascii_case_insensitive)] pub enum RailcarType { PowerCar, TrailerCar, }
true
d8f842315e1befcd32ca3837238d723813d78cf1
Rust
overdrivenpotato/rust
/src/tools/clippy/tests/ui/only_used_in_recursion.rs
UTF-8
2,647
3.15625
3
[ "MIT", "Apache-2.0", "LicenseRef-scancode-other-permissive", "BSD-3-Clause", "BSD-2-Clause", "NCSA" ]
permissive
#![warn(clippy::only_used_in_recursion)] fn simple(a: usize, b: usize) -> usize { if a == 0 { 1 } else { simple(a - 1, b) } } fn with_calc(a: usize, b: isize) -> usize { if a == 0 { 1 } else { with_calc(a - 1, -b + 1) } } fn tuple((a, b): (usize, usize)) -> usize { if a == 0 { 1 } else { tuple((a - 1, b + 1)) } } fn let_tuple(a: usize, b: usize) -> usize { let (c, d) = (a, b); if c == 0 { 1 } else { let_tuple(c - 1, d + 1) } } fn array([a, b]: [usize; 2]) -> usize { if a == 0 { 1 } else { array([a - 1, b + 1]) } } fn index(a: usize, mut b: &[usize], c: usize) -> usize { if a == 0 { 1 } else { index(a - 1, b, c + b[0]) } } fn break_(a: usize, mut b: usize, mut c: usize) -> usize { let c = loop { b += 1; c += 1; if c == 10 { break b; } }; if a == 0 { 1 } else { break_(a - 1, c, c) } } // this has a side effect fn mut_ref(a: usize, b: &mut usize) -> usize { *b = 1; if a == 0 { 1 } else { mut_ref(a - 1, b) } } fn mut_ref2(a: usize, b: &mut usize) -> usize { let mut c = *b; if a == 0 { 1 } else { mut_ref2(a - 1, &mut c) } } fn not_primitive(a: usize, b: String) -> usize { if a == 0 { 1 } else { not_primitive(a - 1, b) } } // this doesn't have a side effect, // but `String` is not primitive. fn not_primitive_op(a: usize, b: String, c: &str) -> usize { if a == 1 { 1 } else { not_primitive_op(a, b + c, c) } } struct A; impl A { fn method(a: usize, b: usize) -> usize { if a == 0 { 1 } else { A::method(a - 1, b - 1) } } fn method2(&self, a: usize, b: usize) -> usize { if a == 0 { 1 } else { self.method2(a - 1, b + 1) } } } trait B { fn hello(a: usize, b: usize) -> usize; fn hello2(&self, a: usize, b: usize) -> usize; } impl B for A { fn hello(a: usize, b: usize) -> usize { if a == 0 { 1 } else { A::hello(a - 1, b + 1) } } fn hello2(&self, a: usize, b: usize) -> usize { if a == 0 { 1 } else { self.hello2(a - 1, b + 1) } } } trait C { fn hello(a: usize, b: usize) -> usize { if a == 0 { 1 } else { Self::hello(a - 1, b + 1) } } fn hello2(&self, a: usize, b: usize) -> usize { if a == 0 { 1 } else { self.hello2(a - 1, b + 1) } } } fn ignore(a: usize, _: usize) -> usize { if a == 1 { 1 } else { ignore(a - 1, 0) } } fn ignore2(a: usize, _b: usize) -> usize { if a == 1 { 1 } else { ignore2(a - 1, _b) } } fn f1(a: u32) -> u32 { a } fn f2(a: u32) -> u32 { f1(a) } fn inner_fn(a: u32) -> u32 { fn inner_fn(a: u32) -> u32 { a } inner_fn(a) } fn main() {}
true
4331474567500d44eb14a77b486155091c2294dc
Rust
birktj/gl-canvas-rs
/examples/hello_world.rs
UTF-8
1,509
2.578125
3
[]
no_license
extern crate glium; extern crate nalgebra as na; extern crate gl_canvas_rs; use std::io::Write; use glium::Surface; use glium::glutin::{Event, self, WindowEvent}; fn main() { let mut event_loop = glutin::EventsLoop::new(); let window = glutin::WindowBuilder::new().with_dimensions((1024, 768).into()); let context = glutin::ContextBuilder::new() .with_multisampling(4); let display = glium::Display::new(window, context, &event_loop).unwrap(); let mut ctx = gl_canvas_rs::RenderContext::new(display.clone()); let mut time = std::time::Instant::now(); let mut closed = false; print!("fps: "); while !closed { print!("\rfps: {}", 1000000.0 / time.elapsed().subsec_micros() as f32); std::io::stdout().flush().unwrap(); time = std::time::Instant::now(); ctx.clear(gl_canvas_rs::Color::new(0.5, 0.5, 0.5, 1.0)); ctx.move_to(0.0, 0.0); ctx.line_to(100.0,100.0); ctx.stroke(); ctx.move_to(200.0, 200.0); ctx.line_to(200.0, 500.0); ctx.line_to(500.0, 500.0); ctx.line_to(500.0, 100.0); //ctx.line_to(700.0, 1000.0); ctx.fill_color(gl_canvas_rs::Color::new(1.0,0.0,0.0,1.0)); ctx.fill(); ctx.render(); event_loop.poll_events(|event| { match event { Event::WindowEvent {event: WindowEvent::CloseRequested, ..} => closed = true, _ => (), } }); } }
true
e2120ae1f870f13ace5884c5c95cb106b9f59c65
Rust
rleyva/ray-tracer
/src/utils.rs
UTF-8
1,749
3.171875
3
[]
no_license
// Utilities use std::fs::File; use std::io::prelude::*; use std::path::Path; // Public function used to write a PPM-formatted string to a file. pub fn write_ppm_to_file(file_path: &String, ppm_content: &String, width: usize, height: usize) { // Header given to generated PPMs. let header = "P3\n".to_string() + &width.to_string() + " " + &height.to_string() + &"\n".to_string() + &"255".to_string() + &"\n".to_string(); let file_contents = header + ppm_content; // Handle file shenanigans here. let path = Path::new(file_path); let display = path.display(); let mut file = match File::create(&path) { Err(why) => panic!("Could not create file at {}: {}", display, why), Ok(file) => file, }; match file.write_all(file_contents.as_bytes()) { Err(why) => panic!("Could not write to {}: {}", display, why), Ok(_) => println!("Successfully wrote to {}.", display), }; } // Public debug function used to create some PPM output. pub fn create_dummy_ppm_contents(width: usize, height: usize) -> String { let mut ppm_contents = String::with_capacity(width * height); for h in 0..height { for w in 0..width { let r = w as f64 / width as f64; let g = h as f64 / height as f64; let b = 0.25; // Rescale values, and store as string. let r_str = &((r * 255.0) as u8).to_string(); let g_str = &((g * 255.0) as u8).to_string(); let b_str = &((b * 255.0) as u8).to_string(); // Push to string. ppm_contents.push_str(&(r_str.to_owned() + " " + g_str + " " + b_str + "\n")); } } return ppm_contents; }
true
843360e1c18f5f1902a9bec69fd856830dc3dfbd
Rust
suhanyujie/rust-cookbook-note
/src/notes/kvs/src/kv.rs
UTF-8
25,038
2.859375
3
[]
no_license
//! 通过 [indexmap](https://github.com/bluss/indexmap) 实现简单的 KV 数据库 //! 为了防止 data race,将 IndexMap 用 Arc 进行包装 //! 具体实现可以参考:https://github.com/pingcap/talent-plan/blob/master/courses/rust/projects/project-2/src/kv.rs use super::util::HandyRwLock; use crate::{KvsError, Result}; use indexmap::IndexMap; use serde::{Deserialize, Serialize}; use serde_json::Deserializer; use std::{ collections::{BTreeMap, HashMap}, ffi::OsStr, fs::File, io::{BufReader, BufWriter, Read, Seek, SeekFrom, Write}, ops::Range, path::{Path, PathBuf}, sync::{Arc, RwLock}, }; /// 键值对存储在日志文件中 todo struct KVStore { /// 当将键设置为值时,kvs 将 set 命令写入硬盘中的有序日志中, /// 然后将该日志对应的指针(文件偏移量)指向键和内容,并存储在内存索引中。 /// 类似地,当删除一个键时,kvs 将 rm 命令写入日志,然后从内存索引中删除该键。 /// 当使用 get 命令检索键的值时,它检索索引,如果找到了,就从对应的日志指针上加载命令,执行命令并返回结果。 /// /// kvs 启动时,就会按从旧到新的顺序从日志中遍历并执行命令,内存索引也会对应的重建。 /// /// 当日志条数达到给定阈值时,kvs 会其压缩为一个新日志,删除冗余日志以回收磁盘空间。 /// /// 注意,kvs 项目既是一个无状态命令行程序,也是一个包含有状态 KVStore 类型的库: /// 对于 CLI,使用 KVStore 类型将加载索引,执行命令,然后退出;对于库使用,它将加载索引,然后执行多个命令,维护索引状态,直到它被删除。 /// ref: https://github.com/pingcap/talent-plan/blob/master/courses/rust/projects/project-2/README.md#project-spec path: PathBuf, // 数字到文件的映射 readers: HashMap<u64, BufReaderWithPos<File>>, // 当前用于写的日志文件 writer: BufWriterWithPos<File>, // 存在内存中的索引 index: BTreeMap<String, CommandPos>, // inner: Arc<RwLock<IndexMap<Vec<u8>, Vec<u8>>>>, /// 记录当前所写入的文件标号 current_gen: u64, /// 记录过期/无效的(可被删除的)值的字节数量 uncompacted: u64, } #[derive(Debug)] struct BufWriterWithPos<W: Write + Seek> { writer: BufWriter<W>, pos: u64, } impl<W: Write + Seek> BufWriterWithPos<W> { fn new(mut inner: W) -> Result<Self> { let pos = inner.seek(SeekFrom::Current(0)); Ok(BufWriterWithPos { writer: BufWriter::new(inner), pos: 0, }) } } impl<W: Write + Seek> Write for BufWriterWithPos<W> { fn write(&mut self, buf: &[u8]) -> std::io::Result<usize> { let len = self.writer.write(buf)?; self.pos += len as u64; Ok(len) } fn flush(&mut self) -> std::io::Result<()> { self.writer.flush() } } struct BufReaderWithPos<R: Read + Seek> { reader: BufReader<R>, pos: u64, } impl<R: Read + Seek> BufReaderWithPos<R> { fn new(mut inner: R) -> Result<Self> { let pos = inner.seek(SeekFrom::Current(0))?; Ok(BufReaderWithPos { reader: BufReader::new(inner), pos, }) } } // 将目录中的文件列表按名字进行排序,以便得到有序的日志文件列表 fn sorted_gen_list(path: PathBuf) -> Result<Vec<u64>> { let mut gen_list: Vec<u64> = std::fs::read_dir(&path)? .flat_map(|res| -> Result<_> { Ok(res?.path()) }) .filter(|path| path.is_file() && path.extension() == Some("log".as_ref())) .flat_map(|path| { path.file_name() .and_then(OsStr::to_str) .map(|s| s.trim_end_matches(".log")) .map(str::parse::<u64>) }) .flatten() .collect(); gen_list.sort_unstable(); Ok(gen_list) } fn log_path(dir: &Path, gen: u64) -> PathBuf { dir.join(format!("{}.log", gen)) } /// 通过文件序号,从对应的文件中读取指令并生成对应的索引加载到内存中(BTreeMap) fn load( gen: u64, reader: &mut BufReaderWithPos<File>, index: &mut BTreeMap<String, CommandPos>, ) -> Result<u64> { // 确定从文件的某个位置开始读 let mut pos = reader.seek(SeekFrom::Start(0))?; let mut stream = Deserializer::from_reader(reader).into_iter::<Command>(); // 通过压缩的手段可节省的字节数 let mut uncompacted = 0; while let Some(cmd) = stream.next() { // 匹配到下一条指令所对应的 offset let new_pos = stream.byte_offset() as u64; match cmd? { Command::Set { key, .. } => { if let Some(old_cmd) = index.insert(key, (gen, pos..new_pos).into()) { uncompacted += old_cmd.len; } } // 删除 Command::Remove { key } => { if let Some(old_cmd) = index.remove(&key) { uncompacted += old_cmd.len; } // 为何加上了指令的长度?todo uncompacted += new_pos - pos; } } pos = new_pos; } Ok(uncompacted) } #[derive(Debug, Deserialize, Serialize)] enum Command { Set { key: String, value: String }, Remove { key: String }, } /// 定义支持的指令/日志 impl Command { fn set(key: String, value: String) -> Self { Command::Set { key, value } } fn remove(key: String) -> Self { Command::Remove { key } } } /// 命令位置 #[derive(Debug)] struct CommandPos { /// 日志文件序号 gen: u64, /// 日志在一个文件中的偏移量 pos: u64, /// 日志的长度。一个指令就算是一条日志 len: u64, } impl From<(u64, Range<u64>)> for CommandPos { fn from((gen, range): (u64, Range<u64>)) -> Self { CommandPos { gen, pos: range.start, len: range.end - range.start, } } } impl<R: Seek + Read> Seek for BufReaderWithPos<R> { fn seek(&mut self, pos: SeekFrom) -> std::io::Result<u64> { self.pos = self.reader.seek(pos)?; Ok(self.pos) } } impl<R: Seek + Read> Read for BufReaderWithPos<R> { fn read(&mut self, buf: &mut [u8]) -> std::io::Result<usize> { let len = self.reader.read(buf)?; self.pos += len as u64; Ok(len) } } impl KVStore { /// 基于一个路径启动一个 KvStore 实例。 /// 如果路径不存在,则创建 fn open(path: impl Into<PathBuf>) -> Result<Self> { // 打开目录,查看目录中的日志文件列表,将其加载进 kvs let using_path = path.into(); std::fs::create_dir_all(&using_path)?; let mut readers = HashMap::new(); // 索引以 btree map 的形式存储在内存中 let mut index: BTreeMap<String, CommandPos> = BTreeMap::new(); let gen_list = sorted_gen_list(using_path.clone())?; let mut uncompacted = 0; for &gen in &gen_list { let mut reader = BufReaderWithPos::new(File::open(log_path(&using_path, gen))?)?; uncompacted += load(gen, &mut reader, &mut index)?; readers.insert(gen, reader); } let current_gen = gen_list.last().unwrap_or(&0) + 1; let writer = new_log_file(&using_path, current_gen, &mut readers)?; Ok(KVStore { path: using_path.clone(), readers, writer, index, current_gen, uncompacted, }) } /// 设定键值对 /// 1.序列化指令,刷入文件中;2.索引写入内存 fn set(&mut self, k: String, v: String) -> Result<()> { let cmd = Command::set(k, v); let pos = self.writer.pos; serde_json::to_writer(&mut self.writer, &cmd)?; self.writer.flush()?; // 索引写入内存 todo if let Command::Set { key, .. } = cmd { if let Some(old_cmd) = self .index .insert(key, (self.current_gen, pos..self.writer.pos).into()) { self.uncompacted += old_cmd.len; } } Ok(()) } /// 读取值 /// 如果key存在则返回值,不存在,返回 None fn get(&mut self, k: String) -> Result<Option<String>> { if let Some(cmd_pos) = self.index.get(&k) { let reader = self .readers .get_mut(&cmd_pos.gen) .expect("Cannot find log reader"); reader.seek(SeekFrom::Start(cmd_pos.pos))?; let cmd_reader = reader.take(cmd_pos.len); if let Command::Set { value, .. } = serde_json::from_reader(cmd_reader)? { Ok(Some(value)) } else { Err(KvsError::UnsupportCmdType) } } else { Ok(None) } } /// 查询 key 是否存在,如果存在,则记录 cmd 到日志,然后删除文件中的数据,再索引索引 fn delete(&mut self, k: String) -> Result<()> { if self.index.contains_key(&k) { let rm_cmd = Command::remove(k.clone()); serde_json::to_writer(&mut self.writer, &rm_cmd)?; self.writer.flush()?; if let Command::Remove { key } = rm_cmd { let old_cmd = self.index.remove(&key).expect("rm key error."); self.uncompacted += old_cmd.len; } Ok(()) } else { Err(KvsError::KeyNotFound) } } /// 压缩过期的不必要的数据指令 fn compact(&mut self) -> Result<()> { let compaction_gen = self.current_gen + 1; self.current_gen += 2; self.writer = self.new_log_file(self.current_gen)?; let mut compaction_writer = self.new_log_file(compaction_gen)?; let mut new_pos = 0; for cmd_pos in &mut self.index.values_mut() { let reader = self .readers .get_mut(&cmd_pos.gen) .expect("cann't find log reader"); if reader.pos != cmd_pos.pos { reader.seek(SeekFrom::Start(cmd_pos.pos))?; } let mut entry_reader = reader.take(cmd_pos.len); let len = std::io::copy(&mut entry_reader, &mut compaction_writer)?; *cmd_pos = (compaction_gen, new_pos..new_pos + len).into(); new_pos += len; } compaction_writer.flush()?; // 删除过期的日志文件 let stale_gens: Vec<_> = self .readers .keys() .filter(|&&gen| gen < compaction_gen) .cloned() .collect(); for stale_gen in stale_gens { self.readers.remove(&stale_gen); std::fs::remove_file(log_path(&self.path, stale_gen))?; } self.uncompacted = 0; Ok(()) } fn new_log_file(&mut self, gen: u64) -> Result<BufWriterWithPos<File>> { new_log_file(&self.path, gen, &mut self.readers) } } // 读取一个目录下的文件 fn read_dir(path: &str) -> Result<Vec<String>> { // Rust 实现浏览文件 let dirs: Vec<String> = std::fs::read_dir(path)? .flat_map(|res| -> Result<_> { Ok(res?.path()) }) .filter(|path| path.is_file()) .flat_map(|path| { path.file_name() .and_then(OsStr::to_str) .map(|s| s.to_string()) }) .collect(); dbg!(&dirs); Ok(dirs) } fn create_dir(path: &str) -> Result<bool> { std::fs::create_dir_all(path)?; Ok(true) } /// 日志文件的创建 fn new_log_file( path: &Path, gen: u64, readers: &mut HashMap<u64, BufReaderWithPos<File>>, ) -> Result<BufWriterWithPos<File>> { let path = log_path(&path, gen); let writer = BufWriterWithPos::new( std::fs::OpenOptions::new() .create(true) .write(true) .append(true) .open(&path)?, )?; readers.insert(gen, BufReaderWithPos::new(File::open(&path)?)?); Ok(writer) } #[cfg(test)] mod tests { use std::{fmt::Result, str::FromStr}; use super::*; #[test] fn test_store1() { let mut st = KVStore::open("./data").expect("kvstore init error."); let cache_key: String = "org_1001_info".into(); st.set(cache_key.clone(), "hello org".to_string()); assert_eq!(st.get(cache_key.to_string()).unwrap(), Some("hello org".to_string())); } #[test] fn test_load1() { let mut st = KVStore::open("./data").expect("kvstore init error."); let cache_key: String = "org_1001_info".to_string(); dbg!(st.get(cache_key.to_string()).unwrap()); } #[test] // fn test_store_delete() { // let mut st = KVStore::new(); // let cache_key: Vec<u8> = "org_1001_info".as_bytes().into(); // st.set(cache_key.clone(), "hello org".as_bytes().into()); // assert_eq!(st.delete(&cache_key), Some("hello org".as_bytes().into())); // assert_eq!(st.get(&cache_key), None); // } #[test] fn test_sorted_gen_list() { let res = sorted_gen_list(PathBuf::from("./")); dbg!(&res); } #[test] fn test_serde() { // 通过 serde_json 可以实现“流”方式的贪婪匹配对象(反序列化) let data = b"[10] [1] [2]"; let de = serde_json::Deserializer::from_slice(data); let mut stream = de.into_iter::<Vec<i32>>(); dbg!(stream.byte_offset()); // 0 dbg!(stream.next()); // Some([10]) dbg!(stream.byte_offset()); // 4 dbg!(stream.next()); // Some([1]) dbg!(stream.byte_offset()); // 8 dbg!(stream.next()); // Some([2]) dbg!(stream.byte_offset()); // 12 } #[test] fn test_read_dir() { let res = read_dir("./"); assert!(res.is_ok()); } #[test] fn test_create_dir() { // 执行时,`./` 指的是项目根目录 let res = create_dir("./test-dir"); assert!(res.is_ok()); } #[test] fn test_new_log_file() { let mut hs: HashMap<u64, BufReaderWithPos<File>> = HashMap::new(); let res = new_log_file(Path::new("./data"), 0, &mut hs); dbg!(res); } #[test] fn test_command_pos() { // Into trait 的使用和了解 let c1: CommandPos = (1, 2..17).into(); dbg!(c1); } } /* >* 资料来源:https://github.com/pingcap/talent-plan/blob/master/courses/rust/projects/project-2/README.md#project-spec ### 部分 1:错误处理 在这个项目中,I/O 错误会导致代码执行失败。因此,在完全实现数据库之前,我们还需要确定一件 至关重要的事:错误处理策略。 Rust 的错误处理很强大,但需要以合适的方式使用多个样板文件,而对于这个项目,failure 库将提供便捷的错误处理工具。 failure 库的指南中描述了几种错误处理模式。 我们选择其中一种策略,然后在库中可以定义自己的错误类型,也可以导入其他 Error。这个策略对应的错误类型将会在项目中的 Result 中使用, 可以使用 `?` 操作符把其他库中的错误类型转换为自己库的错误类型。 这样,为 Result 定义一个含有错误类型的类型别名,编码时就不需要到处输入 Result<T, YourErrorType>,而可以简单的输入 Result。这是一种非常常见的 Rust 模式。 最后,使用 use 语句将这些类型导入到代码中,然后将 main 函数的签名的返回值部分修改为 `Result<()>`。 运行 `cargo check` 可以用编译器检查错误,然后修复这些错误。现在可以先使用 `panic!()` 来结束 `main` 函数,从而通过编译。 在前进之前,先确定好你的错误处理策略。 与之前的项目一样,你可以创建用于占位的数据结构和方法,以便跑通测试用例。现在你定义一个错误类型,这很简单。然后在所有需要编译测试用例的地方添加 panic(`cargo test --no-run`)。 注意:Rust 中的“错误处理”仍在发展和改进中。本课程目前使用 [`failure`](https://docs.rs/failure/0.1.5/failure/) 库定义错误类型更容易。虽然 `failure` 设计不错,但它的使用[不是最佳实践](https://github.com/rust-lang-nursery/rust-cookbook/issues/502#issue-387418261)。Rust 专家可能会开发出更好的错误处理方式。 在后面的课程中有可能不会一直使用 `failure`。于此同时,它也是一个不错的选择,它能用于学习 Rust 错误处理的演进以及优化。 ### 部分 2:log 的作用和原理 现在我们终于要开始从磁盘读写来实现一个真正的数据库。我们将使用 [serde](https://serde.rs/) 来把 "set" 和 "rm" 指令序列化为字符串,然后用标准的文件 I/O 接口来写到硬盘上。 下面这些是 `kvs` 最基本的日志行文: * "set" * 用户调用 `kvs set mykey myvalue` * `kvs` 创建 set 指令包含的值,其中有 key 和 value * 然后,程序将指令序列化为 `String` * 然后,把序列化的指令追加到日志文件中 * 如果成功了,则以错误码 0 静默地退出 * 如果失败了,就打印错误,并返回非 0 地错误代码并退出 * "get" * 用户调用指令:`kvs get mykey` * kvs 每次读取一条指令,将相应受影响的 key 和文件偏移量记录到内存的 map 中,即 key -> 日志指针 * 然后,检查 map 中的日志指针 * 如果失败,则打印“Key not found”,并以代码 0 退出 * 如果成功 * 它将指令日志反序列化得到最后的记录中的 key 和值 * 然后将结果打印到标准输出,并以代码 0 退出 * "rm" * 用户调用指令 `kvs rm mykey` * 和 get 指令一样,kvs 读取整条日志来在内存中构建索引 * 然后,它检查 map 中是否存在给定的 key * 如果不存在,就返回“Key not found” * 如果成功,将会创建对应的 rm 指令,其中包含了 key * 然后将指令序列化后追加到日志中 * 如果成功,则以错误码 0 静默退出 日志是提交到数据库的事务记录。通过在启动时,“重建”(replaying)日志中的记录,我们就可以重现数据库在某个时间点的特定状态。 在这个迭代中,你可以将键的值直接存储在内存中(因此在重启或重建时是不会从日志中读取内容的)。在后面的迭代中,只需将日志指针(文件偏移量)存储到日志中。 ### 部分 3:log 的写入 我们将从 set 开始。接下来将会有很多步骤。但大部分都比较容易实现,你可以通过运行 `cli_*` 相关测试用例来验证你的实现。 `serde` 是一个大型库,有许多功能选项,支持多种序列化格式。基本的序列化和反序列化只需要对结构体进行合适的注解,然后调用一个函数将序列化后的内容写入 `String` 或者 `Write` 流。 你需要选择一种序列化格式。并确定你需要的属性 —— 你是否需要性能优先?你希望以纯文本形式读取日志内容吗?这都在于你如何配置,但你记得在代码中写好注释。 还有其他因素要考虑一下:系统在哪设置缓冲,以及哪些地方需要?缓冲后续的影响是什么?何时打开和关闭文件句柄?有哪些支持的命令?`KvStore` 的生命周期是什么? 你调用的一些 api 可能会失败,并返回错误类型的 `Result`。你需要确保调用函数会返回你自己设定的错误类型的 `Result`,并用 `?` 向上传递。 类似于 rm 命令,我们希望在把命令写入日志之前,还要检查 key 是否存在。因为两种场景需要区分开,所以可以使用 enum 类型的变体来统一所有命令。`serde` 可以完美地与枚举一起使用。 你现在可以实现 set 和 rm 命令了,重点放在 set / rm 对应的测试用例上,也可以阅读下一节的 get 命令实现。记住这两个命令并加以实现,会对你很有帮助。选择权在你。 ### 部分 4:log 的读取 现在该实现 get 了。在这一部分中,你不需要把日志指针存储在索引中,而将其放到下一节进行实现。这一节我们只需在启动时,读取日志中的所有命令,执行它们将每个键值对保存在内存中。然后根据需要从内存中读取。 应该一次性把日志内容全部读取到内存中并通过 map 类型来重现数据吗;需要在某个时候读取一条日志从而重现 map 中的某条数据吗?应该在序列化、反序列化之前将其从文件系统中读取到 buffer 中吗?想想你使用内存的方式。考虑一下与内核交互是否是从 I/O 流读取数据。 记住,"get" 可能获取不到值,这种情况下,需要特殊处理。这里,我们的 API 返回 `None`,然后客户端打印一个特定的消息,并以零代码退出。 读取日志有一个复杂点,你在编写 set 时,可能已经想到了:如何区分日志中的记录?也就是说,如何终止读取,何时开始读取下一条记录?需要这样实现吗?也许 serde 将直接从 I/O 流中序列化一条记录,并在操作完后停止读取,将游标停留在正确的位置,以便读取后续的记录。也许 serde 在检查到两条背靠背(back-to-back)的记录时会报错。也许你需要插入额外的信息来区分每个记录的长度,也有可能有其他方式。 _现在要实现 “get” 了_ ### 部分 5:在索引中存储 log 的指针 此时,除压缩数据相关的测试以外,其他测试应该都是通过的。接下来的步骤是一些性能优化和存储优化。当你实现它们时,需要注意它们的意义是什么? 正如我们前面描述的那样,我们所实现的数据库是在内存中维护所有的 key 索引。这个索引映射到字符串指针(值内容),而非 key 本身的内容。 这个更改就需要我们可以从任意偏移量处读取日志。想一想,这将怎样影响我们对文件的处理。 如果在前面的步骤中,你选择将字符串直接存在内存中,那现在需要调整代码为存储日志指针的方式,并根据需要从磁盘中加载内容。 ### 部分 6:KvStore 的有状态和无状态 请记住,我们的项目不仅是一个库,也可作为命令行程序。它们有些不一样:kvs 命令行程序向磁盘提交一个更改,然后就退出了(无状态);KvStore 会将更改提交到磁盘,然后常驻内存以服务后续的查询(有状态)。 你的 KvStore 是有状态还是无状态呢? 可以让你的 KvStore 的索引常驻内存中,这样就无需在每次调用时重新执行所有的日志指令。 ### 部分 7:log 的压缩 到这里,数据库运行是正常的,但日志会无限增长。这对其他数据库可能没啥影响,但对于我们正在构建的数据库 —— 我们需要尽量减少磁盘的占用。 因此,最后一步就是压缩日志了。需要考虑到随着日志的增长,可能有多个指令日志对同一个键操作。还要考虑到,对于同一个键,只有最近一次的日志的更改才对其值有影响: 索引序号 | 指令 |:---- |:--- | | 0 | ~~Command::Set("key-1", "value-1a")~~ | | 20 | Command::Set("key-2", "value-2") | | | ... | | 100 | Command::Set("key-1", "value-1b") | 在这个例子中,索引 0 的日志很明显是冗余的,因此不需要对其存储。日志压缩其实就是重新构建日志并且消除冗余: 索引序号 | 指令 |:---- |:--- | | 0 | Command::Set("key-2", "value-2") | | | ... | | 99 | Command::Set("key-1", "value-1b") | 这是基本的压缩算法的使用: 如何重建日志取决于你。考虑一下这个问题:最原始的方法是什么?需要多少内存?压缩日志所需的最小拷贝量是多少?能实时压缩吗?如果压缩失败,怎样保证数据完整性? 到目前为止,我们一直致力于“日志”的处理,但实际上,数据库的数据存储在多个日志文件中是很常见的。如果你将日志拆分到多个文件中,你可能会发现压缩日志更容易。 给数据库实现日志压缩。 恭喜!你已经编写了一个功能齐全的数据库了。 如果你很好奇,你可以将你实现的数据库的性能与其他数据库(如 sled、bitcask、badger 或 RicksDB)进行性能对比。你可能喜欢研究它们实现的架构,将其与你自己的架构对比,以及架构的不同对性能有何影响。接下来的几个项目将为你提供优化的机会。 写的很棒,朋友。可以休息一下了。 */
true
608671a929699fca24ae4491dce6c355f599c390
Rust
itaibn/scheme
/src/scheme.rs
UTF-8
13,140
3.109375
3
[]
no_license
// For some reason importing std::borrow::Borrow produces a name collision with // RefCell::borrow but just importing std::borrow doesn't. use std::borrow; use std::fmt; use std::iter::DoubleEndedIterator; use gc::{self, Gc, GcCell}; use num::FromPrimitive; //use crate::equality::SchemeEq; use crate::number::Number; use crate::runtime::Procedure; // TODO: Rethink derive(PartialEq) #[derive(Debug, gc::Finalize, gc::Trace)] enum SchemeData { Boolean(bool), Character(char), Null, Cons(SchemeMut, SchemeMut), ConsImm(Scheme, Scheme), Procedure(Procedure), Symbol(String), Bytevector(Vec<u8>), Number(Number), //Port(Port), String(Vec<char>), Vector(Vec<SchemeMut>), Unspecified, } /// An immutable reference to a Scheme value. In R7RS language (cf. Section /// 3.4), this stands for a location whenever the location is stored in an /// immutable object. #[derive(Clone, Debug, gc::Finalize, gc::Trace)] pub struct Scheme(Gc<SchemeData>); /// A mutable reference to a Scheme value. In R7RS language (cf. Section 3.4), /// this stands for a location whenever the location is stored in a mutable /// object. (TODO: Is this type actually necessary?) #[derive(Debug, gc::Finalize, gc::Trace)] pub struct SchemeMut(GcCell<Scheme>); // Note: I believe the above is used incorrect, especially with respect to // cloning. TODO: Review uses of SchemeMut. /// Error type for Scheme computations. Currently a stub and doesn't hold any /// information. #[derive(Clone, Debug)] pub struct Error; impl Scheme { fn from_data(data: SchemeData) -> Scheme { Scheme(Gc::new(data)) } pub fn as_ptr(&self) -> *const () { &*self.0 as *const _ as *const () } pub fn boolean(b: bool) -> Scheme { Scheme::from_data(SchemeData::Boolean(b)) } pub fn as_boolean(&self) -> Option<bool> { if let SchemeData::Boolean(b) = *self.0 { Some(b) } else { None } } pub fn character(c: char) -> Scheme { Scheme::from_data(SchemeData::Character(c)) } pub fn as_character(&self) -> Option<char> { if let SchemeData::Character(c) = *self.0 { Some(c) } else { None } } pub fn null() -> Scheme { Scheme::from_data(SchemeData::Null) } pub fn is_null(&self) -> bool { match *self.0 { SchemeData::Null => true, _ => false, } } pub fn cons(fst: Scheme, snd: Scheme) -> Scheme { Scheme::from_data(SchemeData::Cons(SchemeMut::new(fst), SchemeMut::new(snd))) } pub fn cons_imm(fst: Scheme, snd: Scheme) -> Scheme { Scheme::from_data(SchemeData::ConsImm(fst, snd)) } // TODO: Make this return values rather than references // ^- What does this mean? pub fn as_pair(&self) -> Option<(Scheme, Scheme)> { //self.as_pair_mut().map(|(x, y)| (x.into(), y.into())) match *self.0 { SchemeData::Cons(ref x, ref y) => Some((x.clone().get(), y.clone().get())), SchemeData::ConsImm(ref x, ref y) => Some((x.clone(), y.clone())), _ => None, } } pub fn as_pair_mut(&self) -> Option<(&SchemeMut, &SchemeMut)> { if let SchemeData::Cons(ref x, ref y) = *self.0 { Some((x, y)) } else { None } } pub(crate) fn procedure(procc: Procedure) -> Scheme { Scheme::from_data(SchemeData::Procedure(procc)) } pub(crate) fn as_procedure(&self) -> Option<Procedure> { if let SchemeData::Procedure(ref procc) = *self.0 { Some(procc.clone()) } else { None } } pub fn symbol<S:ToString>(s: S) -> Scheme { Scheme::from_data(SchemeData::Symbol(s.to_string())) } pub fn as_symbol(&self) -> Option<&str> { if let SchemeData::Symbol(ref s) = *self.0 { Some(&*s) } else { None } } pub fn bytevector(bvec: Vec<u8>) -> Scheme { Scheme::from_data(SchemeData::Bytevector(bvec)) } pub fn as_bytevector(&self) -> Option<&[u8]> { if let SchemeData::Bytevector(ref bvec) = *self.0 { Some(&*bvec) } else { None } } pub fn number(n: Number) -> Scheme { Scheme::from_data(SchemeData::Number(n)) } pub fn as_number(&self) -> Option<&Number> { if let SchemeData::Number(ref n) = *self.0 { Some(&n) } else { None } } pub fn int(n: i64) -> Scheme { Scheme::from_data(SchemeData::Number(Number::from_i64(n).unwrap())) } pub fn as_int(&self) -> Option<i64> { if let SchemeData::Number(ref n) = *self.0 { n.to_i64() } else { None } } pub fn string(s: Vec<char>) -> Scheme { Scheme::from_data(SchemeData::String(s)) } pub fn as_string(&self) -> Option<&[char]> { if let SchemeData::String(ref s) = *self.0 { Some(&*s) } else { None } } // TODO: Revamp interface so no copy necessary pub fn vector(vec: Vec<Scheme>) -> Scheme { let copy = vec.into_iter().map(|x| SchemeMut::new(x)).collect(); Scheme::from_data(SchemeData::Vector(copy)) } pub fn as_vector(&self) -> Option<&[SchemeMut]> { if let SchemeData::Vector(ref vec) = *self.0 { Some(&*vec) } else { None } } pub fn unspecified() -> Scheme { Scheme::from_data(SchemeData::Unspecified) } /// Determine whether the Scheme value is a literal i.e. whether it /// evaluates to itself. pub fn is_literal(&self) -> bool { self.as_boolean().is_some() || self.as_int().is_some() || self.as_character().is_some() || self.as_bytevector().is_some() || self.as_string().is_some() || self.as_vector().is_some() } pub fn truey(&self) -> bool { self.as_boolean() != Some(false) } // Use iterators // May get into infinite loops pub fn into_vec(&self) -> Result<Vec<Scheme>, Error> { let mut cur_elems = Vec::new(); let mut head = self.clone(); loop { if let Some((car, cdr)) = head.as_pair() { cur_elems.push(car.clone()); head = cdr; } else if head.is_null() { return Ok(cur_elems); } else { return Err(Error); } } } // mutable? pub fn list<E: borrow::Borrow<Scheme>, I: IntoIterator<Item=E>>(iter: I) -> Scheme where I::IntoIter : DoubleEndedIterator { let mut res = Scheme::null(); for elem in iter.into_iter().rev() { res = Scheme::cons(elem.borrow().clone(), res); } res } } impl fmt::Display for Scheme { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { if let Some((a, b)) = self.as_pair() { let mut head = b; let mut items = vec![a]; while let Some((a, b)) = head.as_pair() { items.push(a); head = b; } write!(f, "(")?; for (n, x) in items.into_iter().enumerate() { if n > 0 { write!(f, " ")?; } write!(f, "{}", x)?; } if head.is_null() { write!(f, ")") } else { write!(f, " . {})", head) } } else if self.is_null() { write!(f, "()") } else if let Some(s) = self.as_symbol() { write!(f, "{}", s) } else if let Some(b) = self.as_boolean() { let c = if b {'t'} else {'f'}; write!(f, "#{}", c) } else if let Some(n) = self.as_int() { write!(f, "{}", n) } else if let Some(c) = self.as_character() { // TODO: escaping appropriate characters write!(f, "#\\{}", c) } else if let Some(s) = self.as_string() { let to_string: String = s.iter().collect(); // TODO: Scheme-specific escaping write!(f, "{:?}", to_string) } else if let Some(vec) = self.as_vector() { write!(f, "#(")?; for (i, x) in vec.iter().enumerate() { write!(f, "{}{}", x.get(), if i < vec.len()-1 {' '} else {')'})?; } Ok(()) } else if let Some(bvec) = self.as_bytevector() { write!(f, "#u8(")?; for (i, x) in bvec.iter().enumerate() { write!(f, "{}{}", x, if i < bvec.len()-1 {' '} else {')'})?; } Ok(()) } else if let Some(procc) = self.as_procedure() { //write!(f, "<builtin at 0x{:x}>", bltin as usize) write!(f, "{:?}", procc) } else { write!(f, "<unrecognized data type>") } } } // TODO: Do I want to implement Display for SchemeMut? impl SchemeMut { pub fn new(x: Scheme) -> SchemeMut { SchemeMut(GcCell::new(x)) } pub fn set(&self, y: Scheme) { *self.0.borrow_mut() = y; } pub fn get(&self) -> Scheme { self.0.borrow().clone() } } impl From<SchemeMut> for Scheme { fn from(x: SchemeMut) -> Scheme { x.get() } } /* impl fmt::Debug for SchemeData { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { write!(f, "{}", Scheme::from_data(self.clone())) } } */ #[cfg(test)] mod test { use crate::builtin::initial_environment; use crate::equality::SchemeEq; use crate::read::read; use super::Scheme; fn comparison(input: &str, expected: Scheme) { let expr = read(input).unwrap(); let actual = expr.eval(&initial_environment()).unwrap(); assert!(actual.equal(&expected), "Test case failed.\n Program: {}\n Expected: {:?}\n Actual: {:?}", input, &expected, &actual); } #[test] fn test_float() { use crate::number::Number; use num::FromPrimitive; comparison("1.5", Scheme::number(Number::from_f64(1.5).unwrap())); } #[test] fn test_sums() { use crate::number::Number; use num::FromPrimitive; comparison("(+ 1 5 (+ 20) 1)", Scheme::int(27)); comparison("(+ 1.5 1.5)", Scheme::number(Number::from_f64(3.0).unwrap())); } #[test] fn test_lambda_0() { comparison("((lambda (x) x) 3)", Scheme::int(3)); } #[test] fn test_lambda_1() { comparison("(((lambda (x) (lambda (y) x)) 1) 2)", Scheme::int(1)); } #[test] fn test_lambda_2() { comparison("(((lambda (y) ((lambda (x) (lambda (y) x)) y)) 1) 2)", Scheme::int(1)); } #[test] fn test_lambda_3() { comparison("((lambda (x . y) (cons y x)) 2 3 4)", Scheme::cons( Scheme::cons(Scheme::int(3), Scheme::cons(Scheme::int(4), Scheme::null())), Scheme::int(2))); } #[test] fn test_quote() { comparison("''foo", Scheme::cons_imm( Scheme::symbol("quote".to_string()), Scheme::cons_imm( Scheme::symbol("foo".to_string()), Scheme::null() ) )); } #[test] fn test_bool() { comparison("#TrUe", Scheme::boolean(true)); } #[test] fn test_length() { comparison("(length (cons 1 (list 2 3 4 5)))", Scheme::int(5)); } #[test] fn test_character() { comparison("#\\n", Scheme::character('n')); } #[test] fn test_pair_syntax() { comparison("'(a . b)", Scheme::cons_imm(Scheme::symbol("a"), Scheme::symbol("b"))); } #[test] fn test_product() { comparison("(* 2 2)", Scheme::int(4)); } #[test] fn test_if() { comparison("(if (= (* 2 2) 4) 3 4)", Scheme::int(3)); } #[test] fn test_call_cc() { comparison("(call-with-current-continuation (lambda (cont) (* 3 (cont\ (* 5 6)))))", Scheme::int(30)); } #[test] fn test_symbol_eq() { comparison("(symbol=? 'a 'a)", Scheme::boolean(true)); } #[test] fn test_begin() { comparison("(begin 1 2)", Scheme::int(2)); } #[test] fn test_mut_0() { comparison("((lambda (x) (begin (set! x 1) x)) 2)", Scheme::int(1)); } #[test] fn test_mut_1() { comparison("((lambda (x) (begin (set-car! x 1) (set-cdr! x 1) x)) (cons '() '()))", Scheme::cons(Scheme::int(1), Scheme::int(1))); } #[test] fn test_mut_2() { comparison(" ((lambda (x) (begin ((lambda () (set! x 2))) x)) 1) ", Scheme::int(2)); } }
true
847d5d0cff4f61cf2606fe52536459b50ac62e69
Rust
cargo-crates/orm-rs
/src/methods/table_name.rs
UTF-8
739
3
3
[]
no_license
use std::any::type_name; use inflector::{string::{demodulize, pluralize}, cases::snakecase}; pub fn table_name<T>() -> String where T: ?Sized { // eg: arel::UserTable let full_namespace = type_name::<T>(); // eg: UserTable let struct_name = demodulize::demodulize(&full_namespace); // eg: user_table let snake_struct_name = snakecase::to_snake_case(&struct_name); // eg: user_tables pluralize::to_plural(&snake_struct_name) } #[cfg(test)] mod tests { use super::*; #[test] fn it_works() { // User struct User {} assert_eq!(table_name::<User>(), "users"); // UserRole struct UserRole {} assert_eq!(table_name::<UserRole>(), "user_roles"); } }
true
dcc6135544bab3fb9b1ca0e0ad3801a52a4cef4a
Rust
jakubdabek/metaheuristic-algorithms
/list3/z3/src/board.rs
UTF-8
7,384
3.015625
3
[]
no_license
use crate::direction::{Direction, DIRECTIONS}; use crate::point::Point; use itertools::{EitherOrBoth, Itertools}; use ndarray::prelude::*; use ndarray::IntoDimension; use std::fmt; use std::io::BufRead; use std::time::Duration; #[derive(Debug, Clone, Copy, PartialOrd, PartialEq, Ord, Eq)] pub enum Field { Empty, Wall, Exit, } #[derive(Debug, Clone)] pub struct Board { pub fields: Array2<Field>, pub agent_position: Point, } impl Board { #[inline] pub fn in_bounds(&self, point: Point) -> bool { let (h, w) = self.fields.dim(); point.x < (w - 1) as _ && point.y < (h - 1) as _ && point.x > 0 && point.y > 0 } #[inline] pub fn is_next_to_edge(&self, point: Point) -> bool { let (h, w) = self.fields.dim(); point.x == 1 || point.x == (w - 2) as _ || point.y == 1 || point.y == (h - 2) as _ } #[inline] pub fn is_valid_position(&self, point: Point) -> bool { self.in_bounds(point) && !matches!(self.fields[point.into_dimension()], Field::Wall) } #[inline] pub fn is_exit(&self, point: Point) -> bool { self.fields .get(point.into_dimension()) .map_or(false, |f| matches!(f, Field::Exit)) } pub fn move_into_exit(&self, point: Point) -> Option<(Direction, Point)> { debug_assert!(self.is_valid_position(point)); for &dir in DIRECTIONS { let point = dir.move_point(point); if matches!(self.fields[point.into_dimension()], Field::Exit) { return Some((dir, point)); } } None } pub fn adjacent_positions(&self, point: Point) -> impl Iterator<Item = (Direction, Point)> { assert!( self.in_bounds(point), "only points inbounds have adjacent ones" ); DIRECTIONS.iter().map(move |&x| (x, x.move_point(point))) } pub fn adjacent(&self, point: Point) -> impl Iterator<Item = (Direction, Point, Field)> + '_ { self.adjacent_positions(point) .filter_map(move |(d, p)| self.fields.get(p.into_dimension()).map(|&f| (d, p, f))) } pub fn adjacent_in_bounds( &self, point: Point, ) -> impl Iterator<Item = (Direction, Point, Field)> + '_ { debug_assert!( self.in_bounds(point), "adjacent_in_bounds called with out of bounds point" ); self.adjacent_positions(point) .map(move |(d, p)| (d, p, self.fields[p.into_dimension()])) } } #[derive(Debug, Clone, PartialOrd, PartialEq)] pub enum BoardCreationError { InvalidHeader, InvalidLine, NotEnoughLines, InvalidGoal, InvalidAgent, InvalidSolution, IOError(String), } impl fmt::Display for BoardCreationError { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { match self { BoardCreationError::InvalidHeader => write!(f, "Invalid header (first line)"), BoardCreationError::InvalidLine => write!(f, "Invalid data in a line"), BoardCreationError::NotEnoughLines => write!(f, "Not enough lines"), BoardCreationError::InvalidGoal => write!(f, "Invalid goal"), BoardCreationError::InvalidAgent => write!(f, "Invalid agent"), BoardCreationError::InvalidSolution => write!(f, "Invalid solution"), BoardCreationError::IOError(e) => write!(f, "{}", e), } } } impl std::error::Error for BoardCreationError {} impl From<std::io::Error> for BoardCreationError { fn from(err: std::io::Error) -> Self { BoardCreationError::IOError(err.to_string()) } } pub type FromReadOk = (Board, Option<(u64, Vec<Vec<Direction>>)>, Duration); impl Board { pub fn try_from_read<R: BufRead>(reader: R) -> Result<FromReadOk, BoardCreationError> { use BoardCreationError::*; let mut lines = reader.lines(); let header = lines .next() .ok_or(NotEnoughLines)?? .split_ascii_whitespace() .map(str::parse::<u64>) .collect::<Result<Vec<_>, _>>() .map_err(|_| InvalidHeader)?; let (time, n, m, sp) = match *header.as_slice() { [time, n, m] if time > 0 => (time, n, m, None), [time, n, m, s, p] if time > 0 && p >= s => (time, n, m, Some((s, p))), _ => return Err(InvalidHeader), }; let mut agent = None; let on_horizontal_edge = |i| i == n - 1 || i == 0; let on_vertical_edge = |j| j == 0 || j == m - 1; let on_edge = |i, j| on_horizontal_edge(i) || on_vertical_edge(j); let in_corner = |i, j| on_horizontal_edge(i) && on_vertical_edge(j); let mut fields = Array2::from_elem((n as _, m as _), Field::Empty); for it in fields .outer_iter_mut() .enumerate() .rev() .zip_longest(lines.by_ref()) { let (i, mut row, line) = match it { EitherOrBoth::Both((i, row), line) => { let line = line?; if line.as_bytes().len() != m as _ { return Err(InvalidLine); } (i as _, row, line) } EitherOrBoth::Left(_) => return Err(NotEnoughLines), EitherOrBoth::Right(_) => { unreachable!("should have exited the loop after processing all rows") } }; let line = line.as_bytes(); for ((c, j), field) in line.iter().zip(0..).zip(row.iter_mut()) { use Field::*; match c { b'8' if in_corner(i, j) => return Err(InvalidGoal), b'8' => *field = Exit, b'5' if on_edge(i, j) => return Err(InvalidAgent), b'5' if agent.is_some() => return Err(InvalidAgent), b'5' => agent = Some(Point::new(j, i)), b'1' => *field = Wall, b'0' if !on_edge(i, j) => (), // *field = Empty, _ => return Err(InvalidLine), } } if i == 0 { // processed all rows break; } } let agent_position = agent.ok_or(InvalidAgent)?; let sp = sp.map(|(s, p)| { let mut solutions = Vec::with_capacity(s as _); solutions.resize_with(s as _, Vec::new); for it in solutions.iter_mut().zip_longest(lines) { let (solution, line) = match it { EitherOrBoth::Both(solution, line) => (solution, line?), EitherOrBoth::Left(_) => return Err(NotEnoughLines), EitherOrBoth::Right(_) => break, // too many lines }; solution.reserve_exact(line.as_bytes().len()); line.bytes().try_for_each(|c| { Direction::parse(c) .ok_or(InvalidSolution) .map(|c| solution.push(c)) })?; } Ok((p, solutions)) }); Ok(( Board { fields, agent_position, }, sp.transpose()?, Duration::from_secs(time), )) } }
true
5ea2c696b6e737ca0797f0677e1dbea03007f0ec
Rust
luisholanda/asphalt-orm
/asphalt-core/src/types/impls.rs
UTF-8
1,219
2.671875
3
[ "Apache-2.0" ]
permissive
macro_rules! __define_aliases { ($($alias_ty: ident)+, $sql_ty: ty, $name: expr) => { $( #[doc = "Alias to `"] #[doc = $name] #[doc = "`"] pub type $alias_ty = $sql_ty; )+ }; } macro_rules! define_sql_types { ($($sql_name: literal $sql_ty: ident $(aliased as $($alias_ty: ident)*)?),+,) => { $( #[doc = "The `"] #[doc = $sql_name] #[doc = "` SQL type."] pub struct $sql_ty; impl $crate::types::NotNull for $sql_ty {} $(__define_aliases!($($alias_ty)+, $sql_ty, stringify!($sql_ty));)? )* }; } define_sql_types! { "BIGINT" BigInt aliased as BigSerial, "BINARY" Binary, "BOOL" Bool, "DATE" Date, "DOUBLE" Double, "FLOAT" Float, "INTEGER" Integer aliased as Serial, "INTERVAL" Interval, "NUMERIC" Numeric aliased as Decimal, "SMALLINT" SmallInt aliased as SmallSerial, "TEXT" Text aliased as VarChar, "TIME" Time, "TIMESTAMP" Timestamp, "TINYINT" TinyInt, "TIMESTAMPTZ" TimestampTz, "UUID" Uuid, "JSON" Json, } /// The `ARRAY` SQL type. pub struct Array<SqlTy>(SqlTy);
true
b25a5b9d13971931f18e3a633bcae967824ac185
Rust
weworld/rusty-leetcode
/src/tree_tag/closest_binary_search_tree_value_270.rs
UTF-8
1,958
3.171875
3
[ "WTFPL" ]
permissive
/* * @lc app=leetcode.cn id=270 lang=rust * * [270] 最接近的二叉搜索树值 */ use crate::utils::tree::TreeNode; // @lc code=start use std::rc::Rc; use std::cell::RefCell; impl Solution { pub fn closest_value(root: Option<Rc<RefCell<TreeNode>>>, target: f64) -> i32 { Solution::closest_value_rec(&root, target).unwrap() } fn closest_value_rec(root: &Option<Rc<RefCell<TreeNode>>>, target: f64) -> Option<i32> { match root { Some(node_ref) => { let node_b = node_ref.borrow(); let mut another_val: i32 = i32::min_value(); let mut has_another = false; if target < (node_b.val as f64) { if let Some(left_max) = Solution::closest_value_rec(&node_b.left, target) { another_val = left_max; has_another = true; } } else if target > (node_b.val as f64) { if let Some(right_min) = Solution::closest_value_rec(&node_b.right, target) { another_val = right_min; has_another = true; } } if has_another { if f64::abs((another_val as f64) - target) < f64::abs((node_b.val as f64) - target) { Some(another_val) } else { Some(node_b.val) } } else { Some(node_b.val) } }, None => None } } } // @lc code=end struct Solution; #[cfg(test)] mod test { use super::*; use crate::{tree_node,tree_leaf}; #[test] fn test_closest_value_rec() { let src = tree_node!(4, tree_node!(2, tree_leaf!(1), tree_leaf!(3)), tree_leaf!(5)); assert_eq!(Solution::closest_value(src, 3.714286), 4); } }
true
85c783a89e2d4259e55f15f72fef5fa1f82fad76
Rust
aidanbabo/minesweeper
/src/main.rs
UTF-8
2,721
2.640625
3
[]
no_license
// TODO // - MAKE AN ALERT FOR JEFFERY BECAUSE YOU LOVE HIM // - BUG - when bomb is on far right side, the space directly to the left often doesn't get // calculated properly // - add smiley // - requires facial animations while clicking on flagged // - add numbers // - need to create time and mines variables that are shared // - these to have to have numeric representations in the code // like how the open spaces have representations in the code. // OR not and just store it as a number // - TODO how tf to timers work in this language, std::Duration? // - levels of difficulty // - look into the source code to see what the difficulties are // search for "Expert" // - remove now-redudant parts of setting size passing use piston::window::WindowSettings; use piston::input::{RenderEvent}; use piston::event_loop::{Events, EventSettings}; use glutin_window::GlutinWindow; use opengl_graphics::{OpenGL, GlGraphics}; mod minesweeper; mod minesweeper_controller; mod minesweeper_view; pub use crate::minesweeper::{MineSweeper, ROWS, COLS}; pub use crate::minesweeper_controller::MineSweeperController; pub use crate::minesweeper_view::{MineSweeperView, MineSweeperViewSettings}; fn main() { // initialize custom classes to handle events and the like // model let ms = MineSweeper::new(); // controller let mut ms_c = MineSweeperController::new(ms); // view let settings = MineSweeperViewSettings::new(ROWS, COLS, 2.5); let width: f64 = settings.cols as f64 * settings.square_side + 2.0 * settings.border_long; let height: f64 = settings.rows as f64 * settings.square_side + 3.0 * settings.border_long + settings.smiley_side; // create window let opengl = OpenGL::V3_2; let window_settings = WindowSettings::new("Mine Sweeper", [width, height]) .graphics_api(opengl) .resizable(false) .exit_on_esc(true); let mut window: GlutinWindow = window_settings.build() .expect("Could not create window"); let mut event_settings = EventSettings::new(); event_settings.lazy = true; let mut events = Events::new(event_settings); let mut gl = GlGraphics::new(opengl); let ms_v = MineSweeperView::new(settings.clone()); // event loop while let Some(e) = events.next(&mut window) { // handle input event ms_c.event(settings.clone(), &e); // handle rendering if let Some(r) = e.render_args() { gl.draw(r.viewport(), |c, g| { use graphics::clear; // clear screen and call draw function clear([0.8, 0.8, 0.8, 1.0], g); ms_v.draw(&ms_c, &c, g); }); } } }
true
226e6c88f76c46ca2d06646a826be078bb8e7b60
Rust
davechallis/ocypod
/src/application/manager.rs
UTF-8
33,269
2.5625
3
[ "Apache-2.0" ]
permissive
//! Defines most of the core queue/job application logic. //! //! Main struct provided is `RedisManager`, through which all job queue operations are exposed. //! These will typically have HTTP handlers mapped to them. use std::collections::HashMap; use std::default::Default; use log::{debug, info, warn}; use redis::{aio::ConnectionLike, AsyncCommands}; use super::{job::RedisJob, queue::RedisQueue}; use crate::models::{job, queue, DateTime, JobStats, OcyError, OcyResult, QueueInfo, ServerInfo}; use crate::redis_utils::vec_from_redis_pipe; use crate::transaction_async; /// Redis key for list of all queues. This is used for fast lookups of all queue names without a scan. const QUEUES_KEY: &str = "queues"; /// Redis key for limbo queue. This is a very short lived queue, used to keep jobs in the transition state between /// `queued` and `running`. It's mostly a workaround for not being able to atomically pop a job from a queue and /// update its metadata (stored in a separate hash) without the risk of losing some data. const LIMBO_KEY: &str = "limbo"; /// Redis key for the running job list. Jobs are moved here from their original queue (via `limbo`) when they're /// picked up by a worker. Jobs in this queue are checked for timeouts. const RUNNING_KEY: &str = "running"; /// Redis key for the failed job list. Jobs that have either timed out, or failed by worker request are moved to this /// queue. Jobs in this queue are monitored for retries. const FAILED_KEY: &str = "failed"; /// Redis key for the ended job list. Jobs are moved here then they have either successfully completed, /// or failed/timed out with no remaining retries to attempted. Jobs in this queue are monitored for expiry. const ENDED_KEY: &str = "ended"; /// Redis key for the job ID counter. This is used as a counter to generate unique IDs for each job. const JOB_ID_KEY: &str = "job_id"; /// Prefix used for queue settings keys in Redis. A user created queue with name "foo" have its configuration stored /// under the key "queue:foo". const QUEUE_PREFIX: &str = "queue:"; /// Prefix used for job keys in Redis. A job with the ID 123 would be stored under the key "job:123". const JOB_PREFIX: &str = "job:"; /// Suffix used with queue keys get the Redis key for queued jobs. A user created queue with name "foo" would store /// its queued jobs under the key "queue:foo:jobs"; pub const QUEUE_JOBS_SUFFIX: &str = ":jobs"; /// Prefix used for tag keys in Redis. These are used to index jobs by any tags they were given at creation time. /// A tag created with name "foo" would be stored a "tag:foo". const TAG_PREFIX: &str = "tag:"; const STAT_JOBS_CREATED_KEY: &str = "stats:jobs:num_created"; const STAT_JOBS_COMPLETED_KEY: &str = "stats:jobs:num_completed"; const STAT_JOBS_RETRIED_KEY: &str = "stats:jobs:num_retried"; const STAT_JOBS_FAILED_KEY: &str = "stats:jobs:num_failed"; const STAT_JOBS_TIMED_OUT_KEY: &str = "stats:jobs:num_timed_out"; const STAT_JOBS_CANCELLED_KEY: &str = "stats:jobs:num_cancelled"; /// Manages queues and jobs within Redis. Contains main public functions that are called by HTTP services. /// /// Internally, uses RedisJob and RedisQueue structs as convenient wrappers around interacting with jobs/queues. #[derive(Clone, Debug)] pub struct RedisManager { /// Redis key for list of all queues. This is used for fast lookups of all queue names without a scan. pub queues_key: String, /// Redis key for limbo queue. This is a very short lived queue, used to keep jobs in the transition state between /// `queued` and `running`. It's mostly a workaround for not being able to atomically pop a job from a queue and /// update its metadata (stored in a separate hash) without the risk of losing some data. pub limbo_key: String, /// Redis key for the running job list. Jobs are moved here from their original queue (via `limbo`) when they're /// picked up by a worker. Jobs in this queue are checked for timeouts. pub running_key: String, /// Redis key for the failed job list. Jobs that have either timed out, or failed by worker request are moved to this /// queue. Jobs in this queue are monitored for retries. pub failed_key: String, /// Redis key for the ended job list. Jobs are moved here then they have either successfully completed, /// or failed/timed out with no remaining retries to attempted. Jobs in this queue are monitored for expiry. pub ended_key: String, /// Redis key for the job ID counter. This is used as a counter to generate unique IDs for each job. pub job_id_key: String, /// Prefix used for queue settings keys in Redis. A user created queue with name "foo" have its configuration stored /// under the key "queue:foo". pub queue_prefix: String, /// Prefix used for job keys in Redis. A job with the ID 123 would be stored under the key "job:123". pub job_prefix: String, /// Prefix used for tag keys in Redis. These are used to index jobs by any tags they were given at creation time. /// A tag created with name "foo" would be stored a "tag:foo". pub tag_prefix: String, /// Prefix used for job created statistics. pub stat_jobs_created_key: String, /// Prefix used for job completed statistics. pub stat_jobs_completed_key: String, /// Prefix used for job retry statistics. pub stat_jobs_retried_key: String, /// Prefix used for job failed statistics. pub stat_jobs_failed_key: String, /// Prefix used for job timed out statistics. pub stat_jobs_timed_out_key: String, /// Prefix used for job cancelled statistics. pub stat_jobs_cancelled_key: String, } impl RedisManager { /// Creates a new RedisManager which uses the given namespace prefix for internal keys it uses. /// If the given namespace is empty, then no prefix is used. pub fn new(key_namespace: &str) -> Self { let ns = if key_namespace.is_empty() { "".to_owned() } else { format!("{}:", key_namespace) }; Self { queues_key: ns.clone() + QUEUES_KEY, limbo_key: ns.clone() + LIMBO_KEY, running_key: ns.clone() + RUNNING_KEY, failed_key: ns.clone() + FAILED_KEY, ended_key: ns.clone() + ENDED_KEY, job_id_key: ns.clone() + JOB_ID_KEY, queue_prefix: ns.clone() + QUEUE_PREFIX, job_prefix: ns.clone() + JOB_PREFIX, tag_prefix: ns.clone() + TAG_PREFIX, stat_jobs_created_key: ns.clone() + STAT_JOBS_CREATED_KEY, stat_jobs_completed_key: ns.clone() + STAT_JOBS_COMPLETED_KEY, stat_jobs_retried_key: ns.clone() + STAT_JOBS_RETRIED_KEY, stat_jobs_failed_key: ns.clone() + STAT_JOBS_FAILED_KEY, stat_jobs_timed_out_key: ns.clone() + STAT_JOBS_TIMED_OUT_KEY, stat_jobs_cancelled_key: ns + STAT_JOBS_CANCELLED_KEY, } } fn queue_from_string(&self, name: &str) -> OcyResult<RedisQueue> { RedisQueue::new(self, name) } fn job_from_id(&self, id: u64) -> RedisJob { RedisJob::new(self, id) } /// Create or update a queue in Redis with given name and settings. /// /// Returns true if a new queue was created, or false if an existing queue was updated. pub async fn create_or_update_queue<C: ConnectionLike>( &self, conn: &mut C, name: &str, settings: &queue::Settings, ) -> OcyResult<bool> { self.queue_from_string(name)? .create_or_update(conn, settings) .await } /// Delete queue with given name from Redis. /// /// Returns true if a queue was deleted, and false if no queue with given name was found. pub async fn delete_queue<C: ConnectionLike + Send>( &self, conn: &mut C, name: &str, ) -> OcyResult<bool> { self.queue_from_string(name)?.delete(conn).await } /// Delete a job with given ID from Redis. /// /// Returns true if a job was found and deleted, false if no job with given ID was found. pub async fn delete_job<C: ConnectionLike + Send>( &self, conn: &mut C, job_id: u64, ) -> OcyResult<bool> { self.job_from_id(job_id).delete(conn).await } /// Get summary of server and queue data. Currently contains: /// * count of each job's status by queue /// * total number of jobs processed and their final status pub async fn server_info<C: ConnectionLike + Send>(&self, conn: &mut C) -> OcyResult<ServerInfo> { let mut queues_info = HashMap::new(); for queue_name in self.queue_names(conn).await? { let size = match self.queue_from_string(&queue_name)?.size(conn).await { Ok(size) => size, Err(OcyError::NoSuchQueue(_)) => continue, Err(err) => return Err(err), }; queues_info.insert( queue_name, QueueInfo { queued: size, ..Default::default() }, ); } let mut pipeline = redis::pipe(); let pipe = &mut pipeline; for queue_key in &[&self.failed_key, &self.ended_key, &self.running_key] { for job_id in conn.lrange::<_, Vec<u64>>(*queue_key, 0, -1).await? { pipe.hget( self.job_from_id(job_id).key(), &[job::Field::Queue, job::Field::Status], ); } } // option used to allow for jobs being deleted between calls for (queue_name, status) in vec_from_redis_pipe::<C, (Option<String>, Option<job::Status>)>(conn, pipe).await? { let queue_name = match queue_name { Some(queue_name) => queue_name, None => continue, }; let status = match status { Some(status) => status, None => continue, }; let queue_info = queues_info .entry(queue_name) .or_insert_with(QueueInfo::default); queue_info.incr_status_count(&status); } let stats_keys = &[ &self.stat_jobs_created_key, &self.stat_jobs_completed_key, &self.stat_jobs_retried_key, &self.stat_jobs_failed_key, &self.stat_jobs_timed_out_key, &self.stat_jobs_cancelled_key, ]; let job_stats: JobStats = conn.get(stats_keys).await?; Ok(ServerInfo { queues: queues_info, statistics: job_stats, }) } /// Get one or more metadata fields from given job ID. /// /// If `None` is given as the `fields` argument, then get all fields. pub async fn job_fields<C: ConnectionLike>( &self, conn: &mut C, job_id: u64, fields: Option<&[job::Field]>, ) -> OcyResult<job::JobMeta> { self.job_from_id(job_id).fields(conn, fields).await } /// Update one or more job metadata fields. /// /// Only following fields can be updated in this way: /// /// * status - used to mark job as completed/failed/cancelled etc. /// * output - used to update user provided information related to this job pub async fn update_job<C: ConnectionLike + Send>( &self, conn: &mut C, job_id: u64, update_req: &job::UpdateRequest, ) -> OcyResult<()> { self.job_from_id(job_id).update(conn, update_req).await } /// Update a job's `last_heartbeat` field with the current date/time. pub async fn update_job_heartbeat<C: ConnectionLike + Send>( &self, conn: &mut C, job_id: u64, ) -> OcyResult<()> { self.job_from_id(job_id).update_heartbeat(conn).await } /// Get the `status` field of given job. pub async fn job_status<C: ConnectionLike + Send>( &self, conn: &mut C, job_id: u64, ) -> OcyResult<job::Status> { self.job_from_id(job_id).status(conn).await } /// Update a job's `status` field to the given status, if an allowed state transition. /// /// Identical to calling `update_job` and with `Some(status)` provided. pub async fn set_job_status<C: ConnectionLike + Send>( &self, conn: &mut C, job_id: u64, status: &job::Status, ) -> OcyResult<()> { self.job_from_id(job_id).set_status(conn, status).await } /// Get the `output` field of given job. pub async fn job_output<C: ConnectionLike + Send>( &self, conn: &mut C, job_id: u64, ) -> OcyResult<serde_json::Value> { self.job_from_id(job_id).output(conn).await } /// Update a job's `output` field to the given output data. /// /// Identical to calling `update_job` and with `Some(output)` provided. pub async fn set_job_output<C: ConnectionLike + Send>( &self, conn: &mut C, job_id: u64, value: &serde_json::Value, ) -> OcyResult<()> { self.job_from_id(job_id).set_output(conn, value).await } // TODO: add an endpoint to get fields too? /// Get a list of jobs IDs with given tag name. pub async fn tagged_job_ids<C: ConnectionLike + Send>( &self, conn: &mut C, tag: &str, ) -> OcyResult<Vec<u64>> { let key = self.build_tag_key(tag)?; let mut job_ids: Vec<u64> = conn.smembers::<_, Vec<u64>>(key).await?; job_ids.sort(); Ok(job_ids) } /// Get list of all queue names. pub async fn queue_names<C: ConnectionLike + Send>(&self, conn: &mut C) -> OcyResult<Vec<String>> { let mut names: Vec<String> = conn.smembers(&self.queues_key).await?; names.sort(); Ok(names) } /// Get given queue's current settings. pub async fn queue_settings<C: ConnectionLike + Send>( &self, conn: &mut C, queue_name: &str, ) -> OcyResult<queue::Settings> { self.queue_from_string(queue_name)? .ensure_exists(conn) .await? .settings(conn) .await } /// Get the number of queues jobs in given queue. pub async fn queue_size<C: ConnectionLike + Send>( &self, conn: &mut C, queue_name: &str, ) -> OcyResult<u64> { self.queue_from_string(queue_name)? .ensure_exists(conn) .await? .size(conn) .await } /// Get total number of running jobs across all queues. pub async fn running_queue_size<C: ConnectionLike + Send>(&self, conn: &mut C) -> OcyResult<u64> { Ok(conn.llen(&self.running_key).await?) } /// Get total number of failed jobs across all queues. pub async fn failed_queue_size<C: ConnectionLike + Send>(&self, conn: &mut C) -> OcyResult<u64> { Ok(conn.llen(&self.failed_key).await?) } /// Get total number of ended jobs across all queues. pub async fn ended_queue_size<C: ConnectionLike + Send>(&self, conn: &mut C) -> OcyResult<u64> { Ok(conn.llen(&self.ended_key).await?) } /// Get a list of job IDs that are currently in a given queue. pub async fn queue_job_ids<C: ConnectionLike + Send>( &self, conn: &mut C, queue_name: &str, ) -> OcyResult<HashMap<job::Status, Vec<u64>>> { // TODO: check if this needs queue existence check self.queue_from_string(queue_name)?.job_ids(conn).await } /// Check all jobs in the failed queue for retries. /// /// Any which can be retried are re-queued on the queue they were created it. /// /// Any which have no automatic retries remaining are moved to the ended queue. pub async fn check_job_retries<C: ConnectionLike + Send>(&self, conn: &mut C) -> OcyResult<Vec<u64>> { debug!("Checking for jobs to retry"); let mut requeued: Vec<u64> = Vec::new(); let mut pipeline = redis::pipe(); let pipe = &mut pipeline; for job_id in conn.lrange::<_, Vec<u64>>(&self.failed_key, 0, -1).await? { pipe.hget(self.job_from_id(job_id).key(), job::RetryMeta::fields()); } for retry_meta in vec_from_redis_pipe::<C, job::RetryMeta>(conn, pipe).await? { match retry_meta.retry_action() { job::RetryAction::Retry => { let job = self.job_from_id(retry_meta.id()); if job.apply_retries(conn).await? { requeued.push(job.id()); } } job::RetryAction::End => { let job = self.job_from_id(retry_meta.id()); job.end_failed(conn).await?; } job::RetryAction::None => (), } } Ok(requeued) } /// Check all jobs in the running queue for timeouts. /// /// Any which timeout are moved to the failed queue, where they'll eventually either be retried, or moved to the /// ended queue. pub async fn check_job_timeouts<C: ConnectionLike + Send>(&self, conn: &mut C) -> OcyResult<Vec<u64>> { debug!("Checking job timeouts"); let mut timeouts: Vec<u64> = Vec::new(); let mut pipeline = redis::pipe(); let pipe = &mut pipeline; for job_id in conn.lrange::<_, Vec<u64>>(&self.running_key, 0, -1).await? { pipe.hget(self.job_from_id(job_id).key(), job::TimeoutMeta::fields()); } for timeout_meta in vec_from_redis_pipe::<C, job::TimeoutMeta>(conn, pipe).await? { if timeout_meta.has_timed_out() { let job = self.job_from_id(timeout_meta.id()); if job.apply_timeouts(conn).await? { timeouts.push(job.id()); } } } Ok(timeouts) } /// Check all jobs in the ended queue for expiry. Any expired jobs will be entirely removed from the queue system. pub async fn check_job_expiry<C: ConnectionLike + Send>(&self, conn: &mut C) -> OcyResult<Vec<u64>> { debug!("Checking for expired jobs"); let mut expired: Vec<u64> = Vec::new(); let mut pipeline = redis::pipe(); let pipe = &mut pipeline; for job_id in conn.lrange::<_, Vec<u64>>(&self.ended_key, 0, -1).await? { pipe.hget(self.job_from_id(job_id).key(), job::ExpiryMeta::fields()); } for expiry_meta in vec_from_redis_pipe::<C, job::ExpiryMeta>(conn, pipe).await? { if expiry_meta.should_expire() { let job = self.job_from_id(expiry_meta.id()); if job.apply_expiry(conn).await? { expired.push(job.id()); } } } Ok(expired) } // TODO: make available as endpoint? Or optional periodic check? /// Checks the integrity of Redis DB, e.g. checking for dangling indexes, jobs in invalid states, etc. /// /// Mostly intended for use during development, as it has a non-trivial runtime cost. pub async fn check_db_integrity<C: ConnectionLike + Send>(&self, conn: &mut C) -> OcyResult<()> { for queue_name in self.queue_names(conn).await? { let queue = self.queue_from_string(&queue_name)?; if !(queue.exists(conn).await?) { warn!( "Queue '{}' found in {}, but not as key", queue_name, &self.queues_key ); } } let mut iter: redis::AsyncIter<String> = conn.scan_match::<_, String>("queue:*").await?; let mut queues = Vec::new(); while let Some(queue_key) = iter.next_item().await { if !queue_key.ends_with(":jobs") { queues.push(queue_key); } } for queue_key in queues { if !conn .sismember::<_, _, bool>(&self.queues_key, &queue_key[6..]) .await? { warn!( "Queue '{}' found as key, but not in {}", &queue_key, &self.queues_key ); } } let _: () = transaction_async!(conn, &[&self.running_key], { let mut pipe = redis::pipe(); let pipe_ref = pipe.atomic(); for job_id in conn.lrange::<_, Vec<u64>>(&self.running_key, 0, -1).await? { pipe_ref.hget( self.job_from_id(job_id).key(), &[job::Field::Id, job::Field::Status, job::Field::StartedAt], ); } let info: Vec<(Option<u64>, Option<job::Status>, Option<DateTime>)> = vec_from_redis_pipe(conn, pipe_ref).await?; for (job_id, status, started_at) in info { let job_id = match job_id { Some(job_id) => job_id, None => { warn!( "Found job in {} queue, but did not find key", &self.running_key ); continue; } }; match status { Some(job::Status::Running) => (), Some(status) => { warn!("Found status '{}' in {} queue", status, &self.running_key) } None => warn!( "Found job {} in {} queue, but did not find key", job_id, &self.running_key ), } if started_at.is_none() { warn!( "Found job {} in {} queue, but job has no started_at", job_id, &self.running_key ); } } Some(()) }); let _: () = transaction_async!(conn, &[&self.failed_key], { let mut pipe = redis::pipe(); let pipe_ref = pipe.atomic(); for job_id in conn.lrange::<_, Vec<u64>>(&self.failed_key, 0, -1).await? { pipe_ref.hget( self.job_from_id(job_id).key(), &[job::Field::Id, job::Field::Status, job::Field::EndedAt], ); } let info: Vec<(Option<u64>, Option<job::Status>, Option<DateTime>)> = vec_from_redis_pipe(conn, pipe_ref).await?; for (job_id, status, ended_at) in info { let job_id = match job_id { Some(job_id) => job_id, None => { warn!( "Found job in {} queue, but did not find key", &self.failed_key ); continue; } }; match status { Some(job::Status::Failed) | Some(job::Status::TimedOut) => (), Some(status) => { warn!("Found status '{}' in {} queue", status, &self.failed_key) } None => warn!( "Found job {} in {} queue, but did not find key", job_id, &self.failed_key ), } if ended_at.is_none() { warn!( "Found job {} in {} queue, but job has no ended_at", job_id, &self.failed_key ); } } Some(()) }); let _: () = transaction_async!(conn, &[&self.ended_key], { let mut pipe = redis::pipe(); let pipe_ref = pipe.atomic(); for job_id in conn.lrange::<_, Vec<u64>>(&self.ended_key, 0, -1).await? { pipe_ref.hget( self.job_from_id(job_id).key(), &[job::Field::Id, job::Field::Status, job::Field::EndedAt], ); } let info: Vec<(Option<u64>, Option<job::Status>, Option<DateTime>)> = vec_from_redis_pipe(conn, pipe_ref).await?; for (job_id, status, ended_at) in info { let job_id = match job_id { Some(job_id) => job_id, None => { warn!( "Found job in {} queue, but did not find key", &self.ended_key ); continue; } }; match status { Some(job::Status::Failed) | Some(job::Status::TimedOut) | Some(job::Status::Completed) | Some(job::Status::Cancelled) => (), Some(status) => warn!("Found status '{}' in {} queue", status, &self.ended_key), None => warn!( "Found job {} in {} queue, but did not find key", job_id, &self.ended_key ), } if ended_at.is_none() { warn!( "Found job {} in {} queue, but job has no started_at", job_id, &self.ended_key ); } } Some(()) }); Ok(()) } /// Check connection to Redis using ping command. #[cfg_attr(feature = "cargo-clippy", allow(clippy::unit_arg))] pub async fn check_ping<C: ConnectionLike>(conn: &mut C) -> OcyResult<()> { Ok(redis::cmd("PING").query_async(conn).await?) } /// Fetch the next job from given queue, if any. /// /// # Returns /// /// A `job::Payload` if a job is found, or `None` if the queue is empty. pub async fn next_queued_job<C: ConnectionLike + Send>( &self, conn: &mut C, queue_name: &str, ) -> OcyResult<Option<job::Payload>> { debug!("Client requested job from queue={}", queue_name); // queue can be deleted between these two calls, but will just return no job, so harmless let queue = self.queue_from_string(queue_name)? .ensure_exists(conn) .await?; let job = match conn .rpoplpush::<_, Option<u64>>(queue.jobs_key(), &self.limbo_key) .await? { Some(job_id) => self.job_from_id(job_id), None => return Ok(None), }; debug!( "[{}{}] moved from {} -> {}", &self.job_prefix, job.id(), queue.jobs_key(), &self.limbo_key ); // if Redis goes down before the following, job will be left in limbo, requeued at startup let job_payload: job::Payload = transaction_async!(conn, &[&job.key], { let input: Option<String> = conn.hget(&job.key, job::Field::Input).await?; let payload = job::Payload::new(job.id(), input.map(|s| serde_json::from_str(&s).unwrap())); let result: Option<()> = redis::pipe() .atomic() .hset(&job.key, job::Field::Status, job::Status::Running) .hset(&job.key, job::Field::StartedAt, DateTime::now()) .lrem(&self.limbo_key, 1, job.id()) .rpush(&self.running_key, job.id()) .query_async(conn) .await?; result.map(|_| payload) }); info!("[{}{}] started", &self.job_prefix, job_payload.id()); Ok(Some(job_payload)) } /// Create a new job on given queue. pub async fn create_job<C: ConnectionLike + Send>( &self, conn: &mut C, queue_name: &str, job_req: &job::CreateRequest, ) -> OcyResult<u64> { // TODO: use transaction to ensure that queue isn't deleted partway through job creation let queue = self.queue_from_string(queue_name)? .ensure_exists(conn) .await?; let queue_settings = queue.settings(conn).await?; let timeout = job_req.timeout.as_ref().unwrap_or(&queue_settings.timeout); let heartbeat_timeout = job_req .heartbeat_timeout .as_ref() .unwrap_or(&queue_settings.heartbeat_timeout); let expires_after = job_req .expires_after .as_ref() .unwrap_or(&queue_settings.expires_after); let retries = job_req.retries.unwrap_or(queue_settings.retries); let retry_delays = match job_req.retry_delays.clone() { Some(rd) => rd, None => Vec::new(), }; let job = self.job_from_id(conn.incr(&self.job_id_key, 1).await?); debug!( "Creating job with job_id={} on queue={}", job.id(), &queue.name ); let mut pipeline = redis::pipe(); let pipe = pipeline .atomic() .hset(&job.key, job::Field::Id, job.id()) .hset(&job.key, job::Field::Queue, &queue.name) .hset(&job.key, job::Field::Status, job::Status::Queued) .hset(&job.key, job::Field::CreatedAt, DateTime::now()) .hset(&job.key, job::Field::Timeout, timeout) .hset(&job.key, job::Field::HeartbeatTimeout, heartbeat_timeout) .hset(&job.key, job::Field::ExpiresAfter, expires_after) .hset(&job.key, job::Field::Retries, retries) .hset(&job.key, job::Field::RetriesAttempted, 0) .incr(&self.stat_jobs_created_key, 1) .lpush(queue.jobs_key(), job.id()); if let Some(ref input) = job_req.input { pipe.hset(&job.key, job::Field::Input, input.to_string()); } if let Some(ref tags) = job_req.tags { let tags_json: serde_json::Value = tags.as_slice().into(); pipe.hset(&job.key, job::Field::Tags, tags_json.to_string()); for tag in tags { let key = format!("{}{}", &self.tag_prefix, tag); pipe.sadd(key, job.id()); } } if !retry_delays.is_empty() { let retry_delays_json: serde_json::Value = retry_delays.as_slice().into(); pipe.hset( &job.key, job::Field::RetryDelays, retry_delays_json.to_string(), ); } pipe.query_async(conn).await?; info!("[{}] [{}] created", &queue.key, &job.key); Ok(job.id()) } /// Get unique Redis key for given tag. pub fn build_tag_key(&self, tag: &str) -> OcyResult<String> { if !tag.is_empty() { Ok(format!("{}{}", self.tag_prefix, tag)) } else { Err(OcyError::bad_request("tags cannot be empty")) } } } #[cfg(test)] mod test { use super::*; #[test] fn prefix_configuration_empty() { let rm = RedisManager::new(""); assert_eq!(rm.queues_key, "queues"); assert_eq!(rm.limbo_key, "limbo"); assert_eq!(rm.running_key, "running"); assert_eq!(rm.failed_key, "failed"); assert_eq!(rm.ended_key, "ended"); assert_eq!(rm.job_id_key, "job_id"); assert_eq!(rm.queue_prefix, "queue:"); assert_eq!(rm.job_prefix, "job:"); assert_eq!(rm.tag_prefix, "tag:"); assert_eq!(rm.stat_jobs_created_key, "stats:jobs:num_created"); assert_eq!(rm.stat_jobs_completed_key, "stats:jobs:num_completed"); assert_eq!(rm.stat_jobs_retried_key, "stats:jobs:num_retried"); assert_eq!(rm.stat_jobs_failed_key, "stats:jobs:num_failed"); assert_eq!(rm.stat_jobs_timed_out_key, "stats:jobs:num_timed_out"); assert_eq!(rm.stat_jobs_cancelled_key, "stats:jobs:num_cancelled"); } #[test] fn prefix_configuration() { let rm = RedisManager::new("foo"); assert_eq!(rm.queues_key, "foo:queues"); assert_eq!(rm.limbo_key, "foo:limbo"); assert_eq!(rm.running_key, "foo:running"); assert_eq!(rm.failed_key, "foo:failed"); assert_eq!(rm.ended_key, "foo:ended"); assert_eq!(rm.job_id_key, "foo:job_id"); assert_eq!(rm.queue_prefix, "foo:queue:"); assert_eq!(rm.job_prefix, "foo:job:"); assert_eq!(rm.tag_prefix, "foo:tag:"); assert_eq!(rm.stat_jobs_created_key, "foo:stats:jobs:num_created"); assert_eq!(rm.stat_jobs_completed_key, "foo:stats:jobs:num_completed"); assert_eq!(rm.stat_jobs_retried_key, "foo:stats:jobs:num_retried"); assert_eq!(rm.stat_jobs_failed_key, "foo:stats:jobs:num_failed"); assert_eq!(rm.stat_jobs_timed_out_key, "foo:stats:jobs:num_timed_out"); assert_eq!(rm.stat_jobs_cancelled_key, "foo:stats:jobs:num_cancelled"); } }
true
268ecffe970214803a3e1b007a1a3f4493cd24c8
Rust
dimohy/rust-learning
/exercise1/src/ex2.rs
UTF-8
603
3.578125
4
[ "MIT" ]
permissive
use std::io; use std::io::Write; /* 2. 터미널에서 문자열을 입력 받아서 그 문자열을 역순으로 출력하세요. 예를 들어 터미널에서 "abbd" 를 입력 받았으면 "dbba"를 출력하세요. **/ #[allow(dead_code)] pub fn run() { print!("? "); io::stdout().flush().unwrap(); let mut input = String::new(); io::stdin().read_line(&mut input) .expect("Failed to read line"); input = input.trim().to_string(); println!("input = {}", input); let output: String = input.chars().rev().collect(); println!("output = {}", output); }
true
433423497ad1c0884004e235fd9007a89358a227
Rust
ericsink/rust-raytracer
/src/geometry/prims/triangle.rs
UTF-8
7,933
3.203125
3
[ "MIT" ]
permissive
#![allow(dead_code)] use crate::prelude::*; use crate::geometry::bbox::{union_point, union_points, BBox, PartialBoundingBox}; use crate::geometry::prim::Prim; use crate::material::Material; use crate::mat4::{Mat4, Transform}; use crate::raytracer::{Ray, Intersection}; use crate::vec3::Vec3; use crate::material::materials::FlatMaterial; struct UvValue { u: f64, v: f64 } impl UvValue { pub fn from_tuple(uv: (f64, f64)) -> UvValue { UvValue { u: uv.0, v: uv.1 } } fn default3() -> [UvValue; 3] { [ UvValue { u: 0.5, v: 1.0 }, UvValue { u: 0.0, v: 0.0 }, UvValue { u: 1.0, v: 0.0 }, ] } } pub struct TriangleOptions { vertices: [Vec3; 3], normals: Option<[Vec3; 3]>, texinfo: Option<[UvValue; 3]>, material: Option<Box<dyn Material+Send+Sync>>, } fn get_auto_normals(v: [Vec3; 3]) -> [Vec3; 3] { let n = (v[1] - v[0]).cross(&(v[2] - v[0])); [n, n, n] } impl TriangleOptions { pub fn new(v0: Vec3, v1: Vec3, v2: Vec3) -> TriangleOptions { TriangleOptions { vertices: [v0, v1, v2], normals: None, texinfo: None, material: None, } } /// In the default case, all three normals at vertices are perpendicular /// to the triangle plane. pub fn normals(&mut self, normals: [Vec3; 3]) -> &mut Self { self.normals = Some(normals); self } pub fn texinfo(&mut self, texinfo: [(f64, f64); 3]) -> &mut Self { self.texinfo = Some([ UvValue::from_tuple(texinfo[0]), UvValue::from_tuple(texinfo[1]), UvValue::from_tuple(texinfo[2]), ]); self } pub fn material(&mut self, material: Box<dyn Material+Send+Sync>) -> &mut Self { self.material = Some(material); self } pub fn build(self) -> Triangle { let normals = self.normals.unwrap_or_else(|| get_auto_normals(self.vertices)); let texinfo = self.texinfo.unwrap_or_else(UvValue::default3); let material = self.material.unwrap_or_else(|| Box::new(FlatMaterial { color: Vec3::one() })); Triangle { vertices: self.vertices, normals: normals, texinfo: texinfo, material: material, } } } pub struct Triangle { vertices: [Vec3; 3], // All the same if our triangle is ``flat''. // Values differ when we want interpolation. e.g. round things like teapot. normals: [Vec3; 3], // Used in textured triangles, can be [UvValue; 3]::default() otherwise. texinfo: [UvValue; 3], material: Box<dyn Material+Send+Sync> } impl PartialBoundingBox for Triangle { fn partial_bounding_box(&self) -> Option<BBox> { Some(union_point(&union_points(&self.vertices[0], &self.vertices[1]), &self.vertices[2])) } } impl Prim for Triangle { /// http://en.wikipedia.org/wiki/M%C3%B6ller%E2%80%93Trumbore_intersection_algorithm /// Barycentric coordinates. fn intersects<'a>(&'a self, ray: &Ray, t_min: f64, t_max: f64) -> Option<Intersection<'a>> { let e1 = self.vertices[1] - self.vertices[0]; let e2 = self.vertices[2] - self.vertices[0]; let p = ray.direction.cross(&e2); let det = e1.dot(&p); // if determinant is near zero, ray lies in plane of triangle if det > -::core::f64::EPSILON && det < ::core::f64::EPSILON { return None } let inv_det = 1.0 / det; let s = ray.origin - self.vertices[0]; let beta = inv_det * s.dot(&p); if beta < 0.0 || beta > 1.0 { return None } let q = s.cross(&e1); let gamma = inv_det * ray.direction.dot(&q); if gamma < 0.0 || beta + gamma > 1.0 { return None } let t = inv_det * e2.dot(&q); if t < t_min || t > t_max { None } else { let intersection_point = ray.origin + ray.direction.scale(t); let alpha = 1.0 - beta - gamma; // Interpolate normals at vertices to get normal let n = self.normals[0].scale(alpha) + self.normals[1].scale(beta) + self.normals[2].scale(gamma); // Interpolate UVs at vertices to get UV let u = self.texinfo[0].u * alpha + self.texinfo[1].u * beta + self.texinfo[2].u * gamma; let v = self.texinfo[0].v * alpha + self.texinfo[1].v * beta + self.texinfo[2].v * gamma; Some(Intersection { n: n, t: t, u: u, v: v, position: intersection_point, material: &self.material }) } } fn mut_transform(&mut self, transform: &Transform) { let v0_t = Mat4::mult_p(&transform.m, &self.vertices[0]); let v1_t = Mat4::mult_p(&transform.m, &self.vertices[1]); let v2_t = Mat4::mult_p(&transform.m, &self.vertices[2]); let n0_t = Mat4::transform_normal(&self.normals[0], &transform.m); let n1_t = Mat4::transform_normal(&self.normals[1], &transform.m); let n2_t = Mat4::transform_normal(&self.normals[2], &transform.m); self.vertices[0] = v0_t; self.vertices[1] = v1_t; self.vertices[2] = v2_t; self.normals[0] = n0_t; self.normals[1] = n1_t; self.normals[2] = n2_t; } } #[test] fn it_intersects_and_interpolates() { let mut triopts = TriangleOptions::new( Vec3 { x: -1.0, y: 0.0, z: 0.0 }, Vec3 { x: 1.0, y: 0.0, z: 0.0 }, Vec3 { x: 0.0, y: 1.0, z: 0.0 }); triopts.normals([ Vec3 { x: -1.0, y: 0.0, z: 0.0 }, Vec3 { x: 1.0, y: 0.0, z: 0.0 }, Vec3 { x: 0.0, y: 1.0, z: 0.0 }]); triopts.texinfo([(0.0, 0.0), (1.0, 0.0), (0.0, 1.0)]); let triangle = triopts.build(); // Tests actual intersection let intersecting_ray = Ray::new(Vec3 { x: 0.0, y: 0.5, z: -1.0 }, Vec3 { x: 0.0, y: 0.0, z: 1.0 }); let intersection = triangle.intersects(&intersecting_ray, 0.0, 10.0).unwrap(); assert_eq!(intersection.position.x, 0.0); assert_eq!(intersection.position.y, 0.5); assert_eq!(intersection.position.z, 0.0); assert_eq!(intersection.u, 0.25); assert_eq!(intersection.v, 0.5); assert_eq!(intersection.n.x, 0.0); assert_eq!(intersection.n.y, 0.5); assert_eq!(intersection.n.z, 0.0); // Ray off to the sides let mut non_intersecting_ray = Ray::new(Vec3 { x: 0.0, y: 0.5, z: -1.0 }, Vec3 { x: 100.0, y: 100.0, z: 1.0 }); let mut non_intersection = triangle.intersects(&non_intersecting_ray, 0.0, 10.0); assert!(non_intersection.is_none()); non_intersecting_ray = Ray::new(Vec3 { x: 0.0, y: 0.5, z: -1.0 }, Vec3 { x: -100.0, y: -100.0, z: 1.0 }); non_intersection = triangle.intersects(&non_intersecting_ray, 0.0, 10.0); assert!(non_intersection.is_none()); // Ray in opposite direction non_intersecting_ray = Ray::new(Vec3 { x: 0.0, y: 0.5, z: -1.0 }, Vec3 { x: 0.0, y: 0.0, z: -1.0 }); non_intersection = triangle.intersects(&non_intersecting_ray, 0.0, 10.0); assert!(non_intersection.is_none()); } #[test] fn it_intersects_only_in_tmin_tmax() { let mut triopts = TriangleOptions::new( Vec3 { x: -1.0, y: 0.0, z: 0.0 }, Vec3 { x: 1.0, y: 0.0, z: 0.0 }, Vec3 { x: 0.0, y: 1.0, z: 0.0 }); triopts.normals([Vec3::zero(), Vec3::zero(), Vec3::one()]); triopts.texinfo([(0.0, 0.0), (1.0, 0.0), (0.0, 1.0)]); let triangle = triopts.build(); // Tests tmin let intersecting_ray = Ray::new(Vec3 { x: 0.0, y: 0.5, z: -1.0 }, Vec3 { x: 0.0, y: 0.0, z: 1.0 }); let mut non_intersection = triangle.intersects(&intersecting_ray, 1000.0, 10000.0); assert!(non_intersection.is_none()); // Tests tmax non_intersection = triangle.intersects(&intersecting_ray, 0.0, 0.0001); assert!(non_intersection.is_none()); }
true
99ff915cf9433bb6def5060514afca1ef77c5d56
Rust
mesalock-linux/crates-io
/vendor/hyper-0.10.16/src/server/request.rs
UTF-8
9,476
3.15625
3
[ "Apache-2.0", "Unlicense", "BSD-3-Clause", "0BSD", "MIT" ]
permissive
//! Server Requests //! //! These are requests that a `hyper::Server` receives, and include its method, //! target URI, headers, and message body. use std::io::{self, Read}; use std::net::SocketAddr; use std::time::Duration; use buffer::BufReader; use net::NetworkStream; use version::{HttpVersion}; use method::Method; use header::{Headers, ContentLength, TransferEncoding}; use http::h1::{self, Incoming, HttpReader}; use http::h1::HttpReader::{SizedReader, ChunkedReader, EmptyReader}; use uri::RequestUri; /// A request bundles several parts of an incoming `NetworkStream`, given to a `Handler`. pub struct Request<'a, 'b: 'a> { /// The IP address of the remote connection. pub remote_addr: SocketAddr, /// The `Method`, such as `Get`, `Post`, etc. pub method: Method, /// The headers of the incoming request. pub headers: Headers, /// The target request-uri for this request. pub uri: RequestUri, /// The version of HTTP for this request. pub version: HttpVersion, body: HttpReader<&'a mut BufReader<&'b mut NetworkStream>> } impl<'a, 'b: 'a> Request<'a, 'b> { /// Create a new Request, reading the StartLine and Headers so they are /// immediately useful. pub fn new(stream: &'a mut BufReader<&'b mut NetworkStream>, addr: SocketAddr) -> ::Result<Request<'a, 'b>> { let Incoming { version, subject: (method, uri), headers } = try!(h1::parse_request(stream)); debug!("Request Line: {:?} {:?} {:?}", method, uri, version); debug!("{:?}", headers); let body = if headers.has::<ContentLength>() { match headers.get::<ContentLength>() { Some(&ContentLength(len)) => SizedReader(stream, len), None => unreachable!() } } else if headers.has::<TransferEncoding>() { todo!("check for Transfer-Encoding: chunked"); ChunkedReader(stream, None) } else { EmptyReader(stream) }; Ok(Request { remote_addr: addr, method: method, uri: uri, headers: headers, version: version, body: body }) } /// Set the read timeout of the underlying NetworkStream. #[inline] pub fn set_read_timeout(&self, timeout: Option<Duration>) -> io::Result<()> { self.body.get_ref().get_ref().set_read_timeout(timeout) } /// Get a reference to the underlying `NetworkStream`. #[inline] pub fn downcast_ref<T: NetworkStream>(&self) -> Option<&T> { self.body.get_ref().get_ref().downcast_ref() } /// Get a reference to the underlying Ssl stream, if connected /// over HTTPS. /// /// This is actually just an alias for `downcast_ref`. #[inline] pub fn ssl<T: NetworkStream>(&self) -> Option<&T> { self.downcast_ref() } /// Deconstruct a Request into its constituent parts. #[inline] pub fn deconstruct(self) -> (SocketAddr, Method, Headers, RequestUri, HttpVersion, HttpReader<&'a mut BufReader<&'b mut NetworkStream>>) { (self.remote_addr, self.method, self.headers, self.uri, self.version, self.body) } } impl<'a, 'b> Read for Request<'a, 'b> { #[inline] fn read(&mut self, buf: &mut [u8]) -> io::Result<usize> { self.body.read(buf) } } #[cfg(test)] mod tests { use buffer::BufReader; use header::{Host, TransferEncoding, Encoding}; use net::NetworkStream; use mock::MockStream; use super::Request; use std::io::{self, Read}; use std::net::SocketAddr; fn sock(s: &str) -> SocketAddr { s.parse().unwrap() } fn read_to_string(mut req: Request) -> io::Result<String> { let mut s = String::new(); try!(req.read_to_string(&mut s)); Ok(s) } #[test] fn test_get_empty_body() { let mut mock = MockStream::with_input(b"\ GET / HTTP/1.1\r\n\ Host: example.domain\r\n\ \r\n\ I'm a bad request.\r\n\ "); // FIXME: Use Type ascription let mock: &mut NetworkStream = &mut mock; let mut stream = BufReader::new(mock); let req = Request::new(&mut stream, sock("127.0.0.1:80")).unwrap(); assert_eq!(read_to_string(req).unwrap(), "".to_owned()); } #[test] fn test_get_with_body() { let mut mock = MockStream::with_input(b"\ GET / HTTP/1.1\r\n\ Host: example.domain\r\n\ Content-Length: 19\r\n\ \r\n\ I'm a good request.\r\n\ "); // FIXME: Use Type ascription let mock: &mut NetworkStream = &mut mock; let mut stream = BufReader::new(mock); let req = Request::new(&mut stream, sock("127.0.0.1:80")).unwrap(); assert_eq!(read_to_string(req).unwrap(), "I'm a good request.".to_owned()); } #[test] fn test_head_empty_body() { let mut mock = MockStream::with_input(b"\ HEAD / HTTP/1.1\r\n\ Host: example.domain\r\n\ \r\n\ I'm a bad request.\r\n\ "); // FIXME: Use Type ascription let mock: &mut NetworkStream = &mut mock; let mut stream = BufReader::new(mock); let req = Request::new(&mut stream, sock("127.0.0.1:80")).unwrap(); assert_eq!(read_to_string(req).unwrap(), "".to_owned()); } #[test] fn test_post_empty_body() { let mut mock = MockStream::with_input(b"\ POST / HTTP/1.1\r\n\ Host: example.domain\r\n\ \r\n\ I'm a bad request.\r\n\ "); // FIXME: Use Type ascription let mock: &mut NetworkStream = &mut mock; let mut stream = BufReader::new(mock); let req = Request::new(&mut stream, sock("127.0.0.1:80")).unwrap(); assert_eq!(read_to_string(req).unwrap(), "".to_owned()); } #[test] fn test_parse_chunked_request() { let mut mock = MockStream::with_input(b"\ POST / HTTP/1.1\r\n\ Host: example.domain\r\n\ Transfer-Encoding: chunked\r\n\ \r\n\ 1\r\n\ q\r\n\ 2\r\n\ we\r\n\ 2\r\n\ rt\r\n\ 0\r\n\ \r\n" ); // FIXME: Use Type ascription let mock: &mut NetworkStream = &mut mock; let mut stream = BufReader::new(mock); let req = Request::new(&mut stream, sock("127.0.0.1:80")).unwrap(); // The headers are correct? match req.headers.get::<Host>() { Some(host) => { assert_eq!("example.domain", host.hostname); }, None => panic!("Host header expected!"), }; match req.headers.get::<TransferEncoding>() { Some(encodings) => { assert_eq!(1, encodings.len()); assert_eq!(Encoding::Chunked, encodings[0]); } None => panic!("Transfer-Encoding: chunked expected!"), }; // The content is correctly read? assert_eq!(read_to_string(req).unwrap(), "qwert".to_owned()); } /// Tests that when a chunk size is not a valid radix-16 number, an error /// is returned. #[test] fn test_invalid_chunk_size_not_hex_digit() { let mut mock = MockStream::with_input(b"\ POST / HTTP/1.1\r\n\ Host: example.domain\r\n\ Transfer-Encoding: chunked\r\n\ \r\n\ X\r\n\ 1\r\n\ 0\r\n\ \r\n" ); // FIXME: Use Type ascription let mock: &mut NetworkStream = &mut mock; let mut stream = BufReader::new(mock); let req = Request::new(&mut stream, sock("127.0.0.1:80")).unwrap(); assert!(read_to_string(req).is_err()); } /// Tests that when a chunk size contains an invalid extension, an error is /// returned. #[test] fn test_invalid_chunk_size_extension() { let mut mock = MockStream::with_input(b"\ POST / HTTP/1.1\r\n\ Host: example.domain\r\n\ Transfer-Encoding: chunked\r\n\ \r\n\ 1 this is an invalid extension\r\n\ 1\r\n\ 0\r\n\ \r\n" ); // FIXME: Use Type ascription let mock: &mut NetworkStream = &mut mock; let mut stream = BufReader::new(mock); let req = Request::new(&mut stream, sock("127.0.0.1:80")).unwrap(); assert!(read_to_string(req).is_err()); } /// Tests that when a valid extension that contains a digit is appended to /// the chunk size, the chunk is correctly read. #[test] fn test_chunk_size_with_extension() { let mut mock = MockStream::with_input(b"\ POST / HTTP/1.1\r\n\ Host: example.domain\r\n\ Transfer-Encoding: chunked\r\n\ \r\n\ 1;this is an extension with a digit 1\r\n\ 1\r\n\ 0\r\n\ \r\n" ); // FIXME: Use Type ascription let mock: &mut NetworkStream = &mut mock; let mut stream = BufReader::new(mock); let req = Request::new(&mut stream, sock("127.0.0.1:80")).unwrap(); assert_eq!(read_to_string(req).unwrap(), "1".to_owned()); } }
true
d8f732d1b2f5713a822773414db44c16bffb6c57
Rust
LordAro/AdventOfCode
/2016/src/bin/day6.rs
UTF-8
1,267
3.21875
3
[]
no_license
use std::collections::btree_map::BTreeMap; use std::env; use std::fs::File; use std::io::{BufRead, BufReader}; fn most_least_common(btm: BTreeMap<char, i32>) -> (char, char) { let mut count_vec: Vec<_> = btm.into_iter().collect(); // Reverse sort the vector of pairs by "value" (sorted by "key" in case of tie) count_vec.sort_by(|a, b| b.1.cmp(&a.1)); let m = count_vec.first().map(|&(k, _)| k).unwrap(); let l = count_vec.last().map(|&(k, _)| k).unwrap(); (m, l) } fn main() { if env::args().len() != 2 { panic!("Incorrect number of arguments provided\n"); } let input = BufReader::new(File::open(env::args().nth(1).unwrap()).unwrap()); let mut cols: Vec<BTreeMap<char, i32>> = vec![]; for line in input.lines() { for (i, c) in line.unwrap().chars().enumerate() { if i == cols.len() { cols.push(BTreeMap::new()); } *cols[i].entry(c).or_insert(0) += 1; } } let mut most = String::new(); let mut least = String::new(); for c in cols { let (m, l) = most_least_common(c); most.push(m); least.push(l); } println!("Most common message: {}", most); println!("Least common message: {}", least); }
true
d1174ddb309a006c75521c4466efbc4f1c40e4bb
Rust
ilkkahanninen/juhlakalu
/backend/src/errors.rs
UTF-8
4,056
2.65625
3
[ "MIT" ]
permissive
use std::io::{Error, ErrorKind}; use actix_web::{http::StatusCode, Error as ActixError, HttpResponse, ResponseError}; use config::ConfigError; use deadpool_postgres::config::ConfigError as PoolConfigError; use deadpool_postgres::PoolError; use derive_more::{Display, From}; use serde::{Deserialize, Serialize}; use tokio_pg_mapper::Error as PGMError; use tokio_postgres::Error as PGError; use ts_rs::{export, TS}; use validator::ValidationErrors; #[derive(Display, From, Debug)] pub enum JkError { NotFound, Unauthorized, AlreadyExists, ConfigError(ConfigError), PGError(PGError), PGMError(PGMError), PoolError(PoolError), PoolConfigError(PoolConfigError), ActixError(ActixError), ValidationError(ValidationErrors), } impl std::error::Error for JkError {} impl JkError { pub fn errorcode(&self) -> ErrorCode { match self { JkError::NotFound => ErrorCode::NotFound, JkError::Unauthorized => ErrorCode::Unauthorized, JkError::PGError(error) if Self::is_unique_constraint_violation(error) => { ErrorCode::AlreadyExists } JkError::ValidationError(_) => ErrorCode::ValidationError, _ => ErrorCode::Internal, } } pub fn is_unique_constraint_violation(error: &PGError) -> bool { error .to_string() .contains("duplicate key value violates unique constrain") } } impl From<JkError> for Error { fn from(error: JkError) -> Self { match error { JkError::PoolError(_) => Error::new( ErrorKind::ConnectionRefused, format!( "Could not initialize a database pool (probably invalid host or credentials, or Postgres is not running)", ), ), JkError::ConfigError(error) => Error::new( ErrorKind::InvalidData, format!("Invalid configuration: {}", error.to_string()), ), JkError::ActixError(error) => Error::new( ErrorKind::Other, format!("Actix error: {}", error.to_string()), ), _ => Error::new(ErrorKind::Other, "Unexpected error"), } } } #[derive(Serialize, TS)] pub struct ErrorMessage { status_code: u16, error: ErrorCode, message: &'static str, info: Option<String>, } #[derive(Serialize, Deserialize, TS)] pub enum ErrorCode { NotFound, Unauthorized, AlreadyExists, Internal, ValidationError, } impl ErrorCode { fn message(&self) -> &'static str { match self { ErrorCode::NotFound => "Not found", ErrorCode::Unauthorized => "Unauthorized", ErrorCode::AlreadyExists => "Already exists", ErrorCode::Internal => "Internal server error", ErrorCode::ValidationError => "Validation error", } } } export! { ErrorMessage => "frontend/src/rust-types/ErrorMessage.ts", ErrorCode => "frontend/src/rust-types/ErrorCode.ts", } impl ResponseError for JkError { fn status_code(&self) -> StatusCode { match self { JkError::NotFound => StatusCode::NOT_FOUND, JkError::Unauthorized => StatusCode::UNAUTHORIZED, JkError::PGError(error) if Self::is_unique_constraint_violation(error) => { StatusCode::CONFLICT } JkError::ValidationError(_) => StatusCode::BAD_REQUEST, _ => StatusCode::INTERNAL_SERVER_ERROR, } } fn error_response(&self) -> HttpResponse { let status_code = self.status_code(); let error = self.errorcode(); let message = error.message(); let info = match self { JkError::ValidationError(errors) => Some(errors.to_string()), _ => None, }; HttpResponse::build(status_code).json(ErrorMessage { status_code: status_code.as_u16(), error, message, info, }) } }
true
da480578a152c0eb2c7fa9cb59751f027be03552
Rust
x7Gv/qpasswd
/src/gen.rs
UTF-8
1,859
3.140625
3
[]
no_license
use anyhow::Result; use rand::seq::SliceRandom; use rand_core::OsRng; #[derive(Debug)] pub enum CharsetType { Lowercase, Uppercase, Symbols, Numbers, Special, } #[derive(Debug, Default)] pub struct PasswdGenBuilder { pub length: i16, pub charsets: Vec<CharsetType>, } #[derive(Debug, Default)] pub struct PasswdGen { pub length: i16, pub charsets: Vec<CharsetType>, } pub fn charset(charset: &CharsetType) -> Vec<char> { match charset { CharsetType::Lowercase => { "abcdefghijklmnopqrstuvwxyz".chars().collect() }, CharsetType::Uppercase => { "ABCDEFGHIJKLMNQRSTUVWXYZ".chars().collect() }, CharsetType::Symbols => { "_*&|!?@$#=%".chars().collect() }, CharsetType::Numbers => { "0123456789".chars().collect() } CharsetType::Special => { r###"!"#$%&'()*+,-./:;<=>?@[\]^_`{|}~"###.chars().collect() } } } impl PasswdGenBuilder { pub fn add_charset(&mut self, charset: CharsetType) -> &mut Self { self.charsets.push(charset); self } pub fn set_length(&mut self, length: i16) -> &mut Self { self.length = length; self } pub fn build(self) -> PasswdGen { PasswdGen { length: self.length, charsets: self.charsets, } } } impl PasswdGen { pub fn builder() -> PasswdGenBuilder { PasswdGenBuilder::default() } pub fn generate(&mut self) -> Result<String> { let mut s = String::new(); let mut c = Vec::<char>::new(); for chset in &self.charsets { c.append(&mut charset(chset)); } for _ in 0..self.length { s.push(*c.choose(&mut rand::thread_rng()).unwrap()); } Ok(s) } }
true
901aa46b9485a7b309c8a3461f922d9ac6573e3e
Rust
JacobVanGeffen/shuttle
/tests/basic/pct.rs
UTF-8
7,762
3.03125
3
[ "Apache-2.0" ]
permissive
use shuttle::scheduler::PctScheduler; use shuttle::sync::Mutex; use shuttle::{check_random, thread, Config, MaxSteps, Runner}; use std::sync::atomic::AtomicUsize; use std::sync::atomic::Ordering; use std::sync::Arc; use std::time::Duration; use test_env_log::test; const TEST_LENGTH: usize = 20; /// Based on Fig 5 of the PCT paper. A randomized scheduler struggles here because it must choose /// to continually schedule thread 1 until it terminates, which happens with chance 2^TEST_LENGTH. /// On the other hand, this is a bug depth of 1, so PCT should find it with p = 1/2. fn figure5() { let lock = Arc::new(Mutex::new(0usize)); let lock_clone = Arc::clone(&lock); thread::spawn(move || { for _ in 0..TEST_LENGTH { thread::sleep(Duration::from_millis(1)); } *lock_clone.lock().unwrap() = 1; }); let l = lock.lock().unwrap(); assert_ne!(*l, 1, "thread 1 ran to completion"); } #[test] fn figure5_random() { // Chance of missing the bug is (1 - 2^-20)^100 ~= 99.99%, so this should not trip the assert check_random(figure5, 100); } #[test] #[should_panic(expected = "thread 1 ran to completion")] fn figure5_pct() { // Change of hitting the bug should be 1 - (1 - 1/2)^20 > 99.9999%, so this should trip the assert let scheduler = PctScheduler::new(1, 20); let runner = Runner::new(scheduler, Default::default()); runner.run(figure5); } #[test] fn one_step() { let scheduler = PctScheduler::new(2, 100); let runner = Runner::new(scheduler, Default::default()); runner.run(|| { thread::spawn(|| {}); }); } // Check that PCT correctly deprioritizes a yielding thread. If it wasn't, there would be some // iteration of this test where the yielding thread has the highest priority and so the others // never make progress. fn yield_spin_loop(use_yield: bool) { const NUM_THREADS: usize = 4; let scheduler = PctScheduler::new(1, 100); let mut config = Config::new(); config.max_steps = MaxSteps::FailAfter(50); let runner = Runner::new(scheduler, config); runner.run(move || { let count = Arc::new(AtomicUsize::new(0usize)); let _thds = (0..NUM_THREADS) .map(|_| { let count = count.clone(); thread::spawn(move || { count.fetch_add(1, Ordering::SeqCst); }) }) .collect::<Vec<_>>(); while count.load(Ordering::SeqCst) < NUM_THREADS { if use_yield { thread::yield_now(); } else { thread::sleep(Duration::from_millis(1)); } } }); } #[test] fn yield_spin_loop_fair() { yield_spin_loop(true); } #[test] #[should_panic(expected = "exceeded max_steps bound")] fn yield_spin_loop_unfair() { yield_spin_loop(false); } #[test] #[should_panic(expected = "null dereference")] // Based on Fig 1(a) of the PCT paper. We model NULL pointer dereference with an Option unwrap fn figure1a_pct() { const COUNT: usize = 5usize; // n=2, d=1, so probability of finding the bug is at least 1/2 // So probability of hitting the bug in 20 iterations = 1 - (1 - 1/2)^20 > 99.9% let scheduler = PctScheduler::new(1, 20); let runner = Runner::new(scheduler, Default::default()); runner.run(|| { let t1 = Arc::new(Mutex::new(None)); let t2 = Arc::clone(&t1); thread::spawn(move || { for _ in 0..COUNT { thread::sleep(Duration::from_millis(1)); } *t1.lock().unwrap() = Some(1); for _ in 0..COUNT { thread::sleep(Duration::from_millis(1)); } }); thread::spawn(move || { for _ in 0..COUNT { thread::sleep(Duration::from_millis(1)); } let _ = t2.lock().unwrap().expect("null dereference"); for _ in 0..COUNT { thread::sleep(Duration::from_millis(1)); } }); }); } // Based on Fig 1(b) from the PCT paper. We model NULL pointer dereference with an Option unwrap. fn figure1b(num_threads: usize) { assert!(num_threads >= 2); let x1 = Arc::new(Mutex::new(Some(1))); let x2 = Arc::clone(&x1); // Optionally, spawn a bunch of threads that add scheduling choice points, each taking 5 steps for _ in 0..num_threads - 2 { thread::spawn(|| { for _ in 0..5 { thread::sleep(Duration::from_millis(1)); } }); } // Main worker threads take 10 steps each thread::spawn(move || { for _ in 0..5 { thread::sleep(Duration::from_millis(1)); } *x1.lock().unwrap() = None; for _ in 0..4 { thread::sleep(Duration::from_millis(1)); } }); thread::spawn(move || { for _ in 0..4 { thread::sleep(Duration::from_millis(1)); } let b = { let b = x2.lock().unwrap().is_some(); b }; if b { let _ = x2.lock().unwrap().expect("null dereference"); } for _ in 0..4 { thread::sleep(Duration::from_millis(1)); } }); } #[test] #[should_panic(expected = "null dereference")] fn figure1b_pct() { // n=2, k=20, d=2, so probability of finding the bug in one iteration is at least 1/(2*20) // So probability of hitting the bug in 300 iterations = 1 - (1 - 1/40)^300 > 99.9% let scheduler = PctScheduler::new(2, 300); let runner = Runner::new(scheduler, Default::default()); runner.run(|| { figure1b(2); }); } #[test] #[should_panic(expected = "null dereference")] fn figure1b_pct_with_many_tasks() { // Spawn 18 busy threads, each taking 5 steps, plus 2 main threads with 10 steps, so k=110 // n=50, k=110, d=2, so probability of finding the bug in one iteration is at least 1/(20*110) // So probability of hitting the bug in 16_000 iterations = 1 - (1 - 1/2200)^16_000 > 99.9% let scheduler = PctScheduler::new(2, 16_000); let runner = Runner::new(scheduler, Default::default()); runner.run(|| { figure1b(20); }); } #[test] #[should_panic(expected = "deadlock")] // Based on Fig 1(c) from the PCT paper. fn figure_1c() { const COUNT: usize = 4usize; // n=2, k=2*14, d=2, so probability of finding the bug is at least 1/(2*28) // So probability of hitting the bug in 400 iterations = 1 - (1 - 1/56)^400 > 99.9% let scheduler = PctScheduler::new(2, 400); let runner = Runner::new(scheduler, Default::default()); runner.run(|| { let a1 = Arc::new(Mutex::new(0)); let a2 = Arc::clone(&a1); let b1 = Arc::new(Mutex::new(0)); let b2 = Arc::clone(&b1); thread::spawn(move || { for _ in 0..COUNT { thread::sleep(Duration::from_millis(1)); } let a = a1.lock().unwrap(); for _ in 0..COUNT { thread::sleep(Duration::from_millis(1)); } let b = b1.lock().unwrap(); for _ in 0..COUNT { thread::sleep(Duration::from_millis(1)); } assert_eq!(*a + *b, 0) }); thread::spawn(move || { for _ in 0..COUNT { thread::sleep(Duration::from_millis(1)); } let b = b2.lock().unwrap(); for _ in 0..COUNT { thread::sleep(Duration::from_millis(1)); } let a = a2.lock().unwrap(); for _ in 0..COUNT { thread::sleep(Duration::from_millis(1)); } assert_eq!(*a + *b, 0); }); }); }
true
b47cf6d7421a83dfe4ae55393337ec17167b87dd
Rust
MaxOhn/Bathbot
/bathbot/src/core/events/interaction/command.rs
UTF-8
3,453
2.578125
3
[ "ISC" ]
permissive
use std::{mem, sync::Arc}; use eyre::Result; use crate::{ core::{ commands::{ checks::check_authority, interaction::{InteractionCommandKind, InteractionCommands, SlashCommand}, }, events::{EventKind, ProcessResult}, BotConfig, Context, }, util::{interaction::InteractionCommand, Authored, InteractionCommandExt}, }; pub async fn handle_command(ctx: Arc<Context>, mut command: InteractionCommand) { let name = mem::take(&mut command.data.name); EventKind::InteractionCommand .log(&ctx, &command, &name) .await; let Some(cmd) = InteractionCommands::get().command(&name) else { return error!(name, "Unknown interaction command"); }; match process_command(ctx, command, cmd).await { Ok(ProcessResult::Success) => info!(%name, "Processed interaction command"), Ok(reason) => info!(?reason, "Interaction command `{name}` was not processed"), Err(err) => error!(name, ?err, "Failed to process interaction command"), } } async fn process_command( ctx: Arc<Context>, command: InteractionCommand, cmd: InteractionCommandKind, ) -> Result<ProcessResult> { match cmd { InteractionCommandKind::Chat(cmd) => { match pre_process_command(&ctx, &command, cmd).await? { Some(result) => return Ok(result), None => { if cmd.flags.defer() { command.defer(&ctx, cmd.flags.ephemeral()).await?; } (cmd.exec)(ctx, command).await?; } } } InteractionCommandKind::Message(cmd) => { if cmd.flags.defer() { command.defer(&ctx, cmd.flags.ephemeral()).await?; } (cmd.exec)(ctx, command).await?; } } Ok(ProcessResult::Success) } async fn pre_process_command( ctx: &Context, command: &InteractionCommand, slash: &SlashCommand, ) -> Result<Option<ProcessResult>> { let user_id = command.user_id()?; // Only for owner? if slash.flags.only_owner() && user_id != BotConfig::get().owner { let content = "That command can only be used by the bot owner"; command.error_callback(ctx, content).await?; return Ok(Some(ProcessResult::NoOwner)); } // Ratelimited? if let Some(bucket) = slash.bucket { if let Some(cooldown) = ctx.check_ratelimit(user_id, bucket) { trace!("Ratelimiting user {user_id} on bucket `{bucket:?}` for {cooldown} seconds"); let content = format!("Command on cooldown, try again in {cooldown} seconds"); command.error_callback(ctx, content).await?; return Ok(Some(ProcessResult::Ratelimited(bucket))); } } // Only for authorities? if slash.flags.authority() { match check_authority(ctx, user_id, command.guild_id).await { Ok(None) => {} Ok(Some(content)) => { command.error_callback(ctx, content).await?; return Ok(Some(ProcessResult::NoAuthority)); } Err(err) => { let content = "Error while checking authority status"; let _ = command.error_callback(ctx, content).await; return Err(err.wrap_err("failed to check authority status")); } } } Ok(None) }
true
fd58c014e063f71b7e7d021607a11298c1298580
Rust
d2verb/rush
/src/main.rs
UTF-8
2,686
3.109375
3
[]
no_license
use nix::sys::wait::*; use nix::unistd::*; use rush::builtin; use rush::command::*; use rustyline::error::ReadlineError; use rustyline::Editor; use std::env; use std::ffi::CString; use std::path::Path; /// Find real path of given command. /// /// # Examples /// /// ```rust /// let path = find_realpath("sh"); /// assert_eq!(&path, "/bin/sh"); /// ``` fn find_realpath(cmd_name: &str) -> String { match env::var_os("PATH") { Some(paths) => { for path in env::split_paths(&paths) { let cmd_path = Path::new(&path).join(cmd_name); if cmd_path.exists() { return cmd_path.to_str().unwrap().to_string(); } } cmd_name.to_string() } None => cmd_name.to_string(), } } fn execve_wrapper(args: Vec<&str>) { let path = CString::new(find_realpath(&args[0])).unwrap(); let mut cargs = Vec::<CString>::new(); for arg in args { cargs.push(CString::new(arg).unwrap()); } let envs: Vec<CString> = env::vars() .map(|(k, v)| CString::new(format!("{}={}", k, v)).unwrap()) .collect(); execve(&path, &cargs[0..], &envs).expect(&format!("failed to execute {:?}", &cargs[0])); } fn execute(cmd: Command) { match cmd { Command::Exit => builtin::exit(), Command::Cd(args) => builtin::cd(&args), Command::Pwd => builtin::pwd(), Command::External(args) => match fork().expect("fork failed") { ForkResult::Parent { child } => { let _ = waitpid(child, None); } ForkResult::Child => execve_wrapper(args), }, } } fn main() { env::set_var("RUST_BACKTRACE", "1"); let mut rl = Editor::<()>::new(); loop { let line = rl.readline("$ "); match line { Ok(line) => { let cmd = match Command::parse(&line) { Some(cmd) => cmd, None => continue, }; execute(cmd); } Err(ReadlineError::Interrupted) => break, Err(ReadlineError::Eof) => break, Err(err) => { println!("error: {:?}", err); break; } } } } #[cfg(test)] mod tests { use super::find_realpath; #[test] fn test_find_realpath() { // found assert_eq!(find_realpath("sh"), "/bin/sh"); // not found assert_eq!( find_realpath("b6f57b0a02ff43a72738a2e5be2f335690925d20cf4e89bd088d7677d7e94e99"), "b6f57b0a02ff43a72738a2e5be2f335690925d20cf4e89bd088d7677d7e94e99" ); } }
true
b42024cd2107ff0026353dfbe7c479cd05c606e0
Rust
fharding1/adventofcode-2019
/day1/src/main.rs
UTF-8
667
3.375
3
[]
no_license
use std::fs::File; use std::io::{BufRead, BufReader}; fn fuel_requirement(mass: i64) -> i64 { let mut total_fuel = 0; let mut cur_mass = mass; loop { cur_mass = (cur_mass / 3) - 2; if cur_mass <= 0 { break; } total_fuel += cur_mass; } return total_fuel; } fn main() { let path = "input"; let input = File::open(path).unwrap(); let buffered = BufReader::new(input); let mut total_fuel = 0; for line in buffered.lines() { let mass: i64 = line.unwrap().parse().unwrap(); total_fuel += fuel_requirement(mass); } println!("Total Fuel: {}", total_fuel); }
true
565287223af3b3aa2caaa7c0884dad35f0d88b54
Rust
nathan-at-least/wormcode
/wormcode_inst/src/instruction/intermediate/tests.rs
UTF-8
1,259
2.6875
3
[]
no_license
use super::{Intermediate, OpCode0, OpCode1, OpCode2, OpCode3}; use crate::{Mode, Operand}; use test_case::test_case; use wormcode_bits::B; #[test] fn test_instruction_data_0xabcdef() { use crate::Instruction; use wormcode_bits::Encode; let expected = B::<28>::from(0xabcdef); let inst = Instruction::Data(B::<24>::from(0xabcdef)); let enc = Intermediate::from(inst); let b28: B<28> = enc.encode(); assert_eq!(expected, b28); } #[test_case(Intermediate::Data(B::from(0x12_34_56)))] #[test_case(Intermediate::Data(B::from(0x65_43_21)))] #[test_case(Intermediate::Nullary(OpCode0::Nop))] #[test_case(Intermediate::Unary(OpCode1::Step, Operand::new(Mode::Direct, B::from(0x2a))))] #[test_case(Intermediate::Binary( OpCode2::Inc, Operand::new(Mode::Direct, B::from(0x2a)), Operand::new(Mode::Literal, B::from(0x3f)) ))] #[test_case(Intermediate::Trinary( OpCode3::MemCpy, Operand::new(Mode::Direct, B::from(0x2a)), Operand::new(Mode::Literal, B::from(0x3f)), Operand::new(Mode::Indirect, B::from(0x29)) ))] fn test_encode_decode(enc: Intermediate) { use wormcode_bits::{Decode, Encode}; let b: B<28> = enc.encode(); let dec = Intermediate::decode_option(b); assert_eq!(Some(enc), dec); }
true
24bffb18abcd4aa2bb6f6ede5b3314bbf54da159
Rust
noxabellus/uir
/support/src/utils.rs
UTF-8
3,979
3.078125
3
[]
no_license
use std::{cell::{Ref, RefMut}, ops::{Deref, DerefMut}}; pub fn flip_ref_opt_to_opt_ref<T> (r: Ref<Option<T>>) -> Option<Ref<T>> { match r.deref() { Some(_) => Some(Ref::map(r, |o| o.as_ref().unwrap())), None => None } } pub fn ref_and_then<'r, T, U: 'static, F: FnOnce (&T) -> Option<&U>> (r: Ref<'r, T>, f: F) -> Option<Ref<'r, U>> { match f(r.deref()) { Some(u) => { // SAFETY: we're discarding the compile-time managed borrow in the reference, // in favor of the runtime-managed borrow in the Ref let u = unsafe { std::mem::transmute::<_, &'static U>(u) }; Some(Ref::map(r, move |_| u)) } None => None } } pub trait RefAndThen<'r> { type Inner; fn and_then<U: 'static, F: FnOnce (&Self::Inner) -> Option<&U>> (self, f: F) -> Option<Ref<'r, U>>; fn map<U: 'static, F: FnOnce (&Self::Inner) -> &U> (self, f: F) -> Ref<'r, U>; } impl<'r, T> RefAndThen<'r> for Ref<'r, T> { type Inner = T; fn and_then<U: 'static, F: FnOnce (&Self::Inner) -> Option<&U>> (self, f: F) -> Option<Ref<'r, U>> { ref_and_then(self, f) } fn map<U: 'static, F: FnOnce (&Self::Inner) -> &U> (self, f: F) -> Ref<'r, U> { Ref::map(self, f) } } pub fn ref_and_then_mut<'r, T, U: 'static, F: FnOnce (&mut T) -> Option<&mut U>> (mut r: RefMut<'r, T>, f: F) -> Option<RefMut<'r, U>> { match f(r.deref_mut()) { Some(u) => { // SAFETY: we're discarding the compile-time managed borrow in the reference, // in favor of the runtime-managed borrow in the Ref let u = unsafe { std::mem::transmute::<&mut U, &'static mut U>(u) }; Some(RefMut::map(r, move |_| u)) } None => None } } pub trait RefAndThenMut<'r> { type Inner; fn and_then_mut<U: 'static, F: FnOnce (&mut Self::Inner) -> Option<&mut U>> (self, f: F) -> Option<RefMut<'r, U>>; fn map_mut<U: 'static, F: FnOnce (&mut Self::Inner) -> &mut U> (self, f: F) -> RefMut<'r, U>; } impl<'r, T> RefAndThenMut<'r> for RefMut<'r, T> { type Inner = T; fn and_then_mut <U: 'static, F: FnOnce (&mut Self::Inner) -> Option<&mut U>> (self, f: F) -> Option<RefMut<'r, U>> { ref_and_then_mut(self, f) } fn map_mut<U: 'static, F: FnOnce (&mut Self::Inner) -> &mut U> (self, f: F) -> RefMut<'r, U> { RefMut::map(self, f) } } pub fn assert<E> (cond: bool, err: E) -> Result<(), E> { if cond { Ok(()) } else { Err(err) } } pub fn index_of<E: PartialEq, I: Iterator<Item = E>> (i: I, el: E) -> Option<usize> { i.enumerate() .find(|(_, e)| el == *e) .map(|(i, _)| i) } #[macro_export] macro_rules! re_export { ($($module:ident),* $(,)?) => { $( mod $module; pub use $module::*; ),* }; } pub fn make_buffer_with_indexed<T, F: FnMut (usize) -> T> (len: usize, mut f: F) -> Box<[T]> { let mut v = Vec::with_capacity(len); for i in 0..len { v.push(f(i)) } v.into_boxed_slice() } pub fn make_buffer_with<T, F: FnMut () -> T> (len: usize, mut f: F) -> Box<[T]> { make_buffer_with_indexed(len, move |_| f()) } pub fn make_buffer_default<T: Default> (len: usize) -> Box<[T]> { make_buffer_with(len, T::default) } pub fn make_buffer_clone<T: Clone> (len: usize, init: &T) -> Box<[T]> { make_buffer_with(len, move || init.clone()) } pub fn make_buffer_copy<T: Copy> (len: usize, init: T) -> Box<[T]> { make_buffer_with(len, move || init) } pub fn fill_buffer_with_indexed<T, F: FnMut (usize) -> T> (b: &mut [T], mut f: F) { b.iter_mut().enumerate().for_each(move |(i, e)| *e = f(i)) } pub fn fill_buffer_with<T, F: FnMut () -> T> (b: &mut [T], mut f: F) { fill_buffer_with_indexed(b, move |_| f()) } pub fn fill_buffer_default<T: Default> (b: &mut [T]) { fill_buffer_with(b, T::default) } pub fn fill_buffer_clone<T: Clone> (b: &mut [T], v: &T) { fill_buffer_with(b, move || v.clone()) } pub fn fill_buffer_copy<T: Copy> (b: &mut [T], v: T) { fill_buffer_with(b, move || v) } use std::cmp::Ordering; pub fn clamp<T: Ord> (x: T, a: T, b: T) -> T { match (x.cmp(&a), x.cmp(&b)) { (Ordering::Less, _) => a, (_, Ordering::Greater) => b, _ => x, } }
true
a46ae4f2faf432154999c473b3ccb80b596e20d9
Rust
FJJ-Oneday/rust-study
/smart-pointer/src/main.rs
UTF-8
1,766
3.109375
3
[]
no_license
use crate::List::{Cons, Nil}; use std::ops::Deref; use std::rc::{Rc, Weak}; use std::cell::RefCell; fn main() { // let b = Box::new(5); // println!("b = {}", b); // let list = Cons(1, Box::new(Cons(2, Box::new(Cons(3, Box::new(Nil)))))); // let a = Rc::new(List2::Cons(1, Rc::new(List2::Cons(2, Rc::new(List2::Nil))))); // let b = List2::Cons(3, Rc::clone(&a)); // let c = List2::Cons(4, Rc::clone(&a)); // let value = Rc::new(RefCell::new(5)); // let a = Rc::new(Cons(Rc::clone(&value), Rc::new(Nil))); // let b = Cons(Rc::new(RefCell::new(6)), Rc::clone(&a)); // let c = Cons(Rc::new(RefCell::new(10)), Rc::clone(&a)); // *value.borrow_mut() += 10; // println!("a after = {:?}", a); // println!("b after = {:?}", b); // println!("c after = {:?}", c); let leaf = Rc::new(Node { value: 3, parent: RefCell::new(Weak::new()), children: RefCell::new(vec![]), }); println!("leaf parent = {:?}", leaf.parent.borrow().upgrade()); let branch = Rc::new(Node { value: 5, parent: RefCell::new(Weak::new()), children: RefCell::new(vec![Rc::clone(&leaf)]), }); *leaf.parent.borrow_mut() = Rc::downgrade(&branch); println!("leaf parent = {:?}", leaf.parent.borrow().upgrade()); } #[derive(Debug)] struct Node { value: i32, parent: RefCell<Weak<Node>>, children: RefCell<Vec<Rc<Node>>>, } #[derive(Debug)] enum List { Cons(Rc<RefCell<i32>>, Rc<List>), Nil, } struct MyBox<T>(T); impl<T> MyBox<T> { fn new(x: T) -> MyBox<T> { MyBox(x) } } impl<T> Deref for MyBox<T> { type Target = T; fn deref(&self) -> &T { &self.0 } } enum List2 { Cons(i32, Rc<List2>), Nil, }
true
af4e6bed615031a59b4d6e875220443d23c18b7c
Rust
rnleach/sounding-analysis
/src/layers.rs
UTF-8
4,899
3.328125
3
[ "MIT" ]
permissive
//! This module finds significant layers. //! //! Examples are the dendritic snow growth zone, the hail growth zone, and inversions. //! //! The `Layer` type also provides some methods for doing basic analysis on a given layer. //! use crate::sounding::DataRow; use metfor::{CelsiusDiff, CelsiusPKm, HectoPascal, Km, Meters, MetersPSec, WindUV}; /// A layer in the atmosphere described by the values at the top and bottom. #[derive(Debug, Clone, Copy)] pub struct Layer { /// Sounding values at the bottom of the layer. pub bottom: DataRow, /// Sounding values at the top of the layer. pub top: DataRow, } /// A list of layers. pub type Layers = Vec<Layer>; impl Layer { /// Get the average lapse rate in C/km pub fn lapse_rate(&self) -> Option<CelsiusPKm> { let top_t = self.top.temperature.into_option()?; let bottom_t = self.bottom.temperature.into_option()?; #[allow(clippy::useless_conversion)] let CelsiusDiff(dt) = CelsiusDiff::from(top_t - bottom_t); let Km(dz) = Km::from(self.height_thickness()?); Some(CelsiusPKm(dt / dz)) } /// Get the height thickness in meters pub fn height_thickness(&self) -> Option<Meters> { let top = self.top.height.into_option()?; let bottom = self.bottom.height.into_option()?; if top == bottom { None } else { Some(top - bottom) } } /// Get the pressure thickness. pub fn pressure_thickness(&self) -> Option<HectoPascal> { let bottom_p = self.bottom.pressure.into_option()?; let top_p = self.top.pressure.into_option()?; if bottom_p == top_p { None } else { Some(bottom_p - top_p) } } /// Get the bulk wind shear (spd kts, direction degrees) pub fn wind_shear(&self) -> Option<WindUV<MetersPSec>> { let top = WindUV::from(self.top.wind.into_option()?); let bottom = WindUV::from(self.bottom.wind.into_option()?); Some(top - bottom) } } #[cfg(test)] mod layer_tests { use super::*; use crate::sounding::DataRow; use metfor::*; use optional::some; fn make_test_layer() -> Layer { let mut bottom = DataRow::default(); bottom.pressure = some(HectoPascal(1000.0)); bottom.temperature = some(Celsius(20.0)); bottom.height = some(Meters(5.0)); bottom.wind = some(WindSpdDir::<Knots> { speed: Knots(1.0), direction: 180.0, }); let mut top = DataRow::default(); top.pressure = some(HectoPascal(700.0)); top.temperature = some(Celsius(-2.0)); top.height = some(Meters(3012.0)); top.wind = some(WindSpdDir::<Knots> { speed: Knots(1.0), direction: 90.0, }); Layer { bottom, top } } fn approx_eq(a: f64, b: f64, tol: f64) -> bool { (a - b).abs() <= tol } #[test] fn test_height_thickness() { let lyr = make_test_layer(); println!("{:#?}", lyr); assert!(lyr .height_thickness() .unwrap() .approx_eq(Meters(3007.0), Meters(std::f64::EPSILON))); } #[test] fn test_pressure_thickness() { let lyr = make_test_layer(); println!("{:#?}", lyr); assert!(lyr .pressure_thickness() .unwrap() .approx_eq(HectoPascal(300.0), HectoPascal(std::f64::EPSILON))); } #[test] fn test_lapse_rate() { let lyr = make_test_layer(); println!( "{:#?}\n\n -- \n\n Lapse Rate = {:#?} \n\n --", lyr, lyr.lapse_rate().unwrap() ); assert!(lyr .lapse_rate() .unwrap() .approx_eq(CelsiusPKm(-7.31626), CelsiusPKm(1.0e-5))); } #[test] fn test_wind_shear() { let lyr = make_test_layer(); println!( "{:#?}\n\n -- \n\n {:#?} \n\n --", lyr, lyr.wind_shear().unwrap() ); let shear = WindSpdDir::<Knots>::from(lyr.wind_shear().unwrap()); let speed_shear = shear.abs(); let WindSpdDir { direction: direction_shear, .. } = shear; assert!(speed_shear.approx_eq(Knots(::std::f64::consts::SQRT_2), Knots(1.0e-5))); assert!(approx_eq(direction_shear, 45.0, 1.0e-5)); } } mod temperature_layers; pub use temperature_layers::{ cold_surface_temperature_layer, dendritic_snow_zone, hail_growth_zone, melting_freezing_energy_area, warm_surface_temperature_layer, warm_temperature_layer_aloft, warm_wet_bulb_layer_aloft, }; mod height_pressure; pub use height_pressure::{layer_agl, pressure_layer}; mod inversions; pub use inversions::{inversions, sfc_based_inversion}; mod convective; pub use convective::effective_inflow_layer;
true
6434ff7f02dad725a8d8dc14d00ba061f3d6f2a8
Rust
HerringtonDarkholme/leetcode
/src/reverse_k_group.rs
UTF-8
937
3.09375
3
[]
no_license
use crate::util::linked_list::ListNode; pub struct Solution; impl Solution { pub fn reverse_k_group(mut node: Option<Box<ListNode>>, k: i32) -> Option<Box<ListNode>> { None // I don't think rust is capable of doing below... or I don't have time to do // it can be done // https://github.com/aylei/leetcode-rust/blob/4fb84c1a62264f0dda3a28934d3b12aa7cc49616/src/n0025_reverse_nodes_in_k_group.rs } } /* var reverseKGroup = function(node, k) { let count = k let n = node if (!node) { return node } while (n && count > 0) { n = n.next count -= 1 } if (count !== 0) { return node } let last = n let current = node while (count < k) { let next = current.next current.next = last last = current current = next count += 1 } node.next = reverseKGroup(node.next, k) return last }; */
true
fc0a745254d42b9fc0a3d260765b1126b82479ed
Rust
williewillus/advent_of_code_2017
/src/day21.rs
UTF-8
3,042
3.078125
3
[]
no_license
use std::collections::HashMap; use std::fs::File; use std::io::BufRead; use std::io::BufReader; use pathfinding::Matrix; use itertools::Itertools; use itertools::iterate; fn to_matrix(side: &str) -> Matrix<bool> { Matrix::square_from_vec( side.bytes() .filter(|b| *b != b'/') .map(|b| b == b'#') .collect()) } fn to_rules(line: &str) -> Vec<(Matrix<bool>, Matrix<bool>)> { // todo can I use a fixed-size array? it messes with flat_map below let (left, right) = line.split(" => ").next_tuple().unwrap(); let pat = to_matrix(left); let result = to_matrix(right); vec![ (pat.flipped_lr(), result.clone()), (pat.flipped_lr().rotated_cw(1), result.clone()), (pat.flipped_lr().rotated_cw(2), result.clone()), (pat.flipped_lr().rotated_cw(3), result.clone()), (pat.flipped_ud(), result.clone()), (pat.flipped_ud().rotated_cw(1), result.clone()), (pat.flipped_ud().rotated_cw(2), result.clone()), (pat.flipped_ud().rotated_cw(3), result.clone()), (pat.rotated_cw(3), result.clone()), (pat.rotated_cw(2), result.clone()), (pat.rotated_cw(1), result.clone()), (pat, result), ] } fn enhance(old: &Matrix<bool>, rules: &HashMap<Matrix<bool>, Matrix<bool>>) -> Matrix<bool> { if old.rows % 2 == 0 { let old_chunks = old.rows / 2; let new_grid_size = old_chunks * 3; let mut new_grid = Matrix::new_square(new_grid_size, false); for chunk_y in 0..old_chunks { for chunk_x in 0..old_chunks { let old_chunk = old.slice(chunk_x*2..chunk_x*2 + 2, chunk_y*2..chunk_y*2 + 2); let res = &rules[&old_chunk]; new_grid.set_slice(&(chunk_x*3, chunk_y*3), &res); } } new_grid } else { assert_eq!(0, old.rows % 3); let old_chunks = old.rows / 3; let new_grid_size = old_chunks * 4; let mut new_grid = Matrix::new_square(new_grid_size, false); for chunk_y in 0..old_chunks { for chunk_x in 0..old_chunks { let old_chunk = old.slice(chunk_x*3..chunk_x*3 + 3, chunk_y*3..chunk_y*3 + 3); let res = &rules[&old_chunk]; new_grid.set_slice(&(chunk_x*4, chunk_y*4), &res); } } new_grid } } pub fn run() { let rules = BufReader::new(File::open("d21_input.txt").unwrap()).lines().filter_map(|l| l.ok()) .flat_map(|l| to_rules(&l).into_iter()) .collect::<HashMap<_, _>>(); let init = Matrix::square_from_vec(vec![false, true, false, false, false, true, true, true, true]); // todo use itertools.iterate() when this actually works let mut iter = iterate(init, |s| enhance(s, &rules)); println!("part 1: {}", iter.nth(5).unwrap().as_ref().iter().filter(|b| **b).count()); // another 12 to get the 18th iteration println!("part 2: {}", iter.nth(12).unwrap().as_ref().iter().filter(|b| **b).count()); }
true
8dab9d96b0c6e0d2739dc8007b0bde7628ae6bb1
Rust
h2gb/h2transformer
/src/lib.rs
UTF-8
62,610
3.6875
4
[ "MIT" ]
permissive
//! [![Crate](https://img.shields.io/crates/v/h2transformer.svg)](https://crates.io/crates/h2transformer) //! //! H2Transformer is a library for transforming raw data between encodings. //! //! As part of [h2gb](https://github.com/h2gb), it's common to extract a buffer //! from a binary that's encoded in some format - Base64, hex string, etc. //! //! This library can detect and transform common formats. It can also //! transform back to the original data with a constant length and without //! saving any context (while the length is constant, the data isn't always //! identical - like the case of Base32 and hex strings). If proper undo/redo //! is needed, this won't fit the bill. //! //! Check out the definition of the `H2Transformation` enum for full details on //! everything it can do! //! //! # Usage //! //! The public API is pretty straight forward. Here's an example that transforms //! then untransforms some hex data: //! //! //! ``` //! use h2transformer::H2Transformation; //! //! // Input (note that some are uppercase and some are lower - that's allowed) //! let i: Vec<u8> = b"48656c6C6F2c20776f726c64".to_vec(); //! //! // Output //! let o = H2Transformation::FromHex.transform(&i).unwrap(); //! //! // It's "Hello, world" //! assert_eq!(b"Hello, world".to_vec(), o); //! //! // Transform back to the original //! let i = H2Transformation::FromHex.untransform(&o).unwrap(); //! //! // Get the original back - note that it's the same length, but the case has //! // been normalized //! assert_eq!(b"48656c6c6f2c20776f726c64".to_vec(), i); //! ``` use simple_error::{SimpleResult, bail}; use base64; use base32; use inflate; #[cfg(feature = "serialize")] use serde::{Serialize, Deserialize}; /// When performing an XorByConstant transformation, this represents the size #[derive(Clone, Debug, Ord, PartialOrd, Eq, PartialEq, Copy)] #[cfg_attr(feature = "serialize", derive(Serialize, Deserialize))] pub enum XorSize { /// One byte / 8 bits - eg, `0x12` EightBit(u8), /// Two bytes / 16 bits - eg, `0x1234` SixteenBit(u16), /// Four bytes / 32 bits - eg, `0x12345678` ThirtyTwoBit(u32), /// Eight bytes / 64 bits - eg, `0x123456789abcdef0` SixtyFourBit(u64), } /// Which transformation to perform. #[derive(Clone, Debug, Ord, PartialOrd, Eq, PartialEq, Copy)] #[cfg_attr(feature = "serialize", derive(Serialize, Deserialize))] pub enum H2Transformation { /// No transformation - simply returns the same value. Mostly here for /// testing. /// /// # Example /// /// ``` /// use h2transformer::H2Transformation; /// /// // Input: "abcdef" /// let i: Vec<u8> = b"abcdef".to_vec(); /// /// // Output: "abcdef" /// let o = H2Transformation::Null.transform(&i); /// assert_eq!(Ok(b"abcdef".to_vec()), o); /// ``` /// /// # Restrictions / errors /// /// n/a Null, /// Xor each byte / word / dword / qword by a constant. Operates on eight, /// 16, 32, or 64-bit chunks. /// /// # Examples /// /// ## Eight bit /// /// ``` /// use h2transformer::{H2Transformation, XorSize}; /// /// // Input: "\x00\x01\x02\x03", XorSize::EightBit(0xFF) /// let i: Vec<u8> = b"\x00\x01\x02\x03".to_vec(); /// /// // Output: "\xff\xfe\xfd\xfc" /// let o = H2Transformation::XorByConstant(XorSize::EightBit(0xFF)).transform(&i); /// assert_eq!(Ok(b"\xff\xfe\xfd\xfc".to_vec()), o); /// ``` /// /// ## Sixteen bit /// /// ``` /// use h2transformer::{H2Transformation, XorSize}; /// /// // Input: "\x00\x01\x02\x03", XorSize::SixteenBit(0xFF00) /// let i: Vec<u8> = b"\x00\x01\x02\x03".to_vec(); /// // Output: "\xFF\x01\xFD\x03" /// let o = H2Transformation::XorByConstant(XorSize::SixteenBit(0xFF00)).transform(&i); /// assert_eq!(Ok(b"\xff\x01\xfd\x03".to_vec()), o); /// ``` /// /// # Restrictions / errors /// /// The size of the input buffer must be a multiple of the XOR bit size. /// /// ``` /// use h2transformer::{H2Transformation, XorSize}; /// /// let i: Vec<u8> = b"\x00".to_vec(); /// /// // Error /// assert!(H2Transformation::XorByConstant(XorSize::SixteenBit(0xFF00)).transform(&i).is_err()); /// ``` XorByConstant(XorSize), /// Convert from standard Base64 with padding. /// /// # Example /// /// ``` /// use h2transformer::H2Transformation; /// /// // Input: "AQIDBA==" /// let i: Vec<u8> = b"AQIDBA==".to_vec(); /// /// // Output: "\x01\x02\x03\x04" /// let o = H2Transformation::FromBase64.transform(&i).unwrap(); /// /// assert_eq!(b"\x01\x02\x03\x04".to_vec(), o); /// ``` /// /// # Restrictions / errors /// /// Must be valid Base64 with correct padding and decode to full bytes. /// /// ``` /// use h2transformer::H2Transformation; /// /// let i: Vec<u8> = b"Not valid base64~".to_vec(); /// /// // Error /// assert!(H2Transformation::FromBase64.transform(&i).is_err()); /// ``` FromBase64, /// Convert from standard Base64 with NO padding. /// /// # Example /// /// ``` /// use h2transformer::H2Transformation; /// /// // Input: "AQIDBA" /// let i: Vec<u8> = b"AQIDBA".to_vec(); /// /// // Output: "\x01\x02\x03\x04" /// let o = H2Transformation::FromBase64NoPadding.transform(&i).unwrap(); /// /// assert_eq!(b"\x01\x02\x03\x04".to_vec(), o); /// ``` /// /// # Restrictions / errors /// /// Must be valid Base64 with NO padding whatsoever, and decode to full bytes. /// /// ``` /// use h2transformer::H2Transformation; /// /// let i: Vec<u8> = b"Not valid base64~".to_vec(); /// /// // Error /// assert!(H2Transformation::FromBase64NoPadding.transform(&i).is_err()); /// ``` FromBase64NoPadding, /// Convert from standard Base64 with optional padding, with some attempt /// to ignore problems. /// /// This is a ONE-WAY transformation! /// /// # Example /// /// ``` /// use h2transformer::H2Transformation; /// /// // Input: "AQIDBA=" /// let i: Vec<u8> = b"AQIDBA=".to_vec(); /// /// // Output: "\x01\x02\x03\x04" /// let o = H2Transformation::FromBase64Permissive.transform(&i).unwrap(); /// /// assert_eq!(b"\x01\x02\x03\x04".to_vec(), o); /// ``` /// /// # Restrictions / errors /// /// Must be valid enough Base64. /// /// ``` /// use h2transformer::H2Transformation; /// /// let i: Vec<u8> = b"Not valid base64~".to_vec(); /// /// // Error /// assert!(H2Transformation::FromBase64Permissive.transform(&i).is_err()); /// ``` FromBase64Permissive, /// Convert from URL-safe Base64 with padding - that is, `+` becomes `-` /// and `/` becomes `_`. /// /// # Example /// /// ``` /// use h2transformer::H2Transformation; /// /// // Input: "aa--_z8=" /// let i: Vec<u8> = b"aa--_z8=".to_vec(); /// /// // Output: "\x69\xaf\xbe\xff\x3f" /// let o = H2Transformation::FromBase64URL.transform(&i).unwrap(); /// /// assert_eq!(b"\x69\xaf\xbe\xff\x3f".to_vec(), o); /// ``` /// /// # Restrictions / errors /// /// Must be valid Base64 with correct padding and decode to full bytes. /// /// ``` /// use h2transformer::H2Transformation; /// /// let i: Vec<u8> = b"Not valid base64~".to_vec(); /// /// // Error /// assert!(H2Transformation::FromBase64URL.transform(&i).is_err()); /// ``` FromBase64URL, /// Convert from URL-safe Base64 with NO padding. /// /// # Example /// /// ``` /// use h2transformer::H2Transformation; /// /// // Input: "aa--_z8" /// let i: Vec<u8> = b"aa--_z8".to_vec(); /// /// // Output: "\x69\xaf\xbe\xff\x3f" /// let o = H2Transformation::FromBase64URLNoPadding.transform(&i).unwrap(); /// /// assert_eq!(b"\x69\xaf\xbe\xff\x3f".to_vec(), o); /// ``` /// /// # Restrictions / errors /// /// Must be valid Base64 with NO padding whatsoever, and decode to full bytes. /// /// ``` /// use h2transformer::H2Transformation; /// /// let i: Vec<u8> = b"Not valid base64~".to_vec(); /// /// // Error /// assert!(H2Transformation::FromBase64URLNoPadding.transform(&i).is_err()); /// ``` FromBase64URLNoPadding, /// Convert from URL-safe Base64URL with optional padding, with some attempt /// to ignore problems. /// /// This is a ONE-WAY transformation! /// /// # Example /// /// ``` /// use h2transformer::H2Transformation; /// /// // Input: "aa--_z8" /// let i: Vec<u8> = b"aa--_z8".to_vec(); /// /// // Output: "\x69\xaf\xbe\xff\x3f" /// let o = H2Transformation::FromBase64URLPermissive.transform(&i).unwrap(); /// /// assert_eq!(b"\x69\xaf\xbe\xff\x3f".to_vec(), o); /// ``` /// /// # Restrictions / errors /// /// Must be valid enough Base64. /// /// ``` /// use h2transformer::H2Transformation; /// /// let i: Vec<u8> = b"Not valid base64~".to_vec(); /// /// // Error /// assert!(H2Transformation::FromBase64URLPermissive.transform(&i).is_err()); /// ``` FromBase64URLPermissive, /// Convert from standard Base32 with padding. Case is ignored. /// /// # Example /// /// ``` /// use h2transformer::H2Transformation; /// /// // Input: "AEBAGBA=" /// let i: Vec<u8> = b"AEBAGBA=".to_vec(); /// /// // Output: "\x01\x02\x03\x04" /// let o = H2Transformation::FromBase32.transform(&i).unwrap(); /// /// assert_eq!(b"\x01\x02\x03\x04".to_vec(), o); /// ``` /// /// # Restrictions / errors /// /// Must be valid Base32 with correct padding and decode to full bytes. /// /// ``` /// use h2transformer::H2Transformation; /// /// let i: Vec<u8> = b"Not valid base32~".to_vec(); /// /// // Error /// assert!(H2Transformation::FromBase32.transform(&i).is_err()); /// ``` FromBase32, /// Convert from standard Base32 with no padding. Case is ignored. /// /// # Example /// /// ``` /// use h2transformer::H2Transformation; /// /// // Input: "AEBAGBA" /// let i: Vec<u8> = b"AEBAGBA".to_vec(); /// /// // Output: "\x01\x02\x03\x04" /// let o = H2Transformation::FromBase32NoPadding.transform(&i).unwrap(); /// /// assert_eq!(b"\x01\x02\x03\x04".to_vec(), o); /// ``` /// /// # Restrictions / errors /// /// Must be valid Base32 with no padding and decode to full bytes. /// /// ``` /// use h2transformer::H2Transformation; /// /// let i: Vec<u8> = b"Not valid base32~".to_vec(); /// /// // Error /// assert!(H2Transformation::FromBase32NoPadding.transform(&i).is_err()); /// ``` FromBase32NoPadding, /// Convert from Base32 using the Crockford alphabet, which does not allow /// padding. Case is ignored, and ambiguous letters (like i/l/L) are /// treated the same. Untransforming is possible, but will be normalized. /// /// # Example /// /// ``` /// use h2transformer::H2Transformation; /// /// // Input: "91JPRV3F" /// let i: Vec<u8> = b"91JPRV3F".to_vec(); /// /// // Output: "Hello" /// let o = H2Transformation::FromBase32Crockford.transform(&i).unwrap(); /// /// assert_eq!(b"Hello".to_vec(), o); /// ``` /// /// # Restrictions / errors /// /// Must be valid Base32 Crockford with no padding and decode to full bytes. /// /// ``` /// use h2transformer::H2Transformation; /// /// let i: Vec<u8> = b"Not valid base32~".to_vec(); /// /// // Error /// assert!(H2Transformation::FromBase32Crockford.transform(&i).is_err()); /// ``` FromBase32Crockford, /// Convert from standard Base32 with optional padding. Any non-Base32 /// characters are ignored and discarded. /// /// This is a ONE-WAY transformation! /// /// # Example /// /// ``` /// use h2transformer::H2Transformation; /// /// // Input: "AEBAGBA=" /// let i: Vec<u8> = b"AEBAGBA=".to_vec(); /// /// // Output: "\x01\x02\x03\x04" /// let o = H2Transformation::FromBase32.transform(&i).unwrap(); /// /// assert_eq!(b"\x01\x02\x03\x04".to_vec(), o); /// ``` /// /// # Restrictions / errors /// /// Must be close enough to Base32 and decode to full bytes. /// /// ``` /// use h2transformer::H2Transformation; /// /// let i: Vec<u8> = b"Not valid base32~0123456789".to_vec(); /// /// // Error /// assert!(H2Transformation::FromBase32Permissive.transform(&i).is_err()); /// ``` FromBase32Permissive, /// Convert from Base32 using the Crockford alphabet, but allow optional /// padding. Case is ignored, and ambiguous letters (like i/l/L) are /// treated the same. All non-Base32 characters are ignored. /// /// This is a ONE-WAY transformation! /// /// # Example /// /// ``` /// use h2transformer::H2Transformation; /// /// // Input: "91JPRV3F==" /// let i: Vec<u8> = b"91JPRV3F==".to_vec(); /// /// // Output: "Hello" /// let o = H2Transformation::FromBase32CrockfordPermissive.transform(&i).unwrap(); /// /// assert_eq!(b"Hello".to_vec(), o); /// ``` /// /// # Restrictions / errors /// /// Must be valid enough Base32 Crockford and decode to full bytes (the /// letter 'u', for example, is not allowed) /// /// ``` /// use h2transformer::H2Transformation; /// /// let i: Vec<u8> = b"uuuuu".to_vec(); /// /// // Error /// assert!(H2Transformation::FromBase32CrockfordPermissive.transform(&i).is_err()); /// ``` FromBase32CrockfordPermissive, /// Convert from Zlib "Deflated" format with no header. Uses the /// [inflate](https://github.com/image-rs/inflate) library. /// /// This is a ONE-WAY transformation! /// /// # Restrictions / errors /// /// Must be valid deflated data. FromDeflated, /// Convert from Zlib "Deflated" format with a header. Uses the /// [inflate](https://github.com/image-rs/inflate) library. /// /// This is a ONE-WAY transformation! /// /// # Restrictions / errors /// /// Must be valid deflated data with a valid checksum. FromDeflatedZlib, /// Convert from a hex string. Case is ignored. /// /// # Example /// /// ``` /// use h2transformer::H2Transformation; /// /// // Input: "41424344" /// let i: Vec<u8> = b"41424344".to_vec(); /// /// // Output: "ABCD" /// let o = H2Transformation::FromHex.transform(&i).unwrap(); /// /// assert_eq!(b"ABCD".to_vec(), o); /// ``` /// /// # Restrictions / errors /// /// Must be a hex string with an even length, made up of the digits 0-9 /// and a-f. FromHex, } /// A list of transformations that can automatically be detected. /// /// This is used as a basis for the `detect()` call. Many transformations /// are overly broad (such as `FromBase32Permissive`), overly useless (such as /// `Null`), or require configuration (such as `FromHex`). We skip those and /// only look at potentially interesting transformations. const TRANSFORMATIONS_THAT_CAN_BE_DETECTED: [H2Transformation; 10] = [ H2Transformation::FromBase64, H2Transformation::FromBase64NoPadding, H2Transformation::FromBase64URL, H2Transformation::FromBase64URLNoPadding, H2Transformation::FromBase32, H2Transformation::FromBase32NoPadding, H2Transformation::FromBase32Crockford, H2Transformation::FromDeflated, H2Transformation::FromDeflatedZlib, H2Transformation::FromHex, ]; impl H2Transformation { fn transform_null(buffer: &Vec<u8>) -> SimpleResult<Vec<u8>> { Ok(buffer.clone()) } fn untransform_null(buffer: &Vec<u8>) -> SimpleResult<Vec<u8>> { Ok(buffer.clone()) } fn check_null(_buffer: &Vec<u8>) -> bool { true } fn transform_xor(buffer: &Vec<u8>, xs: XorSize) -> SimpleResult<Vec<u8>> { if !Self::check_xor(buffer, xs) { bail!("Xor failed: Xor isn't a multiple of the buffer size"); } // Clone the buffer so we can edit in place let mut buffer = buffer.clone(); match xs { XorSize::EightBit(c) => { // Transform in-place, since we can for n in &mut buffer { *n = *n ^ c; } }, XorSize::SixteenBit(c) => { let xorer: Vec<u8> = vec![ ((c >> 8) & 0x00FF) as u8, ((c >> 0) & 0x00FF) as u8, ]; let mut xor_position: usize = 0; for n in &mut buffer { *n = *n ^ (xorer[xor_position]); xor_position = (xor_position + 1) % 2; } }, XorSize::ThirtyTwoBit(c) => { let xorer: Vec<u8> = vec![ ((c >> 24) & 0x00FF) as u8, ((c >> 16) & 0x00FF) as u8, ((c >> 8) & 0x00FF) as u8, ((c >> 0) & 0x00FF) as u8, ]; let mut xor_position: usize = 0; for n in &mut buffer { *n = *n ^ (xorer[xor_position]); xor_position = (xor_position + 1) % 4; } }, XorSize::SixtyFourBit(c) => { let xorer: Vec<u8> = vec![ ((c >> 56) & 0x00FF) as u8, ((c >> 48) & 0x00FF) as u8, ((c >> 40) & 0x00FF) as u8, ((c >> 32) & 0x00FF) as u8, ((c >> 24) & 0x00FF) as u8, ((c >> 16) & 0x00FF) as u8, ((c >> 8) & 0x00FF) as u8, ((c >> 0) & 0x00FF) as u8, ]; let mut xor_position: usize = 0; for n in &mut buffer { *n = *n ^ (xorer[xor_position]); xor_position = (xor_position + 1) % 8; } }, }; Ok(buffer) } fn untransform_xor(buffer: &Vec<u8>, xs: XorSize) -> SimpleResult<Vec<u8>> { // Untransform is identical to transform Self::transform_xor(buffer, xs) } fn check_xor(buffer: &Vec<u8>, xs: XorSize) -> bool { match xs { XorSize::EightBit(_) => true, XorSize::SixteenBit(_) => { (buffer.len() % 2) == 0 }, XorSize::ThirtyTwoBit(_) => { (buffer.len() % 4) == 0 }, XorSize::SixtyFourBit(_) => { (buffer.len() % 8) == 0 }, } } fn transform_base64(buffer: &Vec<u8>, config: base64::Config) -> SimpleResult<Vec<u8>> { let original_length = buffer.len(); // Decode let out = match base64::decode_config(buffer, config) { Ok(r) => r, Err(e) => bail!("Couldn't decode base64: {}", e), }; // Ensure it encodes to the same length - we can't handle length changes if base64::encode_config(&out, config).len() != original_length { bail!("Base64 didn't decode correctly (the length changed with decode->encode, check padding)"); } Ok(out) } fn untransform_base64(buffer: &Vec<u8>, config: base64::Config) -> SimpleResult<Vec<u8>> { Ok(base64::encode_config(buffer, config).into_bytes()) } fn check_base64(buffer: &Vec<u8>, config: base64::Config) -> bool { // The only reasonable way to check is by just doing it (since the // config is opaque to us) Self::transform_base64(buffer, config).is_ok() } fn transform_base64_permissive(buffer: &Vec<u8>, config: base64::Config) -> SimpleResult<Vec<u8>> { // Filter out any control characters and spaces let buffer: Vec<u8> = buffer.clone().into_iter().filter(|b| { *b > 0x20 && *b < 0x80 }).collect(); // Decode let out = match base64::decode_config(buffer, config) { Ok(r) => r, Err(e) => bail!("Couldn't decode base64: {}", e), }; Ok(out) } fn check_base64_permissive(buffer: &Vec<u8>, config: base64::Config) -> bool { // The only reasonable way to check is by just doing it (since the // config is opaque to us) Self::transform_base64_permissive(buffer, config).is_ok() } fn transform_base32(buffer: &Vec<u8>, alphabet: base32::Alphabet) -> SimpleResult<Vec<u8>> { let original_length = buffer.len(); let s = match std::str::from_utf8(buffer) { Ok(s) => s, Err(e) => bail!("Couldn't convert the buffer into a string: {}", e), }; // Decode let out = match base32::decode(alphabet, &s) { Some(r) => r, None => bail!("Couldn't decode base32"), }; // Ensure it encodes to the same length - we can't handle length changes if base32::encode(alphabet, &out).into_bytes().len() != original_length { bail!("Base32 didn't decode correctly"); } Ok(out) } fn untransform_base32(buffer: &Vec<u8>, alphabet: base32::Alphabet) -> SimpleResult<Vec<u8>> { Ok(base32::encode(alphabet, buffer).into_bytes()) } fn check_base32(buffer: &Vec<u8>, alphabet: base32::Alphabet) -> bool { // The only reasonable way to check is by just doing it Self::transform_base32(buffer, alphabet).is_ok() } fn transform_base32_permissive(buffer: &Vec<u8>, alphabet: base32::Alphabet) -> SimpleResult<Vec<u8>> { // Filter out any obviously impossible characters let buffer: Vec<u8> = buffer.clone().into_iter().filter(|b| { (*b >= 0x30 && *b <= 0x39) || (*b >= 0x41 && *b <= 0x5a) || (*b >= 0x61 && *b <= 0x7a) }).collect(); let s = match String::from_utf8(buffer) { Ok(s) => s, Err(e) => bail!("Couldn't convert the buffer into a string: {}", e), }; // Decode match base32::decode(alphabet, &s) { Some(r) => Ok(r), None => bail!("Couldn't decode base32"), } } fn check_base32_permissive(buffer: &Vec<u8>, alphabet: base32::Alphabet) -> bool { // The only reasonable way to check is by just doing it Self::transform_base32_permissive(buffer, alphabet).is_ok() } fn transform_deflated(buffer: &Vec<u8>) -> SimpleResult<Vec<u8>> { match inflate::inflate_bytes(buffer) { Ok(b) => Ok(b), Err(e) => bail!("Couldn't inflate: {}", e), } } fn check_deflated(buffer: &Vec<u8>) -> bool { // Extra short strings kinda sorta decode, but a zero-length string is // a minimum 6 characters so just enforce that buffer.len() > 5 && Self::transform_deflated(buffer).is_ok() } fn transform_deflated_zlib(buffer: &Vec<u8>) -> SimpleResult<Vec<u8>> { match inflate::inflate_bytes_zlib(buffer) { Ok(b) => Ok(b), Err(e) => bail!("Couldn't inflate: {}", e), } } fn check_deflated_zlib(buffer: &Vec<u8>) -> bool { // The only reasonable way to check is by just doing it Self::transform_deflated_zlib(buffer).is_ok() } fn transform_hex(buffer: &Vec<u8>) -> SimpleResult<Vec<u8>> { let s = match std::str::from_utf8(buffer) { Ok(s) => s, Err(e) => bail!("Couldn't convert the buffer into a string: {}", e), }; match hex::decode(s) { Ok(s) => Ok(s), Err(e) => bail!("Couldn't decode hex: {}", e), } } fn untransform_hex(buffer: &Vec<u8>) -> SimpleResult<Vec<u8>> { Ok(hex::encode(buffer).into_bytes()) } fn check_hex(buffer: &Vec<u8>) -> bool { Self::transform_hex(buffer).is_ok() } // fn transform_XXX(buffer: &Vec<u8>) -> SimpleResult<Vec<u8>> { // bail!("Not implemented yet!"); // } // fn untransform_XXX(buffer: &Vec<u8>) -> SimpleResult<Vec<u8>> { // bail!("Not implemented yet!"); // } // fn check_XXX(buffer: &Vec<u8>) -> bool { // bail!("Not implemented yet!"); // } /// Transform a buffer into another buffer, without changing the original. pub fn transform(&self, buffer: &Vec<u8>) -> SimpleResult<Vec<u8>> { // We can never handle 0-length buffers if buffer.len() == 0 { bail!("Cannot transform 0-length buffer"); } match self { Self::Null => Self::transform_null(buffer), Self::XorByConstant(xs) => Self::transform_xor(buffer, *xs), Self::FromBase64 => Self::transform_base64(buffer, base64::STANDARD), Self::FromBase64NoPadding => Self::transform_base64(buffer, base64::STANDARD_NO_PAD), Self::FromBase64Permissive => Self::transform_base64_permissive(buffer, base64::STANDARD_NO_PAD), Self::FromBase64URL => Self::transform_base64(buffer, base64::URL_SAFE), Self::FromBase64URLNoPadding => Self::transform_base64(buffer, base64::URL_SAFE_NO_PAD), Self::FromBase64URLPermissive => Self::transform_base64_permissive(buffer, base64::URL_SAFE_NO_PAD), Self::FromBase32 => Self::transform_base32(buffer, base32::Alphabet::RFC4648 { padding: true }), Self::FromBase32NoPadding => Self::transform_base32(buffer, base32::Alphabet::RFC4648 { padding: false }), Self::FromBase32Crockford => Self::transform_base32(buffer, base32::Alphabet::Crockford), Self::FromBase32Permissive => Self::transform_base32_permissive(buffer, base32::Alphabet::RFC4648 { padding: false }), Self::FromBase32CrockfordPermissive => Self::transform_base32_permissive(buffer, base32::Alphabet::Crockford), Self::FromDeflated => Self::transform_deflated(buffer), Self::FromDeflatedZlib => Self::transform_deflated_zlib(buffer), Self::FromHex => Self::transform_hex(buffer), //Self::From => Self::transform_(buffer), } } /// Transform a buffer backwards, if possible. The length of the result will /// match the length of the original buffer, but the data may be normalized. /// The original buffer is not changed. pub fn untransform(&self, buffer: &Vec<u8>) -> SimpleResult<Vec<u8>> { // We can never handle 0-length buffers if buffer.len() == 0 { bail!("Cannot untransform 0-length buffer"); } match self { Self::Null => Self::untransform_null(buffer), Self::XorByConstant(xs) => Self::untransform_xor(buffer, *xs), Self::FromBase64 => Self::untransform_base64(buffer, base64::STANDARD), Self::FromBase64NoPadding => Self::untransform_base64(buffer, base64::STANDARD_NO_PAD), Self::FromBase64Permissive => bail!("Base64Permissive is one-way"), Self::FromBase64URL => Self::untransform_base64(buffer, base64::URL_SAFE), Self::FromBase64URLNoPadding => Self::untransform_base64(buffer, base64::URL_SAFE_NO_PAD), Self::FromBase64URLPermissive => bail!("Base64URLPermissive is one-way"), Self::FromBase32 => Self::untransform_base32(buffer, base32::Alphabet::RFC4648 { padding: true }), Self::FromBase32NoPadding => Self::untransform_base32(buffer, base32::Alphabet::RFC4648 { padding: false }), Self::FromBase32Crockford => Self::untransform_base32(buffer, base32::Alphabet::Crockford), Self::FromBase32Permissive => bail!("Base32Permissive is one-way"), Self::FromBase32CrockfordPermissive => bail!("Base32CrockfordPermissive is one-way"), Self::FromDeflated => bail!("Deflated is one-way"), Self::FromDeflatedZlib => bail!("DeflatedZlib is one-way"), Self::FromHex => Self::untransform_hex(buffer), //Self::From => Self::untransform_(buffer), } } /// Check whether a buffer can be transformed by this variant. /// /// Warning: This is a semi-expensive operation for most variants; unless /// the transformation is based on length or another easy-to-check factor, /// we simply clone the data and attempt to transform it. pub fn can_transform(&self, buffer: &Vec<u8>) -> bool { // We can never handle 0-length buffers if buffer.len() == 0 { return false; } match self { Self::Null => Self::check_null(buffer), Self::XorByConstant(xs) => Self::check_xor(buffer, *xs), Self::FromBase64 => Self::check_base64(buffer, base64::STANDARD), Self::FromBase64NoPadding => Self::check_base64(buffer, base64::STANDARD_NO_PAD), Self::FromBase64Permissive => Self::check_base64_permissive(buffer, base64::STANDARD_NO_PAD), Self::FromBase64URL => Self::check_base64(buffer, base64::URL_SAFE), Self::FromBase64URLNoPadding => Self::check_base64(buffer, base64::URL_SAFE_NO_PAD), Self::FromBase64URLPermissive => Self::check_base64_permissive(buffer, base64::URL_SAFE_NO_PAD), Self::FromBase32 => Self::check_base32(buffer, base32::Alphabet::RFC4648 { padding: true }), Self::FromBase32NoPadding => Self::check_base32(buffer, base32::Alphabet::RFC4648 { padding: false }), Self::FromBase32Crockford => Self::check_base32(buffer, base32::Alphabet::Crockford), Self::FromBase32Permissive => Self::check_base32_permissive(buffer, base32::Alphabet::RFC4648 { padding: false }), Self::FromBase32CrockfordPermissive => Self::check_base32_permissive(buffer, base32::Alphabet::Crockford), Self::FromDeflated => Self::check_deflated(buffer), Self::FromDeflatedZlib => Self::check_deflated_zlib(buffer), Self::FromHex => Self::check_hex(buffer), //Self::From => Self::check_(buffer), } } /// Determines if the transformation can be undone. /// /// Does not require a buffer, because the variant itself is enough to /// make this determination. pub fn is_two_way(&self) -> bool { match self { Self::Null => true, Self::XorByConstant(_) => true, Self::FromBase64 => true, Self::FromBase64NoPadding => true, Self::FromBase64URL => true, Self::FromBase64URLNoPadding => true, Self::FromBase32 => true, Self::FromBase32NoPadding => true, Self::FromBase32Crockford => true, Self::FromHex => true, Self::FromBase64Permissive => false, Self::FromBase64URLPermissive => false, Self::FromBase32Permissive => false, Self::FromBase32CrockfordPermissive => false, Self::FromDeflated => false, Self::FromDeflatedZlib => false, } } /// Returns a list of possible transformations that will work on this /// buffer. /// /// This is VERY expensive, as it attempts to transform using every /// potential variant. pub fn detect(buffer: &Vec<u8>) -> Vec<&H2Transformation> { TRANSFORMATIONS_THAT_CAN_BE_DETECTED.iter().filter(|t| { t.can_transform(buffer) }).collect() } } #[cfg(test)] mod tests { use super::*; use pretty_assertions::assert_eq; #[test] fn test_null() -> SimpleResult<()> { assert_eq!(true, H2Transformation::Null.is_two_way()); let tests: Vec<(Vec<u8>, SimpleResult<Vec<u8>>)> = vec![ (vec![1], Ok(vec![1])), (vec![1, 2, 3], Ok(vec![1, 2, 3])), (vec![1, 2, 3, 4, 5], Ok(vec![1, 2, 3, 4, 5])), ]; for (test, expected) in tests { assert!(H2Transformation::Null.can_transform(&test)); let result = H2Transformation::Null.transform(&test); assert_eq!(expected, result); let result = H2Transformation::Null.untransform(&result?); assert_eq!(Ok(test), result); } Ok(()) } #[test] fn test_xor8() -> SimpleResult<()> { assert_eq!(true, H2Transformation::XorByConstant(XorSize::EightBit(0)).is_two_way()); let tests: Vec<(u8, Vec<u8>, SimpleResult<Vec<u8>>)> = vec![ (0, vec![1], Ok(vec![1])), (0, vec![1, 2, 3], Ok(vec![1, 2, 3])), (0, vec![1, 2, 3, 4, 5], Ok(vec![1, 2, 3, 4, 5])), (1, vec![1], Ok(vec![0])), (1, vec![1, 2, 3], Ok(vec![0, 3, 2])), (1, vec![1, 2, 3, 4, 5], Ok(vec![0, 3, 2, 5, 4])), (0xFF, vec![1], Ok(vec![254])), (0xFF, vec![1, 2, 3], Ok(vec![254, 253, 252])), (0xFF, vec![1, 2, 3, 4, 5], Ok(vec![254, 253, 252, 251, 250])), ]; for (c, test, expected) in tests { assert!(H2Transformation::XorByConstant(XorSize::EightBit(c)).can_transform(&test)); let result = H2Transformation::XorByConstant(XorSize::EightBit(c)).transform(&test); assert_eq!(expected, result); let result = H2Transformation::XorByConstant(XorSize::EightBit(c)).untransform(&result?); assert_eq!(Ok(test), result); } Ok(()) } #[test] fn test_xor16() -> SimpleResult<()> { let t = H2Transformation::XorByConstant(XorSize::SixteenBit(0x0000)); // It can transform even-length vectors assert!(t.can_transform(&vec![0x11, 0x22])); assert!(t.can_transform(&vec![0x11, 0x22, 0x33, 0x44])); // It cannot transform odd-length vectors assert!(!t.can_transform(&vec![0x11])); assert!(!t.can_transform(&vec![0x11, 0x22, 0x33])); // Simplest examples let t = H2Transformation::XorByConstant(XorSize::SixteenBit(0x0000)); assert_eq!(vec![0x11, 0x22, 0x33, 0x44, 0x55, 0x66], t.transform(&vec![0x11, 0x22, 0x33, 0x44, 0x55, 0x66])?); let t = H2Transformation::XorByConstant(XorSize::SixteenBit(0xFFFF)); assert_eq!(vec![0xEE, 0xDD, 0xCC, 0xBB, 0xAA, 0x99], t.transform(&vec![0x11, 0x22, 0x33, 0x44, 0x55, 0x66])?); // More complex examples let t = H2Transformation::XorByConstant(XorSize::SixteenBit(0x1234)); // First byte: 0x11 & 0x12 = 0x03 // Second byte: 0x22 & 0x34 = 0x16 assert_eq!(vec![0x03, 0x16], t.transform(&vec![0x11, 0x22])?); // Third byte: 0x33 & 0x12 = 0x21 // Fourth byte: 0x44 & 0x34 = 0x70 assert_eq!(vec![0x03, 0x16, 0x21, 0x70], t.transform(&vec![0x11, 0x22, 0x33, 0x44])?); // Fail on bad strings assert!(t.transform(&vec![0x11]).is_err()); Ok(()) } #[test] fn test_xor32() -> SimpleResult<()> { let t = H2Transformation::XorByConstant(XorSize::ThirtyTwoBit(0x00000000)); // It can transform multiple-of-4 vectors assert!(t.can_transform(&vec![0x11, 0x22, 0x33, 0x44])); assert!(t.can_transform(&vec![0x11, 0x22, 0x33, 0x44, 0x55, 0x66, 0x77, 0x88])); // It cannot transform odd-length vectors assert!(!t.can_transform(&vec![0x11])); assert!(!t.can_transform(&vec![0x11, 0x33])); assert!(!t.can_transform(&vec![0x11, 0x22, 0x33])); assert!(!t.can_transform(&vec![0x11, 0x22, 0x33, 0x44, 0x55])); // Simplest examples let t = H2Transformation::XorByConstant(XorSize::ThirtyTwoBit(0x00000000)); assert_eq!(vec![0x11, 0x22, 0x33, 0x44, 0x55, 0x66, 0x77, 0x88], t.transform(&vec![0x11, 0x22, 0x33, 0x44, 0x55, 0x66, 0x77, 0x88])?); let t = H2Transformation::XorByConstant(XorSize::ThirtyTwoBit(0xFFFFFFFF)); assert_eq!(vec![0xEE, 0xDD, 0xCC, 0xBB, 0xAA, 0x99, 0x88, 0x77], t.transform(&vec![0x11, 0x22, 0x33, 0x44, 0x55, 0x66, 0x77, 0x88])?); // More complex examples let t = H2Transformation::XorByConstant(XorSize::ThirtyTwoBit(0x12345678)); // First byte: 0x11 & 0x12 = 0x03 // Second byte: 0x22 & 0x34 = 0x16 // Third byte: 0x33 & 0x56 = 0x65 // Fourth byte: 0x44 & 0x78 = 0x3c assert_eq!(vec![0x03, 0x16, 0x65, 0x3c], t.transform(&vec![0x11, 0x22, 0x33, 0x44])?); // Fifth byte: 0x55 & 0x12 = 0x47 // Sixth byte: 0x66 & 0x34 = 0x52 // Seventh byte: 0x77 & 0x56 = 0x21 // Eighth byte: 0x88 & 0x78 = 0xf0 assert_eq!(vec![0x03, 0x16, 0x65, 0x3c, 0x47, 0x52, 0x21, 0xf0], t.transform(&vec![0x11, 0x22, 0x33, 0x44, 0x55, 0x66, 0x77, 0x88])?); //assert_eq!(vec![0x03, 0x16, 0x21, 0x70], t.transform(&vec![0x11, 0x22, 0x33, 0x44])?); Ok(()) } #[test] fn test_xor64() -> SimpleResult<()> { let t = H2Transformation::XorByConstant(XorSize::SixtyFourBit(0x0000000000000000)); // It can transform multiple-of-8 vectors assert!(t.can_transform(&vec![0x00, 0x11, 0x22, 0x33, 0x44, 0x55, 0x66, 0x77])); assert!(t.can_transform(&vec![0x00, 0x11, 0x22, 0x33, 0x44, 0x55, 0x66, 0x77, 0x88, 0x99, 0xaa, 0xbb, 0xcc, 0xdd, 0xee, 0xff])); // It cannot transform anything else assert!(!t.can_transform(&vec![0x00])); assert!(!t.can_transform(&vec![0x00, 0x11])); assert!(!t.can_transform(&vec![0x00, 0x11, 0x22])); assert!(!t.can_transform(&vec![0x00, 0x11, 0x22, 0x33])); assert!(!t.can_transform(&vec![0x00, 0x11, 0x22, 0x33, 0x44])); assert!(!t.can_transform(&vec![0x00, 0x11, 0x22, 0x33, 0x44, 0x55])); assert!(!t.can_transform(&vec![0x00, 0x11, 0x22, 0x33, 0x44, 0x55, 0x66])); assert!(!t.can_transform(&vec![0x00, 0x11, 0x22, 0x33, 0x44, 0x55, 0x66, 0x77, 0x88])); // Simplest examples let t = H2Transformation::XorByConstant(XorSize::SixtyFourBit(0x0000000000000000)); assert_eq!( vec![0x00, 0x11, 0x22, 0x33, 0x44, 0x55, 0x66, 0x77, 0x88, 0x99, 0xaa, 0xbb, 0xcc, 0xdd, 0xee, 0xff], t.transform(&vec![0x00, 0x11, 0x22, 0x33, 0x44, 0x55, 0x66, 0x77, 0x88, 0x99, 0xaa, 0xbb, 0xcc, 0xdd, 0xee, 0xff])? ); let t = H2Transformation::XorByConstant(XorSize::SixtyFourBit(0xFFFFFFFFFFFFFFFF)); assert_eq!( vec![0xff, 0xee, 0xdd, 0xcc, 0xbb, 0xaa, 0x99, 0x88, 0x77, 0x66, 0x55, 0x44, 0x33, 0x22, 0x11, 0x00], t.transform(&vec![0x00, 0x11, 0x22, 0x33, 0x44, 0x55, 0x66, 0x77, 0x88, 0x99, 0xaa, 0xbb, 0xcc, 0xdd, 0xee, 0xff])? ); // // More complex examples let t = H2Transformation::XorByConstant(XorSize::SixtyFourBit(0x0123456789abcdef)); // First byte: 0x00 & 0x01 = 0x01 // Second byte: 0x11 & 0x23 = 0x32 // Third byte: 0x22 & 0x45 = 0x67 // Fourth byte: 0x33 & 0x67 = 0x54 // Fifth byte: 0x44 & 0x89 = 0xcd // Sixth byte: 0x55 & 0xab = 0xfe // Seventh byte: 0x66 & 0xcd = 0xab // Eighth byte: 0x77 & 0xef = 0x98 assert_eq!( vec![0x01, 0x32, 0x67, 0x54, 0xcd, 0xfe, 0xab, 0x98], t.transform(&vec![0x00, 0x11, 0x22, 0x33, 0x44, 0x55, 0x66, 0x77])? ); // First byte: 0x88 & 0x01 = 0x89 // Second byte: 0x99 & 0x23 = 0xba // Third byte: 0xaa & 0x45 = 0xef // Fourth byte: 0xbb & 0x67 = 0xdc // Fifth byte: 0xcc & 0x89 = 0x45 // Sixth byte: 0xdd & 0xab = 0x76 // Seventh byte: 0xee & 0xcd = 0x23 // Eighth byte: 0xff & 0xef = 0x10 assert_eq!( vec![0x01, 0x32, 0x67, 0x54, 0xcd, 0xfe, 0xab, 0x98, 0x89, 0xba, 0xef, 0xdc, 0x45, 0x76, 0x23, 0x10], t.transform(&vec![0x00, 0x11, 0x22, 0x33, 0x44, 0x55, 0x66, 0x77, 0x88, 0x99, 0xaa, 0xbb, 0xcc, 0xdd, 0xee, 0xff])? ); Ok(()) } // Just a small convenience function for tests fn b(s: &[u8]) -> Vec<u8> { s.to_vec() } #[test] fn test_base64_standard() -> SimpleResult<()> { let t = H2Transformation::FromBase64; assert_eq!(true, t.is_two_way()); // Short string: "\x00" assert!(t.can_transform(&b(b"AA=="))); let result = t.transform(&b(b"AA=="))?; assert_eq!(b(b"\x00"), result); let original = t.untransform(&result)?; assert_eq!(b(b"AA=="), original); // Longer string: "\x00\x01\x02\x03\x04\x05\x06" assert!(t.can_transform(&b(b"AAECAwQFBg=="))); let result = t.transform(&b(b"AAECAwQFBg=="))?; assert_eq!(b(b"\x00\x01\x02\x03\x04\x05\x06"), result); let original = t.untransform(&result)?; assert_eq!(b(b"AAECAwQFBg=="), original); // Weird string: "\x69\xaf\xbe\xff\x3f" assert!(t.can_transform(&b(b"aa++/z8="))); let result = t.transform(&b(b"aa++/z8="))?; assert_eq!(b(b"\x69\xaf\xbe\xff\x3f"), result); let original = t.untransform(&result)?; assert_eq!(b(b"aa++/z8="), original); // Do padding wrong assert!(!t.can_transform(&b(b"AA"))); assert!(!t.can_transform(&b(b"AA="))); assert!(!t.can_transform(&b(b"AA==="))); assert!(!t.can_transform(&b(b"AA===="))); assert!(t.transform(&b(b"AA")).is_err()); assert!(t.transform(&b(b"AA=")).is_err()); assert!(t.transform(&b(b"AA===")).is_err()); assert!(t.transform(&b(b"AA====")).is_err()); // Wrong characters assert!(t.transform(&b(b"aa--_z8=")).is_err()); Ok(()) } #[test] fn test_base64_standard_no_padding() -> SimpleResult<()> { let t = H2Transformation::FromBase64NoPadding; assert_eq!(true, t.is_two_way()); // Short string: "\x00" assert!(t.can_transform(&b(b"AA"))); let result = t.transform(&b(b"AA"))?; assert_eq!(b(b"\x00"), result); let original = t.untransform(&result)?; assert_eq!(b(b"AA"), original); // Longer string: "\x00\x01\x02\x03\x04\x05\x06" assert!(t.can_transform(&b(b"AAECAwQFBg"))); let result = t.transform(&b(b"AAECAwQFBg"))?; assert_eq!(b(b"\x00\x01\x02\x03\x04\x05\x06"), result); let original = t.untransform(&result)?; assert_eq!(b(b"AAECAwQFBg"), original); // Weird string: "\x69\xaf\xbe\xff\x3f" let result = t.transform(&b(b"aa++/z8"))?; assert_eq!(b(b"\x69\xaf\xbe\xff\x3f"), result); let original = t.untransform(&result)?; assert_eq!(b(b"aa++/z8"), original); // Do padding wrong assert!(t.transform(&b(b"AA=")).is_err()); assert!(t.transform(&b(b"AA==")).is_err()); assert!(t.transform(&b(b"AA===")).is_err()); assert!(t.transform(&b(b"AA====")).is_err()); // Wrong characters assert!(t.transform(&b(b"aa--_z8")).is_err()); Ok(()) } #[test] fn test_base64_permissive() -> SimpleResult<()> { let t = H2Transformation::FromBase64Permissive; assert_eq!(false, t.is_two_way()); // Short string: "\x00" with various padding assert!(t.can_transform(&b(b"AA"))); assert!(t.can_transform(&b(b"AA="))); assert!(t.can_transform(&b(b"AA=="))); assert_eq!(b(b"\x00"), t.transform(&b(b"AA"))?); assert_eq!(b(b"\x00"), t.transform(&b(b"AA="))?); assert_eq!(b(b"\x00"), t.transform(&b(b"AA=="))?); // Add a bunch of control characters assert_eq!(b(b"\x00\x00\x00\x00"), t.transform(&b(b"A A\nAAA\n \t\rA=\n="))?); Ok(()) } #[test] fn test_base64_url() -> SimpleResult<()> { let t = H2Transformation::FromBase64URL; assert_eq!(true, t.is_two_way()); // Short string: "\x00" let result = t.transform(&b(b"AA=="))?; assert_eq!(b(b"\x00"), result); let original = t.untransform(&result)?; assert_eq!(b(b"AA=="), original); // Longer string: "\x00\x01\x02\x03\x04\x05\x06" let result = t.transform(&b(b"AAECAwQFBg=="))?; assert_eq!(b(b"\x00\x01\x02\x03\x04\x05\x06"), result); let original = t.untransform(&result)?; assert_eq!(b(b"AAECAwQFBg=="), original); // Weird string: "\x69\xaf\xbe\xff\x3f" let result = t.transform(&b(b"aa--_z8="))?; assert_eq!(b(b"\x69\xaf\xbe\xff\x3f"), result); let original = t.untransform(&result)?; assert!(t.can_transform(&b(b"aa--_z8="))); assert_eq!(b(b"aa--_z8="), original); // Do padding wrong assert!(t.transform(&b(b"AA")).is_err()); assert!(t.transform(&b(b"AA=")).is_err()); assert!(t.transform(&b(b"AA===")).is_err()); assert!(t.transform(&b(b"AA====")).is_err()); // Wrong characters assert!(!t.can_transform(&b(b"aa++/z8="))); assert!(t.transform(&b(b"aa++/z8=")).is_err()); Ok(()) } #[test] fn test_base64_standard_url_no_padding() -> SimpleResult<()> { let t = H2Transformation::FromBase64URLNoPadding; assert_eq!(true, t.is_two_way()); // Short string: "\x00" let result = t.transform(&b(b"AA"))?; assert_eq!(b(b"\x00"), result); let original = t.untransform(&result)?; assert_eq!(b(b"AA"), original); // Longer string: "\x00\x01\x02\x03\x04\x05\x06" let result = t.transform(&b(b"AAECAwQFBg"))?; assert_eq!(b(b"\x00\x01\x02\x03\x04\x05\x06"), result); let original = t.untransform(&result)?; assert_eq!(b(b"AAECAwQFBg"), original); // Weird string: "\x69\xaf\xbe\xff\x3f" let result = t.transform(&b(b"aa--_z8"))?; assert_eq!(b(b"\x69\xaf\xbe\xff\x3f"), result); let original = t.untransform(&result)?; assert_eq!(b(b"aa--_z8"), original); // Do padding wrong assert!(t.transform(&b(b"AA=")).is_err()); assert!(t.transform(&b(b"AA==")).is_err()); assert!(t.transform(&b(b"AA===")).is_err()); assert!(t.transform(&b(b"AA====")).is_err()); // Wrong characters assert!(t.transform(&b(b"aa++/z8")).is_err()); Ok(()) } #[test] fn test_base64_url_permissive() -> SimpleResult<()> { let t = H2Transformation::FromBase64URLPermissive; assert_eq!(false, t.is_two_way()); // Short string: "\x00" with various padding assert_eq!(b(b"\x00"), t.transform(&b(b"AA"))?); assert_eq!(b(b"\x00"), t.transform(&b(b"AA="))?); assert_eq!(b(b"\x00"), t.transform(&b(b"AA=="))?); // Add a bunch of control characters assert_eq!(b(b"\x00\x00\x00\x00"), t.transform(&b(b"A A\nAAA\n \t\rA=\n="))?); Ok(()) } #[test] fn test_base32_standard() -> SimpleResult<()> { let t = H2Transformation::FromBase32; assert_eq!(true, t.is_two_way()); // Short string: "\x00" let t = H2Transformation::FromBase32; let result = t.transform(&b(b"IE======"))?; assert_eq!(b(b"A"), result); let original = t.untransform(&result)?; assert_eq!(b(b"IE======"), original); // Longer string: "ABCDEF" let t = H2Transformation::FromBase32; let result = t.transform(&b(b"IFBEGRCFIY======"))?; assert_eq!(b(b"ABCDEF"), result); let original = t.untransform(&result)?; assert_eq!(b(b"IFBEGRCFIY======"), original); // It's okay to be case insensitive let t = H2Transformation::FromBase32; let result = t.transform(&b(b"ifbegrcfiy======"))?; assert_eq!(b(b"ABCDEF"), result); let original = t.untransform(&result)?; assert_eq!(b(b"IFBEGRCFIY======"), original); // Do padding wrong let t = H2Transformation::FromBase32; assert!(t.transform(&b(b"IE")).is_err()); assert!(t.transform(&b(b"IE=")).is_err()); assert!(t.transform(&b(b"IE==")).is_err()); assert!(t.transform(&b(b"IE===")).is_err()); assert!(t.transform(&b(b"IE====")).is_err()); assert!(t.transform(&b(b"IE=====")).is_err()); assert!(t.transform(&b(b"IE=======")).is_err()); assert!(t.transform(&b(b"IE========")).is_err()); // Wrong characters let t = H2Transformation::FromBase32; assert!(t.transform(&b(b"I.======")).is_err()); Ok(()) } #[test] fn test_base32_no_padding() -> SimpleResult<()> { let t = H2Transformation::FromBase32NoPadding; assert_eq!(true, t.is_two_way()); // Short string: "\x00" let t = H2Transformation::FromBase32NoPadding; let result = t.transform(&b(b"IE"))?; assert_eq!(b(b"A"), result); let original = t.untransform(&result)?; assert_eq!(b(b"IE"), original); // Longer string: "ABCDEF" let t = H2Transformation::FromBase32NoPadding; let result = t.transform(&b(b"IFBEGRCFIY"))?; assert_eq!(b(b"ABCDEF"), result); let original = t.untransform(&result)?; assert_eq!(b(b"IFBEGRCFIY"), original); // It's okay to be case insensitive let t = H2Transformation::FromBase32NoPadding; let result = t.transform(&b(b"ifbegrcfiy"))?; assert_eq!(b(b"ABCDEF"), result); let original = t.untransform(&result)?; assert_eq!(b(b"IFBEGRCFIY"), original); // Do padding wrong let t = H2Transformation::FromBase32NoPadding; assert!(t.transform(&b(b"IE=")).is_err()); assert!(t.transform(&b(b"IE==")).is_err()); assert!(t.transform(&b(b"IE===")).is_err()); assert!(t.transform(&b(b"IE====")).is_err()); assert!(t.transform(&b(b"IE=====")).is_err()); assert!(t.transform(&b(b"IE======")).is_err()); assert!(t.transform(&b(b"IE=======")).is_err()); assert!(t.transform(&b(b"IE========")).is_err()); // Wrong characters let t = H2Transformation::FromBase32NoPadding; assert!(t.transform(&b(b"A.")).is_err()); Ok(()) } #[test] fn test_base32_crockford() -> SimpleResult<()> { let t = H2Transformation::FromBase32Crockford; assert_eq!(true, t.is_two_way()); // Short string: "\x00" let t = H2Transformation::FromBase32Crockford; let result = t.transform(&b(b"84"))?; assert_eq!(b(b"A"), result); let original = t.untransform(&result)?; assert_eq!(b(b"84"), original); // Longer string: "ABCDEF" let t = H2Transformation::FromBase32Crockford; let result = t.transform(&b(b"85146H258R"))?; assert_eq!(b(b"ABCDEF"), result); let original = t.untransform(&result)?; assert_eq!(b(b"85146H258R"), original); // It's okay to be case insensitive let t = H2Transformation::FromBase32Crockford; let result = t.transform(&b(b"85146h258r"))?; assert_eq!(b(b"ABCDEF"), result); let original = t.untransform(&result)?; assert_eq!(b(b"85146H258R"), original); // Do padding wrong let t = H2Transformation::FromBase32Crockford; assert!(t.transform(&b(b"84=")).is_err()); assert!(t.transform(&b(b"84==")).is_err()); assert!(t.transform(&b(b"84===")).is_err()); assert!(t.transform(&b(b"84====")).is_err()); assert!(t.transform(&b(b"84=====")).is_err()); assert!(t.transform(&b(b"84======")).is_err()); assert!(t.transform(&b(b"84=======")).is_err()); assert!(t.transform(&b(b"84========")).is_err()); // Wrong characters let t = H2Transformation::FromBase32Crockford; assert!(t.transform(&b(b"A.")).is_err()); Ok(()) } #[test] fn test_base32_permissive() -> SimpleResult<()> { let t = H2Transformation::FromBase32Permissive; assert_eq!(false, t.is_two_way()); // Short string: "\x00" let t = H2Transformation::FromBase32Permissive; let result = t.transform(&b(b"IE======"))?; assert_eq!(b(b"A"), result); // Longer string: "ABCDEF" let t = H2Transformation::FromBase32Permissive; let result = t.transform(&b(b"IFBEGRCFIY======"))?; assert_eq!(b(b"ABCDEF"), result); // It's okay to be case insensitive let t = H2Transformation::FromBase32Permissive; let result = t.transform(&b(b"ifbegrcfiy======"))?; assert_eq!(b(b"ABCDEF"), result); // Do padding wrong let t = H2Transformation::FromBase32Permissive; assert_eq!(b(b"A"), t.transform(&b(b"IE"))?); assert_eq!(b(b"A"), t.transform(&b(b"IE="))?); assert_eq!(b(b"A"), t.transform(&b(b"IE=="))?); assert_eq!(b(b"A"), t.transform(&b(b"IE==="))?); assert_eq!(b(b"A"), t.transform(&b(b"IE===="))?); assert_eq!(b(b"A"), t.transform(&b(b"IE====="))?); assert_eq!(b(b"A"), t.transform(&b(b"IE============="))?); assert_eq!(b(b"A"), t.transform(&b(b"I=============E"))?); assert_eq!(b(b"A"), t.transform(&b(b"IE============="))?); assert_eq!(b(b"A"), t.transform(&b(b"I.@#$...E...======"))?); // We can still error with bad characters assert!(t.transform(&b(b"1234567890")).is_err()); Ok(()) } #[test] fn test_base32_crockford_permissive() -> SimpleResult<()> { let t = H2Transformation::FromBase32CrockfordPermissive; assert_eq!(false, t.is_two_way()); // Short string: "\x00" let t = H2Transformation::FromBase32CrockfordPermissive; let result = t.transform(&b(b"84======"))?; assert_eq!(b(b"A"), result); // Longer string: "ABCDEF" let t = H2Transformation::FromBase32CrockfordPermissive; let result = t.transform(&b(b"85146H258R======"))?; assert_eq!(b(b"ABCDEF"), result); // It's okay to be case insensitive let t = H2Transformation::FromBase32CrockfordPermissive; let result = t.transform(&b(b"85146h258r======"))?; assert_eq!(b(b"ABCDEF"), result); // Do padding wrong let t = H2Transformation::FromBase32CrockfordPermissive; assert_eq!(b(b"A"), t.transform(&b(b"84"))?); assert_eq!(b(b"A"), t.transform(&b(b"84="))?); assert_eq!(b(b"A"), t.transform(&b(b"84=="))?); assert_eq!(b(b"A"), t.transform(&b(b"84==="))?); assert_eq!(b(b"A"), t.transform(&b(b"84===="))?); assert_eq!(b(b"A"), t.transform(&b(b"84====="))?); assert_eq!(b(b"A"), t.transform(&b(b"84============="))?); assert_eq!(b(b"A"), t.transform(&b(b"8==---========4"))?); assert_eq!(b(b"A"), t.transform(&b(b"84============="))?); assert_eq!(b(b"A"), t.transform(&b(b"8.@#$...4...======"))?); // We can still error with bad characters assert!(t.transform(&b(b"no u")).is_err()); Ok(()) } #[test] fn test_deflate() -> SimpleResult<()> { let t = H2Transformation::FromDeflated; let result = t.transform(&b(b"\x03\x00\x00\x00\x00\x01"))?; assert_eq!(0, result.len()); let result = t.transform(&b(b"\x63\x00\x00\x00\x01\x00\x01"))?; assert_eq!(vec![0x00], result); let result = t.transform(&b(b"\x63\x60\x80\x01\x00\x00\x0a\x00\x01"))?; assert_eq!(vec![0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00], result); let result = t.transform(&b(b"\x63\x60\x64\x62\x66\x61\x65\x63\xe7\xe0\x04\x00\x00\xaf\x00\x2e"))?; assert_eq!(vec![0x00, 0x01, 0x02, 0x03, 0x04, 0x05, 0x06, 0x07, 0x08, 0x09], result); // Best compression let result = t.transform(&b(b"\x73\x74\x72\x76\x01\x00\x02\x98\x01\x0b"))?; assert_eq!(vec![0x41, 0x42, 0x43, 0x44], result); // No compression let result = t.transform(&b(b"\x01\x04\x00\xfb\xff\x41\x42\x43\x44\x02\x98\x01\x0b"))?; assert_eq!(vec![0x41, 0x42, 0x43, 0x44], result); // Try an intentional error assert!(t.transform(&b(b"\xFF")).is_err()); Ok(()) } #[test] fn test_deflate_zlib() -> SimpleResult<()> { let t = H2Transformation::FromDeflatedZlib; let result = t.transform(&b(b"\x78\x9c\x03\x00\x00\x00\x00\x01"))?; assert_eq!(0, result.len()); let result = t.transform(&b(b"\x78\x9c\x63\x00\x00\x00\x01\x00\x01"))?; assert_eq!(vec![0x00], result); let result = t.transform(&b(b"\x78\x9c\x63\x60\x80\x01\x00\x00\x0a\x00\x01"))?; assert_eq!(vec![0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00], result); let result = t.transform(&b(b"\x78\x9c\x63\x60\x64\x62\x66\x61\x65\x63\xe7\xe0\x04\x00\x00\xaf\x00\x2e"))?; assert_eq!(vec![0x00, 0x01, 0x02, 0x03, 0x04, 0x05, 0x06, 0x07, 0x08, 0x09], result); // Best compression let result = t.transform(&b(b"\x78\x9c\x73\x74\x72\x76\x01\x00\x02\x98\x01\x0b"))?; assert_eq!(vec![0x41, 0x42, 0x43, 0x44], result); // No compression let result = t.transform(&b(b"\x78\x01\x01\x04\x00\xfb\xff\x41\x42\x43\x44\x02\x98\x01\x0b"))?; assert_eq!(vec![0x41, 0x42, 0x43, 0x44], result); // Try an intentional error assert!(t.transform(&b(b"\xFF")).is_err()); Ok(()) } #[test] fn test_hex() -> SimpleResult<()> { let t = H2Transformation::FromHex; assert!(t.is_two_way()); assert!(t.can_transform(&b(b"00"))); assert!(t.can_transform(&b(b"0001"))); assert!(t.can_transform(&b(b"000102feff"))); assert!(!t.can_transform(&b(b"0"))); assert!(!t.can_transform(&b(b"001"))); assert!(!t.can_transform(&b(b"00102FEff"))); assert!(!t.can_transform(&b(b"fg"))); assert!(!t.can_transform(&b(b"+="))); assert_eq!(vec![0x00], t.transform(&b(b"00"))?); assert_eq!(vec![0x00, 0x01], t.transform(&b(b"0001"))?); assert_eq!(vec![0x00, 0x01, 0x02, 0xfe, 0xff], t.transform(&b(b"000102fEFf"))?); assert_eq!(b(b"00"), t.untransform(&vec![0x00])?); assert_eq!(b(b"0001"), t.untransform(&vec![0x00, 0x01])?); assert_eq!(b(b"000102feff"), t.untransform(&vec![0x00, 0x01, 0x02, 0xfe, 0xff])?); assert!(t.transform(&b(b"abababag")).is_err()); Ok(()) } #[test] fn test_detect() -> SimpleResult<()> { let tests: Vec<_> = vec![ ( "Testcase: 'A'", b(b"A"), vec![ ], ), ( "Testcase: 'AA'", b(b"AA"), vec![ &H2Transformation::FromBase64NoPadding, &H2Transformation::FromBase64URLNoPadding, &H2Transformation::FromHex, &H2Transformation::FromBase32NoPadding, &H2Transformation::FromBase32Crockford, ], ), ( "Testcase: 'AA=='", b(b"AA=="), vec![ &H2Transformation::FromBase64, &H2Transformation::FromBase64URL, ], ), ( "Testcase: '/+AAAA=='", b(b"/+AAAA=="), vec![ &H2Transformation::FromBase64, ], ), ( "Testcase: '-_AAAA=='", b(b"-_AAAA=="), vec![ &H2Transformation::FromBase64URL, &H2Transformation::FromDeflated, ], ), ( "Testcase: Simple deflated", b(b"\x03\x00\x00\x00\x00\x01"), vec![ &H2Transformation::FromDeflated, ] ), ( "Testcase: Zlib deflated", b(b"\x78\x9c\x03\x00\x00\x00\x00\x01"), vec![ &H2Transformation::FromDeflatedZlib, ] ), ( "Testcase: Base32", b(b"ORSXG5BRGIZSA2DFNRWG6==="), vec![ &H2Transformation::FromBase32, ] ), ( "Testcase: Base32 no padding", b(b"ORSXG5BRGIZSA2DFNRWG6"), vec![ &H2Transformation::FromBase32NoPadding, &H2Transformation::FromBase32Crockford, ] ), ( "Testcase: Base32 crockford", b(b"EHJQ6X1H68SJ0T35DHP6Y"), vec![ &H2Transformation::FromBase32Crockford, ] ), ]; // Do this in a loop since we have to sort both vectors for (desc, s, r) in tests { let mut t = H2Transformation::detect(&s); t.sort(); let mut r = r.clone(); r.sort(); assert_eq!(t, r, "{}", desc); } Ok(()) } }
true
2f2c6faabd14bd6bc9c002d940e4d648a15a05b5
Rust
gbdev/gb-asm-tutorial
/i18n-helpers/src/bin/mdbook-xgettext.rs
UTF-8
5,014
2.765625
3
[ "Apache-2.0", "MIT", "CC0-1.0", "CC-BY-SA-4.0" ]
permissive
// Copyright 2023 Google LLC // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. //! `xgettext` for `mdbook` //! //! This program works like `xgettext`, meaning it will extract //! translatable strings from your book. The strings are saved in a //! GNU Gettext `messages.pot` file in your build directory (typically //! `po/messages.pot`). //! //! See `TRANSLATIONS.md` in the repository root for more information. use anyhow::{anyhow, Context}; use mdbook::renderer::RenderContext; use mdbook::BookItem; use polib::catalog::Catalog; use polib::message::Message; use std::fs; use std::io; fn add_message(catalog: &mut Catalog, msgid: &str, source: &str) { let sources = match catalog.find_message(msgid) { Some(msg) => format!("{}\n{}", msg.source, source), None => String::from(source), }; let message = Message::new_singular("", &sources, "", "", msgid, ""); // Carefully update the existing message or add a new one. It's an // error to create a catalog with duplicate msgids. match catalog.find_message_index(msgid) { Some(&idx) => catalog.update_message_by_index(idx, message).unwrap(), None => catalog.add_message(message), } } fn create_catalog(ctx: &RenderContext) -> anyhow::Result<Catalog> { let mut catalog = Catalog::new(); if let Some(title) = &ctx.config.book.title { catalog.metadata.project_id_version = String::from(title); } if let Some(lang) = &ctx.config.book.language { catalog.metadata.language = String::from(lang); } catalog.metadata.mime_version = String::from("1.0"); catalog.metadata.content_type = String::from("text/plain; charset=UTF-8"); catalog.metadata.content_transfer_encoding = String::from("8bit"); let summary_path = ctx.config.book.src.join("SUMMARY.md"); let summary = std::fs::read_to_string(ctx.root.join(&summary_path))?; // First, add all chapter names and part titles from SUMMARY.md. // The book items are in order of the summary, so we can assign // correct line numbers for duplicate lines by tracking the index // of our last search. let mut last_idx = 0; for item in ctx.book.iter() { let line = match item { BookItem::Chapter(chapter) => &chapter.name, BookItem::PartTitle(title) => title, BookItem::Separator => continue, }; let idx = summary[last_idx..].find(line).ok_or_else(|| { anyhow!( "Could not find {line:?} in SUMMARY.md after line {} -- \ please remove any formatting from SUMMARY.md", summary[..last_idx].lines().count() ) })?; last_idx += idx; let lineno = summary[..last_idx].lines().count(); let source = format!("{}:{}", summary_path.display(), lineno); add_message(&mut catalog, line, &source); } // Next, we add the chapter contents. for item in ctx.book.iter() { if let BookItem::Chapter(chapter) = item { let path = match &chapter.path { Some(path) => ctx.config.book.src.join(path), None => continue, }; for (lineno, paragraph) in i18n_helpers::extract_paragraphs(&chapter.content) { let source = format!("{}:{}", path.display(), lineno); add_message(&mut catalog, paragraph, &source); } } } Ok(catalog) } fn main() -> anyhow::Result<()> { let ctx = RenderContext::from_json(&mut io::stdin()).context("Parsing stdin")?; let cfg = ctx .config .get_renderer("xgettext") .ok_or_else(|| anyhow!("Could not read output.xgettext configuration"))?; let path = cfg .get("pot-file") .ok_or_else(|| anyhow!("Missing output.xgettext.pot-file config value"))? .as_str() .ok_or_else(|| anyhow!("Expected a string for output.xgettext.pot-file"))?; fs::create_dir_all(&ctx.destination) .with_context(|| format!("Could not create {}", ctx.destination.display()))?; let output_path = ctx.destination.join(path); if output_path.exists() { fs::remove_file(&output_path) .with_context(|| format!("Removing {}", output_path.display()))? } let catalog = create_catalog(&ctx).context("Extracting messages")?; polib::po_file::write(&catalog, &output_path) .with_context(|| format!("Writing messages to {}", output_path.display()))?; Ok(()) }
true
842631f0dfbccbc600ebaa964a5a0843542615ff
Rust
Mrmaxmeier/lua-interpreter
/src/instructions/relational_and_logic.rs
UTF-8
1,768
3.234375
3
[]
no_license
use instruction::*; macro_rules! logic { ($name:ident, $op:expr) => ( #[derive(Debug, Clone, Copy, PartialEq)] pub struct $name { pub lhs: DataSource, pub rhs: DataSource, pub inverted: bool } impl LoadInstruction for $name { fn load(d: u32) -> Self { let (a, b, c) = parse_A_B_C(d); $name { lhs: b.into(), rhs: c.into(), inverted: a == 0, } } } impl InstructionOps for $name { fn exec(&self, context: &mut Context) { let lhs = self.lhs.get_from(context); let rhs = self.rhs.get_from(context); let res = $op(lhs, rhs).unwrap(); if res == self.inverted { context.ci_mut().pc += 1 } } } ) } fn attempted_to_compare(a: &Type, b: &Type) -> String { format!("attempted to compare {} with {} ({}, {})", a.as_type_str(), b.as_type_str(), a.repr(), b.repr()) } // 31: EQ A B C if ((RK(B) == RK(C)) ~= A) then pc++ logic!(Equals, |a, b| -> Result<bool, String> { Ok(a == b) }); // 32: LT A B C if ((RK(B) < RK(C)) ~= A) then pc++ logic!(LessThan, |a, b| { if let (&Type::Number(ref a_num), &Type::Number(ref b_num)) = (&a, &b) { Ok(a_num < b_num) } else { Err(attempted_to_compare(&a, &b)) } }); // 33: LE A B C if ((RK(B) <= RK(C)) ~= A) then pc++ logic!(LessThanOrEquals, |a, b| { if let (&Type::Number(ref a_num), &Type::Number(ref b_num)) = (&a, &b) { Ok(a_num <= b_num) } else { Err(attempted_to_compare(&a, &b)) } });
true
997581d5002a945f7b2da6f345b7d6b0f06c5df2
Rust
shaunstanislauslau/jormungandr
/jormungandr/src/network/p2p/policy.rs
UTF-8
2,843
2.9375
3
[ "MIT", "Apache-2.0" ]
permissive
use jormungandr_lib::time::Duration; use poldercast::{Node, PolicyReport}; use serde::{Deserialize, Serialize}; use slog::Logger; /// default quarantine duration is 30min const DEFAULT_QUARANTINE_DURATION: std::time::Duration = std::time::Duration::from_secs(1800); /// This is the P2P policy. Right now it is very similar to the default policy /// defined in `poldercast` crate. /// #[derive(Debug, Clone)] pub struct Policy { quarantine_duration: std::time::Duration, logger: Logger, } #[derive(Debug, Clone, Serialize, Deserialize)] #[serde(deny_unknown_fields, rename_all = "snake_case")] pub struct PolicyConfig { quarantine_duration: Duration, } impl Policy { pub fn new(pc: PolicyConfig, logger: Logger) -> Self { Self { quarantine_duration: pc.quarantine_duration.into(), logger, } } } impl Default for PolicyConfig { fn default() -> Self { Self { quarantine_duration: Duration::from(DEFAULT_QUARANTINE_DURATION), } } } impl poldercast::Policy for Policy { fn check(&mut self, node: &mut Node) -> PolicyReport { let id = node.id().to_string(); let logger = self.logger.new(o!("id" => id)); // if the node is already quarantined if let Some(since) = node.logs().quarantined() { let duration = since.elapsed().unwrap(); if duration < self.quarantine_duration { // the node still need to do some quarantine time PolicyReport::None } else if node.logs().last_update().elapsed().unwrap() < self.quarantine_duration { // the node has been quarantined long enough, check if it has been updated // while being quarantined (i.e. the node is still up and advertising itself // or others are still gossiping about it.) // the fact that this `Policy` does clean the records is a policy choice. // one could prefer to keep the record longers for future `check`. node.record_mut().clean_slate(); debug!(logger, "lifting quarantine"); PolicyReport::LiftQuarantine } else { // it appears the node was quarantine and is no longer active or gossiped // about, so we can forget it debug!(logger, "forgetting about the node"); PolicyReport::Forget } } else if node.record().is_clear() { // if the record is clear, do nothing, leave the Node in the available nodes PolicyReport::None } else { // if the record is not `clear` then we quarantine the block for some time debug!(logger, "move node to quarantine"); PolicyReport::Quarantine } } }
true
d35e3e1c5b5a3f71730dde7033aaf03dc0836fe7
Rust
aconley/Algorithms
/TAOCP/Implementations/taocp/src/backtracking/sudoku.rs
UTF-8
29,662
3.734375
4
[ "MIT" ]
permissive
// A sudoku solver using basic backtracking. // // If there is more than one solution, this will return an arbitrary one. use std::fmt; use std::mem; #[derive(Debug, PartialEq, Eq)] pub struct SudokuSolution { rows: Vec<Vec<u8>>, } impl SudokuSolution { fn create(values: &[u8]) -> Self { assert_eq!(values.len(), 81); let mut result: Vec<Vec<u8>> = Vec::with_capacity(9); for row in values.chunks(9) { result.push(row.to_vec()); } SudokuSolution { rows: result } } } impl fmt::Display for SudokuSolution { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { for row_chunk in self.rows.chunks(3) { write!(f, "+---+---+---+\n")?; for row in row_chunk { write!( f, "|{}{}{}|{}{}{}|{}{}{}|\n", row[0], row[1], row[2], row[3], row[4], row[5], row[6], row[7], row[8] )?; } } write!(f, "+---+---+---+") } } // Each box has a postion [0, 81) where the numbering starts in the top left // of the sudoku and increases along columns then rows. The positions are // therefore: // +----------+----------+----------+ // | 0 1 2 | 3 4 5 | 6 7 8 | // | 9 10 11 | 12 13 14 | 15 16 17 | // | 18 19 20 | 21 22 23 | 24 25 26 | // +----------+----------+----------+ // | 27 28 29 | 30 31 32 | 33 34 35 | // | 36 37 38 | 39 40 41 | 42 43 44 | // | 45 46 47 | 48 49 50 | 51 52 53 | // +----------+----------+----------+ // | 54 55 56 | 57 58 59 | 60 61 62 | // | 63 64 65 | 66 67 68 | 69 70 71 | // | 72 73 74 | 75 76 77 | 78 79 80 | // +----------+----------+----------+ // // The row, column, and box can be found via: // row = pos / 9 // col = pos mod 9 // box = 3 * (row / 3) + (col / 3) = 3 * (pos / 27) + (pos mod 9) / 3 #[derive(Copy, Clone, Debug, Default, PartialEq, Eq)] pub struct InitialPosition { pub row: u8, // Row number [0, 8] pub col: u8, // Col number [0, 8] pub value: u8, // Value [1, 9] } impl InitialPosition { // Create a vector of initial positions from a 81 element array that is either // 0 (indicating an unset value) or gives the value at that position. pub fn create_from_values(values: &[u8; 81]) -> Vec<InitialPosition> { values .iter() .enumerate() .filter(|(_, &val)| val != 0) .map(|(idx, val)| InitialPosition { row: (idx / 9) as u8, col: (idx % 9) as u8, value: *val, }) .collect() } pub fn create_from_vec(values: &Vec<u8>) -> Vec<InitialPosition> { values .iter() .enumerate() .filter(|(_, &val)| val != 0) .map(|(idx, val)| InitialPosition { row: (idx / 9) as u8, col: (idx % 9) as u8, value: *val, }) .collect() } fn box_index(&self) -> usize { (3 * (self.row / 3) + self.col / 3) as usize } } // Row, column, box position. #[derive(Copy, Clone, Debug, Default, PartialEq, Eq)] struct Square { row: u8, col: u8, r#box: u8, } impl Square { fn create(row: u8, col: u8) -> Self { Square { row: row, col: col, r#box: 3 * (row / 3) + col / 3, } } fn position(&self) -> usize { (9 * self.row + self.col) as usize } } // Bitwise encoded moves are represented as 1 << val where val is in [1, 9] #[derive(Copy, Clone, Debug, Default, PartialEq, Eq)] struct Move { current_move: u16, // Bitwise encoded current move available_moves: u16, // Bitwise or of all available moves, excluding current. square: Square, // The position in the puzzle of this move. } impl Move { // Converts the encoded current_move to the normal value [1, 9] fn value(&self) -> u8 { self.current_move.trailing_zeros() as u8 } } #[derive(Debug)] struct SolutionState { l: usize, // Current level. m: Vec<Move>, // Moves. [0, l) are settled, l is under consideration. initial_position: Vec<InitialPosition>, c_row: [u16; 9], // Available moves per row c_col: [u16; 9], // Available moves per col c_box: [u16; 9], // Available moves per box } #[derive(Debug, Eq, PartialEq)] struct NextMove { idx: usize, available_moves: u16, } impl SolutionState { fn create(mut initial_position: Vec<InitialPosition>) -> Option<Self> { if initial_position.len() > 81 { return None; } let mut unused = [true; 81]; let mut c_row = [0b1111111110u16; 9]; let mut c_col = c_row.clone(); let mut c_box = c_row.clone(); for pos in &initial_position { if pos.row > 8 || pos.col > 8 || pos.value < 1 || pos.value > 9 { // Invalid initial input. return None; } let b = pos.box_index(); let value = 1u16 << pos.value; if c_row[pos.row as usize] & c_col[pos.col as usize] & c_box[b] & value == 0 { // Conflict, no solution. return None; } c_row[pos.row as usize] &= !value; c_col[pos.col as usize] &= !value; c_box[b] &= !value; unused[9 * pos.row as usize + pos.col as usize] = false; } let m: Vec<Move>; if initial_position.len() == 81 { // This is a little tricky -- somebody gave us an already complete // solution. The implementation won't quite work right with that, // so we need to artifically leave one of the moves off. let final_move = initial_position.pop().unwrap(); let available_move = 1u16 << final_move.value; let square = Square::create(final_move.row, final_move.col); c_row[square.row as usize] = available_move; c_col[square.col as usize] = available_move; c_box[square.r#box as usize] = available_move; m = vec![Move { square: square, current_move: 0, available_moves: available_move, }]; } else { m = unused .iter() .enumerate() .filter(|(_, &v)| v) .map(|(idx, _)| { Move { square: Square::create(idx as u8 / 9, idx as u8 % 9), current_move: 0, available_moves: 0, } }) .collect(); } Some(SolutionState { l: 0, m: m, initial_position: initial_position, c_row: c_row, c_col: c_col, c_box: c_box, }) } fn to_solution(&self) -> SudokuSolution { let mut sol = [0u8; 81]; for p in &self.initial_position { sol[(9 * p.row + p.col) as usize] = p.value; } for mv in &self.m { let v = mv.value(); sol[mv.square.position()] = if v == 16 { 0 } else { v }; } SudokuSolution::create(&sol) } // Applies the move in m[l] to the state. // Assumes that self.l is in the range [0, n) and that m[l].available_moves // is not zero (that is, there is an available move). #[inline(always)] unsafe fn apply_next_move(&mut self) { // Assumed non-zero. let avail = self.m[self.l].available_moves as i16; let v = (avail & -avail) as u16; self.m[self.l].current_move = v; self.m[self.l].available_moves &= !v; let sq = self.m[self.l].square; self.c_row[sq.row as usize] &= !v; self.c_col[sq.col as usize] &= !v; self.c_box[sq.r#box as usize] &= !v; } // Undoes the move in position m[l]. Assumes self.l is in the range [0, n) #[inline(always)] unsafe fn undo_current_move(&mut self) { let sq = self.m[self.l].square; self.c_row[sq.row as usize] |= self.m[self.l].current_move; self.c_col[sq.col as usize] |= self.m[self.l].current_move; self.c_box[sq.r#box as usize] |= self.m[self.l].current_move; } // Chooses the next move and swaps it into place as m[l]. // Assumes that self.l is in the range [0, n) #[inline(always)] unsafe fn choose_next_move(&mut self) -> () { let next_move = self.suggest_next_move(); self.m.swap(self.l, next_move.idx); self.m[self.l].current_move = 0; self.m[self.l].available_moves = next_move.available_moves; } // Returns the next move that should be made. Assumes that self.l is in // the range [0, n) #[inline(always)] unsafe fn suggest_next_move(&self) -> NextMove { let sq = self.m[self.l].square; let mut avail_best = self.c_row[sq.row as usize] & self.c_col[sq.col as usize] & self.c_box[sq.r#box as usize]; if avail_best == 0 { return NextMove { idx: self.l, available_moves: 0, }; } let mut mrv_best = avail_best.count_ones(); let mut idx_best = self.l; for (idx, mv) in self.m.iter().enumerate().skip(self.l + 1) { let avail = self.c_row[mv.square.row as usize] & self.c_col[mv.square.col as usize] & self.c_box[mv.square.r#box as usize]; if avail == 0 { return NextMove { idx: idx, available_moves: 0, }; } let mrv = avail.count_ones(); if mrv < mrv_best { idx_best = idx; mrv_best = mrv; avail_best = avail; } } NextMove { idx: idx_best, available_moves: avail_best, } } } impl Iterator for SolutionState { type Item = SudokuSolution; fn next(&mut self) -> Option<Self::Item> { loop { // Backtrack from current position. while self.m[self.l].available_moves == 0 { if self.l == 0 { return None; } unsafe { self.undo_current_move(); } self.l -= 1; } // Undo the current move, then apply the next one. // Apply the move in position m[l] and advance l. unsafe { self.undo_current_move(); self.apply_next_move(); } self.l += 1; // Are we done? if self.l == self.m.len() { return Some(self.to_solution()); } // Chose the next move and put it in m[l]. unsafe { self.choose_next_move(); } } } } enum IteratorState { DONE, NEW(Vec<InitialPosition>), READY(SolutionState), } pub struct SudokuIterator { // Use a Box to hold the state because otherwise the structure is ~ 120 bytes. state: Box<IteratorState>, } impl SudokuIterator { pub fn create(input: Vec<InitialPosition>) -> Self { SudokuIterator { state: Box::new(IteratorState::NEW(input)), } } } impl Iterator for SudokuIterator { type Item = SudokuSolution; fn next(&mut self) -> Option<Self::Item> { match &mut *self.state { IteratorState::NEW(initial_position) => { // We need to take ownership of the initial position. let init_pos = mem::replace(initial_position, Vec::new()); match SolutionState::create(init_pos) { None => { *self.state = IteratorState::DONE; None } Some(mut solution_state) => { unsafe { solution_state.choose_next_move(); } let result = solution_state.next(); match result { None => { *self.state = IteratorState::DONE; } Some(ref _sol) => { *self.state = IteratorState::READY(solution_state); } } result } } } IteratorState::READY(ref mut solution_state) => { solution_state.l -= 1; let result = solution_state.next(); if result.is_none() { *self.state = IteratorState::DONE; } result } IteratorState::DONE => None, } } } #[cfg(test)] mod tests { // A fully solved sudoku puzzle. #[rustfmt::skip] const SOL: [u8; 81] = [ 5, 3, 4, 6, 7, 8, 9, 1, 2, 6, 7, 2, 1, 9, 5, 3, 4, 8, 1, 9, 8, 3, 4, 2, 5, 6, 7, 8, 5, 9, 7, 6, 1, 4, 2, 3, 4, 2, 6, 8, 5, 3, 7, 9, 1, 7, 1, 3, 9, 2, 4, 8, 5, 6, 9, 6, 1, 5, 3, 7, 2, 8, 4, 2, 8, 7, 4, 1, 9, 6, 3, 5, 3, 4, 5, 2, 8, 6, 1, 7, 9, ]; // A partially solved sudoku puzzle. #[rustfmt::skip] const PARTIAL: [u8; 81] = [ 0, 6, 9, 0, 1, 3, 7, 8, 0, 0, 7, 3, 0, 0, 8, 6, 0, 0, 8, 2, 0, 0, 9, 0, 3, 0, 0, 7, 0, 0, 9, 3, 1, 2, 6, 8, 1, 9, 6, 0, 8, 2, 4, 0, 3, 3, 8, 2, 4, 0, 0, 0, 0, 0, 6, 1, 7, 3, 2, 0, 8, 0, 4, 9, 3, 0, 8, 7, 0, 1, 2, 6, 2, 0, 8, 1, 0, 0, 0, 3, 7, ]; mod sudoku_solution { use super::SOL; use crate::backtracking::sudoku::SudokuSolution; use std::fmt::Write; #[test] fn formatter_produces_expected_output() { let s = SudokuSolution::create(&SOL); let mut buf = String::new(); write!(&mut buf, "{}", s).ok(); let expected = "+---+---+---+\n\ |534|678|912|\n\ |672|195|348|\n\ |198|342|567|\n\ +---+---+---+\n\ |859|761|423|\n\ |426|853|791|\n\ |713|924|856|\n\ +---+---+---+\n\ |961|537|284|\n\ |287|419|635|\n\ |345|286|179|\n\ +---+---+---+"; assert_eq!(buf, expected); } } mod solution_state { use super::{PARTIAL, SOL}; use crate::backtracking::sudoku::{InitialPosition, NextMove, SolutionState, Square}; #[test] fn invalid_input_value() { let mut bad_input = SOL; bad_input[10] = 10; assert!(SolutionState::create(InitialPosition::create_from_values(&bad_input)).is_none()); } #[test] fn conflicting_input_row() { let bad_input = vec![0, 1, 1, 2, 3, 4, 5, 6, 7]; assert!(SolutionState::create(InitialPosition::create_from_vec(&bad_input)).is_none()); } #[test] fn select_position_from_almost_full_row() { let input = vec![1, 3, 4, 5, 0, 7, 6, 8, 9]; let initial_position = InitialPosition::create_from_vec(&input); let s = SolutionState::create(initial_position).unwrap(); let next_move = unsafe { s.suggest_next_move() }; assert_eq!( next_move.available_moves, 1u16 << 2, "Unexpected available moves" ); assert_eq!( s.m[next_move.idx].square, Square { row: 0, col: 4, r#box: 1}, "Unexpected position for next move" ); } #[test] fn select_only_possible_move() { let mut almost_sol = SOL; // unset one position. almost_sol[21] = 0; // was 3. let s = SolutionState::create(InitialPosition::create_from_values(&almost_sol)).unwrap(); let next_move = unsafe { s.suggest_next_move() }; assert_eq!( next_move.available_moves, 1u16 << 3, "Unexpected available moves" ); assert_eq!( s.m[next_move.idx].square, Square { row: 2, col: 3, r#box: 1}, "Unexpected position for next move" ); } #[test] fn select_most_constrained_move_with_single_choice() { // The most constrained open space is the second 0 in the first row. let mut input = Vec::with_capacity(18); input.extend([0, 2, 3, 0, 5, 6, 7, 8, 9]); input.extend([0, 7, 8, 1, 2, 3, 4, 5, 0]); let initial_position = InitialPosition::create_from_vec(&input); let s = SolutionState::create(initial_position).unwrap(); let next_move = unsafe { s.suggest_next_move() }; assert_eq!( next_move.available_moves, 1u16 << 4, "Unexpected available moves" ); assert_eq!( s.m[next_move.idx].square, Square { row: 0, col: 3, r#box: 1 }, "Unexpected position for next move" ); } #[test] fn select_most_constrained_move_with_multiple_choices() { // Similar to the previous test but with more unset values with // multiple valid choices in second row. // The best choice should be the second 0 in the first row with // available values 4, 6 let mut input = Vec::with_capacity(18); input.extend([0, 2, 3, 0, 5, 0, 7, 8, 9]); input.extend([0, 0, 0, 1, 2, 0, 4, 5, 6]); let initial_position = InitialPosition::create_from_vec(&input); let s = SolutionState::create(initial_position).unwrap(); let next_move = unsafe { s.suggest_next_move() }; assert_eq!( s.m[next_move.idx].square, Square { row: 0, col: 3, r#box: 1 }, "Unexpected position for next move" ); assert_eq!( next_move.available_moves, (1u16 << 4 | 1u16 << 6), "Unexpected available moves" ); } #[test] fn select_next_move_from_partial_puzzle() { // Try selecting from a real puzzle. let initial_position = InitialPosition::create_from_values(&PARTIAL); let s = SolutionState::create(initial_position).unwrap(); let next_move = unsafe { s.suggest_next_move() }; assert_eq!( s.m[next_move.idx].square, Square { row: 0, col: 0, r#box: 0 }, "Unexpected position for next move" ); assert_eq!( next_move.available_moves, (1u16 << 4 | 1u16 << 5), "Unexpected available moves" ); } #[test] fn select_when_no_move() { // Test what happens when we get into a corner where no move is available. // The last element on the second row has no available value. let mut input = Vec::with_capacity(27); input.extend([1, 2, 3, 4, 5, 6, 7, 8, 9]); input.extend([4, 5, 6, 7, 8, 3, 1, 2, 0]); input.extend([7, 8, 0, 1, 2, 0, 4, 5, 6]); let initial_position = InitialPosition::create_from_vec(&input); let s = SolutionState::create(initial_position).unwrap(); let next_move = unsafe { s.suggest_next_move() }; assert_eq!(next_move.available_moves, 0, "Should be no available moves"); assert_eq!( s.m[next_move.idx].square, Square { row: 1, col: 8, r#box: 2 }, "Unexpected position for next move" ); } #[test] fn already_solved_puzzle_should_force_single_move() { let initial_position = InitialPosition::create_from_values(&SOL); match SolutionState::create(initial_position) { None => panic!("Should have been able to initialize from completed solution"), Some(state) => { assert_eq!(state.m.len(), 1, "Should be single move in m"); assert_eq!( unsafe { state.suggest_next_move() }, NextMove { idx: 0, available_moves: 1u16 << SOL[state.m[0].square.position()] } ); } } } } mod iterator { use super::{PARTIAL, SOL}; use crate::backtracking::sudoku::{ InitialPosition, IteratorState, SudokuIterator, SudokuSolution, }; #[test] fn solves_already_solved_puzzle() { let initial_position = InitialPosition::create_from_values(&SOL); let mut iterator = SudokuIterator::create(initial_position); assert!(matches!(*iterator.state, IteratorState::NEW(_))); match iterator.next() { None => panic!("Should have found solution"), Some(solution) => { let expected_solution = SudokuSolution::create(&SOL); assert_eq!(solution, expected_solution, "Did not get expected solution"); } } assert_eq!( iterator.next(), None, "DONE iterator should produce no more solutions" ); assert!( matches!(*iterator.state, IteratorState::DONE), "Iterator should be done after discovering there are no more solutions" ); } #[test] fn solves_using_only_forced_moves() { // Take the last row of the full solution and remove all the values // on the bottom row. As a result, all moves will be forced. let mut puzzle = SOL; for i in 72..81 { puzzle[i] = 0; } let initial_position = InitialPosition::create_from_values(&puzzle); let mut iterator = SudokuIterator::create(initial_position); assert!( matches!(*iterator.state, IteratorState::NEW(_)), "Iterator not in initial state" ); match iterator.next() { None => panic!("Should have found solution"), Some(solution) => { let expected_solution = SudokuSolution::create(&SOL); assert_eq!(solution, expected_solution, "Did not get expected solution"); } } assert_eq!( iterator.next(), None, "DONE iterator should produce no more solutions" ); assert!( matches!(*iterator.state, IteratorState::DONE), "Iterator should be done after discovering there are no more solutions" ); } #[test] fn solves_partial_solution() { #[rustfmt::skip] let expected_solution : [u8; 81] = [ 4, 6, 9, 5, 1, 3, 7, 8, 2, 5, 7, 3, 2, 4, 8, 6, 1, 9, 8, 2, 1, 6, 9, 7, 3, 4, 5, 7, 5, 4, 9, 3, 1, 2, 6, 8, 1, 9, 6, 7, 8, 2, 4, 5, 3, 3, 8, 2, 4, 5, 6, 9, 7, 1, 6, 1, 7, 3, 2, 5, 8, 9, 4, 9, 3, 5, 8, 7, 4, 1, 2, 6, 2, 4, 8, 1, 6, 9, 5, 3, 7, ]; let initial_position = InitialPosition::create_from_values(&PARTIAL); let mut iterator = SudokuIterator::create(initial_position); assert!(matches!(*iterator.state, IteratorState::NEW(_))); match iterator.next() { None => panic!("Should have found solution"), Some(solution) => { let expected_solution = SudokuSolution::create(&expected_solution); assert_eq!(solution, expected_solution, "Did not get expected solution"); } } assert_eq!( iterator.next(), None, "DONE iterator should produce no more solutions" ); assert!( matches!(*iterator.state, IteratorState::DONE), "Iterator should be done after discovering there are no more solutions" ); } #[test] fn solves_medium_problem() { #[rustfmt::skip] let problem : [u8; 81] = [ 0, 2, 0, 0, 6, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 9, 5, 2, 9, 0, 0, 8, 5, 2, 4, 7, 0, 0, 0, 6, 4, 0, 0, 0, 0, 9, 0, 0, 0, 0, 2, 0, 8, 0, 0, 1, 0, 0, 0, 0, 8, 3, 6, 7, 0, 0, 9, 7, 3, 0, 6, 0, 0, 7, 0, 0, 0, 0, 0, 5, 9, 0, 0, 0, 0, 6, 8, 9, 7, 0, 4 ]; #[rustfmt::skip] let expected_solution : [u8; 81] = [ 5, 2, 4, 9, 6, 7, 1, 8, 3, 6, 7, 8, 3, 4, 1, 9, 5, 2, 9, 3, 1, 8, 5, 2, 4, 7, 6, 8, 5, 6, 4, 7, 3, 2, 1, 9, 3, 9, 7, 1, 2, 6, 8, 4, 5, 1, 4, 2, 5, 9, 8, 3, 6, 7, 4, 8, 9, 7, 3, 5, 6, 2, 1, 7, 6, 3, 2, 1, 4, 5, 9, 8, 2, 1, 5, 6, 8, 9, 7, 3, 4 ]; let initial_position = InitialPosition::create_from_values(&problem); let mut iterator = SudokuIterator::create(initial_position); assert!(matches!(*iterator.state, IteratorState::NEW(_))); match iterator.next() { None => panic!("Should have found solution"), Some(solution) => { let expected_solution = SudokuSolution::create(&expected_solution); assert_eq!(solution, expected_solution, "Did not get expected solution"); } } assert_eq!( iterator.next(), None, "DONE iterator should produce no more solutions" ); assert!( matches!(*iterator.state, IteratorState::DONE), "Iterator should be done after discovering there are no more solutions" ); } #[test] fn solves_extreme_problem() { #[rustfmt::skip] let problem : [u8; 81] = [ 4, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 4, 0, 2, 3, 0, 0, 8, 3, 6, 0, 1, 0, 0, 0, 0, 2, 0, 0, 0, 6, 0, 0, 5, 7, 0, 9, 0, 5, 0, 0, 6, 0, 1, 0, 0, 7, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 8, 6, 0, 0, 3, 7, 0, 0, 0, 0, 0, 0, 0, 0, 6, 4, 0, 0, 7, 0, 0, 0, 2 ]; #[rustfmt::skip] let expected_solution : [u8; 81] = [ 4, 2, 9, 6, 3, 8, 7, 1, 5, 1, 7, 5, 4, 9, 2, 3, 6, 8, 8, 3, 6, 7, 1, 5, 2, 4, 9, 2, 1, 4, 8, 6, 3, 9, 5, 7, 3, 9, 8, 5, 4, 7, 6, 2, 1, 5, 6, 7, 1, 2, 9, 8, 3, 4, 9, 5, 1, 2, 8, 6, 4, 7, 3, 7, 8, 2, 3, 5, 4, 1, 9, 6, 6, 4, 3, 9, 7, 1, 5, 8, 2 ]; let initial_position = InitialPosition::create_from_values(&problem); let mut iterator = SudokuIterator::create(initial_position); assert!(matches!(*iterator.state, IteratorState::NEW(_))); match iterator.next() { None => panic!("Should have found solution"), Some(solution) => { let expected_solution = SudokuSolution::create(&expected_solution); assert_eq!(solution, expected_solution, "Did not get expected solution"); } } assert_eq!( iterator.next(), None, "DONE iterator should produce no more solutions" ); assert!( matches!(*iterator.state, IteratorState::DONE), "Iterator should be done after discovering there are no more solutions" ); } #[test] fn solves_minimum_clue_problem() { // This problem has only 17 clues, the minimum possible for the solution // to be unique. #[rustfmt::skip] let problem : [u8; 81] = [ 0, 0, 0, 0, 0, 0, 3, 0, 0, 1, 0, 0, 4, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 5, 9, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 2, 6, 0, 0, 0, 0, 0, 0, 5, 3, 0, 0, 0, 0, 5, 0, 8, 0, 0, 0, 0, 0, 0, 0, 0, 9, 0, 0, 0, 7, 0, 0, 8, 3, 0, 0, 0, 0, 4, 0 ]; #[rustfmt::skip] let expected_solution : [u8; 81] = [ 5, 9, 7, 2, 1, 8, 3, 6, 4, 1, 3, 2, 4, 6, 5, 8, 9, 7, 8, 6, 4, 3, 7, 9, 1, 2, 5, 9, 1, 5, 6, 8, 4, 7, 3, 2, 3, 4, 8, 7, 9, 2, 6, 5, 1, 2, 7, 6, 1, 5, 3, 4, 8, 9, 6, 5, 9, 8, 4, 7, 2, 1, 3, 4, 2, 1, 9, 3, 6, 5, 7, 8, 7, 8, 3, 5, 2, 1, 9, 4, 6 ]; let initial_position = InitialPosition::create_from_values(&problem); let mut iterator = SudokuIterator::create(initial_position); assert!(matches!(*iterator.state, IteratorState::NEW(_))); match iterator.next() { None => panic!("Should have found solution"), Some(solution) => { let expected_solution = SudokuSolution::create(&expected_solution); assert_eq!(solution, expected_solution, "Did not get expected solution"); } } assert_eq!( iterator.next(), None, "DONE iterator should produce no more solutions" ); assert!( matches!(*iterator.state, IteratorState::DONE), "Iterator should be done after discovering there are no more solutions" ); } #[test] fn solves_non_unique_problem() { // This problem has only 16 clues, so the solution is not unique. #[rustfmt::skip] let problem : [u8; 81] = [ 0, 3, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 4, 0, 0, 1, 0, 0, 0, 5, 0, 0, 0, 0, 0, 9, 0, 2, 0, 0, 0, 0, 0, 6, 0, 4, 0, 0, 0, 0, 3, 5, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 4, 0, 0, 6, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 5, 0, 0, 9, 0, 0, 0, 0, 0, 0, 0 ]; #[rustfmt::skip] let expected_solution1 : [u8; 81] = [ 9, 3, 4, 5, 1, 7, 2, 6, 8, 8, 6, 2, 4, 9, 3, 1, 7, 5, 7, 5, 1, 8, 6, 2, 4, 9, 3, 2, 7, 5, 9, 8, 1, 6, 3, 4, 6, 4, 9, 2, 3, 5, 8, 1, 7, 1, 8, 3, 7, 4, 6, 5, 2, 9, 4, 1, 7, 6, 5, 9, 3, 8, 2, 3, 2, 8, 1, 7, 4, 9, 5, 6, 5, 9, 6, 3, 2, 8, 7, 4, 1 ]; #[rustfmt::skip] let expected_solution2 : [u8; 81] = [ 9, 3, 4, 5, 1, 8, 2, 6, 7, 7, 6, 2, 4, 9, 3, 1, 8, 5, 8, 5, 1, 7, 6, 2, 4, 9, 3, 2, 8, 5, 9, 7, 1, 6, 3, 4, 6, 4, 9, 2, 3, 5, 7, 1, 8, 1, 7, 3, 8, 4, 6, 5, 2, 9, 4, 1, 8, 6, 5, 9, 3, 7, 2, 3, 2, 7, 1, 8, 4, 9, 5, 6, 5, 9, 6, 3, 2, 7, 8, 4, 1 ]; let initial_position = InitialPosition::create_from_values(&problem); let iterator = SudokuIterator::create(initial_position); assert!(matches!(*iterator.state, IteratorState::NEW(_))); let sols: Vec<SudokuSolution> = iterator.collect(); assert_eq!(sols.len(), 2); assert_eq!(sols[0], SudokuSolution::create(&expected_solution1)); assert_eq!(sols[1], SudokuSolution::create(&expected_solution2)); } #[test] fn very_hard_sudoku() { // This is the problem from Knuth 7.2.2.1.50 #[rustfmt::skip] let problem : [u8; 81] = [ 1, 2, 0, 3, 0, 0, 4, 0, 0, 4, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 5, 0, 6, 0, 0, 0, 0, 3, 0, 0, 0, 0, 0, 0, 1, 0, 0, 7, 0, 0, 0, 0, 2, 3, 0, 0, 0, 0, 0, 0, 0, 6, 0, 8, 0, 4, 0, 2, 0, 0, 0, 7, 0, 0, 0, 9, 0, 8, 0, 0, 0, 0, 0, 0, 0, 0, 0, 5, 0, 0, 6 ]; #[rustfmt::skip] let expected_solution : [u8; 81] = [ 1, 2, 8, 3, 7, 9, 4, 6, 5, 4, 6, 7, 1, 5, 2, 9, 8, 3, 9, 3, 5, 8, 6, 4, 7, 2, 1, 3, 9, 4, 6, 2, 8, 5, 1, 7, 8, 7, 6, 5, 9, 1, 2, 3, 4, 2, 5, 1, 7, 4, 3, 6, 9, 8, 5, 4, 3, 2, 1, 6, 8, 7, 9, 6, 1, 9, 4, 8, 7, 3, 5, 2, 7, 8, 2, 9, 3, 5, 1, 4, 6 ]; let initial_position = InitialPosition::create_from_values(&problem); let iterator = SudokuIterator::create(initial_position); let solutions: Vec<SudokuSolution> = iterator.collect(); assert_eq!(solutions.len(), 1, "Solution should be unique"); assert_eq!(solutions[0], SudokuSolution::create(&expected_solution)); } } }
true
6d5065957a3d861aac5b914b72e36a936cbc8482
Rust
matthiasbeyer/rust-ipfs-api
/ipfs-api-examples/examples/dns.rs
UTF-8
1,277
2.625
3
[ "Apache-2.0", "LicenseRef-scancode-unknown-license-reference", "MIT" ]
permissive
// Copyright 2017 rust-ipfs-api Developers // // Licensed under the Apache License, Version 2.0, <LICENSE-APACHE or // http://apache.org/licenses/LICENSE-2.0> or the MIT license <LICENSE-MIT or // http://opensource.org/licenses/MIT>, at your option. This file may not be // copied, modified, or distributed except according to those terms. // use ipfs_api_examples::ipfs_api::{IpfsApi, IpfsClient}; // Creates an Ipfs client, resolves ipfs.io, and lists the contents of it. // #[ipfs_api_examples::main] async fn main() { tracing_subscriber::fmt::init(); eprintln!("connecting to localhost:5001..."); let client = IpfsClient::default(); let dns = match client.dns("ipfs.io", true).await { Ok(dns) => { eprintln!("dns resolves to ({})", &dns.path); eprintln!(); dns } Err(e) => { eprintln!("error resolving dns: {}", e); return; } }; match client.object_get(&dns.path[..]).await { Ok(contents) => { eprintln!("found contents:"); for link in contents.links.iter() { eprintln!("[{}] ({} bytes)", link.name, link.size); } } Err(e) => eprintln!("error listing path: {}", e), } }
true
9ea25155d599eda73dd2812ccf2d9912dc32c615
Rust
billnote/rust-exercism
/luhn-from/src/lib.rs
UTF-8
353
3.15625
3
[ "MIT" ]
permissive
extern crate luhn; use std::convert::From; use std::fmt::Display; pub struct Luhn<T> where T: Display, { number: T, } impl<T: Display> Luhn<T> { pub fn is_valid(&self) -> bool { luhn::is_valid(&self.number.to_string()) } } impl<T: Display> From<T> for Luhn<T> { fn from(f: T) -> Self { Luhn { number: f } } }
true
933f33c7e51a15785f3add9a10f20bcbd09a6e91
Rust
plugblockchain/plug-blockchain
/primitives/election-providers/src/lib.rs
UTF-8
9,084
2.546875
3
[ "Apache-2.0", "GPL-3.0-or-later", "Classpath-exception-2.0", "GPL-1.0-or-later", "GPL-3.0-only" ]
permissive
// This file is part of Substrate. // Copyright (C) 2020 Parity Technologies (UK) Ltd. // SPDX-License-Identifier: Apache-2.0 // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. //! Primitive traits for providing election functionality. //! //! This crate provides two traits that could interact to enable extensible election functionality //! within FRAME pallets. //! //! Something that will provide the functionality of election will implement [`ElectionProvider`], //! whilst needing an associated [`ElectionProvider::DataProvider`], which needs to be fulfilled by //! an entity implementing [`ElectionDataProvider`]. Most often, *the data provider is* the receiver //! of the election, resulting in a diagram as below: //! //! ```ignore //! ElectionDataProvider //! <------------------------------------------+ //! | | //! v | //! +-----+----+ +------+---+ //! | | | | //! pallet-do-election | | | | pallet-needs-election //! | | | | //! | | | | //! +-----+----+ +------+---+ //! | ^ //! | | //! +------------------------------------------+ //! ElectionProvider //! ``` //! //! > It could also be possible that a third party pallet (C), provides the data of election to an //! > election provider (B), which then passes the election result to another pallet (A). //! //! ## Election Types //! //! Typically, two types of elections exist: //! //! 1. **Stateless**: Election data is provided, and the election result is immediately ready. //! 2. **Stateful**: Election data is is queried ahead of time, and the election result might be //! ready some number of blocks in the future. //! //! To accommodate both type of elections in one trait, the traits lean toward **stateful //! election**, as it is more general than the stateless. This is why [`ElectionProvider::elect`] //! has no parameters. All value and type parameter must be provided by the [`ElectionDataProvider`] //! trait, even if the election happens immediately. //! //! ## Election Data //! //! The data associated with an election, essentially what the [`ElectionDataProvider`] must convey //! is as follows: //! //! 1. A list of voters, with their stake. //! 2. A list of targets (i.e. _candidates_). //! 3. A number of desired targets to be elected (i.e. _winners_) //! //! In addition to that, the [`ElectionDataProvider`] must also hint [`ElectionProvider`] at when //! the next election might happen ([`ElectionDataProvider::next_election_prediction`]). A stateless //! election provider would probably ignore this. A stateful election provider can use this to //! prepare the election result in advance. //! //! Nonetheless, an [`ElectionProvider`] shan't rely on this and should preferably provide some //! means of fallback election as well, in case the `elect` was called immaturely early. //! //! ## Example //! //! ```rust //! # use sp_election_providers::*; //! # use sp_npos_elections::{Support, Assignment}; //! //! type AccountId = u64; //! type Balance = u64; //! type BlockNumber = u32; //! //! mod data_provider { //! use super::*; //! //! pub trait Config: Sized { //! type ElectionProvider: ElectionProvider< //! AccountId, //! BlockNumber, //! DataProvider = Module<Self>, //! >; //! } //! //! pub struct Module<T: Config>(std::marker::PhantomData<T>); //! //! impl<T: Config> ElectionDataProvider<AccountId, BlockNumber> for Module<T> { //! fn desired_targets() -> u32 { //! 1 //! } //! fn voters() -> Vec<(AccountId, VoteWeight, Vec<AccountId>)> { //! Default::default() //! } //! fn targets() -> Vec<AccountId> { //! vec![10, 20, 30] //! } //! fn next_election_prediction(now: BlockNumber) -> BlockNumber { //! 0 //! } //! } //! } //! //! //! mod generic_election_provider { //! use super::*; //! //! pub struct GenericElectionProvider<T: Config>(std::marker::PhantomData<T>); //! //! pub trait Config { //! type DataProvider: ElectionDataProvider<AccountId, BlockNumber>; //! } //! //! impl<T: Config> ElectionProvider<AccountId, BlockNumber> for GenericElectionProvider<T> { //! type Error = (); //! type DataProvider = T::DataProvider; //! //! fn elect() -> Result<Supports<AccountId>, Self::Error> { //! Self::DataProvider::targets() //! .first() //! .map(|winner| vec![(*winner, Support::default())]) //! .ok_or(()) //! } //! } //! } //! //! mod runtime { //! use super::generic_election_provider; //! use super::data_provider; //! use super::AccountId; //! //! struct Runtime; //! impl generic_election_provider::Config for Runtime { //! type DataProvider = data_provider::Module<Runtime>; //! } //! //! impl data_provider::Config for Runtime { //! type ElectionProvider = generic_election_provider::GenericElectionProvider<Runtime>; //! } //! //! } //! //! # fn main() {} //! ``` #![cfg_attr(not(feature = "std"), no_std)] pub mod onchain; use sp_std::{prelude::*, fmt::Debug}; /// Re-export some type as they are used in the interface. pub use sp_arithmetic::PerThing; pub use sp_npos_elections::{Assignment, ExtendedBalance, PerThing128, Supports, VoteWeight}; /// Something that can provide the data to an [`ElectionProvider`]. pub trait ElectionDataProvider<AccountId, BlockNumber> { /// All possible targets for the election, i.e. the candidates. fn targets() -> Vec<AccountId>; /// All possible voters for the election. /// /// Note that if a notion of self-vote exists, it should be represented here. fn voters() -> Vec<(AccountId, VoteWeight, Vec<AccountId>)>; /// The number of targets to elect. fn desired_targets() -> u32; /// Provide a best effort prediction about when the next election is about to happen. /// /// In essence, the implementor should predict with this function when it will trigger the /// [`ElectionProvider::elect`]. /// /// This is only useful for stateful election providers. fn next_election_prediction(now: BlockNumber) -> BlockNumber; /// Utility function only to be used in benchmarking scenarios, to be implemented optionally, /// else a noop. #[cfg(any(feature = "runtime-benchmarks", test))] fn put_snapshot( _voters: Vec<(AccountId, VoteWeight, Vec<AccountId>)>, _targets: Vec<AccountId>, ) { } } #[cfg(feature = "std")] impl<AccountId, BlockNumber> ElectionDataProvider<AccountId, BlockNumber> for () { fn targets() -> Vec<AccountId> { Default::default() } fn voters() -> Vec<(AccountId, VoteWeight, Vec<AccountId>)> { Default::default() } fn desired_targets() -> u32 { Default::default() } fn next_election_prediction(now: BlockNumber) -> BlockNumber { now } } /// Something that can compute the result of an election and pass it back to the caller. /// /// This trait only provides an interface to _request_ an election, i.e. /// [`ElectionProvider::elect`]. That data required for the election need to be passed to the /// implemented of this trait through [`ElectionProvider::DataProvider`]. pub trait ElectionProvider<AccountId, BlockNumber> { /// The error type that is returned by the provider. type Error: Debug; /// The data provider of the election. type DataProvider: ElectionDataProvider<AccountId, BlockNumber>; /// Elect a new set of winners. /// /// The result is returned in a target major format, namely as vector of supports. fn elect() -> Result<Supports<AccountId>, Self::Error>; } #[cfg(feature = "std")] impl<AccountId, BlockNumber> ElectionProvider<AccountId, BlockNumber> for () { type Error = &'static str; type DataProvider = (); fn elect() -> Result<Supports<AccountId>, Self::Error> { Err("<() as ElectionProvider> cannot do anything.") } }
true
3a49fd8f74e420b4570558fdc5db04d27ba49364
Rust
07th-mod/python-patcher
/install_loader/build.rs
UTF-8
331
2.703125
3
[]
no_license
use std::io; #[cfg(windows)] use winres::WindowsResource; fn main() -> io::Result<()> { // At compile time this includes the .ico file in the executable so it has the correct icon. #[cfg(windows)] { WindowsResource::new() .set_icon("src/resources/icon.ico") .compile()?; } Ok(()) }
true
2474f81b2994c2a51c40fafe08e2eb8ec557cc0b
Rust
hesch/assembler-8bit
/src/microcode.rs
UTF-8
6,479
2.65625
3
[]
no_license
use crate::output_datastructures::{ ControlWord, ACCUMULATOR, AND, INSTRUCTION, LOGIC_B, LOGIC_ZERO, MEMORY, MEMORY_ADDRESS, OR, PROGRAM_COUNTER, SHIFT_LEFT, SHIFT_RIGHT, SHIFT_ZERO, UNCHANGED, XOR, }; use gen_microcode::GenMicrocode; use gen_microcode_macro::gen_microcode; use field_size_macro::FieldSize; use field_size::FieldSize; macro_rules! ctrl_vec { ( $( $x:expr ),* ) => { { vec!( ControlWord { read_from: PROGRAM_COUNTER, write_to: MEMORY_ADDRESS, ..ControlWord::empty() }, ControlWord { read_from: MEMORY, write_to: INSTRUCTION, bank_select_enable: false, program_counter_enable: true, ..ControlWord::empty() }, $( $x, )* ControlWord { step_reset: true, ..ControlWord::empty() } ) } }; } #[derive(gen_microcode)] enum Keyword { Mov(MovFrom, MovTo), Sub(GPR, GPR), Add(GPR, GPR), And(GPR, GPR), Or(GPR, GPR), Xor(GPR, GPR), Cmp(GPR, GPR), Shl(GPR), Shr(GPR), Jmp(u8), Jc(u8), Jz(u8), Hlt, Nop, } impl Keyword { fn control_words(&self) -> Vec<ControlWord> { match self { Keyword::Mov(from, to) => ctrl_vec!(ControlWord { read_from: (*from) as u8, write_to: (*to) as u8, ..ControlWord::empty() }), Keyword::Sub(op1, op2) => ctrl_vec!(ControlWord { read_from: ACCUMULATOR, write_to: (*op1) as u8, alu_left: (*op1) as u8, alu_right: (*op2) as u8, alu_subtract: true, alu_shift: UNCHANGED, alu_logic: LOGIC_B, ..ControlWord::empty() }), Keyword::Add(op1, op2) => ctrl_vec!(ControlWord { read_from: ACCUMULATOR, write_to: (*op1) as u8, alu_left: (*op1) as u8, alu_right: (*op2) as u8, alu_shift: UNCHANGED, alu_logic: LOGIC_B, ..ControlWord::empty() }), Keyword::And(op1, op2) => ctrl_vec!(ControlWord { read_from: ACCUMULATOR, write_to: (*op1) as u8, alu_left: (*op1) as u8, alu_right: (*op2) as u8, alu_logic: AND, alu_shift: SHIFT_ZERO, ..ControlWord::empty() }), Keyword::Or(op1, op2) => ctrl_vec!(ControlWord { read_from: ACCUMULATOR, write_to: (*op1) as u8, alu_left: (*op1) as u8, alu_right: (*op2) as u8, alu_logic: OR, alu_shift: SHIFT_ZERO, ..ControlWord::empty() }), Keyword::Xor(op1, op2) => ctrl_vec!(ControlWord { read_from: ACCUMULATOR, write_to: (*op1) as u8, alu_left: (*op1) as u8, alu_right: (*op2) as u8, alu_logic: XOR, alu_shift: SHIFT_ZERO, ..ControlWord::empty() }), Keyword::Cmp(op1, op2) => ctrl_vec!(ControlWord { alu_left: (*op1) as u8, alu_right: (*op2) as u8, alu_subtract: true, alu_shift: UNCHANGED, alu_logic: LOGIC_B, ..ControlWord::empty() }), Keyword::Shl(op1) => ctrl_vec!(ControlWord { read_from: ACCUMULATOR, write_to: (*op1) as u8, alu_left: (*op1) as u8, alu_shift: SHIFT_LEFT, alu_logic: LOGIC_ZERO, ..ControlWord::empty() }), Keyword::Shr(op1) => ctrl_vec!(ControlWord { read_from: ACCUMULATOR, write_to: (*op1) as u8, alu_left: (*op1) as u8, alu_shift: SHIFT_RIGHT, alu_logic: LOGIC_ZERO, ..ControlWord::empty() }), Keyword::Jmp(_) => ctrl_vec!( ControlWord { read_from: PROGRAM_COUNTER, write_to: MEMORY_ADDRESS, ..ControlWord::empty() }, ControlWord { read_from: MEMORY, write_to: PROGRAM_COUNTER, ..ControlWord::empty() } ), Keyword::Jc(addr) => ctrl_vec!(), Keyword::Jz(addr) => ctrl_vec!(), Keyword::Hlt => vec![ControlWord { halt: true, ..ControlWord::empty() }], Keyword::Nop => ctrl_vec!(), } } } #[derive(Copy, Clone, FieldSize)] enum GPR { A = 0, B, C, D, } #[derive(Copy, Clone, FieldSize)] enum MovFrom { A = 0, B = 1, C = 2, D = 3, BS = 5, Acc = 6, } #[derive(Copy, Clone, FieldSize)] enum MovTo { A = 0, B = 1, C = 2, D = 3, BS = 5, Out = 8, } fn generate() {} #[cfg(test)] mod tests { use super::*; fn fetch_cycle() -> Vec<ControlWord> { vec![ ControlWord { read_from: PROGRAM_COUNTER, write_to: MEMORY_ADDRESS, ..ControlWord::empty() }, ControlWord { read_from: MEMORY, write_to: INSTRUCTION, bank_select_enable: false, program_counter_enable: true, ..ControlWord::empty() }, ControlWord { step_reset: true, ..ControlWord::empty() }, ] } #[test] fn ctrl_vec_inserts_fetch_cycle() { let x = ctrl_vec!(); assert_eq!(fetch_cycle(), x); } #[test] fn ctrl_vec_inserts_given_values_before_step_reset() { let last_elem = ControlWord::empty(); let x = ctrl_vec!(last_elem); assert_eq!(4, x.len()); assert_eq!(ControlWord::empty(), x[2]); } #[test] fn test() { Keyword::test(); } }
true
b49deb5a09618c9e5e80ecb943a8c9117d9ca5e4
Rust
noobLue/tmc-langs-rust
/tmc-langs-util/src/error.rs
UTF-8
1,943
3.21875
3
[ "Apache-2.0", "MIT", "LicenseRef-scancode-unknown-license-reference" ]
permissive
//! Contains the FileError error type for file operations. use std::path::PathBuf; use thiserror::Error; /// A wrapper for std::io::Error that provides more context for the failed operations. #[derive(Error, Debug)] pub enum FileError { // file_util errors #[error("Failed to open file at {0}")] FileOpen(PathBuf, #[source] std::io::Error), #[error("Failed to read file at {0}")] FileRead(PathBuf, #[source] std::io::Error), #[error("Failed to write file at {0}")] FileWrite(PathBuf, #[source] std::io::Error), #[error("Failed to create file at {0}")] FileCreate(PathBuf, #[source] std::io::Error), #[error("Failed to remove file at {0}")] FileRemove(PathBuf, #[source] std::io::Error), #[error("Failed to copy file from {from} to {to}")] FileCopy { from: PathBuf, to: PathBuf, source: std::io::Error, }, #[error("Failed to create temporary file")] TempFile(#[source] std::io::Error), #[error("Failed to read directory at {0}")] DirRead(PathBuf, #[source] std::io::Error), #[error("Failed to create directory at {0}")] DirCreate(PathBuf, #[source] std::io::Error), #[error("Failed to remove directory at {0}")] DirRemove(PathBuf, #[source] std::io::Error), #[error("Failed to rename file {from} to {to}")] Rename { from: PathBuf, to: PathBuf, source: std::io::Error, }, #[error("Path {0} has no file name")] NoFileName(PathBuf), #[error("Expected {0} to be a directory, but it was a file")] UnexpectedFile(PathBuf), #[error("Failed to write data")] WriteError(#[source] std::io::Error), // lock errors #[error("Failed to lock file at path {0}")] FdLock(PathBuf, #[source] std::io::Error), #[error("Failed to lock {0}: not a file or directory")] InvalidLockPath(PathBuf), #[error("Directory walk error")] Walkdir(#[from] walkdir::Error), }
true
a77ef46298f6628dddb987a7f99f1b8882119fd5
Rust
suspend0/aws-sdk-rust
/sdk/codestar/src/client.rs
UTF-8
87,689
2.578125
3
[ "Apache-2.0" ]
permissive
// Code generated by software.amazon.smithy.rust.codegen.smithy-rs. DO NOT EDIT. #[derive(Debug)] pub(crate) struct Handle< C = aws_smithy_client::erase::DynConnector, M = aws_hyper::AwsMiddleware, R = aws_smithy_client::retry::Standard, > { client: aws_smithy_client::Client<C, M, R>, conf: crate::Config, } /// An ergonomic service client for `CodeStar_20170419`. /// /// This client allows ergonomic access to a `CodeStar_20170419`-shaped service. /// Each method corresponds to an endpoint defined in the service's Smithy model, /// and the request and response shapes are auto-generated from that same model. /// /// # Using a Client /// /// Once you have a client set up, you can access the service's endpoints /// by calling the appropriate method on [`Client`]. Each such method /// returns a request builder for that endpoint, with methods for setting /// the various fields of the request. Once your request is complete, use /// the `send` method to send the request. `send` returns a future, which /// you then have to `.await` to get the service's response. /// /// [builder pattern]: https://rust-lang.github.io/api-guidelines/type-safety.html#c-builder /// [SigV4-signed requests]: https://docs.aws.amazon.com/general/latest/gr/signature-version-4.html #[derive(std::fmt::Debug)] pub struct Client< C = aws_smithy_client::erase::DynConnector, M = aws_hyper::AwsMiddleware, R = aws_smithy_client::retry::Standard, > { handle: std::sync::Arc<Handle<C, M, R>>, } impl<C, M, R> std::clone::Clone for Client<C, M, R> { fn clone(&self) -> Self { Self { handle: self.handle.clone(), } } } #[doc(inline)] pub use aws_smithy_client::Builder; impl<C, M, R> From<aws_smithy_client::Client<C, M, R>> for Client<C, M, R> { fn from(client: aws_smithy_client::Client<C, M, R>) -> Self { Self::with_config(client, crate::Config::builder().build()) } } impl<C, M, R> Client<C, M, R> { /// Creates a client with the given service configuration. pub fn with_config(client: aws_smithy_client::Client<C, M, R>, conf: crate::Config) -> Self { Self { handle: std::sync::Arc::new(Handle { client, conf }), } } /// Returns the client's configuration. pub fn conf(&self) -> &crate::Config { &self.handle.conf } } impl<C, M, R> Client<C, M, R> where C: aws_smithy_client::bounds::SmithyConnector, M: aws_smithy_client::bounds::SmithyMiddleware<C>, R: aws_smithy_client::retry::NewRequestPolicy, { /// Constructs a fluent builder for the `AssociateTeamMember` operation. /// /// See [`AssociateTeamMember`](crate::client::fluent_builders::AssociateTeamMember) for more information about the /// operation and its arguments. pub fn associate_team_member(&self) -> fluent_builders::AssociateTeamMember<C, M, R> { fluent_builders::AssociateTeamMember::new(self.handle.clone()) } /// Constructs a fluent builder for the `CreateProject` operation. /// /// See [`CreateProject`](crate::client::fluent_builders::CreateProject) for more information about the /// operation and its arguments. pub fn create_project(&self) -> fluent_builders::CreateProject<C, M, R> { fluent_builders::CreateProject::new(self.handle.clone()) } /// Constructs a fluent builder for the `CreateUserProfile` operation. /// /// See [`CreateUserProfile`](crate::client::fluent_builders::CreateUserProfile) for more information about the /// operation and its arguments. pub fn create_user_profile(&self) -> fluent_builders::CreateUserProfile<C, M, R> { fluent_builders::CreateUserProfile::new(self.handle.clone()) } /// Constructs a fluent builder for the `DeleteProject` operation. /// /// See [`DeleteProject`](crate::client::fluent_builders::DeleteProject) for more information about the /// operation and its arguments. pub fn delete_project(&self) -> fluent_builders::DeleteProject<C, M, R> { fluent_builders::DeleteProject::new(self.handle.clone()) } /// Constructs a fluent builder for the `DeleteUserProfile` operation. /// /// See [`DeleteUserProfile`](crate::client::fluent_builders::DeleteUserProfile) for more information about the /// operation and its arguments. pub fn delete_user_profile(&self) -> fluent_builders::DeleteUserProfile<C, M, R> { fluent_builders::DeleteUserProfile::new(self.handle.clone()) } /// Constructs a fluent builder for the `DescribeProject` operation. /// /// See [`DescribeProject`](crate::client::fluent_builders::DescribeProject) for more information about the /// operation and its arguments. pub fn describe_project(&self) -> fluent_builders::DescribeProject<C, M, R> { fluent_builders::DescribeProject::new(self.handle.clone()) } /// Constructs a fluent builder for the `DescribeUserProfile` operation. /// /// See [`DescribeUserProfile`](crate::client::fluent_builders::DescribeUserProfile) for more information about the /// operation and its arguments. pub fn describe_user_profile(&self) -> fluent_builders::DescribeUserProfile<C, M, R> { fluent_builders::DescribeUserProfile::new(self.handle.clone()) } /// Constructs a fluent builder for the `DisassociateTeamMember` operation. /// /// See [`DisassociateTeamMember`](crate::client::fluent_builders::DisassociateTeamMember) for more information about the /// operation and its arguments. pub fn disassociate_team_member(&self) -> fluent_builders::DisassociateTeamMember<C, M, R> { fluent_builders::DisassociateTeamMember::new(self.handle.clone()) } /// Constructs a fluent builder for the `ListProjects` operation. /// /// See [`ListProjects`](crate::client::fluent_builders::ListProjects) for more information about the /// operation and its arguments. pub fn list_projects(&self) -> fluent_builders::ListProjects<C, M, R> { fluent_builders::ListProjects::new(self.handle.clone()) } /// Constructs a fluent builder for the `ListResources` operation. /// /// See [`ListResources`](crate::client::fluent_builders::ListResources) for more information about the /// operation and its arguments. pub fn list_resources(&self) -> fluent_builders::ListResources<C, M, R> { fluent_builders::ListResources::new(self.handle.clone()) } /// Constructs a fluent builder for the `ListTagsForProject` operation. /// /// See [`ListTagsForProject`](crate::client::fluent_builders::ListTagsForProject) for more information about the /// operation and its arguments. pub fn list_tags_for_project(&self) -> fluent_builders::ListTagsForProject<C, M, R> { fluent_builders::ListTagsForProject::new(self.handle.clone()) } /// Constructs a fluent builder for the `ListTeamMembers` operation. /// /// See [`ListTeamMembers`](crate::client::fluent_builders::ListTeamMembers) for more information about the /// operation and its arguments. pub fn list_team_members(&self) -> fluent_builders::ListTeamMembers<C, M, R> { fluent_builders::ListTeamMembers::new(self.handle.clone()) } /// Constructs a fluent builder for the `ListUserProfiles` operation. /// /// See [`ListUserProfiles`](crate::client::fluent_builders::ListUserProfiles) for more information about the /// operation and its arguments. pub fn list_user_profiles(&self) -> fluent_builders::ListUserProfiles<C, M, R> { fluent_builders::ListUserProfiles::new(self.handle.clone()) } /// Constructs a fluent builder for the `TagProject` operation. /// /// See [`TagProject`](crate::client::fluent_builders::TagProject) for more information about the /// operation and its arguments. pub fn tag_project(&self) -> fluent_builders::TagProject<C, M, R> { fluent_builders::TagProject::new(self.handle.clone()) } /// Constructs a fluent builder for the `UntagProject` operation. /// /// See [`UntagProject`](crate::client::fluent_builders::UntagProject) for more information about the /// operation and its arguments. pub fn untag_project(&self) -> fluent_builders::UntagProject<C, M, R> { fluent_builders::UntagProject::new(self.handle.clone()) } /// Constructs a fluent builder for the `UpdateProject` operation. /// /// See [`UpdateProject`](crate::client::fluent_builders::UpdateProject) for more information about the /// operation and its arguments. pub fn update_project(&self) -> fluent_builders::UpdateProject<C, M, R> { fluent_builders::UpdateProject::new(self.handle.clone()) } /// Constructs a fluent builder for the `UpdateTeamMember` operation. /// /// See [`UpdateTeamMember`](crate::client::fluent_builders::UpdateTeamMember) for more information about the /// operation and its arguments. pub fn update_team_member(&self) -> fluent_builders::UpdateTeamMember<C, M, R> { fluent_builders::UpdateTeamMember::new(self.handle.clone()) } /// Constructs a fluent builder for the `UpdateUserProfile` operation. /// /// See [`UpdateUserProfile`](crate::client::fluent_builders::UpdateUserProfile) for more information about the /// operation and its arguments. pub fn update_user_profile(&self) -> fluent_builders::UpdateUserProfile<C, M, R> { fluent_builders::UpdateUserProfile::new(self.handle.clone()) } } pub mod fluent_builders { //! //! Utilities to ergonomically construct a request to the service. //! //! Fluent builders are created through the [`Client`](crate::client::Client) by calling //! one if its operation methods. After parameters are set using the builder methods, //! the `send` method can be called to initiate the request. //! /// Fluent builder constructing a request to `AssociateTeamMember`. /// /// <p>Adds an IAM user to the team for an AWS CodeStar project.</p> #[derive(std::fmt::Debug)] pub struct AssociateTeamMember< C = aws_smithy_client::erase::DynConnector, M = aws_hyper::AwsMiddleware, R = aws_smithy_client::retry::Standard, > { handle: std::sync::Arc<super::Handle<C, M, R>>, inner: crate::input::associate_team_member_input::Builder, } impl<C, M, R> AssociateTeamMember<C, M, R> where C: aws_smithy_client::bounds::SmithyConnector, M: aws_smithy_client::bounds::SmithyMiddleware<C>, R: aws_smithy_client::retry::NewRequestPolicy, { /// Creates a new `AssociateTeamMember`. pub(crate) fn new(handle: std::sync::Arc<super::Handle<C, M, R>>) -> Self { Self { handle, inner: Default::default(), } } /// Sends the request and returns the response. /// /// If an error occurs, an `SdkError` will be returned with additional details that /// can be matched against. /// /// By default, any retryable failures will be retried twice. Retry behavior /// is configurable with the [RetryConfig](aws_smithy_types::retry::RetryConfig), which can be /// set when configuring the client. pub async fn send( self, ) -> std::result::Result< crate::output::AssociateTeamMemberOutput, aws_smithy_http::result::SdkError<crate::error::AssociateTeamMemberError>, > where R::Policy: aws_smithy_client::bounds::SmithyRetryPolicy< crate::input::AssociateTeamMemberInputOperationOutputAlias, crate::output::AssociateTeamMemberOutput, crate::error::AssociateTeamMemberError, crate::input::AssociateTeamMemberInputOperationRetryAlias, >, { let input = self.inner.build().map_err(|err| { aws_smithy_http::result::SdkError::ConstructionFailure(err.into()) })?; let op = input .make_operation(&self.handle.conf) .await .map_err(|err| { aws_smithy_http::result::SdkError::ConstructionFailure(err.into()) })?; self.handle.client.call(op).await } /// <p>The ID of the project to which you will add the IAM user.</p> pub fn project_id(mut self, inp: impl Into<std::string::String>) -> Self { self.inner = self.inner.project_id(inp); self } /// <p>The ID of the project to which you will add the IAM user.</p> pub fn set_project_id(mut self, input: std::option::Option<std::string::String>) -> Self { self.inner = self.inner.set_project_id(input); self } /// <p>A user- or system-generated token that identifies the entity that requested the team /// member association to the project. This token can be used to repeat the request.</p> pub fn client_request_token(mut self, inp: impl Into<std::string::String>) -> Self { self.inner = self.inner.client_request_token(inp); self } /// <p>A user- or system-generated token that identifies the entity that requested the team /// member association to the project. This token can be used to repeat the request.</p> pub fn set_client_request_token( mut self, input: std::option::Option<std::string::String>, ) -> Self { self.inner = self.inner.set_client_request_token(input); self } /// <p>The Amazon Resource Name (ARN) for the IAM user you want to add to the AWS CodeStar /// project.</p> pub fn user_arn(mut self, inp: impl Into<std::string::String>) -> Self { self.inner = self.inner.user_arn(inp); self } /// <p>The Amazon Resource Name (ARN) for the IAM user you want to add to the AWS CodeStar /// project.</p> pub fn set_user_arn(mut self, input: std::option::Option<std::string::String>) -> Self { self.inner = self.inner.set_user_arn(input); self } /// <p>The AWS CodeStar project role that will apply to this user. This role determines what actions /// a user can take in an AWS CodeStar project.</p> pub fn project_role(mut self, inp: impl Into<std::string::String>) -> Self { self.inner = self.inner.project_role(inp); self } /// <p>The AWS CodeStar project role that will apply to this user. This role determines what actions /// a user can take in an AWS CodeStar project.</p> pub fn set_project_role(mut self, input: std::option::Option<std::string::String>) -> Self { self.inner = self.inner.set_project_role(input); self } /// <p>Whether the team member is allowed to use an SSH public/private key pair to remotely /// access project resources, for example Amazon EC2 instances.</p> pub fn remote_access_allowed(mut self, inp: bool) -> Self { self.inner = self.inner.remote_access_allowed(inp); self } /// <p>Whether the team member is allowed to use an SSH public/private key pair to remotely /// access project resources, for example Amazon EC2 instances.</p> pub fn set_remote_access_allowed(mut self, input: std::option::Option<bool>) -> Self { self.inner = self.inner.set_remote_access_allowed(input); self } } /// Fluent builder constructing a request to `CreateProject`. /// /// <p>Creates a project, including project resources. This action creates a project based on /// a submitted project request. A set of source code files and a toolchain template file /// can be included with the project request. If these are not provided, an empty project is created.</p> #[derive(std::fmt::Debug)] pub struct CreateProject< C = aws_smithy_client::erase::DynConnector, M = aws_hyper::AwsMiddleware, R = aws_smithy_client::retry::Standard, > { handle: std::sync::Arc<super::Handle<C, M, R>>, inner: crate::input::create_project_input::Builder, } impl<C, M, R> CreateProject<C, M, R> where C: aws_smithy_client::bounds::SmithyConnector, M: aws_smithy_client::bounds::SmithyMiddleware<C>, R: aws_smithy_client::retry::NewRequestPolicy, { /// Creates a new `CreateProject`. pub(crate) fn new(handle: std::sync::Arc<super::Handle<C, M, R>>) -> Self { Self { handle, inner: Default::default(), } } /// Sends the request and returns the response. /// /// If an error occurs, an `SdkError` will be returned with additional details that /// can be matched against. /// /// By default, any retryable failures will be retried twice. Retry behavior /// is configurable with the [RetryConfig](aws_smithy_types::retry::RetryConfig), which can be /// set when configuring the client. pub async fn send( self, ) -> std::result::Result< crate::output::CreateProjectOutput, aws_smithy_http::result::SdkError<crate::error::CreateProjectError>, > where R::Policy: aws_smithy_client::bounds::SmithyRetryPolicy< crate::input::CreateProjectInputOperationOutputAlias, crate::output::CreateProjectOutput, crate::error::CreateProjectError, crate::input::CreateProjectInputOperationRetryAlias, >, { let input = self.inner.build().map_err(|err| { aws_smithy_http::result::SdkError::ConstructionFailure(err.into()) })?; let op = input .make_operation(&self.handle.conf) .await .map_err(|err| { aws_smithy_http::result::SdkError::ConstructionFailure(err.into()) })?; self.handle.client.call(op).await } /// <p>The display name for the project to be created in AWS CodeStar.</p> pub fn name(mut self, inp: impl Into<std::string::String>) -> Self { self.inner = self.inner.name(inp); self } /// <p>The display name for the project to be created in AWS CodeStar.</p> pub fn set_name(mut self, input: std::option::Option<std::string::String>) -> Self { self.inner = self.inner.set_name(input); self } /// <p>The ID of the project to be created in AWS CodeStar.</p> pub fn id(mut self, inp: impl Into<std::string::String>) -> Self { self.inner = self.inner.id(inp); self } /// <p>The ID of the project to be created in AWS CodeStar.</p> pub fn set_id(mut self, input: std::option::Option<std::string::String>) -> Self { self.inner = self.inner.set_id(input); self } /// <p>The description of the project, if any.</p> pub fn description(mut self, inp: impl Into<std::string::String>) -> Self { self.inner = self.inner.description(inp); self } /// <p>The description of the project, if any.</p> pub fn set_description(mut self, input: std::option::Option<std::string::String>) -> Self { self.inner = self.inner.set_description(input); self } /// <p>A user- or system-generated token that identifies the entity that requested project /// creation. This token can be used to repeat the request.</p> pub fn client_request_token(mut self, inp: impl Into<std::string::String>) -> Self { self.inner = self.inner.client_request_token(inp); self } /// <p>A user- or system-generated token that identifies the entity that requested project /// creation. This token can be used to repeat the request.</p> pub fn set_client_request_token( mut self, input: std::option::Option<std::string::String>, ) -> Self { self.inner = self.inner.set_client_request_token(input); self } /// Appends an item to `sourceCode`. /// /// To override the contents of this collection use [`set_source_code`](Self::set_source_code). /// /// <p>A list of the Code objects submitted with the project request. If this /// parameter is specified, the request must also include the toolchain parameter.</p> pub fn source_code(mut self, inp: impl Into<crate::model::Code>) -> Self { self.inner = self.inner.source_code(inp); self } /// <p>A list of the Code objects submitted with the project request. If this /// parameter is specified, the request must also include the toolchain parameter.</p> pub fn set_source_code( mut self, input: std::option::Option<std::vec::Vec<crate::model::Code>>, ) -> Self { self.inner = self.inner.set_source_code(input); self } /// <p>The name of the toolchain template file submitted with the project request. If /// this parameter is specified, the request must also include the sourceCode parameter.</p> pub fn toolchain(mut self, inp: crate::model::Toolchain) -> Self { self.inner = self.inner.toolchain(inp); self } /// <p>The name of the toolchain template file submitted with the project request. If /// this parameter is specified, the request must also include the sourceCode parameter.</p> pub fn set_toolchain( mut self, input: std::option::Option<crate::model::Toolchain>, ) -> Self { self.inner = self.inner.set_toolchain(input); self } /// Adds a key-value pair to `tags`. /// /// To override the contents of this collection use [`set_tags`](Self::set_tags). /// /// <p>The tags created for the project.</p> pub fn tags( mut self, k: impl Into<std::string::String>, v: impl Into<std::string::String>, ) -> Self { self.inner = self.inner.tags(k, v); self } /// <p>The tags created for the project.</p> pub fn set_tags( mut self, input: std::option::Option< std::collections::HashMap<std::string::String, std::string::String>, >, ) -> Self { self.inner = self.inner.set_tags(input); self } } /// Fluent builder constructing a request to `CreateUserProfile`. /// /// <p>Creates a profile for a user that includes user preferences, such as the display name /// and email address assocciated with the user, in AWS CodeStar. The user profile is not /// project-specific. Information in the user profile is displayed wherever the user's information /// appears to other users in AWS CodeStar.</p> #[derive(std::fmt::Debug)] pub struct CreateUserProfile< C = aws_smithy_client::erase::DynConnector, M = aws_hyper::AwsMiddleware, R = aws_smithy_client::retry::Standard, > { handle: std::sync::Arc<super::Handle<C, M, R>>, inner: crate::input::create_user_profile_input::Builder, } impl<C, M, R> CreateUserProfile<C, M, R> where C: aws_smithy_client::bounds::SmithyConnector, M: aws_smithy_client::bounds::SmithyMiddleware<C>, R: aws_smithy_client::retry::NewRequestPolicy, { /// Creates a new `CreateUserProfile`. pub(crate) fn new(handle: std::sync::Arc<super::Handle<C, M, R>>) -> Self { Self { handle, inner: Default::default(), } } /// Sends the request and returns the response. /// /// If an error occurs, an `SdkError` will be returned with additional details that /// can be matched against. /// /// By default, any retryable failures will be retried twice. Retry behavior /// is configurable with the [RetryConfig](aws_smithy_types::retry::RetryConfig), which can be /// set when configuring the client. pub async fn send( self, ) -> std::result::Result< crate::output::CreateUserProfileOutput, aws_smithy_http::result::SdkError<crate::error::CreateUserProfileError>, > where R::Policy: aws_smithy_client::bounds::SmithyRetryPolicy< crate::input::CreateUserProfileInputOperationOutputAlias, crate::output::CreateUserProfileOutput, crate::error::CreateUserProfileError, crate::input::CreateUserProfileInputOperationRetryAlias, >, { let input = self.inner.build().map_err(|err| { aws_smithy_http::result::SdkError::ConstructionFailure(err.into()) })?; let op = input .make_operation(&self.handle.conf) .await .map_err(|err| { aws_smithy_http::result::SdkError::ConstructionFailure(err.into()) })?; self.handle.client.call(op).await } /// <p>The Amazon Resource Name (ARN) of the user in IAM.</p> pub fn user_arn(mut self, inp: impl Into<std::string::String>) -> Self { self.inner = self.inner.user_arn(inp); self } /// <p>The Amazon Resource Name (ARN) of the user in IAM.</p> pub fn set_user_arn(mut self, input: std::option::Option<std::string::String>) -> Self { self.inner = self.inner.set_user_arn(input); self } /// <p>The name that will be displayed as the friendly name for the user in AWS CodeStar. </p> pub fn display_name(mut self, inp: impl Into<std::string::String>) -> Self { self.inner = self.inner.display_name(inp); self } /// <p>The name that will be displayed as the friendly name for the user in AWS CodeStar. </p> pub fn set_display_name(mut self, input: std::option::Option<std::string::String>) -> Self { self.inner = self.inner.set_display_name(input); self } /// <p>The email address that will be displayed as part of the user's profile in /// AWS CodeStar.</p> pub fn email_address(mut self, inp: impl Into<std::string::String>) -> Self { self.inner = self.inner.email_address(inp); self } /// <p>The email address that will be displayed as part of the user's profile in /// AWS CodeStar.</p> pub fn set_email_address( mut self, input: std::option::Option<std::string::String>, ) -> Self { self.inner = self.inner.set_email_address(input); self } /// <p>The SSH public key associated with the user in AWS CodeStar. If a project owner allows the /// user remote access to project resources, this public key will be used along with the user's /// private key for SSH access.</p> pub fn ssh_public_key(mut self, inp: impl Into<std::string::String>) -> Self { self.inner = self.inner.ssh_public_key(inp); self } /// <p>The SSH public key associated with the user in AWS CodeStar. If a project owner allows the /// user remote access to project resources, this public key will be used along with the user's /// private key for SSH access.</p> pub fn set_ssh_public_key( mut self, input: std::option::Option<std::string::String>, ) -> Self { self.inner = self.inner.set_ssh_public_key(input); self } } /// Fluent builder constructing a request to `DeleteProject`. /// /// <p>Deletes a project, including project resources. Does not delete users associated with /// the project, but does delete the IAM roles that allowed access to the project.</p> #[derive(std::fmt::Debug)] pub struct DeleteProject< C = aws_smithy_client::erase::DynConnector, M = aws_hyper::AwsMiddleware, R = aws_smithy_client::retry::Standard, > { handle: std::sync::Arc<super::Handle<C, M, R>>, inner: crate::input::delete_project_input::Builder, } impl<C, M, R> DeleteProject<C, M, R> where C: aws_smithy_client::bounds::SmithyConnector, M: aws_smithy_client::bounds::SmithyMiddleware<C>, R: aws_smithy_client::retry::NewRequestPolicy, { /// Creates a new `DeleteProject`. pub(crate) fn new(handle: std::sync::Arc<super::Handle<C, M, R>>) -> Self { Self { handle, inner: Default::default(), } } /// Sends the request and returns the response. /// /// If an error occurs, an `SdkError` will be returned with additional details that /// can be matched against. /// /// By default, any retryable failures will be retried twice. Retry behavior /// is configurable with the [RetryConfig](aws_smithy_types::retry::RetryConfig), which can be /// set when configuring the client. pub async fn send( self, ) -> std::result::Result< crate::output::DeleteProjectOutput, aws_smithy_http::result::SdkError<crate::error::DeleteProjectError>, > where R::Policy: aws_smithy_client::bounds::SmithyRetryPolicy< crate::input::DeleteProjectInputOperationOutputAlias, crate::output::DeleteProjectOutput, crate::error::DeleteProjectError, crate::input::DeleteProjectInputOperationRetryAlias, >, { let input = self.inner.build().map_err(|err| { aws_smithy_http::result::SdkError::ConstructionFailure(err.into()) })?; let op = input .make_operation(&self.handle.conf) .await .map_err(|err| { aws_smithy_http::result::SdkError::ConstructionFailure(err.into()) })?; self.handle.client.call(op).await } /// <p>The ID of the project to be deleted in AWS CodeStar.</p> pub fn id(mut self, inp: impl Into<std::string::String>) -> Self { self.inner = self.inner.id(inp); self } /// <p>The ID of the project to be deleted in AWS CodeStar.</p> pub fn set_id(mut self, input: std::option::Option<std::string::String>) -> Self { self.inner = self.inner.set_id(input); self } /// <p>A user- or system-generated token that identifies the entity that requested project /// deletion. This token can be used to repeat the request. </p> pub fn client_request_token(mut self, inp: impl Into<std::string::String>) -> Self { self.inner = self.inner.client_request_token(inp); self } /// <p>A user- or system-generated token that identifies the entity that requested project /// deletion. This token can be used to repeat the request. </p> pub fn set_client_request_token( mut self, input: std::option::Option<std::string::String>, ) -> Self { self.inner = self.inner.set_client_request_token(input); self } /// <p>Whether to send a delete request for the primary stack in AWS CloudFormation originally /// used to generate the project and its resources. This option will delete all AWS resources for /// the project (except for any buckets in Amazon S3) as well as deleting the project itself. /// Recommended for most use cases.</p> pub fn delete_stack(mut self, inp: bool) -> Self { self.inner = self.inner.delete_stack(inp); self } /// <p>Whether to send a delete request for the primary stack in AWS CloudFormation originally /// used to generate the project and its resources. This option will delete all AWS resources for /// the project (except for any buckets in Amazon S3) as well as deleting the project itself. /// Recommended for most use cases.</p> pub fn set_delete_stack(mut self, input: std::option::Option<bool>) -> Self { self.inner = self.inner.set_delete_stack(input); self } } /// Fluent builder constructing a request to `DeleteUserProfile`. /// /// <p>Deletes a user profile in AWS CodeStar, including all personal preference data associated with /// that profile, such as display name and email address. It does not delete the history of that /// user, for example the history of commits made by that user.</p> #[derive(std::fmt::Debug)] pub struct DeleteUserProfile< C = aws_smithy_client::erase::DynConnector, M = aws_hyper::AwsMiddleware, R = aws_smithy_client::retry::Standard, > { handle: std::sync::Arc<super::Handle<C, M, R>>, inner: crate::input::delete_user_profile_input::Builder, } impl<C, M, R> DeleteUserProfile<C, M, R> where C: aws_smithy_client::bounds::SmithyConnector, M: aws_smithy_client::bounds::SmithyMiddleware<C>, R: aws_smithy_client::retry::NewRequestPolicy, { /// Creates a new `DeleteUserProfile`. pub(crate) fn new(handle: std::sync::Arc<super::Handle<C, M, R>>) -> Self { Self { handle, inner: Default::default(), } } /// Sends the request and returns the response. /// /// If an error occurs, an `SdkError` will be returned with additional details that /// can be matched against. /// /// By default, any retryable failures will be retried twice. Retry behavior /// is configurable with the [RetryConfig](aws_smithy_types::retry::RetryConfig), which can be /// set when configuring the client. pub async fn send( self, ) -> std::result::Result< crate::output::DeleteUserProfileOutput, aws_smithy_http::result::SdkError<crate::error::DeleteUserProfileError>, > where R::Policy: aws_smithy_client::bounds::SmithyRetryPolicy< crate::input::DeleteUserProfileInputOperationOutputAlias, crate::output::DeleteUserProfileOutput, crate::error::DeleteUserProfileError, crate::input::DeleteUserProfileInputOperationRetryAlias, >, { let input = self.inner.build().map_err(|err| { aws_smithy_http::result::SdkError::ConstructionFailure(err.into()) })?; let op = input .make_operation(&self.handle.conf) .await .map_err(|err| { aws_smithy_http::result::SdkError::ConstructionFailure(err.into()) })?; self.handle.client.call(op).await } /// <p>The Amazon Resource Name (ARN) of the user to delete from AWS CodeStar.</p> pub fn user_arn(mut self, inp: impl Into<std::string::String>) -> Self { self.inner = self.inner.user_arn(inp); self } /// <p>The Amazon Resource Name (ARN) of the user to delete from AWS CodeStar.</p> pub fn set_user_arn(mut self, input: std::option::Option<std::string::String>) -> Self { self.inner = self.inner.set_user_arn(input); self } } /// Fluent builder constructing a request to `DescribeProject`. /// /// <p>Describes a project and its resources.</p> #[derive(std::fmt::Debug)] pub struct DescribeProject< C = aws_smithy_client::erase::DynConnector, M = aws_hyper::AwsMiddleware, R = aws_smithy_client::retry::Standard, > { handle: std::sync::Arc<super::Handle<C, M, R>>, inner: crate::input::describe_project_input::Builder, } impl<C, M, R> DescribeProject<C, M, R> where C: aws_smithy_client::bounds::SmithyConnector, M: aws_smithy_client::bounds::SmithyMiddleware<C>, R: aws_smithy_client::retry::NewRequestPolicy, { /// Creates a new `DescribeProject`. pub(crate) fn new(handle: std::sync::Arc<super::Handle<C, M, R>>) -> Self { Self { handle, inner: Default::default(), } } /// Sends the request and returns the response. /// /// If an error occurs, an `SdkError` will be returned with additional details that /// can be matched against. /// /// By default, any retryable failures will be retried twice. Retry behavior /// is configurable with the [RetryConfig](aws_smithy_types::retry::RetryConfig), which can be /// set when configuring the client. pub async fn send( self, ) -> std::result::Result< crate::output::DescribeProjectOutput, aws_smithy_http::result::SdkError<crate::error::DescribeProjectError>, > where R::Policy: aws_smithy_client::bounds::SmithyRetryPolicy< crate::input::DescribeProjectInputOperationOutputAlias, crate::output::DescribeProjectOutput, crate::error::DescribeProjectError, crate::input::DescribeProjectInputOperationRetryAlias, >, { let input = self.inner.build().map_err(|err| { aws_smithy_http::result::SdkError::ConstructionFailure(err.into()) })?; let op = input .make_operation(&self.handle.conf) .await .map_err(|err| { aws_smithy_http::result::SdkError::ConstructionFailure(err.into()) })?; self.handle.client.call(op).await } /// <p>The ID of the project.</p> pub fn id(mut self, inp: impl Into<std::string::String>) -> Self { self.inner = self.inner.id(inp); self } /// <p>The ID of the project.</p> pub fn set_id(mut self, input: std::option::Option<std::string::String>) -> Self { self.inner = self.inner.set_id(input); self } } /// Fluent builder constructing a request to `DescribeUserProfile`. /// /// <p>Describes a user in AWS CodeStar and the user attributes across all projects.</p> #[derive(std::fmt::Debug)] pub struct DescribeUserProfile< C = aws_smithy_client::erase::DynConnector, M = aws_hyper::AwsMiddleware, R = aws_smithy_client::retry::Standard, > { handle: std::sync::Arc<super::Handle<C, M, R>>, inner: crate::input::describe_user_profile_input::Builder, } impl<C, M, R> DescribeUserProfile<C, M, R> where C: aws_smithy_client::bounds::SmithyConnector, M: aws_smithy_client::bounds::SmithyMiddleware<C>, R: aws_smithy_client::retry::NewRequestPolicy, { /// Creates a new `DescribeUserProfile`. pub(crate) fn new(handle: std::sync::Arc<super::Handle<C, M, R>>) -> Self { Self { handle, inner: Default::default(), } } /// Sends the request and returns the response. /// /// If an error occurs, an `SdkError` will be returned with additional details that /// can be matched against. /// /// By default, any retryable failures will be retried twice. Retry behavior /// is configurable with the [RetryConfig](aws_smithy_types::retry::RetryConfig), which can be /// set when configuring the client. pub async fn send( self, ) -> std::result::Result< crate::output::DescribeUserProfileOutput, aws_smithy_http::result::SdkError<crate::error::DescribeUserProfileError>, > where R::Policy: aws_smithy_client::bounds::SmithyRetryPolicy< crate::input::DescribeUserProfileInputOperationOutputAlias, crate::output::DescribeUserProfileOutput, crate::error::DescribeUserProfileError, crate::input::DescribeUserProfileInputOperationRetryAlias, >, { let input = self.inner.build().map_err(|err| { aws_smithy_http::result::SdkError::ConstructionFailure(err.into()) })?; let op = input .make_operation(&self.handle.conf) .await .map_err(|err| { aws_smithy_http::result::SdkError::ConstructionFailure(err.into()) })?; self.handle.client.call(op).await } /// <p>The Amazon Resource Name (ARN) of the user.</p> pub fn user_arn(mut self, inp: impl Into<std::string::String>) -> Self { self.inner = self.inner.user_arn(inp); self } /// <p>The Amazon Resource Name (ARN) of the user.</p> pub fn set_user_arn(mut self, input: std::option::Option<std::string::String>) -> Self { self.inner = self.inner.set_user_arn(input); self } } /// Fluent builder constructing a request to `DisassociateTeamMember`. /// /// <p>Removes a user from a project. Removing a user from a project also removes the IAM /// policies from that user that allowed access to the project and its resources. Disassociating a /// team member does not remove that user's profile from AWS CodeStar. It does not remove the user from /// IAM.</p> #[derive(std::fmt::Debug)] pub struct DisassociateTeamMember< C = aws_smithy_client::erase::DynConnector, M = aws_hyper::AwsMiddleware, R = aws_smithy_client::retry::Standard, > { handle: std::sync::Arc<super::Handle<C, M, R>>, inner: crate::input::disassociate_team_member_input::Builder, } impl<C, M, R> DisassociateTeamMember<C, M, R> where C: aws_smithy_client::bounds::SmithyConnector, M: aws_smithy_client::bounds::SmithyMiddleware<C>, R: aws_smithy_client::retry::NewRequestPolicy, { /// Creates a new `DisassociateTeamMember`. pub(crate) fn new(handle: std::sync::Arc<super::Handle<C, M, R>>) -> Self { Self { handle, inner: Default::default(), } } /// Sends the request and returns the response. /// /// If an error occurs, an `SdkError` will be returned with additional details that /// can be matched against. /// /// By default, any retryable failures will be retried twice. Retry behavior /// is configurable with the [RetryConfig](aws_smithy_types::retry::RetryConfig), which can be /// set when configuring the client. pub async fn send( self, ) -> std::result::Result< crate::output::DisassociateTeamMemberOutput, aws_smithy_http::result::SdkError<crate::error::DisassociateTeamMemberError>, > where R::Policy: aws_smithy_client::bounds::SmithyRetryPolicy< crate::input::DisassociateTeamMemberInputOperationOutputAlias, crate::output::DisassociateTeamMemberOutput, crate::error::DisassociateTeamMemberError, crate::input::DisassociateTeamMemberInputOperationRetryAlias, >, { let input = self.inner.build().map_err(|err| { aws_smithy_http::result::SdkError::ConstructionFailure(err.into()) })?; let op = input .make_operation(&self.handle.conf) .await .map_err(|err| { aws_smithy_http::result::SdkError::ConstructionFailure(err.into()) })?; self.handle.client.call(op).await } /// <p>The ID of the AWS CodeStar project from which you want to remove a team member.</p> pub fn project_id(mut self, inp: impl Into<std::string::String>) -> Self { self.inner = self.inner.project_id(inp); self } /// <p>The ID of the AWS CodeStar project from which you want to remove a team member.</p> pub fn set_project_id(mut self, input: std::option::Option<std::string::String>) -> Self { self.inner = self.inner.set_project_id(input); self } /// <p>The Amazon Resource Name (ARN) of the IAM user or group whom you want to remove from /// the project.</p> pub fn user_arn(mut self, inp: impl Into<std::string::String>) -> Self { self.inner = self.inner.user_arn(inp); self } /// <p>The Amazon Resource Name (ARN) of the IAM user or group whom you want to remove from /// the project.</p> pub fn set_user_arn(mut self, input: std::option::Option<std::string::String>) -> Self { self.inner = self.inner.set_user_arn(input); self } } /// Fluent builder constructing a request to `ListProjects`. /// /// <p>Lists all projects in AWS CodeStar associated with your AWS account.</p> #[derive(std::fmt::Debug)] pub struct ListProjects< C = aws_smithy_client::erase::DynConnector, M = aws_hyper::AwsMiddleware, R = aws_smithy_client::retry::Standard, > { handle: std::sync::Arc<super::Handle<C, M, R>>, inner: crate::input::list_projects_input::Builder, } impl<C, M, R> ListProjects<C, M, R> where C: aws_smithy_client::bounds::SmithyConnector, M: aws_smithy_client::bounds::SmithyMiddleware<C>, R: aws_smithy_client::retry::NewRequestPolicy, { /// Creates a new `ListProjects`. pub(crate) fn new(handle: std::sync::Arc<super::Handle<C, M, R>>) -> Self { Self { handle, inner: Default::default(), } } /// Sends the request and returns the response. /// /// If an error occurs, an `SdkError` will be returned with additional details that /// can be matched against. /// /// By default, any retryable failures will be retried twice. Retry behavior /// is configurable with the [RetryConfig](aws_smithy_types::retry::RetryConfig), which can be /// set when configuring the client. pub async fn send( self, ) -> std::result::Result< crate::output::ListProjectsOutput, aws_smithy_http::result::SdkError<crate::error::ListProjectsError>, > where R::Policy: aws_smithy_client::bounds::SmithyRetryPolicy< crate::input::ListProjectsInputOperationOutputAlias, crate::output::ListProjectsOutput, crate::error::ListProjectsError, crate::input::ListProjectsInputOperationRetryAlias, >, { let input = self.inner.build().map_err(|err| { aws_smithy_http::result::SdkError::ConstructionFailure(err.into()) })?; let op = input .make_operation(&self.handle.conf) .await .map_err(|err| { aws_smithy_http::result::SdkError::ConstructionFailure(err.into()) })?; self.handle.client.call(op).await } /// <p>The continuation token to be used to return the next set of results, if the results /// cannot be returned in one response.</p> pub fn next_token(mut self, inp: impl Into<std::string::String>) -> Self { self.inner = self.inner.next_token(inp); self } /// <p>The continuation token to be used to return the next set of results, if the results /// cannot be returned in one response.</p> pub fn set_next_token(mut self, input: std::option::Option<std::string::String>) -> Self { self.inner = self.inner.set_next_token(input); self } /// <p>The maximum amount of data that can be contained in a single set of results.</p> pub fn max_results(mut self, inp: i32) -> Self { self.inner = self.inner.max_results(inp); self } /// <p>The maximum amount of data that can be contained in a single set of results.</p> pub fn set_max_results(mut self, input: std::option::Option<i32>) -> Self { self.inner = self.inner.set_max_results(input); self } } /// Fluent builder constructing a request to `ListResources`. /// /// <p>Lists resources associated with a project in AWS CodeStar.</p> #[derive(std::fmt::Debug)] pub struct ListResources< C = aws_smithy_client::erase::DynConnector, M = aws_hyper::AwsMiddleware, R = aws_smithy_client::retry::Standard, > { handle: std::sync::Arc<super::Handle<C, M, R>>, inner: crate::input::list_resources_input::Builder, } impl<C, M, R> ListResources<C, M, R> where C: aws_smithy_client::bounds::SmithyConnector, M: aws_smithy_client::bounds::SmithyMiddleware<C>, R: aws_smithy_client::retry::NewRequestPolicy, { /// Creates a new `ListResources`. pub(crate) fn new(handle: std::sync::Arc<super::Handle<C, M, R>>) -> Self { Self { handle, inner: Default::default(), } } /// Sends the request and returns the response. /// /// If an error occurs, an `SdkError` will be returned with additional details that /// can be matched against. /// /// By default, any retryable failures will be retried twice. Retry behavior /// is configurable with the [RetryConfig](aws_smithy_types::retry::RetryConfig), which can be /// set when configuring the client. pub async fn send( self, ) -> std::result::Result< crate::output::ListResourcesOutput, aws_smithy_http::result::SdkError<crate::error::ListResourcesError>, > where R::Policy: aws_smithy_client::bounds::SmithyRetryPolicy< crate::input::ListResourcesInputOperationOutputAlias, crate::output::ListResourcesOutput, crate::error::ListResourcesError, crate::input::ListResourcesInputOperationRetryAlias, >, { let input = self.inner.build().map_err(|err| { aws_smithy_http::result::SdkError::ConstructionFailure(err.into()) })?; let op = input .make_operation(&self.handle.conf) .await .map_err(|err| { aws_smithy_http::result::SdkError::ConstructionFailure(err.into()) })?; self.handle.client.call(op).await } /// <p>The ID of the project.</p> pub fn project_id(mut self, inp: impl Into<std::string::String>) -> Self { self.inner = self.inner.project_id(inp); self } /// <p>The ID of the project.</p> pub fn set_project_id(mut self, input: std::option::Option<std::string::String>) -> Self { self.inner = self.inner.set_project_id(input); self } /// <p>The continuation token for the next set of results, if the results cannot be returned /// in one response.</p> pub fn next_token(mut self, inp: impl Into<std::string::String>) -> Self { self.inner = self.inner.next_token(inp); self } /// <p>The continuation token for the next set of results, if the results cannot be returned /// in one response.</p> pub fn set_next_token(mut self, input: std::option::Option<std::string::String>) -> Self { self.inner = self.inner.set_next_token(input); self } /// <p>The maximum amount of data that can be contained in a single set of results.</p> pub fn max_results(mut self, inp: i32) -> Self { self.inner = self.inner.max_results(inp); self } /// <p>The maximum amount of data that can be contained in a single set of results.</p> pub fn set_max_results(mut self, input: std::option::Option<i32>) -> Self { self.inner = self.inner.set_max_results(input); self } } /// Fluent builder constructing a request to `ListTagsForProject`. /// /// <p>Gets the tags for a project.</p> #[derive(std::fmt::Debug)] pub struct ListTagsForProject< C = aws_smithy_client::erase::DynConnector, M = aws_hyper::AwsMiddleware, R = aws_smithy_client::retry::Standard, > { handle: std::sync::Arc<super::Handle<C, M, R>>, inner: crate::input::list_tags_for_project_input::Builder, } impl<C, M, R> ListTagsForProject<C, M, R> where C: aws_smithy_client::bounds::SmithyConnector, M: aws_smithy_client::bounds::SmithyMiddleware<C>, R: aws_smithy_client::retry::NewRequestPolicy, { /// Creates a new `ListTagsForProject`. pub(crate) fn new(handle: std::sync::Arc<super::Handle<C, M, R>>) -> Self { Self { handle, inner: Default::default(), } } /// Sends the request and returns the response. /// /// If an error occurs, an `SdkError` will be returned with additional details that /// can be matched against. /// /// By default, any retryable failures will be retried twice. Retry behavior /// is configurable with the [RetryConfig](aws_smithy_types::retry::RetryConfig), which can be /// set when configuring the client. pub async fn send( self, ) -> std::result::Result< crate::output::ListTagsForProjectOutput, aws_smithy_http::result::SdkError<crate::error::ListTagsForProjectError>, > where R::Policy: aws_smithy_client::bounds::SmithyRetryPolicy< crate::input::ListTagsForProjectInputOperationOutputAlias, crate::output::ListTagsForProjectOutput, crate::error::ListTagsForProjectError, crate::input::ListTagsForProjectInputOperationRetryAlias, >, { let input = self.inner.build().map_err(|err| { aws_smithy_http::result::SdkError::ConstructionFailure(err.into()) })?; let op = input .make_operation(&self.handle.conf) .await .map_err(|err| { aws_smithy_http::result::SdkError::ConstructionFailure(err.into()) })?; self.handle.client.call(op).await } /// <p>The ID of the project to get tags for.</p> pub fn id(mut self, inp: impl Into<std::string::String>) -> Self { self.inner = self.inner.id(inp); self } /// <p>The ID of the project to get tags for.</p> pub fn set_id(mut self, input: std::option::Option<std::string::String>) -> Self { self.inner = self.inner.set_id(input); self } /// <p>Reserved for future use.</p> pub fn next_token(mut self, inp: impl Into<std::string::String>) -> Self { self.inner = self.inner.next_token(inp); self } /// <p>Reserved for future use.</p> pub fn set_next_token(mut self, input: std::option::Option<std::string::String>) -> Self { self.inner = self.inner.set_next_token(input); self } /// <p>Reserved for future use.</p> pub fn max_results(mut self, inp: i32) -> Self { self.inner = self.inner.max_results(inp); self } /// <p>Reserved for future use.</p> pub fn set_max_results(mut self, input: std::option::Option<i32>) -> Self { self.inner = self.inner.set_max_results(input); self } } /// Fluent builder constructing a request to `ListTeamMembers`. /// /// <p>Lists all team members associated with a project.</p> #[derive(std::fmt::Debug)] pub struct ListTeamMembers< C = aws_smithy_client::erase::DynConnector, M = aws_hyper::AwsMiddleware, R = aws_smithy_client::retry::Standard, > { handle: std::sync::Arc<super::Handle<C, M, R>>, inner: crate::input::list_team_members_input::Builder, } impl<C, M, R> ListTeamMembers<C, M, R> where C: aws_smithy_client::bounds::SmithyConnector, M: aws_smithy_client::bounds::SmithyMiddleware<C>, R: aws_smithy_client::retry::NewRequestPolicy, { /// Creates a new `ListTeamMembers`. pub(crate) fn new(handle: std::sync::Arc<super::Handle<C, M, R>>) -> Self { Self { handle, inner: Default::default(), } } /// Sends the request and returns the response. /// /// If an error occurs, an `SdkError` will be returned with additional details that /// can be matched against. /// /// By default, any retryable failures will be retried twice. Retry behavior /// is configurable with the [RetryConfig](aws_smithy_types::retry::RetryConfig), which can be /// set when configuring the client. pub async fn send( self, ) -> std::result::Result< crate::output::ListTeamMembersOutput, aws_smithy_http::result::SdkError<crate::error::ListTeamMembersError>, > where R::Policy: aws_smithy_client::bounds::SmithyRetryPolicy< crate::input::ListTeamMembersInputOperationOutputAlias, crate::output::ListTeamMembersOutput, crate::error::ListTeamMembersError, crate::input::ListTeamMembersInputOperationRetryAlias, >, { let input = self.inner.build().map_err(|err| { aws_smithy_http::result::SdkError::ConstructionFailure(err.into()) })?; let op = input .make_operation(&self.handle.conf) .await .map_err(|err| { aws_smithy_http::result::SdkError::ConstructionFailure(err.into()) })?; self.handle.client.call(op).await } /// <p>The ID of the project for which you want to list team members.</p> pub fn project_id(mut self, inp: impl Into<std::string::String>) -> Self { self.inner = self.inner.project_id(inp); self } /// <p>The ID of the project for which you want to list team members.</p> pub fn set_project_id(mut self, input: std::option::Option<std::string::String>) -> Self { self.inner = self.inner.set_project_id(input); self } /// <p>The continuation token for the next set of results, if the results cannot be returned /// in one response.</p> pub fn next_token(mut self, inp: impl Into<std::string::String>) -> Self { self.inner = self.inner.next_token(inp); self } /// <p>The continuation token for the next set of results, if the results cannot be returned /// in one response.</p> pub fn set_next_token(mut self, input: std::option::Option<std::string::String>) -> Self { self.inner = self.inner.set_next_token(input); self } /// <p>The maximum number of team members you want returned in a response.</p> pub fn max_results(mut self, inp: i32) -> Self { self.inner = self.inner.max_results(inp); self } /// <p>The maximum number of team members you want returned in a response.</p> pub fn set_max_results(mut self, input: std::option::Option<i32>) -> Self { self.inner = self.inner.set_max_results(input); self } } /// Fluent builder constructing a request to `ListUserProfiles`. /// /// <p>Lists all the user profiles configured for your AWS account in AWS CodeStar.</p> #[derive(std::fmt::Debug)] pub struct ListUserProfiles< C = aws_smithy_client::erase::DynConnector, M = aws_hyper::AwsMiddleware, R = aws_smithy_client::retry::Standard, > { handle: std::sync::Arc<super::Handle<C, M, R>>, inner: crate::input::list_user_profiles_input::Builder, } impl<C, M, R> ListUserProfiles<C, M, R> where C: aws_smithy_client::bounds::SmithyConnector, M: aws_smithy_client::bounds::SmithyMiddleware<C>, R: aws_smithy_client::retry::NewRequestPolicy, { /// Creates a new `ListUserProfiles`. pub(crate) fn new(handle: std::sync::Arc<super::Handle<C, M, R>>) -> Self { Self { handle, inner: Default::default(), } } /// Sends the request and returns the response. /// /// If an error occurs, an `SdkError` will be returned with additional details that /// can be matched against. /// /// By default, any retryable failures will be retried twice. Retry behavior /// is configurable with the [RetryConfig](aws_smithy_types::retry::RetryConfig), which can be /// set when configuring the client. pub async fn send( self, ) -> std::result::Result< crate::output::ListUserProfilesOutput, aws_smithy_http::result::SdkError<crate::error::ListUserProfilesError>, > where R::Policy: aws_smithy_client::bounds::SmithyRetryPolicy< crate::input::ListUserProfilesInputOperationOutputAlias, crate::output::ListUserProfilesOutput, crate::error::ListUserProfilesError, crate::input::ListUserProfilesInputOperationRetryAlias, >, { let input = self.inner.build().map_err(|err| { aws_smithy_http::result::SdkError::ConstructionFailure(err.into()) })?; let op = input .make_operation(&self.handle.conf) .await .map_err(|err| { aws_smithy_http::result::SdkError::ConstructionFailure(err.into()) })?; self.handle.client.call(op).await } /// <p>The continuation token for the next set of results, if the results cannot be returned /// in one response.</p> pub fn next_token(mut self, inp: impl Into<std::string::String>) -> Self { self.inner = self.inner.next_token(inp); self } /// <p>The continuation token for the next set of results, if the results cannot be returned /// in one response.</p> pub fn set_next_token(mut self, input: std::option::Option<std::string::String>) -> Self { self.inner = self.inner.set_next_token(input); self } /// <p>The maximum number of results to return in a response.</p> pub fn max_results(mut self, inp: i32) -> Self { self.inner = self.inner.max_results(inp); self } /// <p>The maximum number of results to return in a response.</p> pub fn set_max_results(mut self, input: std::option::Option<i32>) -> Self { self.inner = self.inner.set_max_results(input); self } } /// Fluent builder constructing a request to `TagProject`. /// /// <p>Adds tags to a project.</p> #[derive(std::fmt::Debug)] pub struct TagProject< C = aws_smithy_client::erase::DynConnector, M = aws_hyper::AwsMiddleware, R = aws_smithy_client::retry::Standard, > { handle: std::sync::Arc<super::Handle<C, M, R>>, inner: crate::input::tag_project_input::Builder, } impl<C, M, R> TagProject<C, M, R> where C: aws_smithy_client::bounds::SmithyConnector, M: aws_smithy_client::bounds::SmithyMiddleware<C>, R: aws_smithy_client::retry::NewRequestPolicy, { /// Creates a new `TagProject`. pub(crate) fn new(handle: std::sync::Arc<super::Handle<C, M, R>>) -> Self { Self { handle, inner: Default::default(), } } /// Sends the request and returns the response. /// /// If an error occurs, an `SdkError` will be returned with additional details that /// can be matched against. /// /// By default, any retryable failures will be retried twice. Retry behavior /// is configurable with the [RetryConfig](aws_smithy_types::retry::RetryConfig), which can be /// set when configuring the client. pub async fn send( self, ) -> std::result::Result< crate::output::TagProjectOutput, aws_smithy_http::result::SdkError<crate::error::TagProjectError>, > where R::Policy: aws_smithy_client::bounds::SmithyRetryPolicy< crate::input::TagProjectInputOperationOutputAlias, crate::output::TagProjectOutput, crate::error::TagProjectError, crate::input::TagProjectInputOperationRetryAlias, >, { let input = self.inner.build().map_err(|err| { aws_smithy_http::result::SdkError::ConstructionFailure(err.into()) })?; let op = input .make_operation(&self.handle.conf) .await .map_err(|err| { aws_smithy_http::result::SdkError::ConstructionFailure(err.into()) })?; self.handle.client.call(op).await } /// <p>The ID of the project you want to add a tag to.</p> pub fn id(mut self, inp: impl Into<std::string::String>) -> Self { self.inner = self.inner.id(inp); self } /// <p>The ID of the project you want to add a tag to.</p> pub fn set_id(mut self, input: std::option::Option<std::string::String>) -> Self { self.inner = self.inner.set_id(input); self } /// Adds a key-value pair to `tags`. /// /// To override the contents of this collection use [`set_tags`](Self::set_tags). /// /// <p>The tags you want to add to the project.</p> pub fn tags( mut self, k: impl Into<std::string::String>, v: impl Into<std::string::String>, ) -> Self { self.inner = self.inner.tags(k, v); self } /// <p>The tags you want to add to the project.</p> pub fn set_tags( mut self, input: std::option::Option< std::collections::HashMap<std::string::String, std::string::String>, >, ) -> Self { self.inner = self.inner.set_tags(input); self } } /// Fluent builder constructing a request to `UntagProject`. /// /// <p>Removes tags from a project.</p> #[derive(std::fmt::Debug)] pub struct UntagProject< C = aws_smithy_client::erase::DynConnector, M = aws_hyper::AwsMiddleware, R = aws_smithy_client::retry::Standard, > { handle: std::sync::Arc<super::Handle<C, M, R>>, inner: crate::input::untag_project_input::Builder, } impl<C, M, R> UntagProject<C, M, R> where C: aws_smithy_client::bounds::SmithyConnector, M: aws_smithy_client::bounds::SmithyMiddleware<C>, R: aws_smithy_client::retry::NewRequestPolicy, { /// Creates a new `UntagProject`. pub(crate) fn new(handle: std::sync::Arc<super::Handle<C, M, R>>) -> Self { Self { handle, inner: Default::default(), } } /// Sends the request and returns the response. /// /// If an error occurs, an `SdkError` will be returned with additional details that /// can be matched against. /// /// By default, any retryable failures will be retried twice. Retry behavior /// is configurable with the [RetryConfig](aws_smithy_types::retry::RetryConfig), which can be /// set when configuring the client. pub async fn send( self, ) -> std::result::Result< crate::output::UntagProjectOutput, aws_smithy_http::result::SdkError<crate::error::UntagProjectError>, > where R::Policy: aws_smithy_client::bounds::SmithyRetryPolicy< crate::input::UntagProjectInputOperationOutputAlias, crate::output::UntagProjectOutput, crate::error::UntagProjectError, crate::input::UntagProjectInputOperationRetryAlias, >, { let input = self.inner.build().map_err(|err| { aws_smithy_http::result::SdkError::ConstructionFailure(err.into()) })?; let op = input .make_operation(&self.handle.conf) .await .map_err(|err| { aws_smithy_http::result::SdkError::ConstructionFailure(err.into()) })?; self.handle.client.call(op).await } /// <p>The ID of the project to remove tags from.</p> pub fn id(mut self, inp: impl Into<std::string::String>) -> Self { self.inner = self.inner.id(inp); self } /// <p>The ID of the project to remove tags from.</p> pub fn set_id(mut self, input: std::option::Option<std::string::String>) -> Self { self.inner = self.inner.set_id(input); self } /// Appends an item to `tags`. /// /// To override the contents of this collection use [`set_tags`](Self::set_tags). /// /// <p>The tags to remove from the project.</p> pub fn tags(mut self, inp: impl Into<std::string::String>) -> Self { self.inner = self.inner.tags(inp); self } /// <p>The tags to remove from the project.</p> pub fn set_tags( mut self, input: std::option::Option<std::vec::Vec<std::string::String>>, ) -> Self { self.inner = self.inner.set_tags(input); self } } /// Fluent builder constructing a request to `UpdateProject`. /// /// <p>Updates a project in AWS CodeStar.</p> #[derive(std::fmt::Debug)] pub struct UpdateProject< C = aws_smithy_client::erase::DynConnector, M = aws_hyper::AwsMiddleware, R = aws_smithy_client::retry::Standard, > { handle: std::sync::Arc<super::Handle<C, M, R>>, inner: crate::input::update_project_input::Builder, } impl<C, M, R> UpdateProject<C, M, R> where C: aws_smithy_client::bounds::SmithyConnector, M: aws_smithy_client::bounds::SmithyMiddleware<C>, R: aws_smithy_client::retry::NewRequestPolicy, { /// Creates a new `UpdateProject`. pub(crate) fn new(handle: std::sync::Arc<super::Handle<C, M, R>>) -> Self { Self { handle, inner: Default::default(), } } /// Sends the request and returns the response. /// /// If an error occurs, an `SdkError` will be returned with additional details that /// can be matched against. /// /// By default, any retryable failures will be retried twice. Retry behavior /// is configurable with the [RetryConfig](aws_smithy_types::retry::RetryConfig), which can be /// set when configuring the client. pub async fn send( self, ) -> std::result::Result< crate::output::UpdateProjectOutput, aws_smithy_http::result::SdkError<crate::error::UpdateProjectError>, > where R::Policy: aws_smithy_client::bounds::SmithyRetryPolicy< crate::input::UpdateProjectInputOperationOutputAlias, crate::output::UpdateProjectOutput, crate::error::UpdateProjectError, crate::input::UpdateProjectInputOperationRetryAlias, >, { let input = self.inner.build().map_err(|err| { aws_smithy_http::result::SdkError::ConstructionFailure(err.into()) })?; let op = input .make_operation(&self.handle.conf) .await .map_err(|err| { aws_smithy_http::result::SdkError::ConstructionFailure(err.into()) })?; self.handle.client.call(op).await } /// <p>The ID of the project you want to update.</p> pub fn id(mut self, inp: impl Into<std::string::String>) -> Self { self.inner = self.inner.id(inp); self } /// <p>The ID of the project you want to update.</p> pub fn set_id(mut self, input: std::option::Option<std::string::String>) -> Self { self.inner = self.inner.set_id(input); self } /// <p>The name of the project you want to update.</p> pub fn name(mut self, inp: impl Into<std::string::String>) -> Self { self.inner = self.inner.name(inp); self } /// <p>The name of the project you want to update.</p> pub fn set_name(mut self, input: std::option::Option<std::string::String>) -> Self { self.inner = self.inner.set_name(input); self } /// <p>The description of the project, if any.</p> pub fn description(mut self, inp: impl Into<std::string::String>) -> Self { self.inner = self.inner.description(inp); self } /// <p>The description of the project, if any.</p> pub fn set_description(mut self, input: std::option::Option<std::string::String>) -> Self { self.inner = self.inner.set_description(input); self } } /// Fluent builder constructing a request to `UpdateTeamMember`. /// /// <p>Updates a team member's attributes in an AWS CodeStar project. For example, you can change a /// team member's role in the project, or change whether they have remote access to project /// resources.</p> #[derive(std::fmt::Debug)] pub struct UpdateTeamMember< C = aws_smithy_client::erase::DynConnector, M = aws_hyper::AwsMiddleware, R = aws_smithy_client::retry::Standard, > { handle: std::sync::Arc<super::Handle<C, M, R>>, inner: crate::input::update_team_member_input::Builder, } impl<C, M, R> UpdateTeamMember<C, M, R> where C: aws_smithy_client::bounds::SmithyConnector, M: aws_smithy_client::bounds::SmithyMiddleware<C>, R: aws_smithy_client::retry::NewRequestPolicy, { /// Creates a new `UpdateTeamMember`. pub(crate) fn new(handle: std::sync::Arc<super::Handle<C, M, R>>) -> Self { Self { handle, inner: Default::default(), } } /// Sends the request and returns the response. /// /// If an error occurs, an `SdkError` will be returned with additional details that /// can be matched against. /// /// By default, any retryable failures will be retried twice. Retry behavior /// is configurable with the [RetryConfig](aws_smithy_types::retry::RetryConfig), which can be /// set when configuring the client. pub async fn send( self, ) -> std::result::Result< crate::output::UpdateTeamMemberOutput, aws_smithy_http::result::SdkError<crate::error::UpdateTeamMemberError>, > where R::Policy: aws_smithy_client::bounds::SmithyRetryPolicy< crate::input::UpdateTeamMemberInputOperationOutputAlias, crate::output::UpdateTeamMemberOutput, crate::error::UpdateTeamMemberError, crate::input::UpdateTeamMemberInputOperationRetryAlias, >, { let input = self.inner.build().map_err(|err| { aws_smithy_http::result::SdkError::ConstructionFailure(err.into()) })?; let op = input .make_operation(&self.handle.conf) .await .map_err(|err| { aws_smithy_http::result::SdkError::ConstructionFailure(err.into()) })?; self.handle.client.call(op).await } /// <p>The ID of the project.</p> pub fn project_id(mut self, inp: impl Into<std::string::String>) -> Self { self.inner = self.inner.project_id(inp); self } /// <p>The ID of the project.</p> pub fn set_project_id(mut self, input: std::option::Option<std::string::String>) -> Self { self.inner = self.inner.set_project_id(input); self } /// <p>The Amazon Resource Name (ARN) of the user for whom you want to change team membership /// attributes.</p> pub fn user_arn(mut self, inp: impl Into<std::string::String>) -> Self { self.inner = self.inner.user_arn(inp); self } /// <p>The Amazon Resource Name (ARN) of the user for whom you want to change team membership /// attributes.</p> pub fn set_user_arn(mut self, input: std::option::Option<std::string::String>) -> Self { self.inner = self.inner.set_user_arn(input); self } /// <p>The role assigned to the user in the project. Project roles have different levels of /// access. For more information, see <a href="http://docs.aws.amazon.com/codestar/latest/userguide/working-with-teams.html">Working with /// Teams</a> in the <i>AWS CodeStar User Guide</i>.</p> pub fn project_role(mut self, inp: impl Into<std::string::String>) -> Self { self.inner = self.inner.project_role(inp); self } /// <p>The role assigned to the user in the project. Project roles have different levels of /// access. For more information, see <a href="http://docs.aws.amazon.com/codestar/latest/userguide/working-with-teams.html">Working with /// Teams</a> in the <i>AWS CodeStar User Guide</i>.</p> pub fn set_project_role(mut self, input: std::option::Option<std::string::String>) -> Self { self.inner = self.inner.set_project_role(input); self } /// <p>Whether a team member is allowed to remotely access project resources using the SSH /// public key associated with the user's profile. Even if this is set to True, the user must /// associate a public key with their profile before the user can access resources.</p> pub fn remote_access_allowed(mut self, inp: bool) -> Self { self.inner = self.inner.remote_access_allowed(inp); self } /// <p>Whether a team member is allowed to remotely access project resources using the SSH /// public key associated with the user's profile. Even if this is set to True, the user must /// associate a public key with their profile before the user can access resources.</p> pub fn set_remote_access_allowed(mut self, input: std::option::Option<bool>) -> Self { self.inner = self.inner.set_remote_access_allowed(input); self } } /// Fluent builder constructing a request to `UpdateUserProfile`. /// /// <p>Updates a user's profile in AWS CodeStar. The user profile is not project-specific. /// Information in the user profile is displayed wherever the user's information appears to other /// users in AWS CodeStar. </p> #[derive(std::fmt::Debug)] pub struct UpdateUserProfile< C = aws_smithy_client::erase::DynConnector, M = aws_hyper::AwsMiddleware, R = aws_smithy_client::retry::Standard, > { handle: std::sync::Arc<super::Handle<C, M, R>>, inner: crate::input::update_user_profile_input::Builder, } impl<C, M, R> UpdateUserProfile<C, M, R> where C: aws_smithy_client::bounds::SmithyConnector, M: aws_smithy_client::bounds::SmithyMiddleware<C>, R: aws_smithy_client::retry::NewRequestPolicy, { /// Creates a new `UpdateUserProfile`. pub(crate) fn new(handle: std::sync::Arc<super::Handle<C, M, R>>) -> Self { Self { handle, inner: Default::default(), } } /// Sends the request and returns the response. /// /// If an error occurs, an `SdkError` will be returned with additional details that /// can be matched against. /// /// By default, any retryable failures will be retried twice. Retry behavior /// is configurable with the [RetryConfig](aws_smithy_types::retry::RetryConfig), which can be /// set when configuring the client. pub async fn send( self, ) -> std::result::Result< crate::output::UpdateUserProfileOutput, aws_smithy_http::result::SdkError<crate::error::UpdateUserProfileError>, > where R::Policy: aws_smithy_client::bounds::SmithyRetryPolicy< crate::input::UpdateUserProfileInputOperationOutputAlias, crate::output::UpdateUserProfileOutput, crate::error::UpdateUserProfileError, crate::input::UpdateUserProfileInputOperationRetryAlias, >, { let input = self.inner.build().map_err(|err| { aws_smithy_http::result::SdkError::ConstructionFailure(err.into()) })?; let op = input .make_operation(&self.handle.conf) .await .map_err(|err| { aws_smithy_http::result::SdkError::ConstructionFailure(err.into()) })?; self.handle.client.call(op).await } /// <p>The name that will be displayed as the friendly name for the user in AWS /// CodeStar.</p> pub fn user_arn(mut self, inp: impl Into<std::string::String>) -> Self { self.inner = self.inner.user_arn(inp); self } /// <p>The name that will be displayed as the friendly name for the user in AWS /// CodeStar.</p> pub fn set_user_arn(mut self, input: std::option::Option<std::string::String>) -> Self { self.inner = self.inner.set_user_arn(input); self } /// <p>The name that is displayed as the friendly name for the user in AWS CodeStar.</p> pub fn display_name(mut self, inp: impl Into<std::string::String>) -> Self { self.inner = self.inner.display_name(inp); self } /// <p>The name that is displayed as the friendly name for the user in AWS CodeStar.</p> pub fn set_display_name(mut self, input: std::option::Option<std::string::String>) -> Self { self.inner = self.inner.set_display_name(input); self } /// <p>The email address that is displayed as part of the user's profile in AWS /// CodeStar.</p> pub fn email_address(mut self, inp: impl Into<std::string::String>) -> Self { self.inner = self.inner.email_address(inp); self } /// <p>The email address that is displayed as part of the user's profile in AWS /// CodeStar.</p> pub fn set_email_address( mut self, input: std::option::Option<std::string::String>, ) -> Self { self.inner = self.inner.set_email_address(input); self } /// <p>The SSH public key associated with the user in AWS CodeStar. If a project owner allows the /// user remote access to project resources, this public key will be used along with the user's /// private key for SSH access.</p> pub fn ssh_public_key(mut self, inp: impl Into<std::string::String>) -> Self { self.inner = self.inner.ssh_public_key(inp); self } /// <p>The SSH public key associated with the user in AWS CodeStar. If a project owner allows the /// user remote access to project resources, this public key will be used along with the user's /// private key for SSH access.</p> pub fn set_ssh_public_key( mut self, input: std::option::Option<std::string::String>, ) -> Self { self.inner = self.inner.set_ssh_public_key(input); self } } } impl<C> Client<C, aws_hyper::AwsMiddleware, aws_smithy_client::retry::Standard> { /// Creates a client with the given service config and connector override. pub fn from_conf_conn(conf: crate::Config, conn: C) -> Self { let retry_config = conf.retry_config.as_ref().cloned().unwrap_or_default(); let client = aws_hyper::Client::new(conn).with_retry_config(retry_config.into()); Self { handle: std::sync::Arc::new(Handle { client, conf }), } } } impl Client< aws_smithy_client::erase::DynConnector, aws_hyper::AwsMiddleware, aws_smithy_client::retry::Standard, > { /// Creates a new client from a shared config. #[cfg(any(feature = "rustls", feature = "native-tls"))] pub fn new(config: &aws_types::config::Config) -> Self { Self::from_conf(config.into()) } /// Creates a new client from the service [`Config`](crate::Config). #[cfg(any(feature = "rustls", feature = "native-tls"))] pub fn from_conf(conf: crate::Config) -> Self { let retry_config = conf.retry_config.as_ref().cloned().unwrap_or_default(); let client = aws_hyper::Client::https().with_retry_config(retry_config.into()); Self { handle: std::sync::Arc::new(Handle { client, conf }), } } }
true
6754dd0fe5a78b9ede214d10997d5ba894f6653c
Rust
5c077m4n/noder
/src/lib/utils/os.rs
UTF-8
964
2.859375
3
[]
no_license
pub fn get_os_name() -> Option<&'static str> { match std::env::consts::OS { "linux" => Some("linux"), "macos" => Some("darwin"), "windows" => Some("win"), _ => None, } } pub fn get_os_arch() -> Option<&'static str> { match std::env::consts::ARCH { "x86" => Some("x86"), "x86_64" => Some("x64"), _ => None, } } pub fn get_os_node_file_name(version: &str) -> Option<String> { let os_name = get_os_name().unwrap_or_else(|| panic!("Sorry, your OS is not supported")); let os_arch = get_os_arch().unwrap_or_else(|| panic!("Sorry, your OS arch is not supported")); let ext = match os_name { "linux" | "darwin" => Some(".tar.gz"), "win" => Some(".zip"), _ => None, }; Some(format!( "node-{version}-{os_name}-{os_arch}{ext}", version = version, os_name = os_name, os_arch = os_arch, ext = ext.unwrap(), )) }
true
f4cf9eb2f99164d4a99ff51bdfc71c9b9eb25cf4
Rust
TyOverby/ares
/src/parse/mod.rs
UTF-8
4,815
2.78125
3
[]
no_license
// Based on Norvig's lisp interpreter use std::rc::Rc; use Value; use intern::SymbolIntern; mod errors; mod util; pub mod tokens; use parse::tokens::{TokenType, Token, Open, TokenIter}; pub use parse::errors::ParseError; use parse::errors::ParseError::*; fn one_expr<'a, 'b>(tok: Token, tok_stream: &'a mut TokenIter<'b>, interner: &mut SymbolIntern) -> Result<Value, ParseError> { use self::tokens::TokenType; match tok.tt { TokenType::Number(s) => Ok(try!(s.parse() .map(Value::Int) .or_else(|_| s.parse().map(Value::Float)) .map_err(|e| ConversionError(s, Box::new(e))))), TokenType::Symbol(s) => Ok(s.parse() .map(Value::Bool) .unwrap_or(Value::Symbol(interner.intern(s)))), TokenType::String(s) => Ok(Value::String(Rc::new(s))), TokenType::FormLike(fl) => Ok({ let quoted = try!(parse_one_expr(tok_stream, interner)); let interned = Value::Symbol(interner.intern(fl.form_name())); Value::list(match quoted { None => vec![interned], Some(v) => vec![interned, v], }) }), TokenType::Close(close) => Err(ExtraRightDelimiter(close, tok.start)), TokenType::Open(open) => { let mut values = try!(parse_delimited(tok_stream, open, interner)); match open { Open::LParen => Ok(Value::list(values)), Open::LBracket => if values.iter().all(|a| util::immediate_value(a, interner)) { let values = values.into_iter().map(util::unquote).collect(); Ok(Value::list(vec![Value::Symbol(interner.intern("quote")), Value::list(values)])) } else { values.insert(0, Value::Symbol(interner.intern("list"))); Ok(Value::list(values)) }, Open::LBrace => { if values.len() % 2 == 1 { return Err(InvalidMapLiteral(tok.start)); } if values.iter().all(|a| util::immediate_value(a, interner)) { let (keys, values): (Vec<_>, _) = values.into_iter() .enumerate() .partition(|&(i, _)| i % 2 == 0); if keys.iter().all(|&(_, ref k)| util::can_be_hash_key(k, interner)) { let m = keys.into_iter() .map(|(_, k)| util::unquote(k)) .zip(values.into_iter().map(|(_, v)| util::unquote(v))) .collect(); Ok(Value::Map(Rc::new(m))) } else { Err(InvalidMapLiteral(tok.start)) } } else { values.insert(0, Value::Symbol(interner.intern("hash-map"))); Ok(Value::list(values)) } } } } } } fn parse_one_expr<'a, 'b>(tok_stream: &'a mut TokenIter<'b>, interner: &mut SymbolIntern) -> Result<Option<Value>, ParseError> { if let Some(tok) = tok_stream.next() { one_expr(try!(tok), tok_stream, interner).map(Some) } else { Ok(None) } } fn parse_delimited<'a, 'b>(tok_stream: &'a mut TokenIter<'b>, opener: Open, interner: &mut SymbolIntern) -> Result<Vec<Value>, ParseError> { let mut v = vec![]; loop { if let Some(tok_or_err) = tok_stream.next() { let tok = try!(tok_or_err); match tok.tt { TokenType::Close(close) => if close == opener.closed_by() { return Ok(v); } else { return Err(ExtraRightDelimiter(opener.closed_by(), tok.start)); }, _ => v.push(try!(one_expr(tok, tok_stream, interner))), } } else { return Err(MissingRightDelimiter(opener.closed_by())); } } } pub fn parse(input: &str, interner: &mut SymbolIntern) -> Result<Vec<Value>, ParseError> { let mut v = vec![]; let mut tok_iter = TokenIter::new(input); while let Some(value) = try!(parse_one_expr(&mut tok_iter, interner)) { v.push(value) } Ok(v) }
true
0e4ba4163fd6771ab79f7ca2ccc3af124a43c294
Rust
y-usuzumi/survive-the-course
/survive-the-course-rs/src/problems/leetcode/_31_Next_Permutation.rs
UTF-8
1,788
3.609375
4
[ "BSD-3-Clause" ]
permissive
// https://leetcode.com/problems/next-permutation/ pub struct Solution; impl Solution { pub fn next_permutation(nums: &mut Vec<i32>) { if nums.len() < 1 { return; } let mut boundl = 0; let boundr = nums.len() - 1; 'outer: for idx in (0..nums.len() - 1).rev() { if nums[idx] >= nums[idx + 1] { continue; } for idx2 in idx + 1..nums.len() { if idx2 == nums.len() - 1 || nums[idx2 + 1] <= nums[idx] { nums.swap(idx, idx2); boundl = idx + 1; break 'outer; } } } Self::flip(nums, boundl, boundr); } fn flip(nums: &mut Vec<i32>, mut left: usize, mut right: usize) { while left < right { nums.swap(left, right); left += 1; right -= 1; } } } #[cfg(test)] mod tests { use super::*; #[test] fn test_1() { let mut arr = vec![1, 2, 3]; Solution::next_permutation(&mut arr); assert_eq!(arr, vec![1, 3, 2]); } #[test] fn test_2() { let mut arr = vec![3, 2, 1]; Solution::next_permutation(&mut arr); assert_eq!(arr, vec![1, 2, 3]); } #[test] fn test_3() { let mut arr = vec![1, 1, 5]; Solution::next_permutation(&mut arr); assert_eq!(arr, vec![1, 5, 1]); } #[test] fn test_4() { let mut arr = vec![1, 5, 1]; Solution::next_permutation(&mut arr); assert_eq!(arr, vec![5, 1, 1]); } #[test] fn test_5() { let mut arr = vec![1, 2, 6, 5, 4]; Solution::next_permutation(&mut arr); assert_eq!(arr, vec![1, 4, 2, 5, 6]); } }
true
c0e46f647953bb7d2d8c1185556c3f1cb04bfe85
Rust
mcoffin/zinc
/src/drivers/dht22.rs
UTF-8
3,206
2.75
3
[ "Apache-2.0", "LicenseRef-scancode-unknown-license-reference" ]
permissive
// Zinc, the bare metal stack for rust. // Copyright 2014 Vladimir "farcaller" Pouzanov <[email protected]> // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. //! Driver for DHT22. use core::option::Option::{self, Some, None}; use hal::pin::Gpio; use hal::pin::GpioLevel::Low; use hal::pin::GpioLevel::High; use hal::pin::GpioDirection::In; use hal::pin::GpioDirection::Out; use hal::pin::GpioLevel; use hal::timer::Timer; /// Basic DHT22 driver ported over from Arduino example. pub struct DHT22<'a, T:'a, P:'a> { gpio: &'a P, timer: &'a T, } /// Measurement data from the DHT22. #[allow(missing_docs)] #[derive(Clone, Copy)] pub struct Measurements { pub humidity: f32, pub temperature: f32, } impl<'a, T: Timer, P: Gpio> DHT22<'a, T, P> { /// Creates a new DHT22 driver based on I/O GPIO and a timer with 10us resolution. pub fn new(timer: &'a T, gpio: &'a P) -> DHT22<'a, T, P> { DHT22 { gpio: gpio, timer: timer, } } /// Returns previous sensor measurements or None if synchronization failed. pub fn read(&self) -> Option<Measurements> { let buffer: &mut [u8; 5] = &mut [0; 5]; let mut idx: usize = 0; let mut mask: u8 = 128; self.gpio.set_direction(Out); self.gpio.set_low(); self.timer.wait_ms(20); self.gpio.set_high(); self.timer.wait_us(40); self.gpio.set_direction(In); if !self.wait_sync() { return None } for _ in 0..40 { if !self.wait_while(Low, 80) { return None } let t = self.timer.get_counter(); if !self.wait_while(High, 80) { return None } if self.timer.get_counter() - t > 40 { buffer[idx] |= mask; } mask >>= 1; if mask == 0 { mask = 128; idx += 1; } } let humidity: f32 = (((buffer[0] as u16) << 8) | buffer[1] as u16) as f32 * 0.1; let temperature: f32 = if buffer[2] & 0x80 != 0 { -0.1 * (((buffer[2] as u16 & 0x7F) << 8) | buffer[3] as u16) as f32 } else { 0.1 * (((buffer[2] as u16) << 8) | buffer[3] as u16) as f32 }; let checksum: u8 = buffer[0] + buffer[1] + buffer[2] + buffer[3]; if checksum != buffer[4] { None } else { Some(Measurements { humidity: humidity, temperature: temperature, }) } } fn wait_sync(&self) -> bool { if !self.wait_while(Low, 80) { false } else if !self.wait_while(High, 100) { false } else { true } } fn wait_while(&self, level: GpioLevel, timeout: usize) -> bool { for _ in 0..(timeout / 10) { self.timer.wait_us(10); if self.gpio.level() != level { return true; } } false } }
true
0828eba984702af600f0ec2ca72cecc65b69edbc
Rust
gvissers/babs
/src/ubig/sub.rs
UTF-8
34,841
3
3
[ "Apache-2.0" ]
permissive
// Copyright, 2021, Gé Vissers // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. use crate::digit::Digit; use crate::result::{Error, Result}; use crate::ubig::support::drop_leading_zeros; /// Decrement the numer or number part represented by the digits in `nr` by one, and returns /// the number of digits in the result. If the original number is zero, an `Underflow` error is /// returned. pub fn dec_assign<T>(nr: &mut [T]) -> Result<usize> where T: Digit { if nr.is_empty() { Err(Error::Underflow) } else { let n = nr.len(); for digit in nr[..n-1].iter_mut() { if !digit.dec() { return Ok(n); } } if nr[n-1].dec() { Err(Error::Underflow) } else if nr[n-1].is_zero() { Ok(n-1) } else { Ok(n) } } } /// Subtract the single digit `digit` from the number or number part represented by the digits in /// `nr`, and return the number of digits in the result. If `digit` is larger than `nr`, an /// Underflow error is returned. pub fn sub_assign_digit<T>(nr: &mut [T], digit: T) -> Result<usize> where T: Digit { if digit.is_zero() { Ok(nr.len()) } else if nr.is_empty() { Err(Error::Underflow) } else if nr[0].sub_carry_assign(digit, false) { let nd = dec_assign(&mut nr[1..])?; Ok(1 + nd) } else { Ok(nr.len()) } } /// Subtract the big number represented by the digits in `n1` from the number or number part represented /// by `n0`, and returns the number of digits in the result. If `nr1` is greater than `nr0`, an /// `Underflow` error is returned. pub fn sub_assign_big<T>(nr0: &mut [T], nr1: &[T]) -> Result<usize> where T: Digit { let n0 = nr0.len(); let n1 = nr1.len(); if n0 < n1 { Err(Error::Underflow) } else { let carry = nr0.iter_mut().zip(nr1).fold(false, |carry, (d0, &d1)| d0.sub_carry_assign(d1, carry)); if carry { let nd = dec_assign(&mut nr0[n1..])?; Ok(n1 + nd) } else { Ok(drop_leading_zeros(nr0, n0)) } } } /// Subtract the big numbers represented by the digits in `nr0` and `nr1`, and store the result in /// `diff`. Returns the number of digits in the differene, a `NoSpace` error if the difference /// cannot be stored in `diff`, or an `Underflow` error if `nr0 < nr1`. /// NOTE: a `NoSpace` error is also returned if any leading zeros as a result of the subtraction /// cannot be stored. Therefore, `diff` should be able to contain at least as many digits as /// `nr0` is long. pub fn sub_big_into<T>(nr0: &[T], nr1: &[T], diff: &mut [T]) -> Result<usize> where T: Digit { let n0 = nr0.len(); let n1 = nr1.len(); if diff.len() < n0 { Err(Error::NoSpace) } else if n0 < n1 { Err(Error::Underflow) } else { let mut carry = false; for ((&d0, &d1), dr) in nr0.iter().zip(nr1).zip(diff.iter_mut()) { *dr = d0; carry = dr.sub_carry_assign(d1, carry); } diff[n1..n0].copy_from_slice(&nr0[n1..]); if carry { let nd = dec_assign(&mut diff[n1..n0])?; Ok(n1 + nd) } else { let n = drop_leading_zeros(diff, n0); Ok(n) } } } /// Subtract `nr1` from `nr0`, leaving the absolute value of the difference in `nr0`. Returns the /// sign of the difference, and the number of digits it contains. The initial length of the number /// stored in `nr0` is `len0` digits, but the array should be large enough to compute the difference, /// i.e. `nr0.len() ≥ max(len, nr1.len())`. pub fn sub_assign_big_abs_sign<T>(nr0: &mut [T], len0: usize, nr1: &[T]) -> (bool, usize) where T: Digit { if crate::ubig::cmp::lt(&nr0[..len0], nr1) { let nd = crate::ubig::rsub::rsub_assign_big(&mut nr0[..nr1.len()], nr1).unwrap(); (true, nd) } else { let nd = crate::ubig::sub::sub_assign_big(&mut nr0[..len0], nr1).unwrap(); (false, nd) } } /// Subtract `nr1` from `nr0`, and store the absolute value of the difference in `abs_diff`. /// Return the sign of the difference, and the number of digits it contains. `abs_diff` should /// be able to hold at least as many digits as the longest of `nr0` and `nr1`. pub fn sub_big_into_abs_sign<T>(nr0: &[T], nr1: &[T], abs_diff: &mut[T]) -> (bool, usize) where T: Digit { debug_assert!(abs_diff.len() >= nr0.len().max(nr1.len())); if crate::ubig::cmp::lt(nr0, nr1) { (true, sub_big_into(nr1, nr0, abs_diff).unwrap()) } else { (false, sub_big_into(nr0, nr1, abs_diff).unwrap()) } } #[cfg(test)] mod tests { use crate::digit::{DecimalDigit, BinaryDigit}; use super::*; #[test] fn test_dec_assign_binary() { let mut nr: [BinaryDigit<u8>; 0] = []; let res = dec_assign(&mut nr); assert_eq!(nr, []); assert_eq!(res, Err(Error::Underflow)); let mut nr = [BinaryDigit(1u8)]; let res = dec_assign(&mut nr); assert_eq!(nr, [BinaryDigit(0)]); assert_eq!(res, Ok(0)); let mut nr = [BinaryDigit(0u8)]; let res = dec_assign(&mut nr); assert_eq!(nr, [BinaryDigit(0xff)]); assert_eq!(res, Err(Error::Underflow)); let mut nr = [BinaryDigit(0u8), BinaryDigit(1)]; let res = dec_assign(&mut nr); assert_eq!(nr, [BinaryDigit(0xff), BinaryDigit(0)]); assert_eq!(res, Ok(1)); let mut nr = [BinaryDigit(0u8), BinaryDigit(1), BinaryDigit(3)]; let res = dec_assign(&mut nr); assert_eq!(nr, [BinaryDigit(0xff), BinaryDigit(0), BinaryDigit(3)]); assert_eq!(res, Ok(3)); let mut nr = [BinaryDigit(0u8), BinaryDigit(0), BinaryDigit(3)]; let res = dec_assign(&mut nr); assert_eq!(nr, [BinaryDigit(0xff), BinaryDigit(0xff), BinaryDigit(2)]); assert_eq!(res, Ok(3)); let mut nr = [BinaryDigit(0u8), BinaryDigit(0), BinaryDigit(0)]; let res = dec_assign(&mut nr); assert_eq!(nr, [BinaryDigit(0xff), BinaryDigit(0xff), BinaryDigit(0xff)]); assert_eq!(res, Err(Error::Underflow)); let mut nr = [BinaryDigit(0xffu32), BinaryDigit(0xff)]; let res = dec_assign(&mut nr); assert_eq!(nr, [BinaryDigit(0xfe), BinaryDigit(0xff)]); assert_eq!(res, Ok(2)); let mut nr = [BinaryDigit(0u32), BinaryDigit(0xff)]; let res = dec_assign(&mut nr); assert_eq!(nr, [BinaryDigit(0xffffffff), BinaryDigit(0xfe)]); assert_eq!(res, Ok(2)); let mut nr = [BinaryDigit(0xffu32), BinaryDigit(0xffffffff)]; let res = dec_assign(&mut nr); assert_eq!(nr, [BinaryDigit(0xfe), BinaryDigit(0xffffffff)]); assert_eq!(res, Ok(2)); let mut nr = [BinaryDigit(0u32), BinaryDigit(0)]; let res = dec_assign(&mut nr); assert_eq!(nr, [BinaryDigit(0xffffffff), BinaryDigit(0xffffffff)]); assert_eq!(res, Err(Error::Underflow)); } #[test] fn test_dec_assign_decimal() { let mut nr: [DecimalDigit<u8>; 0] = []; let res = dec_assign(&mut nr); assert_eq!(nr, []); assert_eq!(res, Err(Error::Underflow)); let mut nr = [DecimalDigit(1u8)]; let res = dec_assign(&mut nr); assert_eq!(nr, [DecimalDigit(0)]); assert_eq!(res, Ok(0)); let mut nr = [DecimalDigit(0u8)]; let res = dec_assign(&mut nr); assert_eq!(nr, [DecimalDigit(99)]); assert_eq!(res, Err(Error::Underflow)); let mut nr = [DecimalDigit(0u8), DecimalDigit(1)]; let res = dec_assign(&mut nr); assert_eq!(nr, [DecimalDigit(99), DecimalDigit(0)]); assert_eq!(res, Ok(1)); let mut nr = [DecimalDigit(0u8), DecimalDigit(1), DecimalDigit(3)]; let res = dec_assign(&mut nr); assert_eq!(nr, [DecimalDigit(99), DecimalDigit(0), DecimalDigit(3)]); assert_eq!(res, Ok(3)); let mut nr = [DecimalDigit(0u8), DecimalDigit(0), DecimalDigit(3)]; let res = dec_assign(&mut nr); assert_eq!(nr, [DecimalDigit(99), DecimalDigit(99), DecimalDigit(2)]); assert_eq!(res, Ok(3)); let mut nr = [DecimalDigit(0u8), DecimalDigit(0), DecimalDigit(0)]; let res = dec_assign(&mut nr); assert_eq!(nr, [DecimalDigit(99), DecimalDigit(99), DecimalDigit(99)]); assert_eq!(res, Err(Error::Underflow)); let mut nr = [DecimalDigit(99u32), DecimalDigit(99)]; let res = dec_assign(&mut nr); assert_eq!(nr, [DecimalDigit(98), DecimalDigit(99)]); assert_eq!(res, Ok(2)); let mut nr = [DecimalDigit(0u32), DecimalDigit(99)]; let res = dec_assign(&mut nr); assert_eq!(nr, [DecimalDigit(999_999_999), DecimalDigit(98)]); assert_eq!(res, Ok(2)); let mut nr = [DecimalDigit(99u32), DecimalDigit(999_999_999)]; let res = dec_assign(&mut nr); assert_eq!(nr, [DecimalDigit(98), DecimalDigit(999_999_999)]); assert_eq!(res, Ok(2)); let mut nr = [DecimalDigit(0u32), DecimalDigit(0)]; let res = dec_assign(&mut nr); assert_eq!(nr, [DecimalDigit(999_999_999), DecimalDigit(999_999_999)]); assert_eq!(res, Err(Error::Underflow)); } #[test] fn test_sub_assign_digit_binary() { let mut nr: [BinaryDigit<u8>; 0] = []; let res = sub_assign_digit(&mut nr, BinaryDigit(0)); assert_eq!(nr, []); assert_eq!(res, Ok(0)); let mut nr: [BinaryDigit<u8>; 0] = []; let res = sub_assign_digit(&mut nr, BinaryDigit(47)); assert_eq!(nr, []); assert_eq!(res, Err(Error::Underflow)); let mut nr = [BinaryDigit(1u8)]; let res = sub_assign_digit(&mut nr, BinaryDigit(0)); assert_eq!(nr, [BinaryDigit(1)]); assert_eq!(res, Ok(1)); let mut nr = [BinaryDigit(48u8)]; let res = sub_assign_digit(&mut nr, BinaryDigit(47)); assert_eq!(nr, [BinaryDigit(1)]); assert_eq!(res, Ok(1)); let mut nr = [BinaryDigit(1u8)]; let res = sub_assign_digit(&mut nr, BinaryDigit(0x85)); assert_eq!(nr, [BinaryDigit(0x7c)]); assert_eq!(res, Err(Error::Underflow)); let mut nr = [BinaryDigit(0x80u8), BinaryDigit(1)]; let res = sub_assign_digit(&mut nr, BinaryDigit(0x85)); assert_eq!(nr, [BinaryDigit(0xfb), BinaryDigit(0)]); assert_eq!(res, Ok(1)); let mut nr = [BinaryDigit(0x80u8), BinaryDigit(0), BinaryDigit(0xfe)]; let res = sub_assign_digit(&mut nr, BinaryDigit(0x85)); assert_eq!(nr, [BinaryDigit(0xfb), BinaryDigit(0xff), BinaryDigit(0xfd)]); assert_eq!(res, Ok(3)); let mut nr = [BinaryDigit(0x80u8), BinaryDigit(0), BinaryDigit(0)]; let res = sub_assign_digit(&mut nr, BinaryDigit(0x85)); assert_eq!(nr, [BinaryDigit(0xfb), BinaryDigit(0xff), BinaryDigit(0xff)]); assert_eq!(res, Err(Error::Underflow)); let mut nr = [BinaryDigit(0x105u16), BinaryDigit(0xff), BinaryDigit(0xff)]; let res = sub_assign_digit(&mut nr, BinaryDigit(0x85)); assert_eq!(nr, [BinaryDigit(0x80), BinaryDigit(0xff), BinaryDigit(0xff)]); assert_eq!(res, Ok(3)); let mut nr = [BinaryDigit(5u16), BinaryDigit(0), BinaryDigit(0)]; let res = sub_assign_digit(&mut nr, BinaryDigit(0x85)); assert_eq!(nr, [BinaryDigit(0xff80), BinaryDigit(0xffff), BinaryDigit(0xffff)]); assert_eq!(res, Err(Error::Underflow)); let mut nr = [BinaryDigit(0x105u32), BinaryDigit(0xff), BinaryDigit(0xff)]; let res = sub_assign_digit(&mut nr, BinaryDigit(0x85)); assert_eq!(nr, [BinaryDigit(0x80), BinaryDigit(0xff), BinaryDigit(0xff)]); assert_eq!(res, Ok(3)); let mut nr = [BinaryDigit(5u32), BinaryDigit(0xffff), BinaryDigit(0xffff)]; let res = sub_assign_digit(&mut nr, BinaryDigit(0x85)); assert_eq!(nr, [BinaryDigit(0xffffff80), BinaryDigit(0xfffe), BinaryDigit(0xffff)]); assert_eq!(res, Ok(3)); let mut nr = [BinaryDigit(5), BinaryDigit(0), BinaryDigit(0)]; let res = sub_assign_digit(&mut nr, BinaryDigit(0x85)); assert_eq!(nr, [BinaryDigit(0xffffff80u32), BinaryDigit(0xffffffff), BinaryDigit(0xffffffff)]); assert_eq!(res, Err(Error::Underflow)); } #[test] fn test_sub_assign_digit_decimal() { let mut nr: [DecimalDigit<u8>; 0] = []; let res = sub_assign_digit(&mut nr, DecimalDigit(0)); assert_eq!(nr, []); assert_eq!(res, Ok(0)); let mut nr: [DecimalDigit<u8>; 0] = []; let res = sub_assign_digit(&mut nr, DecimalDigit(47)); assert_eq!(nr, []); assert_eq!(res, Err(Error::Underflow)); let mut nr = [DecimalDigit(1u8)]; let res = sub_assign_digit(&mut nr, DecimalDigit(0)); assert_eq!(nr, [DecimalDigit(1)]); assert_eq!(res, Ok(1)); let mut nr = [DecimalDigit(48u8)]; let res = sub_assign_digit(&mut nr, DecimalDigit(47)); assert_eq!(nr, [DecimalDigit(1)]); assert_eq!(res, Ok(1)); let mut nr = [DecimalDigit(5u8)]; let res = sub_assign_digit(&mut nr, DecimalDigit(55)); assert_eq!(nr, [DecimalDigit(50)]); assert_eq!(res, Err(Error::Underflow)); let mut nr = [DecimalDigit(5u8), DecimalDigit(1)]; let res = sub_assign_digit(&mut nr, DecimalDigit(55)); assert_eq!(nr, [DecimalDigit(50), DecimalDigit(0)]); assert_eq!(res, Ok(1)); let mut nr = [DecimalDigit(5u8), DecimalDigit(0), DecimalDigit(99)]; let res = sub_assign_digit(&mut nr, DecimalDigit(55)); assert_eq!(nr, [DecimalDigit(50), DecimalDigit(99), DecimalDigit(98)]); assert_eq!(res, Ok(3)); let mut nr = [DecimalDigit(5u8), DecimalDigit(0), DecimalDigit(0)]; let res = sub_assign_digit(&mut nr, DecimalDigit(55)); assert_eq!(nr, [DecimalDigit(50), DecimalDigit(99), DecimalDigit(99)]); assert_eq!(res, Err(Error::Underflow)); let mut nr = [DecimalDigit(105u16), DecimalDigit(99), DecimalDigit(99)]; let res = sub_assign_digit(&mut nr, DecimalDigit(55)); assert_eq!(nr, [DecimalDigit(50), DecimalDigit(99), DecimalDigit(99)]); assert_eq!(res, Ok(3)); let mut nr = [DecimalDigit(5u16), DecimalDigit(0), DecimalDigit(0)]; let res = sub_assign_digit(&mut nr, DecimalDigit(55)); assert_eq!(nr, [DecimalDigit(9_950), DecimalDigit(9_999), DecimalDigit(9_999)]); assert_eq!(res, Err(Error::Underflow)); let mut nr = [DecimalDigit(105u32), DecimalDigit(99), DecimalDigit(99)]; let res = sub_assign_digit(&mut nr, DecimalDigit(55)); assert_eq!(nr, [DecimalDigit(50), DecimalDigit(99), DecimalDigit(99)]); assert_eq!(res, Ok(3)); let mut nr = [DecimalDigit(5u32), DecimalDigit(10_000), DecimalDigit(9_999)]; let res = sub_assign_digit(&mut nr, DecimalDigit(55)); assert_eq!(nr, [DecimalDigit(999_999_950), DecimalDigit(9_999), DecimalDigit(9_999)]); assert_eq!(res, Ok(3)); let mut nr = [DecimalDigit(5), DecimalDigit(0), DecimalDigit(0)]; let res = sub_assign_digit(&mut nr, DecimalDigit(55)); assert_eq!(nr, [DecimalDigit(999_999_950u32), DecimalDigit(999_999_999), DecimalDigit(999_999_999)]); assert_eq!(res, Err(Error::Underflow)); } #[test] fn test_sub_assign_big_binary() { let mut nr0 = [BinaryDigit(1u8)]; let nr1 = []; let res = sub_assign_big(&mut nr0, &nr1); assert_eq!(nr0, [BinaryDigit(1)]); assert_eq!(res, Ok(1)); let mut nr0 = [BinaryDigit(0xffu8)]; let nr1 = [BinaryDigit(0xfeu8)]; let res = sub_assign_big(&mut nr0, &nr1); assert_eq!(nr0, [BinaryDigit(1)]); assert_eq!(res, Ok(1)); let mut nr0 = [BinaryDigit(0u8)]; let nr1 = [BinaryDigit(0xffu8)]; let res = sub_assign_big(&mut nr0, &nr1); assert_eq!(nr0, [BinaryDigit(1)]); assert_eq!(res, Err(Error::Underflow)); let mut nr0 = [BinaryDigit(0x7fu8)]; let nr1 = [BinaryDigit(0xffu8)]; let res = sub_assign_big(&mut nr0, &nr1); assert_eq!(nr0, [BinaryDigit(0x80)]); assert_eq!(res, Err(Error::Underflow)); let mut nr0 = [BinaryDigit(0x7fu8), BinaryDigit(2)]; let nr1 = [BinaryDigit(0xffu8)]; let res = sub_assign_big(&mut nr0, &nr1); assert_eq!(nr0, [BinaryDigit(0x80), BinaryDigit(1)]); assert_eq!(res, Ok(2)); let mut nr0 = [BinaryDigit(0x7fu8), BinaryDigit(0), BinaryDigit(0xff), BinaryDigit(0xff)]; let nr1 = [BinaryDigit(0xffu8)]; let res = sub_assign_big(&mut nr0, &nr1); assert_eq!(nr0, [BinaryDigit(0x80), BinaryDigit(0xff), BinaryDigit(0xfe), BinaryDigit(0xff)]); assert_eq!(res, Ok(4)); let mut nr0 = [BinaryDigit(0x7fu8), BinaryDigit(0), BinaryDigit(0), BinaryDigit(0)]; let nr1 = [BinaryDigit(0xffu8)]; let res = sub_assign_big(&mut nr0, &nr1); assert_eq!(nr0, [BinaryDigit(0x80), BinaryDigit(0xff), BinaryDigit(0xff), BinaryDigit(0xff)]); assert_eq!(res, Err(Error::Underflow)); } #[test] fn test_sub_assign_big_decimal() { let mut nr0 = [DecimalDigit(1u8)]; let nr1 = []; let res = sub_assign_big(&mut nr0, &nr1); assert_eq!(nr0, [DecimalDigit(1)]); assert_eq!(res, Ok(1)); let mut nr0 = [DecimalDigit(99u8)]; let nr1 = [DecimalDigit(98u8)]; let res = sub_assign_big(&mut nr0, &nr1); assert_eq!(nr0, [DecimalDigit(1)]); assert_eq!(res, Ok(1)); let mut nr0 = [DecimalDigit(0u8)]; let nr1 = [DecimalDigit(99u8)]; let res = sub_assign_big(&mut nr0, &nr1); assert_eq!(nr0, [DecimalDigit(1)]); assert_eq!(res, Err(Error::Underflow)); let mut nr0 = [DecimalDigit(49u8)]; let nr1 = [DecimalDigit(99u8)]; let res = sub_assign_big(&mut nr0, &nr1); assert_eq!(nr0, [DecimalDigit(50)]); assert_eq!(res, Err(Error::Underflow)); let mut nr0 = [DecimalDigit(49u8), DecimalDigit(2)]; let nr1 = [DecimalDigit(99u8)]; let res = sub_assign_big(&mut nr0, &nr1); assert_eq!(nr0, [DecimalDigit(50), DecimalDigit(1)]); assert_eq!(res, Ok(2)); let mut nr0 = [DecimalDigit(49u8), DecimalDigit(0), DecimalDigit(99), DecimalDigit(99)]; let nr1 = [DecimalDigit(99u8)]; let res = sub_assign_big(&mut nr0, &nr1); assert_eq!(nr0, [DecimalDigit(50), DecimalDigit(99), DecimalDigit(98), DecimalDigit(99)]); assert_eq!(res, Ok(4)); let mut nr0 = [DecimalDigit(49u8), DecimalDigit(0), DecimalDigit(0), DecimalDigit(0)]; let nr1 = [DecimalDigit(99u8)]; let res = sub_assign_big(&mut nr0, &nr1); assert_eq!(nr0, [DecimalDigit(50), DecimalDigit(99), DecimalDigit(99), DecimalDigit(99)]); assert_eq!(res, Err(Error::Underflow)); } #[test] fn test_sub_big_into_binary() { let nr0: [BinaryDigit<u8>; 0] = []; let nr1: [BinaryDigit<u8>; 0] = []; let mut diff = []; let n = sub_big_into(&nr0, &nr1, &mut diff); assert_eq!(n, Ok(0)); assert_eq!(diff, []); let nr0 = [BinaryDigit(1u8), BinaryDigit(2)]; let nr1: [BinaryDigit<u8>; 0] = []; let mut diff = [BinaryDigit(0); 2]; let n = sub_big_into(&nr0, &nr1, &mut diff); assert_eq!(n, Ok(2)); assert_eq!(diff, [BinaryDigit(1), BinaryDigit(2)]); let nr0 = [BinaryDigit(1u8), BinaryDigit(2)]; let nr1 = [BinaryDigit(1u8)]; let mut diff = [BinaryDigit(0); 2]; let n = sub_big_into(&nr0, &nr1, &mut diff); assert_eq!(n, Ok(2)); assert_eq!(diff, [BinaryDigit(0), BinaryDigit(2)]); let nr0 = [BinaryDigit(1u8), BinaryDigit(2u8)]; let nr1 = [BinaryDigit(2u8)]; let mut diff = [BinaryDigit(0); 2]; let n = sub_big_into(&nr0, &nr1, &mut diff); assert_eq!(n, Ok(2)); assert_eq!(diff, [BinaryDigit(0xff), BinaryDigit(1)]); let nr0 = [BinaryDigit(1u8), BinaryDigit(1u8)]; let nr1 = [BinaryDigit(2u8)]; let mut diff = [BinaryDigit(0); 2]; let n = sub_big_into(&nr0, &nr1, &mut diff); assert_eq!(n, Ok(1)); assert_eq!(diff, [BinaryDigit(0xff), BinaryDigit(0)]); let nr0 = [BinaryDigit(0x2518af54u32), BinaryDigit(0xf6271615), BinaryDigit(0xa5617882)]; let nr1 = [BinaryDigit(0x38278919u32), BinaryDigit(0xffffffff), BinaryDigit(0x76552298)]; let mut diff = [BinaryDigit(0); 3]; let n = sub_big_into(&nr0, &nr1, &mut diff); assert_eq!(n, Ok(3)); assert_eq!(diff, [BinaryDigit(0xecf1263b), BinaryDigit(0xf6271615), BinaryDigit(0x2f0c55e9)]); } #[test] fn test_sub_big_into_decimal() { let nr0: [DecimalDigit<u8>; 0] = []; let nr1: [DecimalDigit<u8>; 0] = []; let mut diff = []; let n = sub_big_into(&nr0, &nr1, &mut diff); assert_eq!(n, Ok(0)); assert_eq!(diff, []); let nr0 = [DecimalDigit(1u8), DecimalDigit(2)]; let nr1: [DecimalDigit<u8>; 0] = []; let mut diff = [DecimalDigit(0); 2]; let n = sub_big_into(&nr0, &nr1, &mut diff); assert_eq!(n, Ok(2)); assert_eq!(diff, [DecimalDigit(1), DecimalDigit(2)]); let nr0 = [DecimalDigit(1u8), DecimalDigit(2)]; let nr1 = [DecimalDigit(1u8)]; let mut diff = [DecimalDigit(0); 2]; let n = sub_big_into(&nr0, &nr1, &mut diff); assert_eq!(n, Ok(2)); assert_eq!(diff, [DecimalDigit(0), DecimalDigit(2)]); let nr0 = [DecimalDigit(1u8), DecimalDigit(2u8)]; let nr1 = [DecimalDigit(2u8)]; let mut diff = [DecimalDigit(0); 2]; let n = sub_big_into(&nr0, &nr1, &mut diff); assert_eq!(n, Ok(2)); assert_eq!(diff, [DecimalDigit(99), DecimalDigit(1)]); let nr0 = [DecimalDigit(1u8), DecimalDigit(1u8)]; let nr1 = [DecimalDigit(2u8)]; let mut diff = [DecimalDigit(0); 2]; let n = sub_big_into(&nr0, &nr1, &mut diff); assert_eq!(n, Ok(1)); assert_eq!(diff, [DecimalDigit(99), DecimalDigit(0)]); let nr0 = [DecimalDigit(837_984_655u32), DecimalDigit(982_376_123), DecimalDigit(761_233_341)]; let nr1 = [DecimalDigit(899_987_987u32), DecimalDigit(213_872_166), DecimalDigit(688_231_987)]; let mut diff = [DecimalDigit(0); 3]; let n = sub_big_into(&nr0, &nr1, &mut diff); assert_eq!(n, Ok(3)); assert_eq!(diff, [DecimalDigit(937_996_668), DecimalDigit(768_503_956), DecimalDigit(73_001_354)]); } #[test] fn test_sub_big_into_nospace() { let nr0 = [DecimalDigit(837_984_655u32), DecimalDigit(982_376_123), DecimalDigit(761_233_341)]; let nr1 = [DecimalDigit(899_987_987u32), DecimalDigit(213_872_166), DecimalDigit(688_231_987)]; let mut diff = [DecimalDigit(0); 2]; let n = sub_big_into(&nr0, &nr1, &mut diff); assert_eq!(n, Err(Error::NoSpace)); let nr0 = [BinaryDigit(0x4518af54u32), BinaryDigit(0xf6271615), BinaryDigit(0xa5617882)]; let nr1 = [BinaryDigit(0x38278919u32), BinaryDigit(0xf6271615), BinaryDigit(0xa5617882)]; let mut diff = [BinaryDigit(0); 2]; let n = sub_big_into(&nr0, &nr1, &mut diff); assert_eq!(n, Err(Error::NoSpace)); } #[test] fn test_sub_big_into_underflow() { let nr0 = [DecimalDigit(837_984_655u32), DecimalDigit(982_376_123)]; let nr1 = [DecimalDigit(899_987_987u32), DecimalDigit(213_872_166), DecimalDigit(688_231_987)]; let mut diff = [DecimalDigit(0); 3]; let n = sub_big_into(&nr0, &nr1, &mut diff); assert_eq!(n, Err(Error::Underflow)); let nr0 = [BinaryDigit(0x3518af54u32), BinaryDigit(0xf6271615), BinaryDigit(0xa5617882)]; let nr1 = [BinaryDigit(0x38278919u32), BinaryDigit(0xf6271615), BinaryDigit(0xa5617882)]; let mut diff = [BinaryDigit(0); 3]; let n = sub_big_into(&nr0, &nr1, &mut diff); assert_eq!(n, Err(Error::Underflow)); } #[test] fn test_sub_assign_big_abs_sign_binary() { let mut nr0: [BinaryDigit<u8>; 0] = []; let nr1: [BinaryDigit<u8>; 0] = []; let (sign, len) = sub_assign_big_abs_sign(&mut nr0, 0, &nr1); assert_eq!(sign, false); assert_eq!(len, 0); assert_eq!(nr0, []); let mut nr0 = [BinaryDigit(1u8)]; let nr1: [BinaryDigit<u8>; 0] = []; let (sign, len) = sub_assign_big_abs_sign(&mut nr0, 1, &nr1); assert_eq!(sign, false); assert_eq!(len, 1); assert_eq!(nr0, [BinaryDigit(1)]); let mut nr0 = [BinaryDigit(0u8)]; let nr1 = [BinaryDigit(1u8)]; let (sign, len) = sub_assign_big_abs_sign(&mut nr0, 0, &nr1); assert_eq!(sign, true); assert_eq!(len, 1); assert_eq!(nr0, [BinaryDigit(1)]); let mut nr0 = [BinaryDigit(0x40u16), BinaryDigit(0x43)]; let nr1 = [BinaryDigit(0x41u16), BinaryDigit(0x43)]; let (sign, len) = sub_assign_big_abs_sign(&mut nr0, 2, &nr1); assert_eq!(sign, true); assert_eq!(len, 1); assert_eq!(nr0, [BinaryDigit(1), BinaryDigit(0)]); let mut nr0 = [ BinaryDigit(0x672288af5189ff45u64), BinaryDigit(0xff453615af3f724d), BinaryDigit(0x282786fdf35eca) ]; let nr1 = [BinaryDigit(0x71898279dfacdf33u64), BinaryDigit(0x6fd527ade516ee12)]; let (sign, len) = sub_assign_big_abs_sign(&mut nr0, 3, &nr1); assert_eq!(sign, false); assert_eq!(len, 3); assert_eq!(nr0, [ BinaryDigit(0xf599063571dd2012), BinaryDigit(0x8f700e67ca28843a), BinaryDigit(0x282786fdf35eca) ]); let mut nr0 = [BinaryDigit(0x71898279dfacdf33u64), BinaryDigit(0x6fd527ade516ee12), BinaryDigit(0)]; let nr1 = [BinaryDigit(0x672288af5189ff45u64), BinaryDigit(0xff453615af3f724d), BinaryDigit(0x282786fdf35eca)]; let (sign, len) = sub_assign_big_abs_sign(&mut nr0, 2, &nr1); assert_eq!(sign, true); assert_eq!(len, 3); assert_eq!(nr0, [ BinaryDigit(0xf599063571dd2012), BinaryDigit(0x8f700e67ca28843a), BinaryDigit(0x282786fdf35eca) ]); } #[test] fn test_sub_assign_big_abs_sign_decimal() { let mut nr0: [DecimalDigit<u8>; 0] = []; let nr1: [DecimalDigit<u8>; 0] = []; let (sign, len) = sub_assign_big_abs_sign(&mut nr0, 0, &nr1); assert_eq!(sign, false); assert_eq!(len, 0); assert_eq!(nr0, []); let mut nr0 = [DecimalDigit(1u8)]; let nr1: [DecimalDigit<u8>; 0] = []; let (sign, len) = sub_assign_big_abs_sign(&mut nr0, 1, &nr1); assert_eq!(sign, false); assert_eq!(len, 1); assert_eq!(nr0, [DecimalDigit(1)]); let mut nr0 = [DecimalDigit(0u8)]; let nr1 = [DecimalDigit(1u8)]; let (sign, len) = sub_assign_big_abs_sign(&mut nr0, 0, &nr1); assert_eq!(sign, true); assert_eq!(len, 1); assert_eq!(nr0, [DecimalDigit(1)]); let mut nr0 = [DecimalDigit(4_000u16), DecimalDigit(4_321)]; let nr1 = [DecimalDigit(4_001u16), DecimalDigit(4_321)]; let (sign, len) = sub_assign_big_abs_sign(&mut nr0, 2, &nr1); assert_eq!(sign, true); assert_eq!(len, 1); assert_eq!(nr0, [DecimalDigit(1), DecimalDigit(0)]); let mut nr0 = [ DecimalDigit(5_748_918_999_164_244_199u64), DecimalDigit(9_332_982_876_466_454_782), DecimalDigit(123) ]; let nr1 = [DecimalDigit(5_983_299_918_982_872_456u64), DecimalDigit(9_564_555_736_893_987_342)]; let (sign, len) = sub_assign_big_abs_sign(&mut nr0, 3, &nr1); assert_eq!(sign, false); assert_eq!(len, 3); assert_eq!(nr0, [ DecimalDigit(9_765_619_080_181_371_743), DecimalDigit(9_768_427_139_572_467_439), DecimalDigit(122) ]); let mut nr0 = [ DecimalDigit(5_983_299_918_982_872_456u64), DecimalDigit(9_564_555_736_893_987_342), DecimalDigit(0) ]; let nr1 = [ DecimalDigit(5_748_918_999_164_244_199u64), DecimalDigit(9_332_982_876_466_454_782), DecimalDigit(123) ]; let (sign, len) = sub_assign_big_abs_sign(&mut nr0, 3, &nr1); assert_eq!(sign, true); assert_eq!(len, 3); assert_eq!(nr0, [ DecimalDigit(9_765_619_080_181_371_743), DecimalDigit(9_768_427_139_572_467_439), DecimalDigit(122) ]); } #[test] fn test_sub_big_into_abs_sign_binary() { let nr0: [BinaryDigit<u8>; 0] = []; let nr1: [BinaryDigit<u8>; 0] = []; let mut abs_diff = [BinaryDigit(0); 1]; let (sign, len) = sub_big_into_abs_sign(&nr0, &nr1, &mut abs_diff); assert_eq!(sign, false); assert_eq!(len, 0); assert_eq!(abs_diff, [BinaryDigit(0)]); let nr0 = [BinaryDigit(1u8)]; let nr1: [BinaryDigit<u8>; 0] = []; let mut abs_diff = [BinaryDigit(0); 1]; let (sign, len) = sub_big_into_abs_sign(&nr0, &nr1, &mut abs_diff); assert_eq!(sign, false); assert_eq!(len, 1); assert_eq!(abs_diff, [BinaryDigit(1)]); let nr0: [BinaryDigit<u8>; 0] = []; let nr1 = [BinaryDigit(1u8)]; let mut abs_diff = [BinaryDigit(0); 1]; let (sign, len) = sub_big_into_abs_sign(&nr0, &nr1, &mut abs_diff); assert_eq!(sign, true); assert_eq!(len, 1); assert_eq!(abs_diff, [BinaryDigit(1)]); let nr0 = [BinaryDigit(0x40u16), BinaryDigit(0x43)]; let nr1 = [BinaryDigit(0x41u16), BinaryDigit(0x43)]; let mut abs_diff = [BinaryDigit(0); 2]; let (sign, len) = sub_big_into_abs_sign(&nr0, &nr1, &mut abs_diff); assert_eq!(sign, true); assert_eq!(len, 1); assert_eq!(abs_diff, [BinaryDigit(1), BinaryDigit(0)]); let nr0 = [BinaryDigit(0x672288af5189ff45u64), BinaryDigit(0xff453615af3f724d), BinaryDigit(0x282786fdf35eca)]; let nr1 = [BinaryDigit(0x71898279dfacdf33u64), BinaryDigit(0x6fd527ade516ee12)]; let mut abs_diff = [BinaryDigit(0); 3]; let (sign, len) = sub_big_into_abs_sign(&nr0, &nr1, &mut abs_diff); assert_eq!(sign, false); assert_eq!(len, 3); assert_eq!(abs_diff, [BinaryDigit(0xf599063571dd2012), BinaryDigit(0x8f700e67ca28843a), BinaryDigit(0x282786fdf35eca)]); let nr0 = [BinaryDigit(0x71898279dfacdf33u64), BinaryDigit(0x6fd527ade516ee12)]; let nr1 = [BinaryDigit(0x672288af5189ff45u64), BinaryDigit(0xff453615af3f724d), BinaryDigit(0x282786fdf35eca)]; let mut abs_diff = [BinaryDigit(0); 3]; let (sign, len) = sub_big_into_abs_sign(&nr0, &nr1, &mut abs_diff); assert_eq!(sign, true); assert_eq!(len, 3); assert_eq!(abs_diff, [BinaryDigit(0xf599063571dd2012), BinaryDigit(0x8f700e67ca28843a), BinaryDigit(0x282786fdf35eca)]); } #[test] fn test_sub_big_into_abs_sign_decimal() { let nr0: [DecimalDigit<u8>; 0] = []; let nr1: [DecimalDigit<u8>; 0] = []; let mut abs_diff = [DecimalDigit(0); 1]; let (sign, len) = sub_big_into_abs_sign(&nr0, &nr1, &mut abs_diff); assert_eq!(sign, false); assert_eq!(len, 0); assert_eq!(abs_diff, [DecimalDigit(0)]); let nr0 = [DecimalDigit(1u8)]; let nr1: [DecimalDigit<u8>; 0] = []; let mut abs_diff = [DecimalDigit(0); 1]; let (sign, len) = sub_big_into_abs_sign(&nr0, &nr1, &mut abs_diff); assert_eq!(sign, false); assert_eq!(len, 1); assert_eq!(abs_diff, [DecimalDigit(1)]); let nr0: [DecimalDigit<u8>; 0] = []; let nr1 = [DecimalDigit(1u8)]; let mut abs_diff = [DecimalDigit(0); 1]; let (sign, len) = sub_big_into_abs_sign(&nr0, &nr1, &mut abs_diff); assert_eq!(sign, true); assert_eq!(len, 1); assert_eq!(abs_diff, [DecimalDigit(1)]); let nr0 = [DecimalDigit(4_000u16), DecimalDigit(4_321)]; let nr1 = [DecimalDigit(4_001u16), DecimalDigit(4_321)]; let mut abs_diff = [DecimalDigit(0); 2]; let (sign, len) = sub_big_into_abs_sign(&nr0, &nr1, &mut abs_diff); assert_eq!(sign, true); assert_eq!(len, 1); assert_eq!(abs_diff, [DecimalDigit(1), DecimalDigit(0)]); let nr0 = [ DecimalDigit(5_748_918_999_164_244_199u64), DecimalDigit(9_332_982_876_466_454_782), DecimalDigit(123) ]; let nr1 = [DecimalDigit(5_983_299_918_982_872_456u64), DecimalDigit(9_564_555_736_893_987_342)]; let mut abs_diff = [DecimalDigit(0); 3]; let (sign, len) = sub_big_into_abs_sign(&nr0, &nr1, &mut abs_diff); assert_eq!(sign, false); assert_eq!(len, 3); assert_eq!(abs_diff, [ DecimalDigit(9_765_619_080_181_371_743), DecimalDigit(9_768_427_139_572_467_439), DecimalDigit(122) ]); let nr0 = [DecimalDigit(5_983_299_918_982_872_456u64), DecimalDigit(9_564_555_736_893_987_342)]; let nr1 = [ DecimalDigit(5_748_918_999_164_244_199u64), DecimalDigit(9_332_982_876_466_454_782), DecimalDigit(123) ]; let mut abs_diff = [DecimalDigit(0); 3]; let (sign, len) = sub_big_into_abs_sign(&nr0, &nr1, &mut abs_diff); assert_eq!(sign, true); assert_eq!(len, 3); assert_eq!(abs_diff, [ DecimalDigit(9_765_619_080_181_371_743), DecimalDigit(9_768_427_139_572_467_439), DecimalDigit(122) ]); } }
true
d963bbf4b9ab41f4503180b1a80dbd7d156b635d
Rust
nervosnetwork/ckb
/network/src/peer_store/types.rs
UTF-8
4,115
2.6875
3
[ "MIT" ]
permissive
//! Type used on peer store use crate::{ peer_store::{Score, SessionType, ADDR_MAX_FAILURES, ADDR_MAX_RETRIES, ADDR_TIMEOUT_MS}, Flags, }; use ipnetwork::IpNetwork; use p2p::multiaddr::{Multiaddr, Protocol}; use serde::{Deserialize, Serialize}; use std::net::IpAddr; /// Peer info #[derive(Debug, Clone)] pub struct PeerInfo { /// Address pub connected_addr: Multiaddr, /// Session type pub session_type: SessionType, /// Connected time pub last_connected_at_ms: u64, } impl PeerInfo { /// Init pub fn new( connected_addr: Multiaddr, session_type: SessionType, last_connected_at_ms: u64, ) -> Self { PeerInfo { connected_addr, session_type, last_connected_at_ms, } } } /// Address info #[derive(Debug, Clone, Hash, Eq, PartialEq, Serialize, Deserialize)] pub struct AddrInfo { /// Multiaddr pub addr: Multiaddr, /// Score about this addr pub score: Score, /// Last connected time pub last_connected_at_ms: u64, /// Last try time pub last_tried_at_ms: u64, /// Attempts count pub attempts_count: u32, /// Random id pub random_id_pos: usize, /// Flags #[serde(default = "default_flags")] pub flags: u64, } fn default_flags() -> u64 { Flags::COMPATIBILITY.bits() } impl AddrInfo { /// Init pub fn new(addr: Multiaddr, last_connected_at_ms: u64, score: Score, flags: u64) -> Self { AddrInfo { addr, score, last_connected_at_ms, last_tried_at_ms: 0, attempts_count: 0, random_id_pos: 0, flags, } } /// Connection information pub fn connected<F: FnOnce(u64) -> bool>(&self, f: F) -> bool { f(self.last_connected_at_ms) } /// Whether already try dail within a minute pub fn tried_in_last_minute(&self, now_ms: u64) -> bool { self.last_tried_at_ms >= now_ms.saturating_sub(60_000) } /// Whether connectable peer pub fn is_connectable(&self, now_ms: u64) -> bool { // do not remove addr tried in last minute if self.tried_in_last_minute(now_ms) { return true; } // we give up if never connect to this addr if self.last_connected_at_ms == 0 && self.attempts_count >= ADDR_MAX_RETRIES { return false; } // consider addr is not connectable if failed too many times if now_ms.saturating_sub(self.last_connected_at_ms) > ADDR_TIMEOUT_MS && (self.attempts_count >= ADDR_MAX_FAILURES) { return false; } true } /// Try dail count pub fn mark_tried(&mut self, tried_at_ms: u64) { self.last_tried_at_ms = tried_at_ms; self.attempts_count = self.attempts_count.saturating_add(1); } /// Mark last connected time pub fn mark_connected(&mut self, connected_at_ms: u64) { self.last_connected_at_ms = connected_at_ms; // reset attempts self.attempts_count = 0; } /// Change address flags pub fn flags(&mut self, flags: Flags) { self.flags = flags.bits(); } } /// Banned addr info #[derive(Debug, Clone, Serialize, Deserialize, PartialEq, Eq, Hash)] pub struct BannedAddr { /// Ip address pub address: IpNetwork, /// Ban until time pub ban_until: u64, /// Ban reason pub ban_reason: String, /// Ban time pub created_at: u64, } /// Convert multiaddr to IpNetwork pub fn multiaddr_to_ip_network(multiaddr: &Multiaddr) -> Option<IpNetwork> { for addr_component in multiaddr { match addr_component { Protocol::Ip4(ipv4) => return Some(IpNetwork::V4(ipv4.into())), Protocol::Ip6(ipv6) => return Some(IpNetwork::V6(ipv6.into())), _ => (), } } None } /// Convert IpAddr to IpNetwork pub fn ip_to_network(ip: IpAddr) -> IpNetwork { match ip { IpAddr::V4(ipv4) => IpNetwork::V4(ipv4.into()), IpAddr::V6(ipv6) => IpNetwork::V6(ipv6.into()), } }
true
15796b8ce857a86ec089f98524bab9be326b6a9b
Rust
jordanbray/chess_uci
/src/gui/gui_command.rs
UTF-8
11,452
2.609375
3
[ "MIT" ]
permissive
use chess::{Board, ChessMove}; use error::Error; use nom::combinator::rest; use std::fmt; use std::str::FromStr; #[cfg(test)] use chess::{File, Piece, Rank, Square}; use gui::go::{parse_go, Go}; use parsers::*; use nom::IResult; use nom::combinator::{map, complete, value}; use nom::bytes::streaming::tag; use nom::bytes::complete::take_until; use nom::branch::alt; use nom::sequence::tuple; #[derive(Debug, PartialEq, Clone)] pub enum GuiCommand { Uci, Debug(bool), IsReady, SetOption(String, Option<String>), Register(String), UciNewGame, Position(Board, Vec<ChessMove>), Go(Go), Stop, PonderHit, Quit, } fn parse_uci(input: &str) -> IResult<&str, GuiCommand> { value(GuiCommand::Uci, tag("uci"))(input) } fn parse_debug(input: &str) -> IResult<&str, GuiCommand> { map( tuple(( tag("debug"), space, alt(( value(true, tag("on")), value(false, tag("off")) )), )), |(_, _, debug)| GuiCommand::Debug(debug) )(input) } fn parse_isready(input: &str) -> IResult<&str, GuiCommand> { value(GuiCommand::IsReady, tag("isready"))(input) } fn parse_setoption_value(input: &str) -> IResult<&str, GuiCommand> { map( tuple(( tag("setoption"), space, tag("name"), space, take_until("value"), tag("value"), rest )), |(_, _, _, _, name, _, value)| GuiCommand::SetOption(name.trim().to_string(), Some(value.trim().to_string())) )(input) } fn parse_setoption_novalue(input: &str) -> IResult<&str, GuiCommand> { map( tuple(( tag("setoption"), space, tag("name"), space, rest )), |(_, _, _, _, name)| GuiCommand::SetOption(name.trim().to_string(), None) )(input) } fn parse_register(input: &str) -> IResult<&str, GuiCommand> { map( tuple(( tag("register"), space, rest, )), |(_, _, token)| GuiCommand::Register(token.to_string()) )(input) } fn parse_ucinewgame(input: &str) -> IResult<&str, GuiCommand> { value(GuiCommand::UciNewGame, tag("ucinewgame"))(input) } fn parse_stop(input: &str) -> IResult<&str, GuiCommand> { value(GuiCommand::Stop, tag("stop"))(input) } fn parse_ponderhit(input: &str) -> IResult<&str, GuiCommand> { value(GuiCommand::PonderHit, tag("ponderhit"))(input) } fn parse_quit(input: &str) -> IResult<&str, GuiCommand> { value(GuiCommand::Quit, tag("quit"))(input) } fn parse_gui_go(input: &str) -> IResult<&str, GuiCommand> { map(parse_go, |go| GuiCommand::Go(go) )(input) } fn parse_position_fen(input: &str) -> IResult<&str, Board> { map( tuple(( tag("fen"), space, parse_fen )), |(_, _, board)| board )(input) } fn parse_position_startpos(input: &str) -> IResult<&str, Board> { value(Board::default(), tag("startpos"))(input) } fn parse_position_moves(input: &str) -> IResult<&str, Vec<ChessMove>> { map( tuple(( space, tag("moves"), space, parse_movelist, )), |(_, _, _, moves)| moves )(input) } fn parse_position_moves_empty(input: &str) -> IResult<&str, Vec<ChessMove>> { value( Vec::new(), tuple(( non_newline_space, tag("\n"), )) )(input) } fn parse_position(input: &str) -> IResult<&str, GuiCommand> { map( tuple(( tag("position"), space, alt(( complete(parse_position_fen), complete(parse_position_startpos), )), alt(( complete(parse_position_moves), complete(parse_position_moves_empty), )) )), |(_, _, board, moves)| GuiCommand::Position(board, moves) )(input) } fn parse_all(input: &str) -> IResult<&str, GuiCommand> { alt(( complete(parse_ucinewgame), complete(parse_uci), complete(parse_debug), complete(parse_quit), complete(parse_isready), complete(parse_setoption_value), complete(parse_setoption_novalue), complete(parse_register), complete(parse_stop), complete(parse_ponderhit), complete(parse_gui_go), complete(parse_position) ))(input) } impl FromStr for GuiCommand { type Err = Error; fn from_str(s: &str) -> Result<Self, Self::Err> { Ok(parse_all(s)?.1) } } impl fmt::Display for GuiCommand { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { match self { GuiCommand::Uci => writeln!(f, "uci"), GuiCommand::Debug(val) => writeln!(f, "debug {}", if *val { "on" } else { "off" }), GuiCommand::IsReady => writeln!(f, "isready"), GuiCommand::SetOption(name, value) => match value { None => writeln!(f, "setoption name {}", name), Some(v) => writeln!(f, "setoption name {} value {}", name, v), }, GuiCommand::Register(code) => writeln!(f, "register {}", code), GuiCommand::UciNewGame => writeln!(f, "ucinewgame"), GuiCommand::Position(pos, moves) => { if pos == &Board::default() { write!(f, "position startpos")?; } else { write!(f, "position fen {}", pos)?; } if moves.len() != 0 { writeln!( f, "{}", moves .iter() .map(|x| x.to_string()) .collect::<Vec<String>>() .join(" ") ) } else { writeln!(f, "") } } GuiCommand::Go(go) => { write!(f, "go")?; match go.get_ponder() { Some(ref p) => write!(f, "ponder {}", p)?, None => {} }; if go.get_wtime().is_some() { write!(f, " wtime {}", go.get_wtime().unwrap())?; } if go.get_btime().is_some() { write!(f, " btime {}", go.get_btime().unwrap())?; } if go.get_winc().is_some() { write!(f, " winc {}", go.get_winc().unwrap())?; } if go.get_binc().is_some() { write!(f, " binc {}", go.get_binc().unwrap())?; } if go.get_movestogo().is_some() { write!(f, " movestogo {}", go.get_movestogo().unwrap())?; } if go.get_depth().is_some() { write!(f, " depth {}", go.get_depth().unwrap())?; } if go.get_nodes().is_some() { write!(f, " nodes {}", go.get_nodes().unwrap())?; } if go.get_mate().is_some() { write!(f, " mate {}", go.get_mate().unwrap())?; } if go.get_movetime().is_some() { write!(f, " movetime {}", go.get_movetime().unwrap())?; } if go.get_infinite() { write!(f, " infinite")?; } if go.get_search_moves().len() != 0 { write!( f, " searchmoves {}", go.get_search_moves() .iter() .map(|x| x.to_string()) .collect::<Vec<String>>() .join(" ") )?; } writeln!(f, "") } GuiCommand::Stop => writeln!(f, "stop"), GuiCommand::PonderHit => writeln!(f, "ponderhit"), GuiCommand::Quit => writeln!(f, "quit"), } } } #[cfg(test)] fn test_parse(s: &str, c: GuiCommand) { let parsed = GuiCommand::from_str(s); assert_eq!(parsed, Ok(c)); } #[test] fn test_parse_gui() { test_parse("uci", GuiCommand::Uci); } #[test] fn test_parse_debug_on() { test_parse("debug on", GuiCommand::Debug(true)); } #[test] fn test_parse_debug_off() { test_parse("debug off", GuiCommand::Debug(false)); } #[test] fn test_parse_setoption_noval() { test_parse( "setoption name test", GuiCommand::SetOption("test".to_string(), None), ); } #[test] fn test_parse_setoption_withval() { test_parse( "setoption name test value value", GuiCommand::SetOption("test".to_string(), Some("value".to_string())), ); } #[test] fn test_isready() { test_parse("isready", GuiCommand::IsReady); } #[test] fn test_registration() { test_parse("register code", GuiCommand::Register("code".to_string())); } #[test] fn test_ucinewgame() { test_parse("ucinewgame", GuiCommand::UciNewGame); } #[test] fn test_stop() { test_parse("stop", GuiCommand::Stop); } #[test] fn test_ponderhit() { test_parse("ponderhit", GuiCommand::PonderHit); } #[test] fn test_quit() { test_parse("quit", GuiCommand::Quit); } #[test] fn test_parse_go_times() { test_parse( "go btime 100 wtime 100\n", GuiCommand::Go(Go::wtime(100).combine(&Go::btime(100))), ); } #[test] fn test_parse_startpos() { test_parse( "position startpos\n", GuiCommand::Position(Board::default(), vec![]), ); } #[test] fn test_parse_startpos_moves() { let e2e4 = ChessMove::new( Square::make_square(Rank::Second, File::E), Square::make_square(Rank::Fourth, File::E), None, ); let e7e5 = ChessMove::new( Square::make_square(Rank::Seventh, File::E), Square::make_square(Rank::Fifth, File::E), None, ); test_parse( "position startpos moves e2e4 e7e5\n", GuiCommand::Position(Board::default(), vec![e2e4, e7e5]), ); } #[test] fn test_position_fen() { test_parse( "position fen rnbqkbnr/pppppppp/8/8/8/8/PPPPPPPP/RNBQKBNR w KQkq - 0 1\n", GuiCommand::Position(Board::default(), vec![]), ); } #[test] fn test_parse_position_fen_moves() { let e2e4 = ChessMove::new( Square::make_square(Rank::Second, File::E), Square::make_square(Rank::Fourth, File::E), None, ); let e7e5 = ChessMove::new( Square::make_square(Rank::Seventh, File::E), Square::make_square(Rank::Fifth, File::E), None, ); test_parse( "position fen rnbqkbnr/pppppppp/8/8/8/8/PPPPPPPP/RNBQKBNR w KQkq - 0 1 moves e2e4 e7e5\n", GuiCommand::Position(Board::default(), vec![e2e4, e7e5]), ); } #[test] fn test_parse_queening_move() { let queening = ChessMove::new( Square::make_square(Rank::Seventh, File::E), Square::make_square(Rank::Eighth, File::E), Some(Piece::Queen), ); test_parse( "position fen rnbqkbnr/pppppppp/8/8/8/8/PPPPPPPP/RNBQKBNR w KQkq - 0 1 moves e7e8q\n", GuiCommand::Position(Board::default(), vec![queening]), ); }
true
31035b0b9c3b3e30e6df6531de2cc07b8264552c
Rust
tomasbasham/echo-echo-echo
/src/main.rs
UTF-8
3,235
3.203125
3
[]
no_license
#![deny(warnings)] // A function which runs a future to completion using the Hyper runtime. use hyper::rt::run; // Miscellaneous types from Hyper for working with HTTP. use hyper::{Body, Method, Request, Response, Server, StatusCode}; // This function turns a closure which returns a future into an // implementation of the the Hyper `Service` trait, which is an asynchronous // function from a generic `Request` to a `Response`. use hyper::service::service_fn; // Extension traits providing additional methods on futures. `FutureExt` // adds methods that work for all futures, whereas `TryFutureExt` adds // methods to futures that return `Result` types. use futures::future::{FutureExt, TryFutureExt}; // Extension trait for futures 0.1 futures, adding the `.compat()` method // which allows us to use `.await` on 0.1 futures. use futures::compat::Future01CompatExt; use std::net::{IpAddr, Ipv4Addr}; use std::net::SocketAddr; fn main() { let localhost_v4 = IpAddr::V4(Ipv4Addr::new(0, 0, 0, 0)); // Set the address to run our socket on. let addr = SocketAddr::from((localhost_v4, 3000)); // Call our `run_server` function, which returns a future. As with every // `async fn`, for `run_server` to do anything, the returned future needs to // be run. Additionally, we need to convert the returned future from a // futures 0.3 future into a futures 0.1 future. let futures_03_future = run_server(addr); let futures_01_future = futures_03_future.unit_error().boxed().compat(); // Finally, we can run the future to completion using the `run` function // provided by Hyper. run(futures_01_future); } pub async fn run_server(addr: SocketAddr) { println!("Listening on http://{}", addr); // Create a server bound on the provided address let serve_future = Server::bind(&addr) // Serve requests using our `async serve_fn` function. `serve` takes a // closure which returns a type implementing the `Service` trait. // `service_fn` returns a value implementing the `Service` trait, and // accepts a closure which goes from request to a future of the response. // To use our `serve_fn` function with Hyper, we have to box it and put it // in a compatability wrapper to go from a futures 0.3 future (the kind // returned by `async fn`) to a futures 0.1 future (the kind used by Hyper). .serve(|| service_fn(|req| serve_fn(req).boxed().compat())); // Wait for the server to complete serving or exit with an error. If an error // occurred, print it to stderr. if let Err(e) = serve_future.compat().await { eprintln!("server error: {}", e); } } async fn serve_fn(req: Request<Body>) -> Result<Response<Body>, hyper::Error> { let mut response = Response::new(Body::empty()); match (req.method(), req.uri().path()) { (&Method::GET, "/") => { *response.body_mut() = Body::from("Try POSTing data to /echo"); }, (&Method::POST, "/echo") => { *response.body_mut() = req.into_body(); }, _ => { *response.status_mut() = StatusCode::NOT_FOUND; }, }; // Always return successfully with a response containing a body with a // friendly greeting ;) // Ok(Response::new(Body::from("hello, world!"))) Ok(response) }
true
0f5f2fa9585c2aca1cf7d3704385db4a50e739b5
Rust
Keats/kickstart
/src/terminal.rs
UTF-8
3,196
3.328125
3
[ "MIT" ]
permissive
use std::fmt; use std::io::prelude::*; /// Show an error message pub fn error(message: &str) { if let Some(mut t) = term::stderr() { match t.fg(term::color::BRIGHT_RED) { Ok(_) => { write!(t, "{}", message).unwrap(); t.reset().unwrap(); } Err(_) => writeln!(t, "{}", message).unwrap(), }; } else { eprint!("{}", message); } } /// Show a success message pub fn success(message: &str) { if let Some(mut t) = term::stdout() { match t.fg(term::color::BRIGHT_GREEN) { Ok(_) => { write!(t, "{}", message).unwrap(); t.reset().unwrap(); } Err(_) => writeln!(t, "{}", message).unwrap(), }; } else { eprint!("{}", message); } } /// Show a message in bold pub fn bold(message: &str) { if let Some(mut t) = term::stdout() { match t.attr(term::Attr::Bold) { Ok(_) => { write!(t, "{}", message).unwrap(); t.reset().unwrap(); } Err(_) => write!(t, "{}", message).unwrap(), }; } else { eprint!("{}", message); } } /// Show a basic question with all necessary formatting applied pub fn basic_question<T: fmt::Display>(prompt: &str, default: &T, validation: &Option<String>) { if let Some(mut t) = term::stdout() { // check for colour/boldness at the beginning so we can unwrap later if !t.supports_color() || !t.supports_attr(term::Attr::Bold) { if let Some(ref pattern) = validation { write!(t, "{} [default: {}, validation: {}]: ", prompt, default, pattern).unwrap(); } else { write!(t, "{} [default: {}]: ", prompt, default).unwrap(); } return; } t.attr(term::Attr::Bold).unwrap(); write!(t, "{} ", prompt).unwrap(); t.reset().unwrap(); t.fg(term::color::YELLOW).unwrap(); if let Some(ref pattern) = validation { write!(t, "[default: {}, validation: {}]: ", default, pattern).unwrap(); } else { write!(t, "[default: {}]: ", default).unwrap(); } t.reset().unwrap(); } else { eprint!("{} [default: {}]: ", prompt, default); } } /// Show a yes/no question with all necessary formatting applied pub fn bool_question(prompt: &str, default: bool) { let default_str = if default { "[Y/n]" } else { "[y/N]" }; if let Some(mut t) = term::stdout() { // check for colour/boldness at the beginning so we can unwrap later if !t.supports_color() || !t.supports_attr(term::Attr::Bold) { write!(t, "{} {}: ", prompt, default_str).unwrap(); return; } t.attr(term::Attr::Bold).unwrap(); write!(t, "{} ", prompt).unwrap(); t.reset().unwrap(); t.fg(term::color::YELLOW).unwrap(); if default { write!(t, "[Y/n]: ").unwrap() } else { write!(t, "[y/N]: ").unwrap() } t.reset().unwrap(); } else { eprint!("{} {}: ", prompt, default_str); } }
true
7dc6854abca53dd9d842f9dc34052a2bd6d43b5d
Rust
rillrate-fossil/rillrate
/pkg-dashboard/rate-ui/src/common/middler.rs
UTF-8
883
2.765625
3
[ "Apache-2.0" ]
permissive
use yew::{html, Children, Component, ComponentLink, Html, Properties, ShouldRender}; pub struct Middler { props: Props, } #[derive(Properties, Clone)] pub struct Props { pub children: Children, } impl Component for Middler { type Message = (); type Properties = Props; fn create(props: Self::Properties, _link: ComponentLink<Self>) -> Self { Self { props } } fn update(&mut self, _msg: Self::Message) -> ShouldRender { false } fn change(&mut self, props: Self::Properties) -> ShouldRender { self.props = props; true } fn view(&self) -> Html { html! { <div class="d-flex flex-grow-1 align-items-center justify-content-center"> <div class="text-center"> { self.props.children.clone() } </div> </div> } } }
true
f73be106b07c533f125174ff6567d51f9ac50428
Rust
balintbalazs/hdf5-rust
/src/hl/space.rs
UTF-8
7,214
2.640625
3
[ "MIT", "Apache-2.0", "LicenseRef-scancode-unknown-license-reference" ]
permissive
use std::convert::AsRef; use std::fmt::{self, Debug}; use std::ops::Deref; use std::ptr; use ndarray::SliceOrIndex; use hdf5_sys::h5s::{ H5Scopy, H5Screate_simple, H5Sget_simple_extent_dims, H5Sget_simple_extent_ndims, H5Sselect_hyperslab, H5S_SELECT_SET, }; use crate::internal_prelude::*; /// Represents the HDF5 dataspace object. #[repr(transparent)] #[derive(Clone)] pub struct Dataspace(Handle); impl ObjectClass for Dataspace { const NAME: &'static str = "dataspace"; const VALID_TYPES: &'static [H5I_type_t] = &[H5I_DATASPACE]; fn from_handle(handle: Handle) -> Self { Self(handle) } fn handle(&self) -> &Handle { &self.0 } fn short_repr(&self) -> Option<String> { if self.ndim() == 1 { Some(format!("({},)", self.dims()[0])) } else { let dims = self.dims().iter().map(ToString::to_string).collect::<Vec<_>>().join(", "); Some(format!("({})", dims)) } } } impl Debug for Dataspace { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { self.debug_fmt(f) } } impl Deref for Dataspace { type Target = Object; fn deref(&self) -> &Object { unsafe { self.transmute() } } } impl Dataspace { /// Copies the dataspace. pub fn copy(&self) -> Self { Self::from_id(h5lock!(H5Scopy(self.id()))).unwrap_or_else(|_| Self::invalid()) } /// Select a slice (known as a 'hyperslab' in HDF5 terminology) of the Dataspace. /// Returns the shape of array that is capable of holding the resulting slice. /// Useful when you want to read a subset of a dataset. pub fn select_slice<S>(&self, slice: S) -> Result<Vec<Ix>> where S: AsRef<[SliceOrIndex]>, { let shape = self.dims(); let ss: &[SliceOrIndex] = slice.as_ref(); let mut start_vec = Vec::with_capacity(ss.len()); let mut stride_vec = Vec::with_capacity(ss.len()); let mut count_vec = Vec::with_capacity(ss.len()); let mut shape_vec = Vec::with_capacity(ss.len()); for i in 0..ss.len() { let (start, stride, count) = Self::get_start_stride_count(&ss[i], shape[i])?; start_vec.push(start); stride_vec.push(stride); count_vec.push(count); shape_vec.push(count as Ix); } h5try!(H5Sselect_hyperslab( self.id(), H5S_SELECT_SET, start_vec.as_ptr(), stride_vec.as_ptr(), count_vec.as_ptr(), ptr::null() )); Ok(shape_vec) } fn get_start_stride_count(v: &SliceOrIndex, len: Ix) -> Result<(u64, u64, u64)> { match v { SliceOrIndex::Slice { start, end, step } => { let end = end.unwrap_or(len as isize); ensure!(end <= len as _, "slice extends beyond dataspace bounds"); ensure!(*step >= 1, "step must be >= 1 (got {})", step); if end < *start { return Ok((0, 1, 0)); } let count = if (end - start) <= 0 { 0 } else { 1 + (end - start - 1) / step }; Ok((*start as u64, *step as u64, count as u64)) } SliceOrIndex::Index(v) => Ok((*v as u64, 1, 1)), } } pub fn try_new<D: Dimension>(d: D, resizable: bool) -> Result<Self> { let rank = d.ndim(); let mut dims: Vec<hsize_t> = vec![]; let mut max_dims: Vec<hsize_t> = vec![]; for dim in &d.dims() { dims.push(*dim as _); max_dims.push(if resizable { H5S_UNLIMITED } else { *dim as _ }); } Self::from_id(h5try!(H5Screate_simple(rank as _, dims.as_ptr(), max_dims.as_ptr()))) } pub fn maxdims(&self) -> Vec<Ix> { let ndim = self.ndim(); if ndim > 0 { let mut maxdims: Vec<hsize_t> = Vec::with_capacity(ndim); unsafe { maxdims.set_len(ndim); } if h5call!(H5Sget_simple_extent_dims(self.id(), ptr::null_mut(), maxdims.as_mut_ptr())) .is_ok() { return maxdims.iter().cloned().map(|x| x as _).collect(); } } vec![] } pub fn resizable(&self) -> bool { self.maxdims().iter().any(|&x| x == H5S_UNLIMITED as _) } } impl Dimension for Dataspace { fn ndim(&self) -> usize { h5call!(H5Sget_simple_extent_ndims(self.id())).unwrap_or(0) as _ } fn dims(&self) -> Vec<Ix> { let ndim = self.ndim(); if ndim > 0 { let mut dims: Vec<hsize_t> = Vec::with_capacity(ndim); unsafe { dims.set_len(ndim); } if h5call!(H5Sget_simple_extent_dims(self.id(), dims.as_mut_ptr(), ptr::null_mut())) .is_ok() { return dims.iter().cloned().map(|x| x as _).collect(); } } vec![] } } #[cfg(test)] pub mod tests { use crate::internal_prelude::*; #[test] pub fn test_dimension() { fn f<D: Dimension>(d: D) -> (usize, Vec<Ix>, Ix) { (d.ndim(), d.dims(), d.size()) } assert_eq!(f(()), (0, vec![], 1)); assert_eq!(f(&()), (0, vec![], 1)); assert_eq!(f(2), (1, vec![2], 2)); assert_eq!(f(&3), (1, vec![3], 3)); assert_eq!(f((4,)), (1, vec![4], 4)); assert_eq!(f(&(5,)), (1, vec![5], 5)); assert_eq!(f((1, 2)), (2, vec![1, 2], 2)); assert_eq!(f(&(3, 4)), (2, vec![3, 4], 12)); assert_eq!(f(vec![2, 3]), (2, vec![2, 3], 6)); assert_eq!(f(&vec![4, 5]), (2, vec![4, 5], 20)); } #[test] pub fn test_debug() { assert_eq!(format!("{:?}", Dataspace::try_new((), true).unwrap()), "<HDF5 dataspace: ()>"); assert_eq!(format!("{:?}", Dataspace::try_new(3, true).unwrap()), "<HDF5 dataspace: (3,)>"); assert_eq!( format!("{:?}", Dataspace::try_new((1, 2), true).unwrap()), "<HDF5 dataspace: (1, 2)>" ); } #[test] pub fn test_dataspace() { let _e = silence_errors(); assert_err!( Dataspace::try_new(H5S_UNLIMITED as Ix, true), "current dimension must have a specific size" ); let d = Dataspace::try_new((5, 6), true).unwrap(); assert_eq!((d.ndim(), d.dims(), d.size()), (2, vec![5, 6], 30)); assert_eq!(Dataspace::try_new((), true).unwrap().dims(), vec![]); assert_err!(Dataspace::from_id(H5I_INVALID_HID), "Invalid dataspace id"); let dc = d.copy(); assert!(dc.is_valid()); assert_ne!(dc.id(), d.id()); assert_eq!((d.ndim(), d.dims(), d.size()), (dc.ndim(), dc.dims(), dc.size())); assert_eq!(Dataspace::try_new((5, 6), false).unwrap().maxdims(), vec![5, 6]); assert_eq!(Dataspace::try_new((5, 6), false).unwrap().resizable(), false); assert_eq!( Dataspace::try_new((5, 6), true).unwrap().maxdims(), vec![H5S_UNLIMITED as _, H5S_UNLIMITED as _] ); assert_eq!(Dataspace::try_new((5, 6), true).unwrap().resizable(), true); } }
true
0b54122112fed9a198e35f69aae029c39389d201
Rust
ericrobolson/Archived_Tremor
/v0/src/gfx/voxels/mod.rs
UTF-8
14,751
2.609375
3
[ "MIT" ]
permissive
use rayon::prelude::*; use wgpu::util::DeviceExt; use super::{model_transform::ModelTransform, poly_renderer::BindGroups, vertex::Vertex}; use crate::lib_core::{ ecs::{Entity, Mask, MaskType, World}, spatial, time::GameFrame, voxels::{Chunk, Voxel}, }; pub mod palette; pub mod texture_voxels; type PaletteIndexType = u32; #[repr(C)] #[derive(Copy, Clone, Debug)] pub struct VoxelChunkVertex { position: [f32; 3], palette_index: PaletteIndexType, } unsafe impl bytemuck::Pod for VoxelChunkVertex {} unsafe impl bytemuck::Zeroable for VoxelChunkVertex {} impl VoxelChunkVertex { pub fn from_verts(chunk_verts: Vec<f32>, palette_indices: Vec<u8>) -> Vec<Self> { let mut verts = vec![]; for i in 0..chunk_verts.len() / 3 { let j = i * 3; let (k, l, m) = (j, j + 1, j + 2); let pos: [f32; 3] = [chunk_verts[k], chunk_verts[l], chunk_verts[m]]; let palette_index = palette_indices[i]; verts.push(Self { position: pos, palette_index: palette_index as PaletteIndexType, }); } verts } } impl Vertex for VoxelChunkVertex { fn desc<'a>() -> wgpu::VertexBufferDescriptor<'a> { wgpu::VertexBufferDescriptor { stride: std::mem::size_of::<VoxelChunkVertex>() as wgpu::BufferAddress, step_mode: wgpu::InputStepMode::Vertex, attributes: &[ wgpu::VertexAttributeDescriptor { offset: 0, shader_location: 0, format: wgpu::VertexFormat::Float3, }, wgpu::VertexAttributeDescriptor { offset: std::mem::size_of::<[f32; 3]>() as wgpu::BufferAddress, shader_location: 1, format: wgpu::VertexFormat::Uint, }, ], } } } #[derive(Copy, Clone, PartialEq, Debug)] enum DoubleBuffer { Draw0Update1, Draw1Update0, } impl DoubleBuffer { pub fn swap(&self) -> DoubleBuffer { match self { DoubleBuffer::Draw0Update1 => DoubleBuffer::Draw1Update0, DoubleBuffer::Draw1Update0 => DoubleBuffer::Draw0Update1, } } } pub struct VoxelPass { meshes: Vec<(Mesh, Mesh)>, double_buffer: DoubleBuffer, } const VOXEL_PASS_MASK: MaskType = Mask::TRANSFORM | Mask::VOXEL_CHUNK; fn active_entity(entity: Entity, world: &World) -> bool { return world.masks[entity] & VOXEL_PASS_MASK == VOXEL_PASS_MASK; } impl VoxelPass { pub fn new( world: &World, bind_groups: &BindGroups, device: &wgpu::Device, queue: &wgpu::Queue, ) -> Self { // Iterate over all entities, regardless of whether it's active or not. Creates everything at once for minimal memory allocation. let entities = (0..world.max_entities()).collect::<Vec<usize>>(); let meshes: Vec<(Mesh, Mesh)> = entities .par_iter() .map(|entity| { let entity = *entity; // Convert entity into mesh. Right now just initializing everything at once instead of doing it piecemeal // Create 2 meshes per entity. This is used for 'double buffering'. E.g. update mesh 1 and draw mesh 0, then next frame update mesh 0 and draw 1, repeat ad nauseam. let chunk = &world.voxel_chunks[entity]; let transform = &world.transforms[entity]; let is_active = active_entity(entity, world); let mesh0 = Mesh::new( entity, is_active, chunk, transform, bind_groups, device, queue, ); let mesh1 = Mesh::new( entity, is_active, chunk, transform, bind_groups, device, queue, ); (mesh0, mesh1) }) .collect(); Self { meshes, double_buffer: DoubleBuffer::Draw0Update1, } } pub fn update(&mut self, world: &World, device: &wgpu::Device, queue: &wgpu::Queue) { // Change which buffer we're updating and drawing self.double_buffer = self.double_buffer.swap(); let double_buffer = self.double_buffer; // Update all meshes self.meshes.par_iter_mut().for_each(|(m0, m1)| { let entity = m0.entity; let chunk = &world.voxel_chunks[entity]; let transform = &world.transforms[entity]; let is_active = active_entity(entity, world); if double_buffer == DoubleBuffer::Draw1Update0 { // Update 0 m0.update(is_active, chunk, transform, device, queue); } else { // Update 1 m1.update(is_active, chunk, transform, device, queue); } }); } pub fn draw<'a>(&'a self, render_pass: &mut wgpu::RenderPass<'a>) { // Draw each chunk. // TODO: frustrum culling for (m0, m1) in &self.meshes { if self.double_buffer == DoubleBuffer::Draw0Update1 { // Draw 0 m0.draw(render_pass); } else { // Draw 1 m1.draw(render_pass); } } } } struct MeshBufferVerts { max_capacity: (usize, usize, usize), vert_len: usize, buffer: wgpu::Buffer, } fn create_mesh_buffer_verts( chunk: &Chunk, device: &wgpu::Device, queue: &wgpu::Queue, ) -> MeshBufferVerts { // Calculate total length of buffer e.g. a full chunk of different voxels. This way a new buffer only has to be created when the voxel capacity is changed. let verts = Mesh::verts(chunk); let vert_len = verts.len(); let single_cube_verts = Mesh::cube_verts().len(); let single_cube_color_verts = (single_cube_verts / 3) * std::mem::size_of::<PaletteIndexType>(); // One PaletteIndexType per 3 verts let max_voxels = { let (x, y, z) = chunk.capacity(); x * y * z }; let max_buf_size = (single_cube_verts + single_cube_color_verts) * max_voxels * std::mem::size_of::<f32>(); let buffer = device.create_buffer(&wgpu::BufferDescriptor { label: None, mapped_at_creation: false, size: max_buf_size as u64, usage: wgpu::BufferUsage::VERTEX | wgpu::BufferUsage::COPY_DST, }); if vert_len > 0 { queue.write_buffer(&buffer, 0, bytemuck::cast_slice(&verts)); } MeshBufferVerts { buffer, vert_len, max_capacity: chunk.capacity(), } } enum MeshingStrategy { Dumb, RunLength, Greedy, } struct Mesh { entity: usize, last_updated: GameFrame, vert_len: usize, mesh_buffer: wgpu::Buffer, max_voxel_capacity: (usize, usize, usize), transform_buffer: wgpu::Buffer, transform_bind_group: wgpu::BindGroup, last_transform: spatial::Transform, active: bool, } impl Mesh { fn new( entity: usize, active: bool, chunk: &Chunk, transform: &spatial::Transform, bind_groups: &BindGroups, device: &wgpu::Device, queue: &wgpu::Queue, ) -> Self { let (mesh_buffer, vert_len, max_voxel_capacity) = { let data = create_mesh_buffer_verts(chunk, device, queue); (data.buffer, data.vert_len, data.max_capacity) }; let (transform_bind_group, transform_buffer) = { let transform = ModelTransform::new(*transform); let buffer = device.create_buffer_init(&wgpu::util::BufferInitDescriptor { label: Some("Transform Buffer"), contents: bytemuck::cast_slice(&[transform]), usage: wgpu::BufferUsage::UNIFORM | wgpu::BufferUsage::COPY_DST, }); let bind_group = device.create_bind_group(&wgpu::BindGroupDescriptor { layout: &bind_groups.model_transform_layout, entries: &[wgpu::BindGroupEntry { binding: 0, resource: wgpu::BindingResource::Buffer(buffer.slice(..)), }], label: Some("transform_bind_group"), }); (bind_group, buffer) }; Self { active, entity, vert_len, mesh_buffer, max_voxel_capacity, transform_bind_group, transform_buffer, last_transform: *transform, last_updated: 0, } } fn update( &mut self, active: bool, chunk: &Chunk, transform: &spatial::Transform, device: &wgpu::Device, queue: &wgpu::Queue, ) { self.active = active; if !self.active { return; } // If chunk capacity was changed, redo the buffer if chunk.capacity() != self.max_voxel_capacity { let (mesh_buffer, vert_len, max_voxel_capacity) = { let data = create_mesh_buffer_verts(chunk, device, queue); (data.buffer, data.vert_len, data.max_capacity) }; self.vert_len = vert_len; self.mesh_buffer = mesh_buffer; self.max_voxel_capacity = max_voxel_capacity; self.last_updated = chunk.last_update(); } else // Remesh if more recent if self.last_updated < chunk.last_update() { self.last_updated = chunk.last_update(); let verts = Self::verts(chunk); self.vert_len = verts.len(); if verts.len() > 0 { queue.write_buffer(&self.mesh_buffer, 0, bytemuck::cast_slice(&verts)); } } // Update transform if different if *transform != self.last_transform { let transform = ModelTransform::new(*transform); queue.write_buffer( &self.transform_buffer, 0, bytemuck::cast_slice(&[transform]), ); } } fn draw<'a>(&'a self, render_pass: &mut wgpu::RenderPass<'a>) { if self.vert_len > 0 && self.active { render_pass.set_bind_group( BindGroups::MODEL_TRANSFORM, &self.transform_bind_group, &[], ); render_pass.set_vertex_buffer(0, self.mesh_buffer.slice(..)); render_pass.draw(0..self.vert_len as u32, 0..1); } } fn cube_verts() -> Vec<f32> { vec![ -1.0, -1.0, -1.0, // triangle 1 : begin -1.0, -1.0, 1.0, -1.0, 1.0, 1.0, // triangle 1 : end 1.0, 1.0, -1.0, // triangle 2 : begin -1.0, -1.0, -1.0, -1.0, 1.0, -1.0, // triangle 2 : end 1.0, -1.0, 1.0, -1.0, -1.0, -1.0, 1.0, -1.0, -1.0, 1.0, 1.0, -1.0, 1.0, -1.0, -1.0, -1.0, -1.0, -1.0, -1.0, -1.0, -1.0, -1.0, 1.0, 1.0, -1.0, 1.0, -1.0, 1.0, -1.0, 1.0, -1.0, -1.0, 1.0, -1.0, -1.0, -1.0, -1.0, 1.0, 1.0, -1.0, -1.0, 1.0, 1.0, -1.0, 1.0, 1.0, 1.0, 1.0, 1.0, -1.0, -1.0, 1.0, 1.0, -1.0, 1.0, -1.0, -1.0, 1.0, 1.0, 1.0, 1.0, -1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, -1.0, -1.0, 1.0, -1.0, 1.0, 1.0, 1.0, -1.0, 1.0, -1.0, -1.0, 1.0, 1.0, 1.0, 1.0, 1.0, -1.0, 1.0, 1.0, 1.0, -1.0, 1.0, ] .iter() .map(|v| v / 2.0) // Need to divide it in half otherwise it's too large. .collect() } fn verts(chunk: &Chunk) -> Vec<VoxelChunkVertex> { let mut verts = vec![]; let mut palette_colors = vec![]; let (x_size, y_size, z_size) = chunk.capacity(); let meshing_strategy = MeshingStrategy::RunLength; match meshing_strategy { MeshingStrategy::Dumb => { for z in 0..z_size { let zf32 = z as f32; for y in 0..y_size { let yf32 = y as f32; // TODO: run length encoding here. for x in 0..x_size { let xf32 = x as f32; let voxel = chunk.voxel(x, y, z); if voxel == Voxel::Empty || chunk.occluded(x, y, z) { continue; } let mut cube = Self::cube_verts(); let mut i = 0; while i < cube.len() { // adjust positions cube[i] += xf32; cube[i + 1] += yf32; cube[i + 2] += zf32; // Add palette color for this vert palette_colors.push(voxel.palette_index()); i += 3; } verts.append(&mut cube); } } } } MeshingStrategy::RunLength => { for z in 0..z_size { let zf32 = z as f32; for y in 0..y_size { let yf32 = y as f32; // TODO: run length encoding here. for x in 0..x_size { let xf32 = x as f32; let voxel = chunk.voxel(x, y, z); if voxel == Voxel::Empty || chunk.occluded(x, y, z) { continue; } let mut cube = Self::cube_verts(); let mut i = 0; while i < cube.len() { // adjust positions cube[i] += xf32; cube[i + 1] += yf32; cube[i + 2] += zf32; // Add palette color for this vert palette_colors.push(voxel.palette_index()); i += 3; } verts.append(&mut cube); } } } } MeshingStrategy::Greedy => { unimplemented!(); } } VoxelChunkVertex::from_verts(verts, palette_colors) } }
true
48f900a92c13551742c88db8db5d5f3913779a2b
Rust
EugeneGonzalez/aoc_2020
/src/day2.rs
UTF-8
1,186
3.125
3
[]
no_license
use aoc_runner_derive::{aoc, aoc_generator}; use parse_display::{Display, FromStr}; use std::error::Error; #[derive(Display, FromStr, PartialEq, Debug)] #[display("{min}-{max} {letter}: {password}")] struct PasswordRule { min: usize, max: usize, letter: char, password: String, } #[aoc_generator(day2)] fn parse_input_day2(input: &str) -> Result<Vec<PasswordRule>, impl Error> { input.lines().map(|l| l.parse()).collect() } #[aoc(day2, part1)] fn part1(rules: &Vec<PasswordRule>) -> usize { rules .iter() .filter(|rule| (rule.min..=rule.max).contains(&rule.password.matches(rule.letter).count())) .count() } #[aoc(day2, part2)] fn part2(rules: &Vec<PasswordRule>) -> usize { rules .iter() .filter(|rule| { let first = if let Some(c) = rule.password.chars().nth(rule.min - 1) { c == rule.letter } else { false }; let second = if let Some(c) = rule.password.chars().nth(rule.max - 1) { c == rule.letter } else { false }; first ^ second }) .count() }
true
dc064094bfb5382b3257d80679ae9d9329a65d03
Rust
suren-m/rsw
/async-app/src/main.rs
UTF-8
741
2.8125
3
[]
no_license
use async_std::fs; use std::io::Error; use tide::prelude::*; use tide::Request; const CONFIG_FILE: &str = "config.txt"; async fn get_config(path: &str) -> Result<String, Error> { fs::read_to_string(path).await } #[derive(Debug, Deserialize)] struct Animal { name: String, legs: u8, } #[async_std::main] async fn main() -> tide::Result<()> { let contents = get_config("~/data/british-english").await?; let mut app = tide::new(); app.at("/orders/shoes").post(order_shoes); app.listen("127.0.0.1:8080").await?; Ok(()) } async fn order_shoes(mut req: Request<()>) -> tide::Result { let Animal { name, legs } = req.body_json().await?; Ok(format!("Hello, {} Order for {} shoes", name, legs).into()) }
true
20b1d4e52f614cbedeaf472333c5f5227204b749
Rust
zaeleus/noodles
/noodles-fasta/src/writer.rs
UTF-8
3,140
3.53125
4
[ "MIT" ]
permissive
//! FASTA writer. mod builder; pub use self::builder::Builder; use std::io::{self, Write}; use super::{record::Sequence, Record}; /// A FASTA writer. pub struct Writer<W> { inner: W, line_base_count: usize, } impl<W> Writer<W> where W: Write, { /// Creates a FASTA writer. /// /// # Examples /// /// ``` /// use noodles_fasta as fasta; /// let writer = fasta::Writer::new(Vec::new()); /// ``` pub fn new(inner: W) -> Self { Builder::default().build_with_writer(inner) } /// Returns a reference to the underlying writer. /// /// # Examples /// /// ``` /// use noodles_fasta as fasta; /// let writer = fasta::Writer::new(Vec::new()); /// assert!(writer.get_ref().is_empty()); /// ``` pub fn get_ref(&self) -> &W { &self.inner } /// Writes a FASTA record. /// /// By default, sequence lines are hard wrapped at 80 bases. This can be changed by using /// [`Builder::set_line_base_count`] when creating the writer. /// /// # Examples /// /// ``` /// # use std::io; /// use noodles_fasta::{self as fasta, record::{Definition, Sequence}}; /// /// let mut writer = fasta::Writer::new(Vec::new()); /// /// let definition = Definition::new("sq0", None); /// let sequence = Sequence::from(b"ACGT".to_vec()); /// let record = fasta::Record::new(definition, sequence); /// /// writer.write_record(&record)?; /// /// assert_eq!(writer.get_ref(), b">sq0\nACGT\n"); /// # Ok::<(), io::Error>(()) /// ``` pub fn write_record(&mut self, record: &Record) -> io::Result<()> { writeln!(self.inner, "{}", record.definition())?; write_record_sequence(&mut self.inner, record.sequence(), self.line_base_count)?; Ok(()) } } fn write_record_sequence<W>( writer: &mut W, sequence: &Sequence, line_bases: usize, ) -> io::Result<()> where W: Write, { for bases in sequence.as_ref().chunks(line_bases) { writer.write_all(bases)?; writeln!(writer)?; } Ok(()) } #[cfg(test)] mod tests { use super::*; #[test] fn test_new() { let writer = Writer::new(Vec::new()); assert_eq!(writer.line_base_count, 80); } #[test] fn test_write_record_sequence() -> io::Result<()> { let mut writer = Vec::new(); let sequence = Sequence::from(b"AC".to_vec()); write_record_sequence(&mut writer, &sequence, 4)?; assert_eq!(writer, b"AC\n"); writer.clear(); let sequence = Sequence::from(b"ACGT".to_vec()); write_record_sequence(&mut writer, &sequence, 4)?; assert_eq!(writer, b"ACGT\n"); writer.clear(); let sequence = Sequence::from(b"ACGTACGT".to_vec()); write_record_sequence(&mut writer, &sequence, 4)?; assert_eq!(writer, b"ACGT\nACGT\n"); writer.clear(); let sequence = Sequence::from(b"ACGTACGTAC".to_vec()); write_record_sequence(&mut writer, &sequence, 4)?; assert_eq!(writer, b"ACGT\nACGT\nAC\n"); Ok(()) } }
true
d54ef4e50caf53fa890e9caeaea60e4a69fbb65b
Rust
AravindGopala/PracticePrograms
/Rust/helloworld/src/main.rs
UTF-8
114
2.921875
3
[]
no_license
fn main() { // Variables can be type annotated. let i: i32 = 10; println!("Hello, world!, {}", i); }
true
070cc45f9098d892097a1ca13b89d2b5ebd451c4
Rust
danambrogio/roll20
/src/main.rs
UTF-8
1,710
3.109375
3
[ "MIT" ]
permissive
#[macro_use] extern crate clap; extern crate rand; use clap::App; use rand::Rng; fn main() { let yaml = load_yaml!("../cli.yml"); let matches = App::from_yaml(yaml).get_matches(); let die_opt = matches.value_of("die").unwrap_or("20").parse::<i32>().unwrap(); let num_opt = matches.value_of("num").unwrap_or("1").parse::<i32>().unwrap(); let sum_opt = matches.is_present("sum"); let advangage_opt = matches.is_present("advantage"); let disadvantage_opt = matches.is_present("disadvantage"); if advangage_opt { let (result, dropped) = roll_with_advantage(die_opt); println!("You rolled {}! (dropped {}) [0 - {}]", result, dropped, die_opt); } else if disadvantage_opt { let (result, dropped) = roll_with_disadvantage(die_opt); println!("You rolled {}! (dropped {}) [0 - {}]", result, dropped, die_opt); } else { let mut rolls: Vec<i32> = Vec::new(); for _ in 0..num_opt { let result = roll(die_opt); rolls.push(result); println!("You rolled {}! [0 - {}]", result, die_opt); } if sum_opt { println!("---"); println!("Total: {}", rolls.iter().sum::<i32>()); } } } fn roll(die: i32) -> i32 { let mut rng = rand::thread_rng(); rng.gen_range(1, die) } fn roll_with_advantage(die: i32) -> (i32, i32) { let mut rng = rand::thread_rng(); let roll1 = rng.gen_range(1, die); let roll2 = rng.gen_range(1, die); return if roll1 > roll2 { (roll1, roll2) } else { (roll2, roll1) } } fn roll_with_disadvantage(die: i32) -> (i32, i32) { let mut rng = rand::thread_rng(); let roll1 = rng.gen_range(1, die); let roll2 = rng.gen_range(1, die); return if roll1 < roll2 { (roll1, roll2) } else { (roll2, roll1) } }
true
2b74b5d9cb64e2dcab2d513e41a66206b696b23b
Rust
gen0083/atcoder_python
/rust/abc230/src/bin/c.rs
UTF-8
978
2.59375
3
[]
no_license
use std::cmp::{max, min}; use proconio::input; fn main() { input!{ n: i64, a: i64, b: i64, p: i64, q: i64, r: i64, s: i64, } let template = ".".repeat((s-r+1) as usize); let p1f = max(1 - a, 1 - b); let p2f = max(1 -a, b - n); let p1t = min(n - a, n - b); let p2t = min(n - a, b - 1); let p1range = p1f..=p1t; let p2range = p2f..=p2t; for c in p..=q { let k = c - a; let mut t = template.clone(); if p1range.contains(&k) { let b1 = b + k - r; if b1 >= 0 && b1 <= s - r { let b1 = b1 as usize; t.replace_range(b1..b1+1, "#"); } } if p2range.contains(&k) { let b2 = b - k - r; if b2 >= 0 && b2 <= s - r { let b2 = b2 as usize; t.replace_range(b2..b2+1, "#"); } } println!("{}", t); } }
true
de82c635e5658e7ea45aa8a981440ec7e07da655
Rust
topecongiro/allocators-rs
/malloc-bind/src/lib.rs
UTF-8
23,257
2.75
3
[ "Apache-2.0" ]
permissive
// Copyright 2017 the authors. See the 'Copyright and license' section of the // README.md file at the top-level directory of this repository. // // Licensed under the Apache License, Version 2.0 (the LICENSE file). This file // may not be copied, modified, or distributed except according to those terms. //! Bindings for the C `malloc` API to Rust allocators. //! //! This crate provides a mechanism to construct a C allocator - an implementation of `malloc`, //! `free`, and related functions - that is backed by a Rust allocator (an implementation of the //! `Alloc` trait). //! //! In order to create bindings, two things must be provided: an implementation of the `Alloc` //! trait, and an implementation of the `LayoutFinder` trait (defined in this crate). Since the C //! API does not provide size or alignment on `free`, but the Rust `Alloc` API requires both size //! and alignment on `dealloc`, a mapping must be maintained between allocated objects and those //! objects' size and alignment. The `LayoutFinder` provides this functionality. #![no_std] #![feature(allocator_api)] #![feature(alloc)] #![feature(core_intrinsics)] #![feature(const_fn)] extern crate alloc; extern crate libc; extern crate errno; extern crate sysconf; // lazy_static's macros are only used in the macros we define, so if no macros are called (which is // the case when compiling this crate on its own), then lazy_static's macros (and thus the // #[macro_use] attribute) will appear unused. Due to an issue with clippy // (https://rust-lang-nursery.github.io/rust-clippy/master/index.html#useless_attribute), this // allow(unused_imports) directive will be seen as useless, so we suppress the useless_attribute // warning as well. #[cfg_attr(feature = "cargo-clippy", allow(useless_attribute))] #[allow(unused_imports)] #[macro_use] extern crate lazy_static; use alloc::allocator::{Alloc, AllocErr, Layout}; use libc::{c_void, size_t}; use core::{mem, ptr}; use core::cmp::max; const WORD_SIZE: usize = mem::size_of::<*mut c_void>(); /// A mechanism for mapping allocated objects to their `Layout`s. /// /// A `LayoutFinder` is an object that can store and look up the `Layout` associated with an /// allocated object. In the functions generated by this crate, newly-allocated objects will be /// inserted into a global `LayoutFinder` object, and this `LayoutFinder` will be used to look up /// the `Layout`s associated with objects passed to `free` and other functions. pub trait LayoutFinder { /// Get the `Layout` associated with an allocated object. /// /// `get_layout` is passed a pointer to an allocated object, and it returns a `Layout` /// describing that object. `ptr` is guaranteed to be an object previously allocated using one /// of the various C allocation functions. fn get_layout(&self, ptr: *mut u8) -> Layout; /// Insert a new object to `Layout` mapping. /// /// `insert_layout` is passed a pointer to a newly-allocated object and a `Layout` describing /// that object, and it stores this mapping. `insert_layout` is called immediately after /// allocation in all of the C allocation functions. /// /// The default implementation of `insert_layout` is a no-op, as some allocators may already /// keep track of the information necessary to implement `get_layout` internally. fn insert_layout(&self, _ptr: *mut u8, _layout: Layout) {} /// Delete an existing object to `Layout` mapping. /// /// `delete_layout` is passed a pointer to an object whose mapping has previously been /// inserted, and it deletes this mapping. `delete_layout` is called immediately after /// deallocation in all of the C deallocation functions. /// /// The default implementation of `delete_layout` is a no-op, as some allocators may already /// keep track of the information necessary to implement `get_layout` internally. fn delete_layout(&self, _ptr: *mut u8) {} } /// A wrapper for a Rust allocator providing C bindings. /// /// `Malloc` wraps existing `Alloc` and `LayoutFinder` instances and provides methods for each of /// the various C allocation functions. Most users should simply call the `define_malloc` or /// `define_malloc_lazy_static` macros, which take care of constructing a `Malloc` instance and /// defining the various `extern "C"` functions of the C allocation API. Users who wish to expose /// only a subset of this API will need to instantiate a `Malloc` and define the `extern "C"` /// functions manually. pub struct Malloc<A, L: LayoutFinder> where for<'a> &'a A: Alloc { alloc: A, layout_finder: L, } impl<A, L: LayoutFinder> Malloc<A, L> where for<'a> &'a A: Alloc { /// Construct a new `Malloc`. /// /// `new` constructs a new `Malloc` using the provided allocator and `LayoutFinder`. Since C /// allocation functions can be called from many threads simultaneously, the allocator must be /// thread-safe. Thus, `A` (the type of the `alloc` parameter) isn't required to implement /// `Alloc`. Instead, `&A` must implement `Alloc` so that `Alloc`'s methods can be called /// concurrently. pub const fn new(alloc: A, layout_finder: L) -> Malloc<A, L> { Malloc { alloc, layout_finder, } } /// The C `malloc` function. pub unsafe fn malloc(&self, size: size_t) -> *mut c_void { if size == 0 { return ptr::null_mut(); } // According to the posix_memalign manpage, "The glibc malloc(3) always returns 8-byte // aligned memory addresses..." Thus, we round up the size of allocations to 8 bytes in // order guarantee that 8 is a valid alignment (since Layout requires that the size is a // multiple of the alignment). let size = max(size, 8); let layout = Layout::from_size_align(size as usize, 8).unwrap(); match (&self.alloc).alloc(layout.clone()) { Ok(ptr) => { self.layout_finder.insert_layout(ptr, layout); ptr as *mut c_void } Err(AllocErr::Exhausted { .. }) => ptr::null_mut(), Err(AllocErr::Unsupported { .. }) => core::intrinsics::abort(), } } /// The C `free` function. pub unsafe fn free(&self, ptr: *mut c_void) { if ptr.is_null() { return; } let layout = self.layout_finder.get_layout(ptr as *mut u8); self.layout_finder.delete_layout(ptr as *mut u8); (&self.alloc).dealloc(ptr as *mut u8, layout); } /// The obsolete C `cfree` function. pub unsafe fn cfree(&self, ptr: *mut c_void) { // See https://linux.die.net/man/3/cfree self.free(ptr) } /// The C `calloc` function. pub unsafe fn calloc(&self, nmemb: size_t, size: size_t) -> *mut c_void { if nmemb == 0 || size == 0 { return ptr::null_mut(); } // According to the posix_memalign manpage, "The glibc malloc(3) always returns 8-byte // aligned memory addresses..." Thus, we round up the size of allocations to 8 bytes in // order guarantee that 8 is a valid alignment (since Layout requires that the size is a // multiple of the alignment). let size = max(size, 8); let layout = Layout::from_size_align(nmemb * size as usize, 8).unwrap(); match (&self.alloc).alloc_zeroed(layout.clone()) { Ok(ptr) => { self.layout_finder.insert_layout(ptr, layout); ptr as *mut c_void } Err(AllocErr::Exhausted { .. }) => ptr::null_mut(), Err(AllocErr::Unsupported { .. }) => core::intrinsics::abort(), } } /// The obsolete C `valloc` function. pub unsafe fn valloc(&self, size: size_t) -> *mut c_void { if size == 0 { return ptr::null_mut(); } let layout = Layout::from_size_align(size as usize, sysconf::page::pagesize()).unwrap(); match (&self.alloc).alloc_zeroed(layout.clone()) { Ok(ptr) => { self.layout_finder.insert_layout(ptr, layout); ptr as *mut c_void } Err(AllocErr::Exhausted { .. }) => ptr::null_mut(), Err(AllocErr::Unsupported { .. }) => core::intrinsics::abort(), } } /// The obsolete C `pvalloc` function (only implemented on Linux). #[cfg(target_os = "linux")] pub unsafe fn pvalloc(&self, size: size_t) -> *mut c_void { // See http://man7.org/linux/man-pages/man3/posix_memalign.3.html if size == 0 { return ptr::null_mut(); } // TODO: round size up to the next multiple of the page size. let layout = Layout::from_size_align(size as usize, sysconf::page::pagesize()).unwrap(); match (&self.alloc).alloc_zeroed(layout.clone()) { Ok(ptr) => { self.layout_finder.insert_layout(ptr, layout); ptr as *mut c_void } Err(AllocErr::Exhausted { .. }) => ptr::null_mut(), Err(AllocErr::Unsupported { .. }) => core::intrinsics::abort(), } } /// The C `realloc` function. pub unsafe fn realloc(&self, ptr: *mut c_void, size: size_t) -> *mut c_void { // See http://man7.org/linux/man-pages/man3/malloc.3.html, // http://www.manpagez.com/man/3/malloc/osx-10.6.php if ptr.is_null() { return self.malloc(size); } if size == 0 { // According to the Linux manpage: "if size is equal to zero, and ptr is not NULL, then // the call is equivalent to free(ptr)." However, according to Darwin: "If size is zero // and ptr is not NULL, a new, minimum sized object is allocated and the original // object is freed." Since it is valid for malloc(0) to simply return NULL, we opt to // implement the Linux behavior in both cases. The only way for this to cause problems // is for Darwin programs to rely on the fact that the returned pointer represents the // "minimum sized object" instead of only assuming that, since the size passed was 0, // the object has 0 size. Since "minimum sized object" does not seem to be a // well-defined term, reliance on such behavior is erroneous. // TODO: What should we return? self.free(ptr); return ptr::null_mut(); } // TODO: Round size up to 8 and use 8-byte alignment like in malloc/calloc? let layout = self.layout_finder.get_layout(ptr as *mut u8); // TODO: What's the right choice of alignment here? let new_layout = Layout::from_size_align(size as usize, 1).unwrap(); match (&self.alloc).realloc(ptr as *mut u8, layout, new_layout.clone()) { Ok(ptr) => { self.layout_finder.delete_layout(ptr); self.layout_finder.insert_layout(ptr, new_layout); ptr as *mut c_void } Err(AllocErr::Exhausted { .. }) => ptr::null_mut(), Err(AllocErr::Unsupported { .. }) => core::intrinsics::abort(), } } /// The C `reallocf` function (only implemented on Mac). #[cfg(target_os = "macos")] pub unsafe fn reallocf(&self, ptr: *mut c_void, size: size_t) -> *mut c_void { // See http://www.manpagez.com/man/3/malloc/osx-10.6.php if ptr.is_null() { return self.malloc(size); } if size == 0 { // According to the malloc manpage: "If size is zero and ptr is not NULL, a new, // minimum sized object is allocated and the original object is freed." See the // equivalent comment in realloc for why we do this. // TODO: What should we return? self.free(ptr); return ptr::null_mut(); } // TODO: Round size up to 8 and use 8-byte alignment like in malloc/calloc? let layout = self.layout_finder.get_layout(ptr as *mut u8); // TODO: What's the right choice of alignment here? let new_layout = Layout::from_size_align(size as usize, 1).unwrap(); match (&self.alloc).realloc(ptr as *mut u8, layout, new_layout.clone()) { Ok(ptr) => { self.layout_finder.delete_layout(ptr); self.layout_finder.insert_layout(ptr, new_layout); ptr as *mut c_void } Err(AllocErr::Exhausted { .. }) => { self.free(ptr); ptr::null_mut() } Err(AllocErr::Unsupported { .. }) => core::intrinsics::abort(), } } /// The C `reallocarray` function (only implemented on Linux). #[cfg(target_os = "linux")] pub unsafe fn reallocarray(&self, ptr: *mut c_void, nmemb: size_t, size: size_t) -> *mut c_void { // See http://man7.org/linux/man-pages/man3/malloc.3.html // According to the malloc manpage, "unlike that realloc() call, reallocarray() fails // safely in the case where the multiplication would overflow. If such an overflow occurs, // reallocarray() returns NULL, sets errno to ENOMEM, and leaves the original block of // memory unchanged." match nmemb.checked_mul(size) { Some(product) => self.realloc(ptr, product), None => { errno::set_errno(errno::Errno(libc::ENOMEM)); ptr::null_mut() } } } /// The C `posix_memalign` function. pub unsafe fn posix_memalign(&self, memptr: *mut *mut c_void, alignment: size_t, size: size_t) -> i32 { // See http://man7.org/linux/man-pages/man3/posix_memalign.3.html // The manpage also specifies that the alignment must be a multiple of the word size, but // all powers of two greater than or equal to the word size are multiples of the word size, // so we omit that check. if alignment <= WORD_SIZE || !alignment.is_power_of_two() { return libc::EINVAL; } if size == 0 { *memptr = ptr::null_mut(); return 0; } // TODO: posix_memalign does not require that size is a multiple of alignment. Thus, we // need to manually round up since valid Layouts must have that property. This is safe // because this API never takes the memory region size on deallocation, so it's fine that // the caller might think they have a smaller memory region than they actually do. let layout = Layout::from_size_align(size as usize, alignment).unwrap(); match (&self.alloc).alloc(layout.clone()) { Ok(ptr) => { self.layout_finder.insert_layout(ptr, layout); *memptr = ptr as *mut c_void; 0 } Err(AllocErr::Exhausted { .. }) => libc::ENOMEM, Err(AllocErr::Unsupported { .. }) => core::intrinsics::abort(), } } /// The obsolete C `memalign` function (only implemented on Linux). #[cfg(target_os = "linux")] pub unsafe fn memalign(&self, alignment: size_t, size: size_t) -> *mut c_void { // See http://man7.org/linux/man-pages/man3/posix_memalign.3.html if !alignment.is_power_of_two() { return ptr::null_mut(); } if size == 0 { return ptr::null_mut(); } // TODO: memalign does not require that size is a multiple of alignment. Thus, we need to // manually round up since valid Layouts must have that property. This is safe because this // API never takes the memory region size on deallocation, so it's fine that the caller // might think they have a smaller memory region than they actually do. let layout = Layout::from_size_align(size as usize, alignment).unwrap(); match (&self.alloc).alloc(layout.clone()) { Ok(ptr) => { self.layout_finder.insert_layout(ptr, layout); ptr as *mut c_void } Err(AllocErr::Exhausted { .. }) => ptr::null_mut(), Err(AllocErr::Unsupported { .. }) => core::intrinsics::abort(), } } /// The C `aligned_alloc` function (only implemented on Linux). #[cfg(target_os = "linux")] pub unsafe fn aligned_alloc(&self, alignment: size_t, size: size_t) -> *mut c_void { // See http://man7.org/linux/man-pages/man3/posix_memalign.3.html // From the aligned_alloc manpage: "The function aligned_alloc() is the same as memalign(), // except for the added restriction that size should be a multiple of alignment." if size % alignment != 0 { return ptr::null_mut(); } self.memalign(alignment, size) } } /// Define `extern "C"` functions for the C allocation API. /// /// `define_malloc` is a convenience macro that constructs a global instance of `Malloc` and /// defines each of the functions of the C allocation API by calling methods on that instance. One /// function is defined for each of the methods on `Malloc`. Users who only want to define a subset /// of the C allocation API should instead define these functions manually. /// /// `define_malloc` takes an allocator type, an expression to construct a new instance of that /// type, a `LayoutFinder` type, and an expression to construct a new instance of that type. Both /// expressions must be constant expressions, as they will be used in the initialization of a /// static variable. #[macro_export] macro_rules! define_malloc { ($alloc_ty:ty, $alloc_new:expr, $layout_finder_ty:ty, $layout_finder_new:expr) => ( static HEAP: $crate::Malloc<$alloc_ty, $layout_finder_ty> = $crate::Malloc::new($alloc_new, $layout_finder_new); #[no_mangle] pub extern "C" fn malloc(size: size_t) -> *mut c_void { unsafe { HEAP.malloc(size) } } #[no_mangle] pub extern "C" fn free(ptr: *mut c_void) { unsafe { HEAP.free(ptr) } } #[no_mangle] pub extern "C" fn cfree(ptr: *mut c_void) { unsafe { HEAP.cfree(ptr) } } #[no_mangle] pub extern "C" fn calloc(nmemb: size_t, size: size_t) -> *mut c_void { unsafe { HEAP.calloc(nmemb, size) } } #[no_mangle] pub extern "C" fn valloc(size: size_t) -> *mut c_void { unsafe { HEAP.valloc(size) } } #[cfg(target_os = "linux")] #[no_mangle] pub extern "C" fn pvalloc(size: size_t) -> *mut c_void { unsafe { HEAP.pvalloc(size) } } #[no_mangle] pub extern "C" fn realloc(ptr: *mut c_void, size: size_t) -> *mut c_void { unsafe { HEAP.realloc(ptr, size) } } #[cfg(target_os = "macos")] #[no_mangle] pub extern "C" fn reallocf(ptr: *mut c_void, size: size_t) -> *mut c_void { unsafe { HEAP.reallocf(ptr, size) } } #[cfg(target_os = "linux")] pub extern "C" fn reallocarray(ptr: *mut c_void, nmemb: size_t, size: size_t) -> *mut c_void { unsafe { HEAP.reallocarray(ptr, nmemb, size) } } #[no_mangle] pub extern "C" fn posix_memalign(memptr: *mut *mut c_void, alignment: size_t, size: size_t) -> i32 { unsafe { HEAP.posix_memalign(memptr, alignment, size) } } #[cfg(target_os = "linux")] #[no_mangle] pub extern "C" fn memalign(alignment: size_t, size: size_t) -> *mut c_void { unsafe { HEAP.memalign(alignment, size) } } #[cfg(target_os = "linux")] #[no_mangle] pub extern "C" fn aligned_alloc(alignment: size_t, size: size_t) -> *mut c_void { unsafe { HEAP.aligned_alloc(alignment, size) } } ) } // This line re-exports the macros from lazy_static so that they'll be available to the code // calling define_malloc_lazy_static. This allows define_malloc_lazy_static to be used without the // caller needing to know about lazy_static and import its macros themselves. // // Credit to https://users.rust-lang.org/t/how-to-use-macro-inside-another-macro/12061/2 pub use lazy_static::*; /// Define `extern "C"` functions for the C allocation API with non-constant initializers. /// /// `define_malloc_lazy_static` is like `define_malloc`, except there is no requirement that the /// initialization expressions must be constant. Instead, `lazy_static` is used to construct the /// global `Malloc` instance. #[macro_export] macro_rules! define_malloc_lazy_static { ($alloc_ty:ty, $alloc_new:expr, $layout_finder_ty:ty, $layout_finder_new:expr) => ( lazy_static!{ static ref HEAP: $crate::Malloc<$alloc_ty, $layout_finder_ty> = $crate::Malloc::new($alloc_new, $layout_finder_new); } #[no_mangle] pub extern "C" fn malloc(size: size_t) -> *mut c_void { unsafe { HEAP.malloc(size) } } #[no_mangle] pub extern "C" fn free(ptr: *mut c_void) { unsafe { HEAP.free(ptr) } } #[no_mangle] pub extern "C" fn cfree(ptr: *mut c_void) { unsafe { HEAP.cfree(ptr) } } #[no_mangle] pub extern "C" fn calloc(nmemb: size_t, size: size_t) -> *mut c_void { unsafe { HEAP.calloc(nmemb, size) } } #[no_mangle] pub extern "C" fn valloc(size: size_t) -> *mut c_void { unsafe { HEAP.valloc(size) } } #[cfg(target_os = "linux")] #[no_mangle] pub extern "C" fn pvalloc(size: size_t) -> *mut c_void { unsafe { HEAP.pvalloc(size) } } #[no_mangle] pub extern "C" fn realloc(ptr: *mut c_void, size: size_t) -> *mut c_void { unsafe { HEAP.realloc(ptr, size) } } #[cfg(target_os = "macos")] #[no_mangle] pub extern "C" fn reallocf(ptr: *mut c_void, size: size_t) -> *mut c_void { unsafe { HEAP.reallocf(ptr, size) } } #[cfg(target_os = "linux")] pub extern "C" fn reallocarray(ptr: *mut c_void, nmemb: size_t, size: size_t) -> *mut c_void { unsafe { HEAP.reallocarray(ptr, nmemb, size) } } #[no_mangle] pub extern "C" fn posix_memalign(memptr: *mut *mut c_void, alignment: size_t, size: size_t) -> i32 { unsafe { HEAP.posix_memalign(memptr, alignment, size) } } #[cfg(target_os = "linux")] #[no_mangle] pub extern "C" fn memalign(alignment: size_t, size: size_t) -> *mut c_void { unsafe { HEAP.memalign(alignment, size) } } #[cfg(target_os = "linux")] #[no_mangle] pub extern "C" fn aligned_alloc(alignment: size_t, size: size_t) -> *mut c_void { unsafe { HEAP.aligned_alloc(alignment, size) } } ) }
true
a8532c940538311c03135ba5b784b561c23f2b4f
Rust
AntonGepting/tmux-interface-rs
/src/commands/windows_and_panes/move_window_tests.rs
UTF-8
2,306
2.9375
3
[ "LicenseRef-scancode-unknown-license-reference", "MIT" ]
permissive
#[test] fn move_window() { use crate::{MoveWindow, TargetWindow}; use std::borrow::Cow; // Like join-pane, but `src-pane` and `dst-pane` may belong to the same window // // # Manual // // tmux ^3.2: // ```text // move-window [-abrdk] [-s src-window] [-t dst-window] // (alias: movew) // ``` // // tmux ^2.1: // ```text // move-window [-ardk] [-s src-window] [-t dst-window] // (alias: movew) // ``` // // tmux ^1.7: // ```text // move-window [-rdk] [-s src-window] [-t dst-window] // (alias: movew) // ``` // // tmux ^1.3: // ```text // move-window [-dk] [-s src-window] [-t dst-window] // (alias: movew) // ``` // // tmux ^0.8: // ```text // move-window [-d] [-s src-window] [-t dst-window] // (alias: movew) // ``` let src_pane = TargetWindow::Raw("1").to_string(); let dst_pane = TargetWindow::Raw("2").to_string(); let move_window = MoveWindow::new(); #[cfg(feature = "tmux_2_1")] let move_window = move_window.after(); #[cfg(feature = "tmux_3_2")] let move_window = move_window.before(); #[cfg(feature = "tmux_1_7")] let move_window = move_window.renumber(); #[cfg(feature = "tmux_0_8")] let move_window = move_window.detached(); #[cfg(feature = "tmux_1_3")] let move_window = move_window.kill(); #[cfg(feature = "tmux_0_8")] let move_window = move_window.src_window(&src_pane); #[cfg(feature = "tmux_0_8")] let move_window = move_window.dst_window(&dst_pane); #[cfg(not(feature = "cmd_alias"))] let cmd = "move-window"; #[cfg(feature = "cmd_alias")] let cmd = "movew"; let mut s = Vec::new(); s.push(cmd); #[cfg(feature = "tmux_2_1")] s.push("-a"); #[cfg(feature = "tmux_3_2")] s.push("-b"); #[cfg(feature = "tmux_1_7")] s.push("-r"); #[cfg(feature = "tmux_0_8")] s.push("-d"); #[cfg(feature = "tmux_1_3")] s.push("-k"); #[cfg(feature = "tmux_0_8")] s.extend_from_slice(&["-s", "1"]); #[cfg(feature = "tmux_0_8")] s.extend_from_slice(&["-t", "2"]); let s: Vec<Cow<str>> = s.into_iter().map(|a| a.into()).collect(); let move_window = move_window.build().to_vec(); assert_eq!(move_window, s); }
true
93c77e6fe0f388ce901f8ed0ea68891c8cc1c148
Rust
JeeZeh/advent-of-code
/2018/day10/src/main.rs
UTF-8
3,527
3.078125
3
[]
no_license
use std::{ collections::HashMap, fs, io::{stdin, stdout, Read, Write}, }; #[derive(Debug, PartialEq, Eq, Hash, Clone, Copy)] struct Point { x: i32, y: i32, } #[derive(Debug)] struct Star { position: Point, velocity: Point, } impl Star { fn step(&mut self) { self.position.x += self.velocity.x; self.position.y += self.velocity.y; } } fn main() { let mut stars: Vec<Star> = fs::read_to_string("./src/input") .unwrap() .lines() .map(parse_stars) .collect(); run_stars(&mut stars) } fn run_stars(stars: &mut Vec<Star>) { let mut best: (i32, HashMap<Point, bool>, i32) = (i32::MAX, HashMap::new(), 0); let mut time = 1; loop { for star in stars.iter_mut() { star.step(); } let (size, sky) = get_letter_confidence_v2(&stars); if size < best.0 { best = (size, sky, time); } if size > (best.0 * 2) { break; } time += 1; } print_sky(&best.1); println!("Elves waited {}s", &best.2) } fn print_sky(sky: &HashMap<Point, bool>) { let x_max = sky.keys().map(|p| p.x).max().unwrap() + 5; let x_min = sky.keys().map(|p| p.x).min().unwrap() - 5; let y_max = sky.keys().map(|p| p.y).max().unwrap() + 5; let y_min = sky.keys().map(|p| p.y).min().unwrap() - 5; for y in y_min..y_max { let mut line = String::new(); for x in x_min..x_max { if sky.contains_key(&Point { x, y }) { line.push('#'); } else { line.push('.'); } } println!("{}", line); } } fn get_letter_confidence_v2(stars: &[Star]) -> (i32, HashMap<Point, bool>) { let y_max = stars.iter().map(|p| p.position.y).max().unwrap(); let y_min = stars.iter().map(|p| p.position.y).min().unwrap(); let mut sky: HashMap<Point, bool> = HashMap::new(); for star in stars { sky.insert(star.position, true); } ((y_max - y_min), sky) } #[deprecated] fn get_letter_confidence(stars: &[Star]) -> (i32, HashMap<Point, bool>) { let mut sky: HashMap<Point, bool> = HashMap::new(); for star in stars { sky.insert(star.position, true); } let mut borders = 0; for star in &sky { for x in -1..=1 { for y in -1..=1 { let test = Point { x: star.0.x + x, y: star.0.y + y, }; if test != *star.0 && sky.contains_key(&test) { borders += 1; } } } } return (borders, sky); } fn parse_stars(line: &str) -> Star { let u_pos = &line[10..]; let mut pos_parts = u_pos[0..u_pos.find(">").unwrap()].split(","); let u_vel = line.split("velocity=<").nth(1).unwrap(); let mut vel_parts = u_vel[0..u_vel.find(">").unwrap()].split(","); return Star { position: Point { x: pos_parts.next().unwrap().replace(" ", "").parse().unwrap(), y: pos_parts.next().unwrap().replace(" ", "").parse().unwrap(), }, velocity: Point { x: vel_parts.next().unwrap().replace(" ", "").parse().unwrap(), y: vel_parts.next().unwrap().replace(" ", "").parse().unwrap(), }, }; } fn pause() { let mut stdout = stdout(); stdout.write(b"Press Enter to continue...").unwrap(); stdout.flush().unwrap(); stdin().read(&mut [0]).unwrap(); }
true
94509e2f7174c9af312b8d8169bbfb1714de9b5b
Rust
jonalmeida/random-code
/rust/learning/modules-again/src/reader.rs
UTF-8
1,238
3.65625
4
[]
no_license
use std::io::File; use std::io::BufferedReader; pub type ReaderResult<T, E> = Result<T, E>; pub struct Reader { path: Path, } pub trait ReaderFile { fn create(&self); fn open(&self) -> File; //fn insert(&self, String); fn spill(&self); } impl Reader { pub fn new(path: Path) -> Reader { Reader { path: path, } } } impl ReaderFile for Reader { fn create(&self) { File::create(&self.path); } fn open(&self) -> File { let file = File::open(&self.path); match file { Ok(file) => { file }, Err(..) => { panic!("File couldn't be opened!"); }, } } //fn insert(&self, item: String) { // self.file.write(item); //} fn spill(&self) { let mut file = BufferedReader::new(self.open()); for line_iter in file.lines() { println!("{}", line_iter.unwrap()); } } } #[test] fn test_open_file() { let reader = Reader::new(Path::new("test.txt")); } // We should output the entire contents of the database file we open // into standard output. #[test] fn test_read_file() { let reader = Reader::new(Path::new("test.txt")); reader.spill(); }
true
2fc3b75012f2a45e02143d93954ec129c79bb022
Rust
nbanal/toy-payment-engine
/src/main.rs
UTF-8
1,757
2.78125
3
[ "Apache-2.0" ]
permissive
use std::{collections::HashMap, io}; use std::env; use std::fs::File; use std::io::BufReader; mod bank; fn main() -> io::Result<()> { let mut bank = bank::Bank { accounts: HashMap::new(), ledger: HashMap::new(), }; let csv_filename = env::args().nth(1); let file = File::open(csv_filename.unwrap())?; let buffer_size = 1000; let reader = BufReader::with_capacity(buffer_size,file); let mut rdr = csv::ReaderBuilder::new() .trim(csv::Trim::All) .flexible(true) .from_reader(reader); for record in rdr.deserialize() { match record as Result<bank::Transaction, csv::Error> { Ok(transaction) => { if (transaction.kind == bank::TransactionType::deposit || transaction.kind == bank::TransactionType::withdrawal) && transaction.amount.is_none() { println!("Amount is missing"); continue; } match bank.process_transaction(&transaction) { Ok(_) => { if transaction.kind == bank::TransactionType::deposit || transaction.kind == bank::TransactionType::withdrawal { bank.add_transaction_to_ledger(transaction); } } Err(e) => println!("{}", e), } } Err(e) => { println!("{}", e); continue; } } } bank.print_accounts(); Ok(()) }
true
d3eb71fa3e86faf1ac3b910db6761e8ea8353cd9
Rust
irjones/aoc2020
/dec_5/common/src/lib.rs
UTF-8
2,506
3.609375
4
[ "MIT" ]
permissive
pub mod day_five { #[derive(Debug)] pub struct Seat { row: i32, column: i32 } fn adjust_boundary(a: i32, b: i32) -> i32 { (a + b) / 2 } impl Seat { pub fn id(&self) -> i32 { self.row * 8 + self.column } pub fn from(pass: &'_ str) -> Seat { let mut upper_row = 127; let mut lower_row = 0; let mut upper_column = 7; let mut lower_column = 0; for c in pass.chars() { match c { 'F' => upper_row = adjust_boundary(lower_row, upper_row), 'B' => lower_row = adjust_boundary(lower_row, upper_row), 'L' => upper_column = adjust_boundary(lower_column, upper_column), 'R' => lower_column = adjust_boundary(lower_column, upper_column), _ => print!("{} not recognized!", c) } } dbg!(lower_row, lower_column); Seat { row: upper_row, column: upper_column } } } } #[cfg(test)] mod tests { use crate::day_five::Seat; use std::fs; use std::collections::HashSet; #[test] fn it_creates_seat_correctly() { let input = "BFFFBBFRRR"; let seat = Seat::from(input); assert_eq!(567, seat.id()); } #[test] fn it_does_pt1_correctly() { let input = fs::read_to_string("./input").expect("Could not read file"); let result = input.split("\n") .filter(|s| !s.is_empty()) .map(|s| Seat::from(s)) .map(|seat| seat.id()) .max().expect("No value where value was expected"); assert_eq!(989, result); } #[test] fn it_does_pt2_correctly() { let input = fs::read_to_string("./input").expect("Could not read file"); let id_set = input.split('\n') .filter(|s| !s.is_empty()) .map(|s| Seat::from(s)) .map(|seat| seat.id()) .collect::<HashSet<_>>(); let min = id_set.iter().min().expect("No min where min expected"); let max = id_set.iter().max().expect("No max where max expected"); let mut answer: Option<i32> = None; for i in *min..*max { if !id_set.contains(&i) { answer = Some(i); } } assert_eq!(true, answer.is_some()); assert_eq!(548, answer.unwrap()); } }
true
f4da65aae94bd54ca397cc1e053ce1d4fac50886
Rust
jakosimov/blockkey
/src/crypto/hashing/hash.rs
UTF-8
3,167
3.078125
3
[ "MIT" ]
permissive
use data_encoding::HEXUPPER; use sha2::{Digest, Sha256}; use std::convert::TryInto; use std::fmt; use std::marker::PhantomData; #[derive(Debug)] pub struct Hash<T: ?Sized = ()>([u8; 32], PhantomData<T>); impl<T: ?Sized> Clone for Hash<T> { fn clone(&self) -> Self { Hash(self.0, PhantomData) } } impl<T: ?Sized> PartialEq for Hash<T> { fn eq(&self, h: &Self) -> bool { self.0 == h.0 } } impl<T: ?Sized> Eq for Hash<T> {} impl<T: ?Sized> Copy for Hash<T> {} impl<T: ?Sized> std::hash::Hash for Hash<T> { fn hash<H: std::hash::Hasher>(&self, state: &mut H) { self.0.hash(state); } } impl Hash { pub fn from_bytes(bytes: &[u8]) -> Hash<Vec<u8>> { let mut hasher = Sha256::new(); hasher.update(bytes); let result = hasher.finalize(); Hash(result.as_slice().try_into().unwrap(), PhantomData) } } impl<T> Hash<T> { pub fn empty() -> Hash<T> { Vec::<u8>::new().hash().cast() } pub fn get_bytes(&self) -> &[u8; 32] { &self.0 } pub fn cast<H>(&self) -> Hash<H> { Hash(self.0, PhantomData) } } impl<T> fmt::Display for Hash<T> { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { write!(f, "{}", HEXUPPER.encode(&self.0)) } } pub trait Hashable<Input = Self> { fn hash(&self) -> Hash<Input>; } impl<T> Hashable<T> for Hash<T> { fn hash(&self) -> Hash<T> { *self } } impl Hashable for Vec<u8> { fn hash(&self) -> Hash<Self> { Hash::from_bytes(&self) } } impl Hashable for usize { fn hash(&self) -> Hash<Self> { Hash::from_bytes(&self.to_be_bytes()).cast() } } impl Hashable for i32 { fn hash(&self) -> Hash<Self> { Hash::from_bytes(&self.to_be_bytes()).cast() } } impl Hashable for u8 { fn hash(&self) -> Hash<Self> { Hash::from_bytes(&self.to_be_bytes()).cast() } } impl Hashable for u64 { fn hash(&self) -> Hash<Self> { Hash::from_bytes(&self.to_be_bytes()).cast() } } impl Hashable for u128 { fn hash(&self) -> Hash<Self> { Hash::from_bytes(&self.to_be_bytes()).cast() } } #[allow(unused_macros)] #[macro_export] macro_rules! hash { (impl $x:expr, $y:expr) => { $x.extend_from_slice($y.hash().get_bytes()); }; (impl $x:expr, $y:expr, $($z:expr),+) => { $x.extend_from_slice($y.hash().get_bytes()); hash!(impl $x, $($z),+); }; [$x:expr] => ( $x.hash().cast() ); [$($y:expr),+] => ( { let mut v = vec![]; hash!(impl &mut v, $($y),*); Hash::from_bytes(v.as_slice()).cast() } ); } #[cfg(test)] mod test { use crate::crypto::hashing::*; #[test] fn equality() { let x: Hash = hash![1, 2, 3]; let y: Hash = hash![1, 2, 3]; assert_eq!(x == y, true); } #[test] fn hash_transparency() { let x: Hash = hash![1, 2, 3]; assert_eq!(x, hash![1, 2.hash(), 3]); } #[test] fn nested_hashing() { let x: Hash = hash![1, 2, 3]; let y: Hash = hash![2, 3]; assert_ne!(x, hash![1, y]); } }
true
f925ddd1500cff7c29429558dd740145c77560ee
Rust
mythmon/hackerrank
/src/algorithms/warmup/time-conversion.rs
UTF-8
3,168
3.640625
4
[]
no_license
use std::io; use std::str::FromStr; use std::fmt::{Display, Formatter, Error}; #[derive(Clone)] pub enum Time { AmPm { hour: u8, minute: u8, second: u8, am: bool }, TwentyFour { hour: u8, minute: u8, second: u8 }, } impl Time { pub fn new_ampm(hour: u8, minute: u8, second: u8, am: bool) -> Time { Time::AmPm { hour: hour, minute: minute, second: second, am: am } } pub fn new_twenty_four(hour: u8, minute: u8, second: u8) -> Time { Time::TwentyFour { hour: hour, minute: minute, second: second } } pub fn to_twenty_four(&self) -> Time { match *self { Time::AmPm { hour, minute, second, am } => { let new_hour = { if am { if hour == 12 { 0 } else { hour } } else { if hour == 12 { 12 } else { hour + 12 } } }; Time::new_twenty_four(new_hour, minute, second) }, Time::TwentyFour { hour: _, minute: _, second: _ } => self.clone(), } } pub fn to_ampm(&self) -> Time { match *self { Time::AmPm { hour: _, minute: _, second: _, am: _ } => self.clone(), Time::TwentyFour { hour, minute, second } => { let new_hour = hour - if hour > 12 { 12 } else { 0 }; let am = hour < 12; Time::new_ampm(new_hour, minute, second, am) }, } } } impl FromStr for Time { type Err = (); fn from_str(s: &str) -> Result<Self, Self::Err> { let hour: u8 = s[0..2].parse().unwrap(); let minute: u8 = s[3..5].parse().unwrap(); let second: u8 = s[6..8].parse().unwrap(); if s.len() > 8 { let am = s[8..10] == *"AM"; Ok(Time::new_ampm(hour, minute, second, am)) } else { Ok(Time::new_twenty_four(hour, minute, second)) } } } impl Display for Time { fn fmt(&self, f: &mut Formatter) -> Result<(), Error> { match *self { Time::AmPm { hour, minute, second, am } => { try!(write!(f, "{:0>2}:{:0>2}:{:0>2}{}", hour, minute, second, if am { "AM" } else { "PM" })); }, Time::TwentyFour { hour, minute, second } => { try!(write!(f, "{:0>2}:{:0>2}:{:0>2}", hour, minute, second)); }, } Ok(()) } } fn read_line() -> String { let mut input = String::new(); io::stdin().read_line(&mut input).unwrap(); input } fn read_time() -> Time { read_line().trim().parse().unwrap() } fn main() { let time = read_time(); println!("{}", time.to_twenty_four()); } #[cfg(test)] mod tests { use super::Time; #[test] fn test_sample() { let t: Time = "07:05:45PM".parse().unwrap(); let s = format!("{}", t.to_twenty_four()); assert_eq!(s, "19:05:45"); } #[test] fn test_case_1() { let t: Time = "12:40:22AM".parse().unwrap(); let s = format!("{}", t.to_twenty_four()); assert_eq!(s, "00:40:22"); } }
true
faa24c02d6c93d559ca5b83def3bd356dbcd82e9
Rust
kyleoneill/adventofcode
/2019/2/2.rs
UTF-8
1,930
3.234375
3
[]
no_license
use std::fs::File; use std::io::{self, prelude::*, BufReader}; fn main() -> io::Result<()> { let file = File::open("input.txt")?; let reader = BufReader::new(file); for line in reader.lines() { let result_line = line?; let values: Vec<i32> = result_line.split(',').map(|x| x.parse()).collect::<Result<_,_>>().unwrap(); let mut solution = Vec::new(); 'outer: for noun in 0..100 { 'inner: for verb in 0..100 { let mut modified_values = values.clone(); modified_values[1] = noun; modified_values[2] = verb; let result = run_opcode(modified_values); if result == 19690720 { solution.push(noun); solution.push(verb); break 'outer; } } } println!("The noun is: {}", solution[0].to_string()); println!("The verb is: {}", solution[1].to_string()); println!("The solution is: {}", (100 * solution[0] + solution[1]).to_string()); } Ok(()) } fn run_opcode(mut program: Vec<i32>) -> i32 { let mut current_opcode = 0; loop { let first: usize = program[current_opcode + 1] as usize; let second: usize = program[current_opcode + 2] as usize; let result: usize = program[current_opcode + 3] as usize; if program[current_opcode] == 1 { program[result] = program[first] + program[second]; current_opcode += 4; } else if program[current_opcode] == 2 { program[result] = program[first] * program[second]; current_opcode += 4; } else if program[current_opcode] == 99 { current_opcode += 1; break; } else { panic!("Opcode at position {} is not valid", current_opcode.to_string()); } } program[0] }
true
2dcf5b1cdc6442d9ac7b4b7e78f27ec96e265e43
Rust
AI-and-ML/alumina
/alumina_ops/src/elementwise/softsign.rs
UTF-8
2,197
2.9375
3
[ "MIT" ]
permissive
// y = x / (abs(x) + 1) // y' = 1 / (abs(x) + 1)^2 use crate::{ elementwise::elementwise_single::{UnaryElementwise, UnaryFunc}, elementwise::{abs::abs, div::Div, offset::offset, sqr::sqr}, }; use alumina_core::{ base_ops::OpSpecification, errors::{GradientError, OpBuildError}, grad::GradientContext, graph::{Node, NodeID}, }; /// Returns the softsign (y = x / (x.abs() + 1.0)) of the input element-wise. /// /// The output node has the same shape as the input. pub fn softsign<I>(input: I) -> Result<Node, OpBuildError> where I: Into<Node>, { let input = input.into(); let output = input .graph() .new_node(input.shape()) .set_name_unique(&format!("softsign({})", input)); let _op = Softsign::new_default(input, output.clone()).build()?; Ok(output) } pub type Softsign = UnaryElementwise<SoftsignFunc>; #[derive(Clone, Debug, Default)] pub struct SoftsignFunc {} impl UnaryFunc for SoftsignFunc { #[inline] fn calc(&self, input: f32) -> f32 { input / (input.abs() + 1.0) } fn type_name(&self) -> &'static str { "Softsign" } fn grad(&self, ctx: &mut GradientContext, input: &NodeID, output: &NodeID) -> Result<(), GradientError> { let abs = abs(ctx.node(input))?; let abs_p1 = offset(abs, 1.0)?; let sqr_abs_p1 = sqr(abs_p1)?; let _op = Div::new_default(ctx.grad_of(output), sqr_abs_p1, ctx.grad_of(input)).build()?; Ok(()) } } #[cfg(test)] mod tests { use super::softsign; use alumina_core::graph::Node; use alumina_test::{grad_numeric_test::GradNumericTest, relatively_close::RelClose}; use indexmap::indexset; use ndarray::arr0; #[test] fn forward_test() { let input = Node::new(&[13, 33]).set_name("input"); let output = softsign(&input).unwrap(); input.set_value(arr0(1.25)); assert!(output .calc() .unwrap() .all_relatively_close(&arr0(0.555_555_6), ::std::f32::EPSILON)); input.set_value(arr0(-0.8)); assert!(output .calc() .unwrap() .all_relatively_close(&arr0(-0.444_444_45), ::std::f32::EPSILON)); } #[test] fn grad_numeric_test() { let input = Node::new(&[13, 33]).set_name("input"); let output = softsign(&input).unwrap(); GradNumericTest::new(&output, &indexset![&input]).run(); } }
true
661c95375575d0ec5ed62702864692da9dbb452d
Rust
tech-paws/vm
/src/commands_reader.rs
UTF-8
7,089
3.078125
3
[]
no_license
//! Commands reader. use vm_buffers::BytesReader; use vm_buffers::IntoVMBuffers; pub struct CommandsReader<'a> { pub bytes_reader: &'a mut BytesReader, pub address: String, pub count: u64, command_breakpoint: u64, command_len: u64, read_commands: u64, } pub struct Command<'a> { pub id: u64, pub bytes_reader: &'a mut BytesReader, } impl<'a> CommandsReader<'a> { pub fn new(bytes_reader: &'a mut BytesReader) -> Self { let count = bytes_reader.read_u64(); let address = String::read_from_buffers(bytes_reader); let command_breakpoint = bytes_reader.current_offset(); Self { bytes_reader, address, count, command_breakpoint, command_len: 0, read_commands: 0, } } pub fn len(&self) -> usize { self.count as usize } pub fn next(&mut self) -> Option<Command> { assert!( self.bytes_reader.current_offset() <= self.command_breakpoint + self.command_len, "last command payload overflow" ); if self.read_commands == self.count { return None; } let skip_bytes = self.command_breakpoint + self.command_len - self.bytes_reader.current_offset(); self.bytes_reader.skip(skip_bytes); let command_id = self.bytes_reader.read_u64(); self.command_len = self.bytes_reader.read_u64(); self.command_breakpoint = self.bytes_reader.current_offset(); self.read_commands += 1; Some(Command { id: command_id, bytes_reader: self.bytes_reader, }) } } #[cfg(test)] mod tests { use vm_buffers::IntoVMBuffers; use vm_buffers::{ByteOrder, BytesReader, BytesWriter}; use vm_memory::RegionAllocator; use super::CommandsReader; fn write_demo(allocator: &RegionAllocator) { let mut bytes_writer = BytesWriter::new(ByteOrder::LittleEndian, allocator); bytes_writer.write_u64(5); // Commands count let from_address = String::from("tech.paws.tests"); from_address.write_to_buffers(&mut bytes_writer); // Command 1 bytes_writer.write_u64(1); // Command id bytes_writer.write_u64(8); // Command payload len bytes_writer.write_u32(5); // Some payload bytes_writer.write_u32(9); // Some payload // Command 2 bytes_writer.write_u64(2); // Command id bytes_writer.write_u64(12); // Command payload len bytes_writer.write_u32(2); // Some payload bytes_writer.write_u32(4); // Some payload bytes_writer.write_u32(91); // Some payload // Command 3 bytes_writer.write_u64(3); // Command id bytes_writer.write_u64(4); // Command payload len bytes_writer.write_u32(1); // Some payload // Command 4 bytes_writer.write_u64(4); // Command id bytes_writer.write_u64(32); // Command payload len bytes_writer.write_u32(2); // Some payload bytes_writer.write_u32(4); // Some payload bytes_writer.write_u32(5); // Some payload bytes_writer.write_u32(92); // Some payload bytes_writer.write_u32(2); // Some payload bytes_writer.write_u32(4); // Some payload bytes_writer.write_u32(5); // Some payload bytes_writer.write_u32(92); // Some payload // Command 5 bytes_writer.write_u64(5); // Command id bytes_writer.write_u64(8); // Command payload len bytes_writer.write_u32(99); // Some payload bytes_writer.write_u32(123); // Some payload } #[test] fn skip_commands() { let allocator = RegionAllocator::new(1024); write_demo(&allocator); let mut bytes_reader = BytesReader::new(ByteOrder::LittleEndian, &allocator); let mut commands_reader = CommandsReader::new(&mut bytes_reader); let command = commands_reader.next().unwrap(); assert_eq!(command.id, 1); let command = commands_reader.next().unwrap(); assert_eq!(command.id, 2); let command = commands_reader.next().unwrap(); assert_eq!(command.id, 3); let command = commands_reader.next().unwrap(); assert_eq!(command.id, 4); let command = commands_reader.next().unwrap(); assert_eq!(command.id, 5); } #[test] fn partially_read_commands_payloads() { let allocator = RegionAllocator::new(1024); write_demo(&allocator); let mut bytes_reader = BytesReader::new(ByteOrder::LittleEndian, &allocator); let mut commands_reader = CommandsReader::new(&mut bytes_reader); // Command 1 let command = commands_reader.next().unwrap(); assert_eq!(command.id, 1); let data = command.bytes_reader.read_u32(); assert_eq!(data, 5); // Command 2 let command = commands_reader.next().unwrap(); assert_eq!(command.id, 2); let data = command.bytes_reader.read_u32(); assert_eq!(data, 2); let data = command.bytes_reader.read_u32(); assert_eq!(data, 4); let data = command.bytes_reader.read_u32(); assert_eq!(data, 91); // Command 3 let command = commands_reader.next().unwrap(); assert_eq!(command.id, 3); let data = command.bytes_reader.read_u32(); assert_eq!(data, 1); let command = commands_reader.next().unwrap(); assert_eq!(command.id, 4); let data = command.bytes_reader.read_u32(); assert_eq!(data, 2); let data = command.bytes_reader.read_u32(); assert_eq!(data, 4); let data = command.bytes_reader.read_u32(); assert_eq!(data, 5); let command = commands_reader.next().unwrap(); assert_eq!(command.id, 5); let data = command.bytes_reader.read_u32(); assert_eq!(data, 99); let data = command.bytes_reader.read_u32(); assert_eq!(data, 123); } #[test] #[should_panic] fn payload_overflow() { let allocator = RegionAllocator::new(1024); write_demo(&allocator); let mut bytes_reader = BytesReader::new(ByteOrder::LittleEndian, &allocator); let mut commands_reader = CommandsReader::new(&mut bytes_reader); let command = commands_reader.next().unwrap(); assert_eq!(command.id, 1); let data = command.bytes_reader.read_u32(); assert_eq!(data, 5); command.bytes_reader.read_u64(); commands_reader.next(); } #[test] fn read_all_commands() { let allocator = RegionAllocator::new(1024); write_demo(&allocator); let mut bytes_reader = BytesReader::new(ByteOrder::LittleEndian, &allocator); let mut commands_reader = CommandsReader::new(&mut bytes_reader); for _ in 0..5 { let command = commands_reader.next(); assert!(command.is_some()); } let command = commands_reader.next(); assert!(command.is_none()); } }
true
4b2afd3d592fb3ed3abf4fa68eb5bd03921dc1ef
Rust
nephele-rs/nephele
/nephele/src/proto/h1/client/encode.rs
UTF-8
3,789
2.5625
3
[ "MIT", "Apache-2.0" ]
permissive
use cynthia::future::swap::{self, AsyncRead, Cursor}; use cynthia::runtime::task::{Context, Poll}; use std::io::Write; use std::pin::Pin; use crate::common::http_types::headers::{CONTENT_LENGTH, HOST, TRANSFER_ENCODING}; use crate::common::http_types::{Method, Request}; use crate::proto::h1::body_encoder::BodyEncoder; use crate::proto::h1::EncoderState; use crate::read_to_end; #[doc(hidden)] #[derive(Debug)] pub struct Encoder { request: Request, state: EncoderState, } impl Encoder { pub fn new(request: Request) -> Self { Self { request, state: EncoderState::Start, } } fn finalize_headers(&mut self) -> swap::Result<()> { if self.request.header(HOST).is_none() { let url = self.request.url(); let host = url .host_str() .ok_or_else(|| swap::Error::new(swap::ErrorKind::InvalidData, "Missing hostname"))? .to_owned(); if let Some(port) = url.port() { self.request .insert_header(HOST, format!("{}:{}", host, port)); } else { self.request.insert_header(HOST, host); }; } if self.request.method() == Method::Connect { self.request.insert_header("proxy-connection", "keep-alive"); } if let Some(len) = self.request.len() { self.request.insert_header(CONTENT_LENGTH, len.to_string()); } else { self.request.insert_header(TRANSFER_ENCODING, "chunked"); } Ok(()) } fn compute_head(&mut self) -> swap::Result<Cursor<Vec<u8>>> { let mut buf = Vec::with_capacity(128); let url = self.request.url(); let method = self.request.method(); write!(buf, "{} ", method)?; if method == Method::Connect { let host = url.host_str().ok_or_else(|| { swap::Error::new(swap::ErrorKind::InvalidData, "Missing hostname") })?; let port = url.port_or_known_default().ok_or_else(|| { swap::Error::new( swap::ErrorKind::InvalidData, "Unexpected scheme with no default port", ) })?; write!(buf, "{}:{}", host, port)?; } else { write!(buf, "{}", url.path())?; if let Some(query) = url.query() { write!(buf, "?{}", query)?; } } write!(buf, " HTTP/1.1\r\n")?; self.finalize_headers()?; let mut headers = self.request.iter().collect::<Vec<_>>(); headers.sort_unstable_by_key(|(h, _)| if **h == HOST { "0" } else { h.as_str() }); for (header, values) in headers { for value in values.iter() { write!(buf, "{}: {}\r\n", header, value)?; } } write!(buf, "\r\n")?; Ok(Cursor::new(buf)) } } impl AsyncRead for Encoder { fn poll_read( mut self: Pin<&mut Self>, cx: &mut Context<'_>, buf: &mut [u8], ) -> Poll<swap::Result<usize>> { loop { self.state = match self.state { EncoderState::Start => EncoderState::Head(self.compute_head()?), EncoderState::Head(ref mut cursor) => { read_to_end!(Pin::new(cursor).poll_read(cx, buf)); EncoderState::Body(BodyEncoder::new(self.request.take_body())) } EncoderState::Body(ref mut encoder) => { read_to_end!(Pin::new(encoder).poll_read(cx, buf)); EncoderState::End } EncoderState::End => return Poll::Ready(Ok(0)), } } } }
true
48990c7637f12893c900ac3914efeed75acfb4a5
Rust
maboesanman/cargo-llvm-codecov-converter
/src/main.rs
UTF-8
4,450
2.6875
3
[ "MIT" ]
permissive
use crate::string_seek::get_region_text; use crate::string_seek::shrinkwrap; use defaultmap::DefaultBTreeMap; use rayon::prelude::*; use std::error::Error; use std::io::Read; use std::path::Path; mod codecov; mod llvm; mod string_seek; #[derive(Clone)] pub struct Region { id: usize, start: (usize, usize), end: (usize, usize), count: u64, has_count: bool, is_gap: bool, } struct OpenRegion { id: usize, start: (usize, usize), count: u64, has_count: bool, is_gap: bool, } impl OpenRegion { fn close(&self, end: (usize, usize)) -> Region { Region { id: self.id, start: self.start, end, count: self.count, has_count: self.has_count, is_gap: self.is_gap, } } } fn main() -> Result<(), Box<dyn Error>> { let mut in_buf = String::new(); std::io::stdin().read_to_string(&mut in_buf)?; let in_str = in_buf.as_str(); let llvm_cov: llvm::LLVMCov = serde_json::from_str(in_str)?; let mut codecov = codecov::CodeCov::new(); let covered_files: Vec<_> = llvm_cov .data .first() .unwrap() .files .par_iter() .map(|file| { // These are the llvm regions but cut by sub-regions. They do not overlap. let mut region_list = Vec::<Region>::new(); // These are exactly the llvm regions. They overlap let mut region_stack = Vec::<OpenRegion>::new(); let mut next_region_id = 0; // create a sequence of non overlapping regions with coverage info. for segment in file.segments.iter() { if let Some(r) = handle_segment(&mut region_stack, &segment, &mut next_region_id) { region_list.push(r); } } let mut line_coverage = DefaultBTreeMap::<usize, codecov::CodeCovLineCoverage>::default(); let file_path = Path::new(file.filename); match std::fs::read_to_string(file_path) { Ok(file_content) => { region_list = get_region_text(region_list, &file_content) .into_iter() .map(|(r, s)| shrinkwrap(r, s)) .collect() } Err(_) => { // this is ok, it just means we can't shrinkwrap our file, but the coverage information should be mostly correct. } }; for region in region_list { if region.has_count && !region.is_gap { let range = region.start.0..region.end.0 + 1; for line_num in range { let hit = codecov::CodeCovLineHit { start_col: if line_num == region.start.0 { Some(region.start.1) } else { None }, end_col: if line_num == region.end.0 { Some(region.end.1) } else { None }, count: region.count, }; line_coverage.get_mut(line_num).hit(hit); } } } (file.filename, line_coverage.into()) }) .collect(); for (filename, line_coverage) in covered_files { codecov.coverage.insert(filename, line_coverage); } let writer = std::io::stdout(); serde_json::to_writer_pretty(writer, &codecov)?; Ok(()) } fn handle_segment( stack: &mut Vec<OpenRegion>, segment: &llvm::LLVMCovSegment, next_region_id: &mut usize, ) -> Option<Region> { let end = (segment.line, segment.col); let new_region = stack.last().map(|r| r.close(end)); if segment.is_region_entry { stack.push(OpenRegion { id: *next_region_id, start: (segment.line, segment.col), count: segment.count, has_count: segment.has_count, is_gap: segment.is_gap_region, }); *next_region_id += 1; } else { stack.pop().unwrap(); if let Some(top) = stack.last_mut() { top.start = end; } } new_region }
true
c3d5df8b11e50519edf4a945a42d548a5c9e9787
Rust
ia7ck/competitive-programming
/AtCoder/abc221/src/bin/c/main.rs
UTF-8
971
2.75
3
[]
no_license
use input_i_scanner::{scan_with, InputIScanner}; fn main() { let stdin = std::io::stdin(); let mut _i_i = InputIScanner::from(stdin.lock()); let n = scan_with!(_i_i, String); let n: Vec<char> = n.chars().collect(); let f = |x: &[u64]| -> u64 { let mut res = 0; for d in x { res *= 10; res += d; } res }; let mut ans = 0; for bits in 0..(1 << n.len()) { let mut x = Vec::new(); let mut y = Vec::new(); for i in 0..n.len() { if bits >> i & 1 == 1 { x.push(n[i] as u64 - '0' as u64); } else { y.push(n[i] as u64 - '0' as u64); } } x.sort(); x.reverse(); y.sort(); y.reverse(); let x = f(&x); let y = f(&y); if x > 0 && y > 0 { ans = ans.max(x * y); } } assert!(ans > 0); println!("{}", ans); }
true
2fc266abf4a8ba07bc5d77e0e4429377739fa567
Rust
rowanhill/aoc16
/day25/src/parser.rs
UTF-8
3,435
3.546875
4
[]
no_license
use parser::Operand::*; use parser::Instruction::*; use regex::Regex; lazy_static! { static ref CPY_RE:Regex = Regex::new(r"cpy (.+?) (.+)").unwrap(); static ref INC_RE:Regex = Regex::new(r"inc (.+?)").unwrap(); static ref DEC_RE:Regex = Regex::new(r"dec (.+?)").unwrap(); static ref JNZ_RE:Regex = Regex::new(r"jnz (.+?) (.+)").unwrap(); static ref TGL_RE:Regex = Regex::new(r"tgl (.+?)").unwrap(); static ref OUT_RE:Regex = Regex::new(r"out (.+?)").unwrap(); } #[derive(Debug, Clone, Copy)] pub enum Operand { Register(usize), Literal(i32) } impl Operand { pub fn parse(str: &str) -> Operand { match str { "a" => Register(0), "b" => Register(1), "c" => Register(2), "d" => Register(3), _ => { if let Ok(val) = str.parse::<i32>() { Literal(val) } else { unreachable!("Unexpected operand: {}", str); } } } } } #[derive(Debug, Clone, Copy)] pub enum Instruction { Copy{source:Operand, target:Operand}, Inc{reg:Operand}, Dec{reg:Operand}, JumpNotZero{check:Operand, delta:Operand}, Toggle{reg:Operand}, Out{operand:Operand}, MultiplyAddAndClear{factor_1:Operand, factor_2:Operand, target:Operand, clear:Operand}, AddAndClear{source:Operand, target:Operand, clear:Operand}, Nop } impl Instruction { pub fn parse(line: &str) -> Instruction { if let Some(caps) = CPY_RE.captures(line) { let val_or_reg = Operand::parse(caps.at(1).unwrap()); let target_reg = Operand::parse(caps.at(2).unwrap()); Copy{source: val_or_reg, target: target_reg} } else if let Some(caps) = INC_RE.captures(line) { let reg = Operand::parse(caps.at(1).unwrap()); Inc{reg: reg} } else if let Some(caps) = DEC_RE.captures(line) { let reg = Operand::parse(caps.at(1).unwrap()); Dec{reg: reg} } else if let Some(caps) = JNZ_RE.captures(line) { let val_or_reg = Operand::parse(caps.at(1).unwrap()); let delta = Operand::parse(caps.at(2).unwrap()); JumpNotZero{check: val_or_reg, delta: delta} } else if let Some(caps) = TGL_RE.captures(line) { let reg = Operand::parse(caps.at(1).unwrap()); Toggle{reg: reg} } else if let Some(caps) = OUT_RE.captures(line) { let operand = Operand::parse(caps.at(1).unwrap()); Out{operand: operand} } else { unreachable!("Did not recognise instruction: {}", line); } } pub fn toggle(&self) -> Instruction { match *self { Copy{source, target} => { JumpNotZero{check: source, delta: target} }, Inc{reg} => { Dec{reg: reg} }, Dec{reg} => { Inc{reg: reg} }, JumpNotZero{check, delta} => { Copy{source: check, target: delta} }, Toggle{reg} => { Inc { reg: reg } }, Out{operand} => { Inc { reg: operand } }, MultiplyAddAndClear{..} | AddAndClear{..} | Nop => { unreachable!("Trying to toggle an optimised instruction") } } } }
true
45512e6dcb583e29733377ab21936c876911a644
Rust
mwilliammyers/elasticsearch-rs
/elasticsearch/src/cat/mod.rs
UTF-8
3,583
2.6875
3
[ "Apache-2.0" ]
permissive
/* * Licensed to Elasticsearch B.V. under one or more contributor * license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright * ownership. Elasticsearch B.V. licenses this file to you under * the Apache License, Version 2.0 (the "License"); you may * not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ //! Cat APIs //! //! The [Cat APIs](https://www.elastic.co/guide/en/elasticsearch/reference/master/cat.html) aim to //! meet the needs of humans when looking at data returned from Elasticsearch, //! formatting it as compact, column aligned text, making it easier on human eyes. //! //! # Plain text responses //! //! By default, all Cat APIs are configured to send requests with `text/plain` content-type //! and accept headers, returning plain text responses //! //! ```rust,no_run //! # use elasticsearch::{Elasticsearch, Error, SearchParts}; //! # use url::Url; //! # use elasticsearch::auth::Credentials; //! # use serde_json::{json, Value}; //! # async fn doc() -> Result<(), Box<dyn std::error::Error>> { //! # let client = Elasticsearch::default(); //! let response = client //! .cat() //! .nodes() //! .send() //! .await?; //! //! let response_body = response.text().await?; //! # Ok(()) //! # } //! ``` //! //! # JSON responses //! //! JSON responses can be returned from Cat APIs either by using `.format("json")` //! //! ```rust,no_run //! # use elasticsearch::{Elasticsearch, Error, SearchParts}; //! # use url::Url; //! # use elasticsearch::auth::Credentials; //! # use serde_json::{json, Value}; //! # async fn doc() -> Result<(), Box<dyn std::error::Error>> { //! # let client = Elasticsearch::default(); //! let response = client //! .cat() //! .nodes() //! .format("json") //! .send() //! .await?; //! //! let response_body = response.json::<Value>().await?; //! # Ok(()) //! # } //! ``` //! //! Or by setting an accept header using `.headers()` //! //! ```rust,no_run //! # use elasticsearch::{Elasticsearch, Error, SearchParts, http::headers::{HeaderValue, DEFAULT_ACCEPT, ACCEPT}}; //! # use url::Url; //! # use serde_json::{json, Value}; //! # async fn doc() -> Result<(), Box<dyn std::error::Error>> { //! # let client = Elasticsearch::default(); //! let response = client //! .cat() //! .nodes() //! .header(ACCEPT, HeaderValue::from_static(DEFAULT_ACCEPT)) //! .send() //! .await?; //! //! let response_body = response.json::<Value>().await?; //! # Ok(()) //! # } //! ``` //! //! # Column Headers //! //! The column headers to return can be controlled with `.h()` //! //! ```rust,no_run //! # use elasticsearch::{Elasticsearch, Error, SearchParts}; //! # use url::Url; //! # use serde_json::{json, Value}; //! # async fn doc() -> Result<(), Box<dyn std::error::Error>> { //! # let client = Elasticsearch::default(); //! let response = client //! .cat() //! .nodes() //! .h(&["ip", "port", "heapPercent", "name"]) //! .send() //! .await?; //! //! let response_body = response.json::<String>().await?; //! # Ok(()) //! # } //! ``` //! pub use super::generated::namespace_clients::cat::*;
true
3a60665b8e5d78839086e96cfcb82ca71f27573c
Rust
rust3d/glium
/tests/vertex_buffer.rs
UTF-8
14,107
2.65625
3
[ "Apache-2.0" ]
permissive
extern crate glutin; #[macro_use] extern crate glium; use glium::Surface; use std::default::Default; mod support; #[test] fn vertex_buffer_creation() { let display = support::build_display(); #[derive(Copy, Clone)] struct Vertex { field1: [f32; 3], field2: [f32; 3], } implement_vertex!(Vertex, field1, field2); glium::VertexBuffer::new(&display, vec![ Vertex { field1: [-0.5, -0.5, 0.0], field2: [0.0, 1.0, 0.0] }, Vertex { field1: [ 0.0, 0.5, 1.0], field2: [0.0, 0.0, 1.0] }, Vertex { field1: [ 0.5, -0.5, 0.0], field2: [1.0, 0.0, 0.0] }, ] ); display.assert_no_error(); } #[test] fn vertex_buffer_empty() { let display = support::build_display(); #[derive(Copy, Clone)] struct Vertex { field1: [f32; 3], field2: [f32; 3], } implement_vertex!(Vertex, field1, field2); let vb: glium::VertexBuffer<Vertex> = glium::VertexBuffer::empty(&display, 12); assert_eq!(vb.len(), 12); display.assert_no_error(); } #[test] fn vertex_buffer_mapping_read() { let display = support::build_display(); #[derive(Copy, Clone)] struct Vertex { field1: [u8; 2], field2: [u8; 2], } implement_vertex!(Vertex, field1, field2); let mut vb = glium::VertexBuffer::new(&display, vec![ Vertex { field1: [ 2, 3], field2: [ 5, 7] }, Vertex { field1: [12, 13], field2: [15, 17] }, ] ); let mapping = vb.map(); assert_eq!(mapping[0].field1, [2, 3]); assert_eq!(mapping[1].field2, [15, 17]); display.assert_no_error(); } #[test] fn vertex_buffer_mapping_write() { let display = support::build_display(); #[derive(Copy, Clone)] struct Vertex { field1: [u8; 2], field2: [u8; 2], } implement_vertex!(Vertex, field1, field2); let mut vb = glium::VertexBuffer::new(&display, vec![ Vertex { field1: [ 2, 3], field2: [ 5, 7] }, Vertex { field1: [12, 13], field2: [15, 17] }, ] ); { let mut mapping = vb.map(); mapping[0].field1 = [0, 1]; } let mapping = vb.map(); assert_eq!(mapping[0].field1, [0, 1]); assert_eq!(mapping[1].field2, [15, 17]); display.assert_no_error(); } #[test] fn vertex_buffer_read() { let display = support::build_display(); #[derive(Copy, Clone)] struct Vertex { field1: [u8; 2], field2: [u8; 2], } implement_vertex!(Vertex, field1, field2); let vb = glium::VertexBuffer::new(&display, vec![ Vertex { field1: [ 2, 3], field2: [ 5, 7] }, Vertex { field1: [12, 13], field2: [15, 17] }, ] ); let data = match vb.read_if_supported() { Some(d) => d, None => return }; assert_eq!(data[0].field1, [2, 3]); assert_eq!(data[1].field2, [15, 17]); display.assert_no_error(); } #[test] fn vertex_buffer_read_slice() { let display = support::build_display(); #[derive(Copy, Clone)] struct Vertex { field1: [u8; 2], field2: [u8; 2], } implement_vertex!(Vertex, field1, field2); let vb = glium::VertexBuffer::new(&display, vec![ Vertex { field1: [ 2, 3], field2: [ 5, 7] }, Vertex { field1: [12, 13], field2: [15, 17] }, ] ); let data = match vb.slice(1 .. 2).unwrap().read_if_supported() { Some(d) => d, None => return }; assert_eq!(data[0].field2, [15, 17]); display.assert_no_error(); } #[test] fn vertex_buffer_slice_out_of_bounds() { let display = support::build_display(); #[derive(Copy, Clone)] struct Vertex { field1: [u8; 2], field2: [u8; 2], } implement_vertex!(Vertex, field1, field2); let vb = glium::VertexBuffer::new(&display, vec![ Vertex { field1: [ 2, 3], field2: [ 5, 7] }, Vertex { field1: [12, 13], field2: [15, 17] }, ] ); assert!(vb.slice(0 .. 3).is_none()); display.assert_no_error(); } #[test] fn vertex_buffer_any() { let display = support::build_display(); #[derive(Copy, Clone)] struct Vertex { field1: [f32; 3], field2: [f32; 3], } implement_vertex!(Vertex, field1, field2); glium::VertexBuffer::new(&display, vec![ Vertex { field1: [-0.5, -0.5, 0.0], field2: [0.0, 1.0, 0.0] }, Vertex { field1: [ 0.0, 0.5, 1.0], field2: [0.0, 0.0, 1.0] }, Vertex { field1: [ 0.5, -0.5, 0.0], field2: [1.0, 0.0, 0.0] }, ] ).into_vertex_buffer_any(); display.assert_no_error(); } #[test] fn vertex_buffer_write() { let display = support::build_display(); #[derive(Copy, Clone)] struct Vertex { field1: [u8; 2], field2: [u8; 2], } implement_vertex!(Vertex, field1, field2); let vb = glium::VertexBuffer::new(&display, vec![ Vertex { field1: [ 2, 3], field2: [ 5, 7] }, Vertex { field1: [ 0, 0], field2: [ 0, 0] }, ] ); vb.write(vec![ Vertex { field1: [ 2, 3], field2: [ 5, 7] }, Vertex { field1: [12, 13], field2: [15, 17] } ]); let data = match vb.read_if_supported() { Some(d) => d, None => return }; assert_eq!(data[0].field1, [2, 3]); assert_eq!(data[0].field2, [5, 7]); assert_eq!(data[1].field1, [12, 13]); assert_eq!(data[1].field2, [15, 17]); display.assert_no_error(); } #[test] fn vertex_buffer_write_slice() { let display = support::build_display(); #[derive(Copy, Clone)] struct Vertex { field1: [u8; 2], field2: [u8; 2], } implement_vertex!(Vertex, field1, field2); let vb = glium::VertexBuffer::new(&display, vec![ Vertex { field1: [ 2, 3], field2: [ 5, 7] }, Vertex { field1: [ 0, 0], field2: [ 0, 0] }, ] ); vb.slice(1 .. 2).unwrap().write(vec![Vertex { field1: [12, 13], field2: [15, 17] }]); let data = match vb.read_if_supported() { Some(d) => d, None => return }; assert_eq!(data[0].field1, [2, 3]); assert_eq!(data[0].field2, [5, 7]); assert_eq!(data[1].field1, [12, 13]); assert_eq!(data[1].field2, [15, 17]); display.assert_no_error(); } #[test] fn multiple_buffers_source() { let display = support::build_display(); let buffer1 = { #[derive(Copy, Clone)] struct Vertex { position: [f32; 2], } implement_vertex!(Vertex, position); glium::VertexBuffer::new(&display, vec![ Vertex { position: [-1.0, 1.0] }, Vertex { position: [ 1.0, 1.0] }, Vertex { position: [-1.0, -1.0] }, Vertex { position: [ 1.0, -1.0] }, ] ) }; let buffer2 = { #[derive(Copy, Clone)] struct Vertex { color: [f32; 3], } implement_vertex!(Vertex, color); glium::VertexBuffer::new(&display, vec![ Vertex { color: [1.0, 0.0, 0.0] }, Vertex { color: [1.0, 0.0, 0.0] }, Vertex { color: [1.0, 0.0, 0.0] }, Vertex { color: [1.0, 0.0, 0.0] }, ] ) }; let index_buffer = glium::IndexBuffer::new(&display, glium::index::TriangleStrip(vec![0u16, 1, 2, 3])); let program = glium::Program::from_source(&display, " #version 110 attribute vec2 position; attribute vec3 color; varying vec3 v_color; void main() { gl_Position = vec4(position, 0.0, 1.0); v_color = color; } ", " #version 110 varying vec3 v_color; void main() { gl_FragColor = vec4(v_color, 1.0); } ", None) .unwrap(); let texture = support::build_renderable_texture(&display); texture.as_surface().clear_color(0.0, 0.0, 0.0, 0.0); texture.as_surface().draw((&buffer1, &buffer2), &index_buffer, &program, &uniform!{}, &std::default::Default::default()).unwrap(); let data: Vec<Vec<(f32, f32, f32, f32)>> = texture.read(); for row in data.iter() { for pixel in row.iter() { assert_eq!(pixel, &(1.0, 0.0, 0.0, 1.0)); } } display.assert_no_error(); } #[test] fn zero_sized_vertex_buffer() { let display = support::build_display(); #[derive(Copy, Clone)] struct Vertex { field1: [f32; 3], field2: [f32; 3], } implement_vertex!(Vertex, field1, field2); glium::VertexBuffer::new(&display, Vec::<Vertex>::new()); display.assert_no_error(); } #[test] fn slice_draw_indices() { #[derive(Copy, Clone)] struct Vertex { position: [f32; 2], } implement_vertex!(Vertex, position); let display = support::build_display(); let program = glium::Program::from_source(&display, " #version 110 attribute vec2 position; void main() { gl_Position = vec4(position, 0.0, 1.0); } ", " #version 110 void main() { gl_FragColor = vec4(1.0, 0.0, 0.0, 1.0); } ", None).unwrap(); let vb = glium::VertexBuffer::new(&display, vec![ Vertex { position: [-1.0, 1.0] }, Vertex { position: [1.0, 1.0] }, Vertex { position: [-1.0, -1.0] }, Vertex { position: [1.0, -1.0] }, ]); let indices = glium::index::TrianglesList(vec![0u16, 1, 2]); let indices = glium::IndexBuffer::new(&display, indices); let texture = support::build_renderable_texture(&display); texture.as_surface().clear_color(0.0, 0.0, 0.0, 0.0); texture.as_surface().draw(vb.slice(1 .. 4).unwrap(), &indices, &program, &glium::uniforms::EmptyUniforms, &Default::default()).unwrap(); let data: Vec<Vec<(u8, u8, u8)>> = texture.read(); assert_eq!(data.last().unwrap()[0], (0, 0, 0)); assert_eq!(data[0].last().unwrap(), &(255, 0, 0)); display.assert_no_error(); } #[test] fn slice_draw_noindices() { #[derive(Copy, Clone)] struct Vertex { position: [f32; 2], } implement_vertex!(Vertex, position); let display = support::build_display(); let program = glium::Program::from_source(&display, " #version 110 attribute vec2 position; void main() { gl_Position = vec4(position, 0.0, 1.0); } ", " #version 110 void main() { gl_FragColor = vec4(1.0, 0.0, 0.0, 1.0); } ", None).unwrap(); let vb = glium::VertexBuffer::new(&display, vec![ Vertex { position: [-1.0, 1.0] }, Vertex { position: [1.0, 1.0] }, Vertex { position: [-1.0, -1.0] }, Vertex { position: [1.0, -1.0] }, ]); let indices = glium::index::NoIndices(glium::index::PrimitiveType::TrianglesList); let texture = support::build_renderable_texture(&display); texture.as_surface().clear_color(0.0, 0.0, 0.0, 0.0); texture.as_surface().draw(vb.slice(1 .. 4).unwrap(), &indices, &program, &glium::uniforms::EmptyUniforms, &Default::default()).unwrap(); let data: Vec<Vec<(u8, u8, u8)>> = texture.read(); assert_eq!(data.last().unwrap()[0], (0, 0, 0)); assert_eq!(data[0].last().unwrap(), &(255, 0, 0)); display.assert_no_error(); } #[test] fn slice_draw_multiple() { #[derive(Copy, Clone)] struct Vertex { position: [f32; 2], } implement_vertex!(Vertex, position); #[derive(Copy, Clone)] struct Vertex2 { position2: [f32; 2], } implement_vertex!(Vertex2, position2); let display = support::build_display(); let program = glium::Program::from_source(&display, " #version 110 attribute vec2 position; attribute vec2 position2; void main() { if (position != position2) { gl_Position = vec4(0.0, 0.0, 0.0, 1.0); } else { gl_Position = vec4(position, 0.0, 1.0); } } ", " #version 110 void main() { gl_FragColor = vec4(1.0, 0.0, 0.0, 1.0); } ", None).unwrap(); // the 3 last elements will be drawn let vb1 = glium::VertexBuffer::new(&display, vec![ Vertex { position: [-1.0, 1.0] }, Vertex { position: [-1.0, 1.0] }, Vertex { position: [-1.0, 1.0] }, Vertex { position: [1.0, 1.0] }, Vertex { position: [-1.0, -1.0] }, Vertex { position: [1.0, -1.0] }, ]); // the 3 last elements will be drawn let vb2 = glium::VertexBuffer::new(&display, vec![ Vertex2 { position2: [-1.0, 1.0] }, Vertex2 { position2: [-1.0, 1.0] }, Vertex2 { position2: [-1.0, 1.0] }, Vertex2 { position2: [-1.0, 1.0] }, Vertex2 { position2: [-1.0, 1.0] }, Vertex2 { position2: [1.0, 1.0] }, Vertex2 { position2: [-1.0, -1.0] }, Vertex2 { position2: [1.0, -1.0] }, ]); let indices = glium::index::TrianglesList(vec![2u16, 3, 4]); let indices = glium::IndexBuffer::new(&display, indices); let texture = support::build_renderable_texture(&display); texture.as_surface().clear_color(0.0, 0.0, 0.0, 0.0); texture.as_surface().draw((vb1.slice(1 .. 4).unwrap(), vb2.slice(3 .. 6).unwrap()), &indices, &program, &glium::uniforms::EmptyUniforms, &Default::default()).unwrap(); let data: Vec<Vec<(u8, u8, u8)>> = texture.read(); assert_eq!(data.last().unwrap()[0], (0, 0, 0)); assert_eq!(data[0].last().unwrap(), &(255, 0, 0)); display.assert_no_error(); }
true
0325d3b68ef87b608221e3bac46088b11572b383
Rust
icyJoseph/codejam-js
/src/rounding/src/rounding.rs
UTF-8
2,278
3.28125
3
[]
no_license
use std::io; type Res<T> = Result<T, Box<dyn std::error::Error>>; fn nxt() -> String { let mut input = String::new(); match io::stdin().read_line(&mut input) { Ok(_) => input, _ => panic!("Error reading line"), } } fn ptc<T: std::str::FromStr>() -> T { match nxt().trim().parse::<T>() { Ok(n) => n, _ => panic!("Error parsing"), } } fn does_round_up(a: usize, b: usize) -> bool { 2 * (100 * a % b) >= b } fn distance(a: usize, b: usize) -> usize { let mut delta = 0; if 100 * a % b == 0 { return delta; } while !does_round_up(a + delta, b) { delta += 1; } delta } fn find_minr(n: usize) -> usize { let mut min = 1; if 100 * min % n == 0 { return min; } while !does_round_up(min, n) { min += 1; } min } fn round_up(a: usize, b: usize) -> usize { if does_round_up(a, b) { return 100 * a / b + 1; } return 100 * a / b; } fn main() -> Res<()> { let n = ptc::<i32>(); for case in 1..=n { let spec: Vec<usize> = nxt() .trim() .split_whitespace() .map(|x| x.parse::<usize>().unwrap()) .collect(); let people = spec[0]; let mut resp: Vec<(usize, usize)> = nxt() .trim() .split_whitespace() .map(|x| x.parse::<usize>().unwrap()) .map(|x| (x, distance(x, people))) .collect(); let mut diff = people - resp.iter().fold(0, |p, c| p + c.0); resp.sort_by(|a, b| a.1.cmp(&b.1)); let mut total = 0; for (qty, dist) in resp { if diff >= dist { let delta = round_up(qty + dist, people); total += delta; diff -= dist; } else { let delta = round_up(qty, people); total += delta; } } let minr = find_minr(people); let pad = diff / minr; let leftover = diff % minr; let delta = round_up(minr, people); let leftover_delta = round_up(leftover, people); total += pad * delta; total += leftover_delta; println!("Case #{}: {}", case, total); } Ok(()) }
true
c5ac680b4e51eef23d8da90e214a66e2c01f393b
Rust
jumpersdevice/solana
/sdk/program/src/account.rs
UTF-8
3,466
2.796875
3
[ "Apache-2.0" ]
permissive
use crate::{clock::Epoch, pubkey::Pubkey}; use std::{cell::RefCell, cmp, fmt, rc::Rc}; /// An Account with data that is stored on chain #[repr(C)] #[frozen_abi(digest = "Upy4zg4EXZTnY371b4JPrGTh2kLcYpRno2K2pvjbN4e")] #[derive(Serialize, Deserialize, PartialEq, Eq, Clone, Default, AbiExample)] #[serde(rename_all = "camelCase")] pub struct Account { /// lamports in the account pub lamports: u64, /// data held in this account #[serde(with = "serde_bytes")] pub data: Vec<u8>, /// the program that owns this account. If executable, the program that loads this account. pub owner: Pubkey, /// this account's data contains a loaded program (and is now read-only) pub executable: bool, /// the epoch at which this account will next owe rent pub rent_epoch: Epoch, } impl fmt::Debug for Account { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { let data_len = cmp::min(64, self.data.len()); let data_str = if data_len > 0 { format!(" data: {}", hex::encode(self.data[..data_len].to_vec())) } else { "".to_string() }; write!( f, "Account {{ lamports: {} data.len: {} owner: {} executable: {} rent_epoch: {}{} }}", self.lamports, self.data.len(), self.owner, self.executable, self.rent_epoch, data_str, ) } } impl Account { pub fn new(lamports: u64, space: usize, owner: &Pubkey) -> Self { Self { lamports, data: vec![0u8; space], owner: *owner, ..Self::default() } } pub fn new_ref(lamports: u64, space: usize, owner: &Pubkey) -> Rc<RefCell<Self>> { Rc::new(RefCell::new(Self::new(lamports, space, owner))) } pub fn new_data<T: serde::Serialize>( lamports: u64, state: &T, owner: &Pubkey, ) -> Result<Self, bincode::Error> { let data = bincode::serialize(state)?; Ok(Self { lamports, data, owner: *owner, ..Self::default() }) } pub fn new_ref_data<T: serde::Serialize>( lamports: u64, state: &T, owner: &Pubkey, ) -> Result<RefCell<Self>, bincode::Error> { Ok(RefCell::new(Self::new_data(lamports, state, owner)?)) } pub fn new_data_with_space<T: serde::Serialize>( lamports: u64, state: &T, space: usize, owner: &Pubkey, ) -> Result<Self, bincode::Error> { let mut account = Self::new(lamports, space, owner); account.serialize_data(state)?; Ok(account) } pub fn new_ref_data_with_space<T: serde::Serialize>( lamports: u64, state: &T, space: usize, owner: &Pubkey, ) -> Result<RefCell<Self>, bincode::Error> { Ok(RefCell::new(Self::new_data_with_space( lamports, state, space, owner, )?)) } pub fn deserialize_data<T: serde::de::DeserializeOwned>(&self) -> Result<T, bincode::Error> { bincode::deserialize(&self.data) } pub fn serialize_data<T: serde::Serialize>(&mut self, state: &T) -> Result<(), bincode::Error> { if bincode::serialized_size(state)? > self.data.len() as u64 { return Err(Box::new(bincode::ErrorKind::SizeLimit)); } bincode::serialize_into(&mut self.data[..], state) } }
true
9a530d8e16811ea480ba8418a8fb2b1641614a4a
Rust
iCodeIN/ybc
/src/layout/tile.rs
UTF-8
3,008
3.296875
3
[ "MIT", "Apache-2.0" ]
permissive
#![allow(clippy::redundant_closure_call)] use derive_more::Display; use yew::prelude::*; use yewtil::NeqAssign; #[derive(Clone, Debug, Properties, PartialEq)] pub struct TileProps { #[prop_or_default] pub children: Children, #[prop_or_default] pub classes: Option<Classes>, /// The HTML tag to use for this component. #[prop_or_else(|| "div".into())] pub tag: String, /// The context modifier to use for this tile element, else none. /// /// https://bulma.io/documentation/layout/tiles/#modifiers #[prop_or_default] pub ctx: Option<TileCtx>, /// Stack tiles vertically. /// /// https://bulma.io/documentation/layout/tiles/#modifiers #[prop_or_default] pub vertical: bool, /// The size to assign to this tile element. /// /// https://bulma.io/documentation/layout/tiles/#modifiers #[prop_or_default] pub size: Option<TileSize>, } /// A single tile element to build 2-dimensional whatever-you-like grids. /// /// [https://bulma.io/documentation/layout/tiles/](https://bulma.io/documentation/layout/tiles/) pub struct Tile { props: TileProps, } impl Component for Tile { type Message = (); type Properties = TileProps; fn create(props: Self::Properties, _: ComponentLink<Self>) -> Self { Self { props } } fn update(&mut self, _: Self::Message) -> ShouldRender { false } fn change(&mut self, props: Self::Properties) -> ShouldRender { self.props.neq_assign(props) } fn view(&self) -> Html { let mut classes = Classes::from("tile"); classes.push(&self.props.classes); if let Some(ctx) = &self.props.ctx { classes.push(&ctx.to_string()); } if self.props.vertical { classes.push("is-vertical"); } if let Some(size) = &self.props.size { classes.push(&size.to_string()); } html! { <@{self.props.tag.clone()} class=classes> {self.props.children.clone()} </@> } } } /// Tile context modifiers. /// /// https://bulma.io/documentation/layout/tiles/#modifiers #[derive(Clone, Debug, Display, PartialEq)] #[display(fmt = "is-{}")] pub enum TileCtx { #[display(fmt = "ancestor")] Ancestor, #[display(fmt = "parent")] Parent, #[display(fmt = "child")] Child, } /// Tile size modifiers. /// /// https://bulma.io/documentation/layout/tiles/#modifiers #[derive(Clone, Debug, Display, PartialEq)] #[display(fmt = "is-{}")] pub enum TileSize { #[display(fmt = "1")] One, #[display(fmt = "2")] Two, #[display(fmt = "3")] Three, #[display(fmt = "4")] Four, #[display(fmt = "5")] Five, #[display(fmt = "6")] Six, #[display(fmt = "7")] Seven, #[display(fmt = "8")] Eight, #[display(fmt = "9")] Nine, #[display(fmt = "10")] Ten, #[display(fmt = "11")] Eleven, #[display(fmt = "12")] Twelve, }
true
5681e425b4b6b3d76fa578caea4fa5dbeffce86e
Rust
dinfuehr/dora
/dora-asm/src/lib.rs
UTF-8
2,180
3.09375
3
[ "MIT" ]
permissive
use byteorder::{LittleEndian, WriteBytesExt}; pub mod arm64; pub mod x64; use std::convert::TryInto; #[derive(Copy, Clone, PartialEq, Eq, Debug)] pub struct Label(usize); struct AssemblerBuffer { code: Vec<u8>, position: usize, labels: Vec<Option<u32>>, } impl AssemblerBuffer { fn new() -> AssemblerBuffer { AssemblerBuffer { code: Vec::new(), position: 0, labels: Vec::new(), } } fn create_label(&mut self) -> Label { self.labels.push(None); Label(self.labels.len() - 1) } fn create_and_bind_label(&mut self) -> Label { self.labels.push(Some(self.position().try_into().unwrap())); Label(self.labels.len() - 1) } fn bind_label(&mut self, lbl: Label) { let Label(idx) = lbl; assert!(self.labels[idx].is_none()); self.labels[idx] = Some(self.position().try_into().unwrap()); } fn offset(&self, lbl: Label) -> Option<u32> { let Label(idx) = lbl; self.labels[idx] } fn position(&self) -> usize { self.position } fn set_position(&mut self, pos: usize) { self.position = pos; } fn set_position_end(&mut self) { self.position = self.code.len(); } fn emit_u8(&mut self, value: u8) { if self.position == self.code.len() { self.code.push(value); } else { self.code[self.position] = value; } self.position += 1; } fn emit_u32(&mut self, value: u32) { if self.position == self.code.len() { self.code.write_u32::<LittleEndian>(value).unwrap() } else { (&mut self.code[self.position..]) .write_u32::<LittleEndian>(value) .unwrap(); } self.position += 4; } fn emit_u64(&mut self, value: u64) { if self.position == self.code.len() { self.code.write_u64::<LittleEndian>(value).unwrap() } else { (&mut self.code[self.position..]) .write_u64::<LittleEndian>(value) .unwrap(); } self.position += 8; } }
true
ac144e150c6321448784ebf75196624af81cdb78
Rust
thomastaylor312/krustlet
/crates/kubelet/src/store/oci/file.rs
UTF-8
15,771
2.96875
3
[ "Apache-2.0" ]
permissive
use crate::store::Storer; use oci_distribution::client::ImageData; use std::path::{Path, PathBuf}; use std::sync::Arc; use async_trait::async_trait; use log::debug; use oci_distribution::Reference; use tokio::sync::Mutex; use tokio::sync::RwLock; use super::client::Client; use crate::store::LocalStore; /// A module store that keeps modules cached on the file system /// /// This type is generic over the type of client used /// to fetch modules from a remote store. This client is expected /// to be a [`Client`] pub type FileStore<C> = LocalStore<FileStorer, C>; impl<C: Client + Send> FileStore<C> { /// Create a new `FileStore` pub fn new<T: AsRef<Path>>(client: C, root_dir: T) -> Self { Self { storer: Arc::new(RwLock::new(FileStorer { root_dir: root_dir.as_ref().into(), })), client: Arc::new(Mutex::new(client)), } } } pub struct FileStorer { root_dir: PathBuf, } impl FileStorer { /// Create a new `FileStorer` pub fn new<T: AsRef<Path>>(root_dir: T) -> Self { Self { root_dir: root_dir.as_ref().into(), } } fn pull_path(&self, r: &Reference) -> PathBuf { let mut path = self.root_dir.join(r.registry()); path.push(r.repository()); path.push(r.tag().unwrap_or("latest")); path } fn pull_file_path(&self, r: &Reference) -> PathBuf { self.pull_path(r).join("module.wasm") } fn digest_file_path(&self, r: &Reference) -> PathBuf { self.pull_path(r).join("digest.txt") } } #[async_trait] impl Storer for FileStorer { async fn get_local(&self, image_ref: &Reference) -> anyhow::Result<Vec<u8>> { let path = self.pull_file_path(image_ref); if !path.exists() { return Err(anyhow::anyhow!( "Image ref {} not available locally", image_ref )); } debug!("Fetching image ref '{:?}' from disk", image_ref); Ok(tokio::fs::read(path).await?) } async fn store(&mut self, image_ref: &Reference, image_data: ImageData) -> anyhow::Result<()> { tokio::fs::create_dir_all(self.pull_path(image_ref)).await?; let digest_path = self.digest_file_path(image_ref); // We delete the digest file before writing the image file, rather // than simply overwriting the digest file after writing the image file. // This addresses failure modes where, for example, the image file // gets updated but the digest file write fails and the store ends // up associating the wrong digest with the file on disk. if digest_path.exists() { tokio::fs::remove_file(&digest_path).await?; } let module_path = self.pull_file_path(image_ref); tokio::fs::write(&module_path, image_data.content).await?; if let Some(d) = image_data.digest { tokio::fs::write(&digest_path, d).await?; } Ok(()) } async fn is_present(&self, image_ref: &Reference) -> bool { let path = self.pull_file_path(image_ref); path.exists() } async fn is_present_with_digest(&self, image_ref: &Reference, digest: String) -> bool { let path = self.digest_file_path(image_ref); path.exists() && file_content_is(path, digest).await } } impl<C: Client + Send> Clone for FileStore<C> { fn clone(&self) -> Self { Self { storer: self.storer.clone(), client: self.client.clone(), } } } async fn file_content_is(path: PathBuf, text: String) -> bool { match tokio::fs::read(path).await { Err(_) => false, Ok(content) => { let file_text = String::from_utf8_lossy(&content); file_text == text } } } #[cfg(test)] mod test { use super::*; use crate::container::PullPolicy; use crate::store::Store; use oci_distribution::client::ImageData; use oci_distribution::secrets::RegistryAuth; use std::collections::HashMap; use std::convert::TryFrom; use std::sync::RwLock; #[tokio::test] async fn can_parse_pull_policies() { assert_eq!(None, PullPolicy::parse(None).unwrap()); assert_eq!( PullPolicy::Always, PullPolicy::parse(Some("Always")).unwrap().unwrap() ); assert_eq!( PullPolicy::IfNotPresent, PullPolicy::parse(Some("IfNotPresent")).unwrap().unwrap() ); assert_eq!( PullPolicy::Never, PullPolicy::parse(Some("Never")).unwrap().unwrap() ); assert!( PullPolicy::parse(Some("IfMoonMadeOfGreenCheese")).is_err(), "Expected parse failure but didn't get one" ); } #[derive(Clone)] struct FakeImageClient { images: Arc<RwLock<HashMap<String, ImageData>>>, } impl FakeImageClient { fn new(entries: Vec<(&'static str, Vec<u8>, &'static str)>) -> Self { let client = FakeImageClient { images: Default::default(), }; for (name, content, digest) in entries { let mut images = client .images .write() .expect("should be able to write to images"); images.insert( name.to_owned(), ImageData { content, digest: Some(digest.to_owned()), }, ); } client } fn update(&mut self, key: &str, content: Vec<u8>, digest: &str) { let mut images = self .images .write() .expect("should be able to write to images"); images.insert( key.to_owned(), ImageData { content, digest: Some(digest.to_owned()), }, ); } } #[async_trait] impl Client for FakeImageClient { async fn pull( &mut self, image_ref: &Reference, _auth: &RegistryAuth, ) -> anyhow::Result<ImageData> { let images = self .images .read() .expect("should be able to read from images"); match images.get(&image_ref.whole()) { Some(v) => Ok(v.clone()), None => Err(anyhow::anyhow!("error pulling module")), } } } struct TemporaryDirectory { path: PathBuf, } impl Drop for TemporaryDirectory { fn drop(&mut self) { std::fs::remove_dir_all(&self.path).expect("Failed to remove temp directory"); } } fn create_temp_dir() -> TemporaryDirectory { let os_temp_dir = std::env::temp_dir(); let subdirectory = PathBuf::from(format!("krustlet-fms-tests-{}", uuid::Uuid::new_v4())); let path = os_temp_dir.join(subdirectory); std::fs::create_dir(&path).expect("Failed to create temp directory"); TemporaryDirectory { path } } #[tokio::test] async fn file_module_store_can_pull_if_policy_if_not_present() -> anyhow::Result<()> { let fake_client = FakeImageClient::new(vec![("foo/bar:1.0", vec![1, 2, 3], "sha256:123")]); let fake_ref = Reference::try_from("foo/bar:1.0")?; let scratch_dir = create_temp_dir(); let store = FileStore::new(fake_client, &scratch_dir.path); let module_bytes = store .get( &fake_ref, PullPolicy::IfNotPresent, &RegistryAuth::Anonymous, ) .await?; assert_eq!(3, module_bytes.len()); assert_eq!(2, module_bytes[1]); Ok(()) } #[tokio::test] async fn file_module_store_can_pull_if_policy_always() -> anyhow::Result<()> { let fake_client = FakeImageClient::new(vec![("foo/bar:1.0", vec![1, 2, 3], "sha256:123")]); let fake_ref = Reference::try_from("foo/bar:1.0")?; let scratch_dir = create_temp_dir(); let store = FileStore::new(fake_client, &scratch_dir.path); let module_bytes = store .get(&fake_ref, PullPolicy::Always, &RegistryAuth::Anonymous) .await?; assert_eq!(3, module_bytes.len()); assert_eq!(2, module_bytes[1]); Ok(()) } #[tokio::test] async fn file_module_store_does_not_pull_if_policy_never() -> anyhow::Result<()> { let fake_client = FakeImageClient::new(vec![("foo/bar:1.0", vec![1, 2, 3], "sha256:123")]); let fake_ref = Reference::try_from("foo/bar:1.0")?; let scratch_dir = create_temp_dir(); let store = FileStore::new(fake_client, &scratch_dir.path); let module_bytes = store .get(&fake_ref, PullPolicy::Never, &RegistryAuth::Anonymous) .await; assert!( module_bytes.is_err(), "expected get with pull policy Never to fail but it worked" ); Ok(()) } #[tokio::test] async fn file_module_store_can_reuse_cached_if_policy_never() -> anyhow::Result<()> { let fake_client = FakeImageClient::new(vec![("foo/bar:1.0", vec![1, 2, 3], "sha256:123")]); let fake_ref = Reference::try_from("foo/bar:1.0")?; let scratch_dir = create_temp_dir(); let store = FileStore::new(fake_client, &scratch_dir.path); let prime_cache = store .get(&fake_ref, PullPolicy::Always, &RegistryAuth::Anonymous) .await; assert!(prime_cache.is_ok()); let module_bytes = store .get(&fake_ref, PullPolicy::Never, &RegistryAuth::Anonymous) .await?; assert_eq!(3, module_bytes.len()); assert_eq!(2, module_bytes[1]); Ok(()) } #[tokio::test] async fn file_module_store_ignores_updates_if_policy_if_not_present() -> anyhow::Result<()> { let mut fake_client = FakeImageClient::new(vec![("foo/bar:1.0", vec![1, 2, 3], "sha256:123")]); let fake_ref = Reference::try_from("foo/bar:1.0")?; let scratch_dir = create_temp_dir(); let store = FileStore::new(fake_client.clone(), &scratch_dir.path); let module_bytes_orig = store .get( &fake_ref, PullPolicy::IfNotPresent, &RegistryAuth::Anonymous, ) .await?; assert_eq!(3, module_bytes_orig.len()); assert_eq!(2, module_bytes_orig[1]); fake_client.update("foo/bar:1.0", vec![4, 5, 6, 7], "sha256:4567"); let module_bytes_after = store .get( &fake_ref, PullPolicy::IfNotPresent, &RegistryAuth::Anonymous, ) .await?; assert_eq!(3, module_bytes_after.len()); assert_eq!(2, module_bytes_after[1]); Ok(()) } #[tokio::test] async fn file_module_store_gets_updates_if_policy_always() -> anyhow::Result<()> { let mut fake_client = FakeImageClient::new(vec![("foo/bar:1.0", vec![1, 2, 3], "sha256:123")]); let fake_ref = Reference::try_from("foo/bar:1.0")?; let scratch_dir = create_temp_dir(); let store = FileStore::new(fake_client.clone(), &scratch_dir.path); let module_bytes_orig = store .get( &fake_ref, PullPolicy::IfNotPresent, &RegistryAuth::Anonymous, ) .await?; assert_eq!(3, module_bytes_orig.len()); assert_eq!(2, module_bytes_orig[1]); fake_client.update("foo/bar:1.0", vec![4, 5, 6, 7], "sha256:4567"); let module_bytes_after = store .get(&fake_ref, PullPolicy::Always, &RegistryAuth::Anonymous) .await?; assert_eq!(4, module_bytes_after.len()); assert_eq!(5, module_bytes_after[1]); Ok(()) } #[tokio::test] async fn file_module_store_copes_with_no_tag() -> anyhow::Result<()> { let fake_client = FakeImageClient::new(vec![("foo/bar", vec![2, 3], "sha256:23")]); let fake_ref = Reference::try_from("foo/bar")?; let scratch_dir = create_temp_dir(); let store = FileStore::new(fake_client, &scratch_dir.path); let module_bytes = store .get(&fake_ref, PullPolicy::Always, &RegistryAuth::Anonymous) .await?; assert_eq!(2, module_bytes.len()); assert_eq!(3, module_bytes[1]); Ok(()) } #[tokio::test] async fn file_module_store_can_pull_if_tag_given_but_policy_omitted() -> anyhow::Result<()> { let mut fake_client = FakeImageClient::new(vec![("foo/bar:2.0", vec![6, 7, 8], "sha256:678")]); let fake_ref = Reference::try_from("foo/bar:2.0")?; let scratch_dir = create_temp_dir(); let store = FileStore::new(fake_client.clone(), &scratch_dir.path); let policy = PullPolicy::parse_effective(None, Some(fake_ref.clone()))?; let module_bytes_orig = store .get(&fake_ref, policy, &RegistryAuth::Anonymous) .await?; assert_eq!(3, module_bytes_orig.len()); assert_eq!(7, module_bytes_orig[1]); fake_client.update("foo/bar:2.0", vec![8, 9], "sha256:89"); // But with no policy it should *not* re-fetch a tag that's in cache let module_bytes_after = store .get(&fake_ref, policy, &RegistryAuth::Anonymous) .await?; assert_eq!(3, module_bytes_after.len()); assert_eq!(7, module_bytes_after[1]); Ok(()) } #[tokio::test] async fn file_module_store_always_pulls_if_tag_latest_and_policy_omitted() -> anyhow::Result<()> { let mut fake_client = FakeImageClient::new(vec![("foo/bar:latest", vec![3, 4], "sha256:34")]); let fake_ref = Reference::try_from("foo/bar:latest")?; let scratch_dir = create_temp_dir(); let store = FileStore::new(fake_client.clone(), &scratch_dir.path); let policy = PullPolicy::parse_effective(None, Some(fake_ref.clone()))?; let module_bytes_orig = store .get(&fake_ref, policy, &RegistryAuth::Anonymous) .await?; assert_eq!(2, module_bytes_orig.len()); assert_eq!(4, module_bytes_orig[1]); fake_client.update("foo/bar:latest", vec![5, 6, 7], "sha256:567"); let module_bytes_after = store .get(&fake_ref, policy, &RegistryAuth::Anonymous) .await?; assert_eq!(3, module_bytes_after.len()); assert_eq!(6, module_bytes_after[1]); Ok(()) } #[tokio::test] async fn file_module_store_always_pulls_if_tag_and_policy_omitted() -> anyhow::Result<()> { let mut fake_client = FakeImageClient::new(vec![("foo/bar", vec![3, 4], "sha256:34")]); let fake_ref = Reference::try_from("foo/bar")?; let scratch_dir = create_temp_dir(); let store = FileStore::new(fake_client.clone(), &scratch_dir.path); let policy = PullPolicy::parse_effective(None, Some(fake_ref.clone()))?; let module_bytes_orig = store .get(&fake_ref, policy, &RegistryAuth::Anonymous) .await?; assert_eq!(2, module_bytes_orig.len()); assert_eq!(4, module_bytes_orig[1]); fake_client.update("foo/bar", vec![5, 6, 7], "sha256:567"); let module_bytes_after = store .get(&fake_ref, policy, &RegistryAuth::Anonymous) .await?; assert_eq!(3, module_bytes_after.len()); assert_eq!(6, module_bytes_after[1]); Ok(()) } }
true
d050f0539c4d0070938daeca27ea80a900dc7be2
Rust
japaric/ultrascale-plus
/firmware/zup-rtfm/macros/src/check.rs
UTF-8
7,951
2.625
3
[ "LicenseRef-scancode-unknown-license-reference", "MIT", "Apache-2.0" ]
permissive
use std::collections::{ hash_map::{Entry, HashMap}, HashSet, }; use proc_macro2::Span; use syn::parse; use crate::{syntax::App, NSGIS}; pub fn app(app: &App) -> parse::Result<()> { // in single-core context no static should use the `#[global]` attribute if app.cores == 1 { let main = &app.mains[0]; for (name, static_) in main .init .iter() .flat_map(|init| &init.statics) .chain(main.idle.iter().flat_map(|idle| &idle.statics)) .chain(app.tasks.values().flat_map(|task| &task.statics)) { if static_.global { return Err(parse::Error::new( name.span(), "statics can NOT be marked as `#[global]` in single-core applications", )); } } } for (name, static_) in app.tasks.values().flat_map(|task| &task.statics) { if static_.global { return Err(parse::Error::new( name.span(), "statics within a `#[task]` can NOT be marked as `#[global]`", )); } } // Check that all referenced resources have been declared and that `static mut` resources are // *not* shared between cores let mut mut_resources = HashMap::new(); for (core, name) in app.mains .iter() .zip(0..) .flat_map(move |(main, core)| { main.init .iter() .flat_map(move |init| init.args.resources.iter().map(move |res| (core, res))) .chain(main.idle.iter().flat_map(move |idle| { idle.args.resources.iter().map(move |res| (core, res)) })) }) .chain(app.interrupts.values().flat_map(|interrupt| { let core = interrupt.args.core; interrupt.args.resources.iter().map(move |res| (core, res)) })) .chain(app.tasks.values().flat_map(|task| { let core = task.args.core; task.args.resources.iter().map(move |res| (core, res)) })) { let span = name.span(); if let Some(res) = app.resources.get(name) { if res.mutability.is_some() { match mut_resources.entry(name) { Entry::Occupied(entry) => { if *entry.get() != core { return Err(parse::Error::new( span, "`static mut` resources can NOT be accessed from different cores", )); } } Entry::Vacant(entry) => { entry.insert(core); } } } } else { return Err(parse::Error::new( span, "this resource has NOT been declared", )); } } for init in app.mains.iter().filter_map(|main| main.init.as_ref()) { // Check that late resources have not been assigned to `init` for res in &init.args.resources { if app.resources.get(res).unwrap().expr.is_none() { return Err(parse::Error::new( res.span(), "late resources can NOT be assigned to `init`", )); } } } // Check that all late resources are covered by `init::LateResources` let mut late_resources = app .resources .iter() .filter_map(|(name, res)| if res.expr.is_none() { Some(name) } else { None }) .collect::<HashSet<_>>(); if !late_resources.is_empty() { if app.cores == 1 { // the only core will initialize all late resources } else { // this core will initialize the "rest" of late resources let mut rest = None; let mut initialized = HashMap::new(); for (core, init) in app.mains.iter().enumerate().filter_map(|(i, main)| { if let Some(init) = main.init.as_ref() { if init.returns_late_resources { Some((i, init)) } else { None } } else { None } }) { if !init.args.late.is_empty() { for res in &init.args.late { if !late_resources.contains(&res) { return Err(parse::Error::new( res.span(), "this is not a late resource", )); } if let Some(other) = initialized.get(res) { return Err(parse::Error::new( res.span(), &format!("this resource will be initialized by core {}", other), )); } else { late_resources.remove(res); initialized.insert(res, core); } } } else if let Some(rest) = rest { return Err(parse::Error::new( Span::call_site(), &format!( "unclear how initialization of late resources is split between \ cores {} and {}", rest, core, ), )); } else { rest = Some(core); } } if let Some(res) = late_resources.iter().next() { if rest.is_none() { return Err(parse::Error::new( res.span(), "this resource is not being initialized", )); } } } } // Check that all referenced tasks have been declared for task in app .mains .iter() .flat_map(|main| { main.init .iter() .flat_map(|init| &init.args.spawn) .chain(main.idle.iter().flat_map(|idle| &idle.args.spawn)) }) .chain( app.interrupts .values() .flat_map(|interrupt| &interrupt.args.spawn), ) .chain(app.tasks.values().flat_map(|task| &task.args.spawn)) { if !app.tasks.contains_key(task) { return Err(parse::Error::new( task.span(), "this task has NOT been declared", )); } } // Check that there are enough dispatchers to handle all priority levels for core in 0..app.cores { let ndispatchers = app .tasks .values() .filter_map(|task| { if task.args.core == core { Some(task.args.priority) } else { None } }) .collect::<HashSet<_>>() .len(); let used_sgis = app .interrupts .keys() .filter(|name| { let name = name.to_string(); name.starts_with("SG") && name[2..].parse::<u8>().map(|n| n < NSGIS).unwrap_or(false) }) .count(); if ndispatchers + usize::from(used_sgis) > usize::from(NSGIS) { return Err(parse::Error::new( Span::call_site(), "Not enough free Software-Generated Interrupts (SGI) to \ dispatch all task priorities", )); } } Ok(()) }
true
1d9431c745cf3bb6fc272e42ef0b6570f01cce28
Rust
EFanZh/Introduction-to-Algorithms
/src/chapter_8_sorting_in_linear_time/section_8_2_counting_sort/exercises/exercise_8_2_3.rs
UTF-8
1,035
3.234375
3
[]
no_license
pub fn modified_counting_sort(a: &[usize], b: &mut [usize], k: usize) { let mut c = vec![0; k]; for &x in a { c[x] += 1; } // C[i] now contains the number of elements equal to i. for i in 1..k { c[i] += c[i - 1]; } // C[i] now contains the number of elements less than or equal to i. for &x in a { b[c[x] - 1] = x; c[x] -= 1; } } #[cfg(test)] mod tests { use crate::test_utilities; use rand::Rng; use std::iter; #[test] fn test_modified_counting_sort() { let mut a = Vec::new(); let mut b = Vec::new(); let mut rng = rand::thread_rng(); for n in 0_usize..10 { for _ in 0..(1 << n) { test_utilities::assign_vec_from_iter(&mut a, iter::repeat_with(|| rng.gen_range(0..n)).take(n)); b.resize(n, 0); super::modified_counting_sort(&a, &mut b, n); a.sort_unstable(); assert_eq!(a, b); } } } }
true
8663f292101a18a266d5bb9a178982ffc40bb3a5
Rust
impuls71/grammers
/lib/grammers-client/src/types/entity_set.rs
UTF-8
4,962
3.484375
3
[ "Apache-2.0", "MIT" ]
permissive
use crate::types::Entity; use grammers_tl_types as tl; use std::collections::HashMap; use std::ops::Index; /// Hashable `Peer`. #[derive(Hash, PartialEq, Eq)] enum Peer { User(i32), Chat(i32), Channel(i32), } pub enum MaybeBorrowedVec<'a, T> { Borrowed(&'a [T]), Owned(Vec<T>), } /// Helper structure to efficiently retrieve entities via their peer. /// /// A lot of responses include the entities related to them in the form of a list of users /// and chats, making it annoying to extract a specific entity. This structure lets you /// save those separate vectors in a single place and query them by using a `Peer`. pub struct EntitySet<'a> { users: MaybeBorrowedVec<'a, tl::enums::User>, chats: MaybeBorrowedVec<'a, tl::enums::Chat>, // Because we can't store references to other fields, we instead store the index map: HashMap<Peer, usize>, } fn build_map(users: &[tl::enums::User], chats: &[tl::enums::Chat]) -> HashMap<Peer, usize> { let mut map = HashMap::new(); for (i, user) in users.into_iter().enumerate() { match user { tl::enums::User::User(user) => { map.insert(Peer::User(user.id), i); } tl::enums::User::Empty(_) => {} } } for (i, chat) in chats.into_iter().enumerate() { let i = users.len() + i; match chat { tl::enums::Chat::Chat(chat) => { map.insert(Peer::Chat(chat.id), i); } tl::enums::Chat::Forbidden(chat) => { map.insert(Peer::Chat(chat.id), i); } tl::enums::Chat::Channel(channel) => { map.insert(Peer::Channel(channel.id), i); } tl::enums::Chat::ChannelForbidden(channel) => { map.insert(Peer::Channel(channel.id), i); } tl::enums::Chat::Empty(_) => {} } } map } impl<T> MaybeBorrowedVec<'_, T> { fn len(&self) -> usize { match self { MaybeBorrowedVec::Borrowed(slice) => slice.len(), MaybeBorrowedVec::Owned(vec) => vec.len(), } } } impl<T> Index<usize> for MaybeBorrowedVec<'_, T> { type Output = T; fn index(&self, index: usize) -> &Self::Output { match self { MaybeBorrowedVec::Borrowed(slice) => &slice[index], MaybeBorrowedVec::Owned(vec) => &vec[index], } } } impl<'a> EntitySet<'a> { /// Create a borrowed entity set. /// /// Useful when you can't or don't want to take ownership of the lists. pub fn new_borrowed(users: &'a [tl::enums::User], chats: &'a [tl::enums::Chat]) -> Self { let map = build_map(users, chats); Self { users: MaybeBorrowedVec::Borrowed(users), chats: MaybeBorrowedVec::Borrowed(chats), map, } } /// Create a new owned entity set. /// /// Useful when you need to pass or hold on to the instance. pub fn new_owned(users: Vec<tl::enums::User>, chats: Vec<tl::enums::Chat>) -> Self { let map = build_map(&users, &chats); Self { users: MaybeBorrowedVec::Owned(users), chats: MaybeBorrowedVec::Owned(chats), map, } } /// Create a new empty entity set. /// /// Useful when there is no information known about any entities. pub fn empty() -> Self { Self { users: MaybeBorrowedVec::Owned(Vec::new()), chats: MaybeBorrowedVec::Owned(Vec::new()), map: HashMap::new(), } } /// Retrieve the full `Entity` object given its `Peer`. pub fn get(&self, peer: &tl::enums::Peer) -> Option<Entity> { let key = match peer { tl::enums::Peer::User(tl::types::PeerUser { user_id }) => (Peer::User(*user_id)), tl::enums::Peer::Chat(tl::types::PeerChat { chat_id }) => (Peer::Chat(*chat_id)), tl::enums::Peer::Channel(tl::types::PeerChannel { channel_id }) => { Peer::Channel(*channel_id) } }; self.map .get(&key) .map(|&index| { if index < self.users.len() { match self.users[index] { tl::enums::User::User(ref user) => Some(Entity::User(user)), tl::enums::User::Empty(_) => None, } } else { match self.chats[index - self.users.len()] { tl::enums::Chat::Chat(ref chat) => Some(Entity::Chat(chat)), tl::enums::Chat::Forbidden(_) => None, tl::enums::Chat::Channel(ref channel) => Some(Entity::Channel(channel)), tl::enums::Chat::ChannelForbidden(_) => None, tl::enums::Chat::Empty(_) => None, } } }) .flatten() } }
true
02979bb7eea23f9c927dafbda5bae43455debdca
Rust
saranshr/HSMW_RandD_Project
/1_Rust/Rust_Basics/1_CommonProgrammingConcepts/2_data_types/src/main.rs
UTF-8
4,521
4.125
4
[]
no_license
fn main() { /* DATA TYPES: SCALAR TYPES --> represent a single value: --> integers --> floating-point numbers --> booleans --> characters */ /* INTEGERS length signed unsigned 8 Bit i8 u8 16 Bit i16 u16 32 Bit i32 u32 64 Bit i64 u64 128 Bit i128 u128 arch isize usize --> this depends on the OS (32 bit or 64 bit) --> -(2^(n-1)) <= signed <= (2^n) -1 --> 0 <= unsigned <= (2^n) - 1 --> Integer overflow e.g. entering 256 in an u8 integer --> two's complement wrapping --> 256 becomes 0, 257 becomes 1, etc. --> This is considered an error! */ /* FLOATING-POINT TYPES - Two primitive floating point numbers: f64 and f32 --> default is f64 --> Represented according to the IEEE-754 Standard --> f32: single precision float --> f64: double precision Numeric operations: Division example (addition, subtraction and multiplication are standard) */ let quotient = 7 / 2; // this gives 3, i.e. integer division. Because Rust interprets the numbers as integers by default println!("quotient = {}", quotient); let quotient = 7.0 / 2.0; // this gives 3.5, i.e. floating point division. Now Rust interprets these as floating points println!("quotient = {}", quotient); let remainder = 43 % 3; println!("remainder = {}", remainder); /* BOOLEAN TYPE --> two possible values: true or false --> one byte in size --> specified using bool */ let t = true; let f:bool = false; // explicit type annotation /* CHARACTER TYPE --> alphabet type: char --> char literals are specified with single quotes '' --> string literals are specified with double quotes "" --> char: 4 Bytes in size, Unicode Scalar Value --> i.e. a lot more than ASCII --> range from U+0000 to U+D7FF and U+E000 to U+10FFFF inclusive Examples: */ let c = 'z'; let my_name = "Bob"; println!("c = {}", c); println!("my_name = {}", my_name); /* COMPOUND TYPES - Compound types group multiple values into one type --> Tuples --> Arrays */ /* TUPLE - grouping together some number of other values with a variety of types into one compound type - once declared, they cannot grow or shrink in size - type of each element in tuple does not need to be explicitly declared Example: */ let tup: (i32, f64, u8) = (500, 6.4, 1); let tup1 = (500, 6.4, 1); let (x, y, z) = tup1; // destructuring a tuple println!("The value of y is: {}", y); println!("The value of y is: {}", tup.1); // both are valid ways to access tuple elements /* ARRAY - each element within array must be of same type - arrays have a fixed length, like tuples. --> Also like in C - An array is a single chunk of memory allocated on the stack --> Not as flexible as so called 'VECTORS'. Vectors are flexible in length. Will be discussed later. Example: */ let d = [1,2,3,4,5]; let b:[i32; 5] = [1,2,3,4,5]; // not necessary to declare type when array is initialized let c:[i32; 10]; // declare an array without initializing it // println!("c[1] is {}",c[1]); // this is not possible, since c has not yet been initialized let a = [3; 5]; // create a = [3, 3, 3, 3, 3] // Accessing an element let start = d[0]; let end = d[a.len()-1]; // length of array: a.len() println!("The value of the first element of d is: {}", start); println!("The value of the last element of d is: {}", end); // let invalidAccess = a[10]; // this does not produce any compilation errors, but the program exits with a runtime error instead /* SAFETY PRINCIPLE --> Rust checks if the index specified is less than the array length --> if the index is greater than or equal to the length, Rust 'panics' --> this check is not performed in most low-level languages, with an incorrect index resulting in invalid memory being access --> in C: no error, just a warning: 'array index 10 is past the end of the array (which contains 5 elements)' --> program can be run resulting in random value to be accessed. */ }
true
4b9874d8e386e051d0068b6e2abfe83ab02865c4
Rust
songlinshu/elvis
/core/src/state.rs
UTF-8
853
3.40625
3
[ "MIT" ]
permissive
//! State machine use crate::Node; use std::collections::HashMap; /// State store map pub type StateKV = HashMap<Vec<u8>, Vec<u8>>; /// state for tree pub struct State { /// Elvis Node child: Node, /// State Machine state: StateKV, } impl State { /// New State pub fn new(node: impl Into<Node>) -> State { State { child: node.into(), state: HashMap::new(), } } } impl State { /// Get state pub fn get(&self, k: &[u8]) -> Vec<u8> { self.state.get(k).unwrap_or(&vec![]).to_vec() } /// Set state pub fn set(&mut self, k: &[u8], v: &[u8]) { self.state.insert(k.to_vec(), v.to_vec()); } } impl Into<Node> for State { fn into(self) -> Node { let mut n: Node = self.child.into(); n.state = Some(self.state); n } }
true
e57275374f46a870e525a3b02d5db37b1ebb42fc
Rust
cang-mang/TrueMan
/RUST/util/hash/src/bkdr/mod.rs
UTF-8
1,075
2.59375
3
[]
no_license
/* * encoding=utf-8 * BKDR-HASH散列操作接口。 * 历史: * 2020-11-10,完成。 */ /*||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||*/ //适用于对字符串进行计算。 //seed可以取31、131、1313、13131、131313等。 //magic是HASH初始值,一般取0即可。 //如果字符串内容主要是英文字母和数字字符,seed建议取31;一般seed是取131。 pub fn x_0(key: &[u8], seed: u32, magic: u32) -> u32 { let mut h = magic; for k in key { h = h.wrapping_mul(seed).wrapping_add(*k as u32); } h } /*----------------------------------------------------------------------------*/ //适用于对短小的由英文字母组成的字符串进行计算。 //magic是HASH初始值,一般取0即可。 #[inline(always)] pub fn time33(key: &[u8], magic: u32) -> u32 { x_0(key, 33_u32, magic) } /*||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||*/ #[cfg(test)] mod tests; /*||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||*/
true
9988de8b2d4d91292c3aa8c1ec6b2c2920d75de4
Rust
wg/rusoto
/rusoto/credential/src/variable.rs
UTF-8
9,857
3.625
4
[ "MIT", "LicenseRef-scancode-unknown-license-reference" ]
permissive
use std::convert::From; use std::env::{var, VarError}; use std::fmt; use std::sync::Arc; /// Variable is an abstraction over parameters to credential providers, allowing to abstract on /// how (source) and when (time) parameter values are resolved. A lot of credentials providers /// use external information sources such as environment variables or files to obtain parameter /// values needed to produce AWS credentials. /// /// # Information Sources /// /// - In memory values (static) /// - Environment variables (dynamic) /// - Files (dynamic) /// - ... /// /// # Resolving Behaviour /// /// - Static variables always resolve to the same value. /// - Dynamic variables can resolve to different values over time. /// /// Most prominent examples for dynamic variables are parameters which read their value from /// environment variables or files. pub enum Variable<T, E = super::CredentialsError> { /// Static variable always resolving to the same given value. Static(T), /// Dynamic variable can resolve to different values over time. Dynamic(Arc<dyn Fn() -> Result<T, E> + Send + Sync>), /// Fallback try variables in given order returning the value of the first variable that /// does resolve. Fallback(Box<Variable<T, E>>, Box<Variable<T, E>>), } impl<T, E> From<T> for Variable<T, E> where T: Clone, { fn from(value: T) -> Self { Variable::with_value(value) } } impl<E> From<&str> for Variable<String, E> { fn from(value: &str) -> Self { Variable::with_value(value.to_owned()) } } /* impl<T, E, V> From<&V> for Variable<T, E> where T: Clone + std::borrow::Borrow<V>, V: ToOwned<Owned = T> + ?Sized, { fn from(value: &V) -> Self { Variable::with_value(value.to_owned()) } }*/ impl<T: fmt::Debug, E> fmt::Debug for Variable<T, E> { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { match self { Self::Static(t) => write!(f, "Variable::Static({:?})", t), Self::Dynamic(_) => write!(f, "Variable::Dynamic(...)"), Self::Fallback(a, b) => write!(f, "Variable::Fallback({:?}, {:?})", a, b), } } } /// Custom Clone implementation as type parameter E doesn't have to be cloneable. impl<T: Clone, E> Clone for Variable<T, E> { fn clone(&self) -> Self { match self { Self::Static(t) => Self::Static(t.clone()), Self::Dynamic(f) => Self::Dynamic(f.clone()), Self::Fallback(a, b) => Self::Fallback(a.clone(), b.clone()), } } } impl<T: Clone, E> Variable<T, E> { /// Variable which statically resolves to a provided (in-memory) value. pub fn with_value<V: Into<T>>(value: V) -> Self { Self::Static(value.into()) } /// Resolve the variable's value. pub fn resolve(&self) -> Result<T, E> { match self { Self::Static(t) => Ok(t.clone()), Self::Dynamic(f) => f(), Self::Fallback(a, b) => a.resolve().or_else(|_| b.resolve()), } } /// Combine this Variable with a fallback Variable. Resolving the variable's value will be /// done lazily, stopping on the first Variable that successfuly resolves. /// /// # Example Usage /// /// ```rust /// # use rusoto_credential::Variable; /// let primary: Variable<String> = Variable::from_env_var("AWS_SECRET_ACCESS_KEY"); /// let fallback = Variable::from_env_var("AWS_SECRET_KEY"); /// let aws_secret_access_key = primary.or(fallback); /// ``` pub fn or(self, other: Variable<T, E>) -> Self { Self::Fallback(Box::new(self), Box::new(other)) } } impl<T: 'static, E: 'static> Variable<T, E> { /// Variable which dynamically resolves to the value returned from the provided closure. Use /// this constructor function to create dynamically resolving Variables with custom logic. pub fn dynamic(f: impl Fn() -> Result<T, E> + Send + Sync + 'static) -> Self { Self::Dynamic(Arc::new(f)) } } impl<T, E> Variable<T, E> where T: From<String> + 'static, E: From<VarError> + 'static, { /// Variable which dynamically resolves to the value of a given environment variable. pub fn from_env_var<K: AsRef<std::ffi::OsStr>>(key: K) -> Self { let tmp = key.as_ref().to_os_string(); Self::dynamic(move || match var(&tmp) { Ok(ref v) if !v.trim().is_empty() => Ok(T::from(v.trim().to_string())), Ok(_) => Err(E::from(VarError::NotPresent)), Err(e) => Err(E::from(e)), }) } } impl<T, E> Variable<Option<T>, E> where T: From<String> + 'static, E: From<VarError> + 'static, { /// Variable which dynamically resolves to the value of a given environment variable. pub fn from_env_var_optional<K: AsRef<std::ffi::OsStr>>(key: K) -> Self { let tmp = key.as_ref().to_os_string(); Self::dynamic(move || match var(&tmp) { Ok(ref v) if !v.trim().is_empty() => Ok(Some(T::from(v.trim().to_string()))), Ok(_) | Err(VarError::NotPresent) => Ok(None), Err(e) => Err(E::from(e)), }) } } impl<T, E> Variable<T, E> where T: From<String> + 'static, E: From<std::io::Error> + From<std::string::FromUtf8Error> + 'static, { /// Variable which dynamically resolves to the value of an UTF-8 encoded text file /// (removing all leading and trailing whitespaces. pub fn from_text_file<K: AsRef<std::path::Path>>(file: K) -> Self { use std::fs::read; let tmp = file.as_ref().to_path_buf(); Self::dynamic(move || { Ok(T::from( String::from_utf8(read(&tmp)?)?.as_str().trim().to_string(), )) }) } } impl<T, E> Variable<T, E> where T: From<Vec<u8>> + 'static, E: From<std::io::Error> + 'static, { /// Variable which dynamically resolves to the value of a binary file. pub fn from_binary_file<K: AsRef<std::path::Path>>(file: K) -> Self { use std::fs::read; let tmp = file.as_ref().to_path_buf(); Self::dynamic(move || Ok(T::from(read(&tmp)?))) } } #[cfg(test)] mod test { use super::super::CredentialsError; use super::*; use crate::test_utils::lock_env; use std::io::Write; use tempfile::NamedTempFile; #[test] fn api_ergonomics() { let _tmp: Variable<String> = "".to_string().into(); let _tmp: Variable<String> = "".into(); let _tmp: Variable<u32> = 1.into(); } #[test] fn is_send() { fn _send<T: Send>(_t: T) {} let var = Variable::<i32, ()>::with_value(1); _send(var); } #[test] fn is_sync() { fn _sync<T: Sync>(_t: T) {} let var = Variable::<i32, ()>::with_value(1); _sync(var); } #[test] fn from_value() { let var = Variable::<i32, ()>::with_value(1); assert_eq!(var.resolve(), Ok(1)); assert_eq!(var.resolve(), Ok(1)); } #[test] fn dynamic() { fn xx() -> Result<i32, ()> { Ok(1) } let var = Variable::<i32, ()>::dynamic(|| Ok(1)); assert_eq!(var.resolve(), Ok(1)); assert_eq!(var.resolve(), Ok(1)); let var = Variable::<i32, ()>::dynamic(xx); assert_eq!(var.resolve(), Ok(1)); assert_eq!(var.resolve(), Ok(1)); } #[test] fn from_env_var() { let _guard = lock_env(); const VALUE: &str = "E6591691_C658_4C63_A7CF_C822D8FFC15B"; std::env::set_var(VALUE, VALUE); let var = Variable::<String, CredentialsError>::from_env_var(VALUE); assert_eq!(var.resolve(), Ok(VALUE.to_string())); assert_eq!(var.resolve(), Ok(VALUE.to_string())); std::env::remove_var(VALUE); assert_eq!(var.resolve().is_ok(), false); } #[test] fn from_empty_env_var() { let _guard = lock_env(); const VALUE: &str = "90E839DA_2254_4416_8295_AB82BD44D822"; std::env::set_var(VALUE, ""); let var = Variable::<String>::from_env_var(VALUE); assert_eq!(var.resolve().is_ok(), false); std::env::remove_var(VALUE); } #[test] fn from_text_file() -> Result<(), CredentialsError> { const VALUE: &str = "value"; let mut file = NamedTempFile::new()?; writeln!(file, "{}", VALUE)?; let var = Variable::<String>::from_text_file(file.path()); assert_eq!(var.resolve(), Ok(VALUE.to_string())); assert_eq!(var.resolve(), Ok(VALUE.to_string())); Ok(()) } #[test] fn from_binary_file() -> Result<(), CredentialsError> { const VALUE: &[u8] = b"value"; let mut file = NamedTempFile::new()?; file.write(VALUE)?; let var = Variable::<Vec<u8>>::from_binary_file(file.path()); assert_eq!(var.resolve().as_ref().map(|v| v.as_slice()), Ok(VALUE)); assert_eq!(var.resolve().as_ref().map(|v| v.as_slice()), Ok(VALUE)); Ok(()) } #[test] fn or() { let a = Variable::<i32, ()>::with_value(1); let b = Variable::<i32, ()>::with_value(2); assert_eq!(a.or(b).resolve(), Ok(1)); let a = Variable::<i32, VarError>::dynamic(|| Err(VarError::NotPresent)); let b = Variable::<i32, VarError>::with_value(2); assert_eq!(a.or(b).resolve(), Ok(2)); let a = Variable::<i32, VarError>::dynamic(|| Err(VarError::NotPresent)); let b = Variable::<i32, VarError>::dynamic(|| Err(VarError::NotPresent)); let c = Variable::<i32, VarError>::with_value(3); assert_eq!(a.or(b).or(c).resolve(), Ok(3)); let a = Variable::<i32, VarError>::dynamic(|| Err(VarError::NotPresent)); let b = Variable::<i32, VarError>::dynamic(|| Err(VarError::NotPresent)); assert_eq!(a.or(b).resolve(), Err(VarError::NotPresent)); } }
true
0405f76dbf5fc67b1a8cfa5a57178df7592e80f0
Rust
TehPers/BevyGame
/engine/crates/game_tiles/src/world/region.rs
UTF-8
3,991
3.125
3
[ "MIT" ]
permissive
use std::{convert::TryInto, num::TryFromIntError}; use crate::{Tile, TileRegionCoordinate, TileRegionPosition, TileRegionRect, TileWorldPosition}; use game_lib::{ bevy::{math::Vec2, prelude::*}, derive_more::{Display, Error}, }; use game_morton::Morton; // TODO: implement Serialize/Deserialize, doesn't support const generics yet so // array is a pain // TODO: implement Reflect once support for arrays is added #[derive(Clone, Debug)] pub struct Region { tiles: [Option<Tile>; Self::TILES], } impl Default for Region { fn default() -> Self { Region { tiles: [None; Self::TILES], } } } impl Region { pub const WIDTH: TileRegionCoordinate = 16; pub const HEIGHT: TileRegionCoordinate = 16; pub const TILES: usize = Self::WIDTH as usize * Self::HEIGHT as usize; pub const BOUNDS: TileRegionRect = TileRegionRect::new( TileRegionPosition::ZERO, TileRegionPosition::new(Self::WIDTH, Self::HEIGHT), ); pub fn get(&self, position: TileRegionPosition) -> Result<&Option<Tile>, RegionGetError> { self.tiles .get(Self::encode_pos(position)?) .ok_or(RegionGetError::OutOfBounds(position)) } pub fn get_mut( &mut self, position: TileRegionPosition, ) -> Result<&mut Option<Tile>, RegionGetError> { self.tiles .get_mut(Self::encode_pos(position)?) .ok_or(RegionGetError::OutOfBounds(position)) } fn encode_pos(position: TileRegionPosition) -> Result<usize, RegionGetError> { if position.x >= Self::WIDTH || position.y >= Self::HEIGHT { Err(RegionGetError::OutOfBounds(position)) } else { Ok(Morton::encode_2d(position.x, position.y).into()) } } fn decode_pos(index: usize) -> Result<TileRegionPosition, TryFromIntError> { let index = index.try_into()?; let (x, y) = Morton::decode_2d(index); Ok(TileRegionPosition::new(x, y)) } pub fn iter(&self) -> impl Iterator<Item = (TileRegionPosition, &Option<Tile>)> { self.tiles .iter() .enumerate() .map(|(index, tile)| (Region::decode_pos(index).unwrap(), tile)) } pub fn iter_mut(&mut self) -> impl Iterator<Item = (TileRegionPosition, &mut Option<Tile>)> { self.tiles .iter_mut() .enumerate() .map(|(index, tile)| (Region::decode_pos(index).unwrap(), tile)) } pub fn iter_rect( &self, rect: TileRegionRect, ) -> impl Iterator<Item = (TileRegionPosition, Result<&Option<Tile>, RegionGetError>)> { rect.iter_positions() .map(move |position| (position, self.get(position))) } pub fn iter_intersecting( &self, bottom_left: Vec2, top_right: Vec2, ) -> impl Iterator<Item = (TileRegionPosition, Result<&Option<Tile>, RegionGetError>)> { let bottom_left: TileRegionPosition = bottom_left.max(Vec2::ZERO).floor().into(); let top_right: TileRegionPosition = top_right.max(Vec2::ZERO).ceil().into(); self.iter_rect(TileRegionRect::new(bottom_left, top_right - bottom_left)) } } #[derive(Clone, Debug, Display, Error)] pub enum RegionGetError { #[display(fmt = "coordinates are out of bounds: {}", _0)] OutOfBounds(#[error(ignore)] TileRegionPosition), #[display(fmt = "failed to encode tile coordinates into an index: {}", position)] IntConversion { position: TileRegionPosition, #[error(source)] source: TryFromIntError, }, } #[derive(Clone, Debug, Display, Error)] pub enum GetTileError { #[display(fmt = "coordinates are out of bounds: {}", _0)] OutOfBounds(#[error(ignore)] TileWorldPosition), #[display(fmt = "failed to encode tile coordinates into an index: {}", position)] IntConversion { position: TileWorldPosition, #[error(source)] source: TryFromIntError, }, }
true