blob_id
stringlengths
40
40
language
stringclasses
1 value
repo_name
stringlengths
5
140
path
stringlengths
5
183
src_encoding
stringclasses
6 values
length_bytes
int64
12
5.32M
score
float64
2.52
4.94
int_score
int64
3
5
detected_licenses
listlengths
0
47
license_type
stringclasses
2 values
text
stringlengths
12
5.32M
download_success
bool
1 class
af40ce6b70f03b4b3079e57c11dac1bf197068d1
Rust
lizhuohua/impulse-engine-rust-wasm
/src/rand.rs
UTF-8
344
2.75
3
[]
no_license
use rand::rngs::OsRng; use rand::RngCore; pub struct Rng { rng: OsRng, } impl Rng { pub fn new() -> Self { Self { rng: OsRng::new().unwrap(), } } pub fn gen_range(&mut self, min: i32, max: i32) -> i32 { let r = self.rng.next_u32(); (r % (max - min + 1) as u32) as i32 + min } }
true
4db2204267af3a6c4370065ae9cc850edc4f8b1e
Rust
supernothing/clamav-rest
/src/main.rs
UTF-8
2,465
2.515625
3
[ "MIT" ]
permissive
use actix_web::{web, App, HttpServer, Responder, middleware, Error, FromRequest}; use clamav; use clamav::{db, engine, scan_settings}; use std::sync::{Arc}; use awmp; use std::process::Command; use serde_derive::Serialize; #[derive(Clone)] pub struct Scanner { scanner: Arc<engine::Engine>, settings: Arc<scan_settings::ScanSettings>, } #[derive(Serialize)] struct ScanResult { malicious: bool, result: String, } async fn index() -> impl Responder { "Post file to scan" } async fn scan(scanner: web::Data<Scanner>, parts: awmp::Parts) -> Result<web::Json<ScanResult>, Error> { let file = parts.files .into_inner() .into_iter() .filter(|(k, _)| k.as_str() == "file") .map(|(_, v)| v.unwrap()) .next() .unwrap(); let path = file.persist("/tmp").unwrap(); let result = scanner.scanner .scan_file(path.to_str().unwrap(), &scanner.settings) .expect("this scan better work"); let body = match result { engine::ScanResult::Virus(name) => ScanResult { malicious: true, result: name }, engine::ScanResult::Clean => ScanResult { malicious: false, result: "clean".to_string() }, engine::ScanResult::Whitelisted => ScanResult { malicious: false, result: "whitelisted".to_string() }, }; Ok(web::Json(body)) } #[actix_rt::main] async fn main() -> std::io::Result<()> { std::env::set_var("RUST_LOG", "actix_web_v2=info"); //update clamav println!("Updating signatures..."); let _output = Command::new("freshclam") .args(&["-F"]) .output() .expect("failed to update sigs"); println!("Done updating signatures."); //initialize clamav clamav::initialize().expect("initialize failed"); let scanner = engine::Engine::new(); scanner.load_databases(&db::default_directory()).expect("load failed"); scanner.compile().expect("compile failed"); let settings: scan_settings::ScanSettings = Default::default(); let s = Scanner { scanner: Arc::new(scanner), settings: Arc::new(settings)} ; // start HTTP server HttpServer::new(move || { App::new().data(s.clone()) .data(awmp::Parts::configure(|cfg| cfg.with_file_limit(1024*1024*50))) .wrap(middleware::Logger::default()) .service(web::resource("/file/scan") .route(web::get().to(index)) .route(web::post().to(scan))) }) .bind("0.0.0.0:8000")? .run() .await }
true
7769aef6a912e07db3270eed38f048c8747b344d
Rust
tomaka/send_wrapper
/src/lib.rs
UTF-8
8,133
3.125
3
[ "MIT", "Apache-2.0" ]
permissive
// Copyright 2017 Thomas Keh. // // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your // option. This file may not be copied, modified, or distributed // except according to those terms. //! This [Rust] library implements a wrapper type called `SendWrapper` which allows you to move around non-[`Send`] types //! between threads, as long as you access the contained value only from within the original thread. You also have to //! make sure that the wrapper is dropped from within the original thread. If any of these constraints is violated, //! a panic occurs. //! //! The idea for this library was born in the context of a [`GTK+`]/[`gtk-rs`]-based application. [`GTK+`] applications //! are strictly single-threaded. It is not allowed to call any [`GTK+`] method from a thread different to the main //! thread. Consequently, all [`gtk-rs`] structs are non-[`Send`]. //! //! Sometimes you still want to do some work in background. It is possible to enqueue [`GTK+`] calls from there to be //! executed in the main thread [using `Glib`]. This way you can know, that the [`gtk-rs`] structs involved are only //! accessed in the main thread and will also be dropped there. This library makes it possible that [`gtk-rs`] structs //! can leave the main thread at all, like required in the given //! //! # Examples //! //! ```rust //! use send_wrapper::SendWrapper; //! use std::rc::Rc; //! use std::thread; //! use std::sync::mpsc::channel; //! //! // This import is important. It allows you to unwrap the value using deref(), //! // deref_mut() or Deref coercion. //! use std::ops::{Deref, DerefMut}; //! //! // Rc is a non-Send type. //! let value = Rc::new(42); //! //! // We now wrap the value with `SendWrapper` (value is moved inside). //! let wrapped_value = SendWrapper::new(value); //! //! // A channel allows us to move the wrapped value between threads. //! let (sender, receiver) = channel(); //! //! let t = thread::spawn(move || { //! //!// This would panic (because of dereferencing in wrong thread): //!// let value = wrapped_value.deref(); //! //! // Move SendWrapper back to main thread, so it can be dropped from there. //! // If you leave this out the thread will panic because of dropping from wrong thread. //! sender.send(wrapped_value).unwrap(); //! //! }); //! //! let wrapped_value = receiver.recv().unwrap(); //! //! // Now you can use the value again. //! let value = wrapped_value.deref(); //! //! // alternatives for dereferencing: //! // let value = *wrapped_value; //! // let value: &NonSendType = &wrapped_value; //! //! // alternatives for mutable dereferencing (value and wrapped_value must be mutable too, then): //! // let mut value = wrapped_value.deref_mut(); //! // let mut value = &mut *wrapped_value; //! // let mut value: &mut NonSendType = &mut wrapped_value; //! ``` //! //! # License //! //! `send_wrapper` is distributed under the terms of both the MIT license and the Apache License (Version 2.0). //! //! See LICENSE-APACHE.txt, and LICENSE-MIT.txt for details. //! //! [Rust]: https://www.rust-lang.org //! [`Send`]: https://doc.rust-lang.org/std/marker/trait.Send.html //! [`gtk-rs`]: http://gtk-rs.org/ //! [`GTK+`]: https://www.gtk.org/ //! [using `Glib`]: http://gtk-rs.org/docs/glib/source/fn.idle_add.html use std::ops::{Drop,Deref,DerefMut}; use std::marker::Send; use std::thread; use std::thread::ThreadId; const DEREF_ERROR: &'static str = "Dropped SendWrapper<T> variable from a thread different to the one it has been created with."; const DROP_ERROR: &'static str = "Dereferenced SendWrapper<T> variable from a thread different to the one it has been created with."; /// A wrapper which allows you to move around non-[`Send`]-types between threads, as long as you access the contained /// value only from within the original thread and make sure that it is dropped from within the original thread. pub struct SendWrapper<T> { data: *mut T, thread_id: ThreadId, } impl<T> SendWrapper<T> { /// Create a SendWrapper<T> wrapper around a value of type T. /// The wrapper takes ownership of the value. pub fn new(data: T) -> SendWrapper<T> { SendWrapper { data: Box::into_raw(Box::new(data)), thread_id: thread::current().id() } } /// Returns if the value can be safely accessed from within the current thread. pub fn valid(&self) -> bool { self.thread_id == thread::current().id() } /// Takes the value out of the SendWrapper. /// /// #Panics /// Panics if it is called from a different thread than the one the SendWrapper<T> instance has /// been created with. pub fn take(self) -> T { if !self.valid() { panic!(DEREF_ERROR); } let result = unsafe { Box::from_raw(self.data) }; // Prevent drop() from being called, as it would drop self.data twice std::mem::forget(self); *result } } unsafe impl<T> Send for SendWrapper<T> { } impl<T> Deref for SendWrapper<T> { type Target = T; /// Returns a reference to the contained value. /// /// # Panics /// Derefencing panics if it is done from a different thread than the one the SendWrapper<T> instance has been /// created with. fn deref(&self) -> &T { if !self.valid() { panic!(DEREF_ERROR); } unsafe { // Access the value. We just checked that it is valid. &*self.data } } } impl<T> DerefMut for SendWrapper<T> { /// Returns a mutable reference to the contained value. /// /// # Panics /// Derefencing panics if it is done from a different thread than the one the SendWrapper<T> instance has been /// created with. fn deref_mut(&mut self) -> &mut T { if !self.valid() { panic!(DEREF_ERROR); } unsafe { // Access the value. We just checked that it is valid. &mut *self.data } } } impl<T> Drop for SendWrapper<T> { /// Drops the contained value. /// /// # Panics /// Dropping panics if it is done from a different thread than the one the SendWrapper<T> instance has been /// created with. As an exception, there is no extra panic if the thread is already panicking/unwinding. This is /// because otherwise there would be double panics (usually resulting in an abort) when dereferencing from a wrong /// thread. fn drop(&mut self) { if self.valid() { unsafe { // Create a boxed value from the raw pointer. We just checked that the pointer is valid. // Box handles the dropping for us when _dropper goes out of scope. let _dropper = Box::from_raw(self.data); } } else { if !std::thread::panicking() { // panic because of dropping from wrong thread // only do this while not unwinding (coud be caused by deref from wrong thread) panic!(DROP_ERROR); } } } } #[cfg(test)] mod tests { use SendWrapper; use std::thread; use std::sync::mpsc::channel; use std::ops::Deref; use std::rc::Rc; #[test] fn test_deref() { let (sender, receiver) = channel(); let w = SendWrapper::new(Rc::new(42)); { let _x = w.deref(); } let t = thread::spawn(move || { // move SendWrapper back to main thread, so it can be dropped from there sender.send(w).unwrap(); }); let w2 = receiver.recv().unwrap(); { let _x = w2.deref(); } assert!(t.join().is_ok()); } #[test] fn test_deref_panic() { let w = SendWrapper::new(Rc::new(42)); let t = thread::spawn(move || { let _x = w.deref(); }); let join_result = t.join(); assert!(join_result.is_err()); } #[test] fn test_drop_panic() { let w = SendWrapper::new(Rc::new(42)); let t = thread::spawn(move || { let _x = w; }); let join_result = t.join(); assert!(join_result.is_err()); } #[test] fn test_valid() { let w = SendWrapper::new(Rc::new(42)); assert!(w.valid()); thread::spawn(move || { assert!(!w.valid()); }); } #[test] fn test_take() { let w = SendWrapper::new(Rc::new(42)); let inner: Rc<usize> = w.take(); assert_eq!(42, *inner); } #[test] fn test_take_panic() { let w = SendWrapper::new(Rc::new(42)); let t = thread::spawn(move || { let _ = w.take(); }); assert!(t.join().is_err()); } }
true
27942eb8b2570141e5c893c0f58ff5dbf063da3b
Rust
mnts26/aws-sdk-rust
/sdk/cognitosync/src/client.rs
UTF-8
61,143
2.53125
3
[ "Apache-2.0" ]
permissive
// Code generated by software.amazon.smithy.rust.codegen.smithy-rs. DO NOT EDIT. #[derive(Debug)] pub(crate) struct Handle< C = smithy_client::erase::DynConnector, M = aws_hyper::AwsMiddleware, R = smithy_client::retry::Standard, > { client: smithy_client::Client<C, M, R>, conf: crate::Config, } /// An ergonomic service client for `AWSCognitoSyncService`. /// /// This client allows ergonomic access to a `AWSCognitoSyncService`-shaped service. /// Each method corresponds to an endpoint defined in the service's Smithy model, /// and the request and response shapes are auto-generated from that same model. /// /// # Using a Client /// /// Once you have a client set up, you can access the service's endpoints /// by calling the appropriate method on [`Client`]. Each such method /// returns a request builder for that endpoint, with methods for setting /// the various fields of the request. Once your request is complete, use /// the `send` method to send the request. `send` returns a future, which /// you then have to `.await` to get the service's response. /// /// [builder pattern]: https://rust-lang.github.io/api-guidelines/type-safety.html#c-builder /// [SigV4-signed requests]: https://docs.aws.amazon.com/general/latest/gr/signature-version-4.html #[derive(std::fmt::Debug)] pub struct Client< C = smithy_client::erase::DynConnector, M = aws_hyper::AwsMiddleware, R = smithy_client::retry::Standard, > { handle: std::sync::Arc<Handle<C, M, R>>, } impl<C, M, R> std::clone::Clone for Client<C, M, R> { fn clone(&self) -> Self { Self { handle: self.handle.clone(), } } } #[doc(inline)] pub use smithy_client::Builder; impl<C, M, R> From<smithy_client::Client<C, M, R>> for Client<C, M, R> { fn from(client: smithy_client::Client<C, M, R>) -> Self { Self::with_config(client, crate::Config::builder().build()) } } impl<C, M, R> Client<C, M, R> { pub fn with_config(client: smithy_client::Client<C, M, R>, conf: crate::Config) -> Self { Self { handle: std::sync::Arc::new(Handle { client, conf }), } } pub fn conf(&self) -> &crate::Config { &self.handle.conf } } impl<C, M, R> Client<C, M, R> where C: smithy_client::bounds::SmithyConnector, M: smithy_client::bounds::SmithyMiddleware<C>, R: smithy_client::retry::NewRequestPolicy, { pub fn bulk_publish(&self) -> fluent_builders::BulkPublish<C, M, R> { fluent_builders::BulkPublish::new(self.handle.clone()) } pub fn delete_dataset(&self) -> fluent_builders::DeleteDataset<C, M, R> { fluent_builders::DeleteDataset::new(self.handle.clone()) } pub fn describe_dataset(&self) -> fluent_builders::DescribeDataset<C, M, R> { fluent_builders::DescribeDataset::new(self.handle.clone()) } pub fn describe_identity_pool_usage( &self, ) -> fluent_builders::DescribeIdentityPoolUsage<C, M, R> { fluent_builders::DescribeIdentityPoolUsage::new(self.handle.clone()) } pub fn describe_identity_usage(&self) -> fluent_builders::DescribeIdentityUsage<C, M, R> { fluent_builders::DescribeIdentityUsage::new(self.handle.clone()) } pub fn get_bulk_publish_details(&self) -> fluent_builders::GetBulkPublishDetails<C, M, R> { fluent_builders::GetBulkPublishDetails::new(self.handle.clone()) } pub fn get_cognito_events(&self) -> fluent_builders::GetCognitoEvents<C, M, R> { fluent_builders::GetCognitoEvents::new(self.handle.clone()) } pub fn get_identity_pool_configuration( &self, ) -> fluent_builders::GetIdentityPoolConfiguration<C, M, R> { fluent_builders::GetIdentityPoolConfiguration::new(self.handle.clone()) } pub fn list_datasets(&self) -> fluent_builders::ListDatasets<C, M, R> { fluent_builders::ListDatasets::new(self.handle.clone()) } pub fn list_identity_pool_usage(&self) -> fluent_builders::ListIdentityPoolUsage<C, M, R> { fluent_builders::ListIdentityPoolUsage::new(self.handle.clone()) } pub fn list_records(&self) -> fluent_builders::ListRecords<C, M, R> { fluent_builders::ListRecords::new(self.handle.clone()) } pub fn register_device(&self) -> fluent_builders::RegisterDevice<C, M, R> { fluent_builders::RegisterDevice::new(self.handle.clone()) } pub fn set_cognito_events(&self) -> fluent_builders::SetCognitoEvents<C, M, R> { fluent_builders::SetCognitoEvents::new(self.handle.clone()) } pub fn set_identity_pool_configuration( &self, ) -> fluent_builders::SetIdentityPoolConfiguration<C, M, R> { fluent_builders::SetIdentityPoolConfiguration::new(self.handle.clone()) } pub fn subscribe_to_dataset(&self) -> fluent_builders::SubscribeToDataset<C, M, R> { fluent_builders::SubscribeToDataset::new(self.handle.clone()) } pub fn unsubscribe_from_dataset(&self) -> fluent_builders::UnsubscribeFromDataset<C, M, R> { fluent_builders::UnsubscribeFromDataset::new(self.handle.clone()) } pub fn update_records(&self) -> fluent_builders::UpdateRecords<C, M, R> { fluent_builders::UpdateRecords::new(self.handle.clone()) } } pub mod fluent_builders { #[derive(std::fmt::Debug)] pub struct BulkPublish< C = smithy_client::erase::DynConnector, M = aws_hyper::AwsMiddleware, R = smithy_client::retry::Standard, > { handle: std::sync::Arc<super::Handle<C, M, R>>, inner: crate::input::bulk_publish_input::Builder, } impl<C, M, R> BulkPublish<C, M, R> where C: smithy_client::bounds::SmithyConnector, M: smithy_client::bounds::SmithyMiddleware<C>, R: smithy_client::retry::NewRequestPolicy, { pub(crate) fn new(handle: std::sync::Arc<super::Handle<C, M, R>>) -> Self { Self { handle, inner: Default::default(), } } pub async fn send( self, ) -> std::result::Result< crate::output::BulkPublishOutput, smithy_http::result::SdkError<crate::error::BulkPublishError>, > where R::Policy: smithy_client::bounds::SmithyRetryPolicy< crate::input::BulkPublishInputOperationOutputAlias, crate::output::BulkPublishOutput, crate::error::BulkPublishError, crate::input::BulkPublishInputOperationRetryAlias, >, { let input = self .inner .build() .map_err(|err| smithy_http::result::SdkError::ConstructionFailure(err.into()))?; let op = input .make_operation(&self.handle.conf) .map_err(|err| smithy_http::result::SdkError::ConstructionFailure(err.into()))?; self.handle.client.call(op).await } /// A name-spaced GUID (for example, /// us-east-1:23EC4050-6AEA-7089-A2DD-08002EXAMPLE) created by Amazon Cognito. GUID generation is /// unique within a region. pub fn identity_pool_id(mut self, inp: impl Into<std::string::String>) -> Self { self.inner = self.inner.identity_pool_id(inp); self } pub fn set_identity_pool_id( mut self, input: std::option::Option<std::string::String>, ) -> Self { self.inner = self.inner.set_identity_pool_id(input); self } } #[derive(std::fmt::Debug)] pub struct DeleteDataset< C = smithy_client::erase::DynConnector, M = aws_hyper::AwsMiddleware, R = smithy_client::retry::Standard, > { handle: std::sync::Arc<super::Handle<C, M, R>>, inner: crate::input::delete_dataset_input::Builder, } impl<C, M, R> DeleteDataset<C, M, R> where C: smithy_client::bounds::SmithyConnector, M: smithy_client::bounds::SmithyMiddleware<C>, R: smithy_client::retry::NewRequestPolicy, { pub(crate) fn new(handle: std::sync::Arc<super::Handle<C, M, R>>) -> Self { Self { handle, inner: Default::default(), } } pub async fn send( self, ) -> std::result::Result< crate::output::DeleteDatasetOutput, smithy_http::result::SdkError<crate::error::DeleteDatasetError>, > where R::Policy: smithy_client::bounds::SmithyRetryPolicy< crate::input::DeleteDatasetInputOperationOutputAlias, crate::output::DeleteDatasetOutput, crate::error::DeleteDatasetError, crate::input::DeleteDatasetInputOperationRetryAlias, >, { let input = self .inner .build() .map_err(|err| smithy_http::result::SdkError::ConstructionFailure(err.into()))?; let op = input .make_operation(&self.handle.conf) .map_err(|err| smithy_http::result::SdkError::ConstructionFailure(err.into()))?; self.handle.client.call(op).await } /// A name-spaced GUID (for example, /// us-east-1:23EC4050-6AEA-7089-A2DD-08002EXAMPLE) created by Amazon Cognito. GUID generation is /// unique within a region. pub fn identity_pool_id(mut self, inp: impl Into<std::string::String>) -> Self { self.inner = self.inner.identity_pool_id(inp); self } pub fn set_identity_pool_id( mut self, input: std::option::Option<std::string::String>, ) -> Self { self.inner = self.inner.set_identity_pool_id(input); self } /// A name-spaced GUID (for example, /// us-east-1:23EC4050-6AEA-7089-A2DD-08002EXAMPLE) created by Amazon Cognito. GUID generation is /// unique within a region. pub fn identity_id(mut self, inp: impl Into<std::string::String>) -> Self { self.inner = self.inner.identity_id(inp); self } pub fn set_identity_id(mut self, input: std::option::Option<std::string::String>) -> Self { self.inner = self.inner.set_identity_id(input); self } /// A string of up to 128 characters. /// Allowed characters are a-z, A-Z, 0-9, '_' (underscore), '-' (dash), and '.' /// (dot). pub fn dataset_name(mut self, inp: impl Into<std::string::String>) -> Self { self.inner = self.inner.dataset_name(inp); self } pub fn set_dataset_name(mut self, input: std::option::Option<std::string::String>) -> Self { self.inner = self.inner.set_dataset_name(input); self } } #[derive(std::fmt::Debug)] pub struct DescribeDataset< C = smithy_client::erase::DynConnector, M = aws_hyper::AwsMiddleware, R = smithy_client::retry::Standard, > { handle: std::sync::Arc<super::Handle<C, M, R>>, inner: crate::input::describe_dataset_input::Builder, } impl<C, M, R> DescribeDataset<C, M, R> where C: smithy_client::bounds::SmithyConnector, M: smithy_client::bounds::SmithyMiddleware<C>, R: smithy_client::retry::NewRequestPolicy, { pub(crate) fn new(handle: std::sync::Arc<super::Handle<C, M, R>>) -> Self { Self { handle, inner: Default::default(), } } pub async fn send( self, ) -> std::result::Result< crate::output::DescribeDatasetOutput, smithy_http::result::SdkError<crate::error::DescribeDatasetError>, > where R::Policy: smithy_client::bounds::SmithyRetryPolicy< crate::input::DescribeDatasetInputOperationOutputAlias, crate::output::DescribeDatasetOutput, crate::error::DescribeDatasetError, crate::input::DescribeDatasetInputOperationRetryAlias, >, { let input = self .inner .build() .map_err(|err| smithy_http::result::SdkError::ConstructionFailure(err.into()))?; let op = input .make_operation(&self.handle.conf) .map_err(|err| smithy_http::result::SdkError::ConstructionFailure(err.into()))?; self.handle.client.call(op).await } /// A name-spaced GUID (for example, /// us-east-1:23EC4050-6AEA-7089-A2DD-08002EXAMPLE) created by Amazon Cognito. GUID generation is /// unique within a region. pub fn identity_pool_id(mut self, inp: impl Into<std::string::String>) -> Self { self.inner = self.inner.identity_pool_id(inp); self } pub fn set_identity_pool_id( mut self, input: std::option::Option<std::string::String>, ) -> Self { self.inner = self.inner.set_identity_pool_id(input); self } /// A name-spaced GUID (for example, /// us-east-1:23EC4050-6AEA-7089-A2DD-08002EXAMPLE) created by Amazon Cognito. GUID generation is /// unique within a region. pub fn identity_id(mut self, inp: impl Into<std::string::String>) -> Self { self.inner = self.inner.identity_id(inp); self } pub fn set_identity_id(mut self, input: std::option::Option<std::string::String>) -> Self { self.inner = self.inner.set_identity_id(input); self } /// A string of up to 128 characters. /// Allowed characters are a-z, A-Z, 0-9, '_' (underscore), '-' (dash), and '.' /// (dot). pub fn dataset_name(mut self, inp: impl Into<std::string::String>) -> Self { self.inner = self.inner.dataset_name(inp); self } pub fn set_dataset_name(mut self, input: std::option::Option<std::string::String>) -> Self { self.inner = self.inner.set_dataset_name(input); self } } #[derive(std::fmt::Debug)] pub struct DescribeIdentityPoolUsage< C = smithy_client::erase::DynConnector, M = aws_hyper::AwsMiddleware, R = smithy_client::retry::Standard, > { handle: std::sync::Arc<super::Handle<C, M, R>>, inner: crate::input::describe_identity_pool_usage_input::Builder, } impl<C, M, R> DescribeIdentityPoolUsage<C, M, R> where C: smithy_client::bounds::SmithyConnector, M: smithy_client::bounds::SmithyMiddleware<C>, R: smithy_client::retry::NewRequestPolicy, { pub(crate) fn new(handle: std::sync::Arc<super::Handle<C, M, R>>) -> Self { Self { handle, inner: Default::default(), } } pub async fn send( self, ) -> std::result::Result< crate::output::DescribeIdentityPoolUsageOutput, smithy_http::result::SdkError<crate::error::DescribeIdentityPoolUsageError>, > where R::Policy: smithy_client::bounds::SmithyRetryPolicy< crate::input::DescribeIdentityPoolUsageInputOperationOutputAlias, crate::output::DescribeIdentityPoolUsageOutput, crate::error::DescribeIdentityPoolUsageError, crate::input::DescribeIdentityPoolUsageInputOperationRetryAlias, >, { let input = self .inner .build() .map_err(|err| smithy_http::result::SdkError::ConstructionFailure(err.into()))?; let op = input .make_operation(&self.handle.conf) .map_err(|err| smithy_http::result::SdkError::ConstructionFailure(err.into()))?; self.handle.client.call(op).await } /// A name-spaced GUID (for /// example, us-east-1:23EC4050-6AEA-7089-A2DD-08002EXAMPLE) created by Amazon Cognito. GUID /// generation is unique within a region. pub fn identity_pool_id(mut self, inp: impl Into<std::string::String>) -> Self { self.inner = self.inner.identity_pool_id(inp); self } pub fn set_identity_pool_id( mut self, input: std::option::Option<std::string::String>, ) -> Self { self.inner = self.inner.set_identity_pool_id(input); self } } #[derive(std::fmt::Debug)] pub struct DescribeIdentityUsage< C = smithy_client::erase::DynConnector, M = aws_hyper::AwsMiddleware, R = smithy_client::retry::Standard, > { handle: std::sync::Arc<super::Handle<C, M, R>>, inner: crate::input::describe_identity_usage_input::Builder, } impl<C, M, R> DescribeIdentityUsage<C, M, R> where C: smithy_client::bounds::SmithyConnector, M: smithy_client::bounds::SmithyMiddleware<C>, R: smithy_client::retry::NewRequestPolicy, { pub(crate) fn new(handle: std::sync::Arc<super::Handle<C, M, R>>) -> Self { Self { handle, inner: Default::default(), } } pub async fn send( self, ) -> std::result::Result< crate::output::DescribeIdentityUsageOutput, smithy_http::result::SdkError<crate::error::DescribeIdentityUsageError>, > where R::Policy: smithy_client::bounds::SmithyRetryPolicy< crate::input::DescribeIdentityUsageInputOperationOutputAlias, crate::output::DescribeIdentityUsageOutput, crate::error::DescribeIdentityUsageError, crate::input::DescribeIdentityUsageInputOperationRetryAlias, >, { let input = self .inner .build() .map_err(|err| smithy_http::result::SdkError::ConstructionFailure(err.into()))?; let op = input .make_operation(&self.handle.conf) .map_err(|err| smithy_http::result::SdkError::ConstructionFailure(err.into()))?; self.handle.client.call(op).await } /// A name-spaced GUID (for /// example, us-east-1:23EC4050-6AEA-7089-A2DD-08002EXAMPLE) created by Amazon Cognito. GUID /// generation is unique within a region. pub fn identity_pool_id(mut self, inp: impl Into<std::string::String>) -> Self { self.inner = self.inner.identity_pool_id(inp); self } pub fn set_identity_pool_id( mut self, input: std::option::Option<std::string::String>, ) -> Self { self.inner = self.inner.set_identity_pool_id(input); self } /// A name-spaced GUID (for example, /// us-east-1:23EC4050-6AEA-7089-A2DD-08002EXAMPLE) created by Amazon Cognito. GUID generation is /// unique within a region. pub fn identity_id(mut self, inp: impl Into<std::string::String>) -> Self { self.inner = self.inner.identity_id(inp); self } pub fn set_identity_id(mut self, input: std::option::Option<std::string::String>) -> Self { self.inner = self.inner.set_identity_id(input); self } } #[derive(std::fmt::Debug)] pub struct GetBulkPublishDetails< C = smithy_client::erase::DynConnector, M = aws_hyper::AwsMiddleware, R = smithy_client::retry::Standard, > { handle: std::sync::Arc<super::Handle<C, M, R>>, inner: crate::input::get_bulk_publish_details_input::Builder, } impl<C, M, R> GetBulkPublishDetails<C, M, R> where C: smithy_client::bounds::SmithyConnector, M: smithy_client::bounds::SmithyMiddleware<C>, R: smithy_client::retry::NewRequestPolicy, { pub(crate) fn new(handle: std::sync::Arc<super::Handle<C, M, R>>) -> Self { Self { handle, inner: Default::default(), } } pub async fn send( self, ) -> std::result::Result< crate::output::GetBulkPublishDetailsOutput, smithy_http::result::SdkError<crate::error::GetBulkPublishDetailsError>, > where R::Policy: smithy_client::bounds::SmithyRetryPolicy< crate::input::GetBulkPublishDetailsInputOperationOutputAlias, crate::output::GetBulkPublishDetailsOutput, crate::error::GetBulkPublishDetailsError, crate::input::GetBulkPublishDetailsInputOperationRetryAlias, >, { let input = self .inner .build() .map_err(|err| smithy_http::result::SdkError::ConstructionFailure(err.into()))?; let op = input .make_operation(&self.handle.conf) .map_err(|err| smithy_http::result::SdkError::ConstructionFailure(err.into()))?; self.handle.client.call(op).await } /// A name-spaced GUID (for example, /// us-east-1:23EC4050-6AEA-7089-A2DD-08002EXAMPLE) created by Amazon Cognito. GUID generation is /// unique within a region. pub fn identity_pool_id(mut self, inp: impl Into<std::string::String>) -> Self { self.inner = self.inner.identity_pool_id(inp); self } pub fn set_identity_pool_id( mut self, input: std::option::Option<std::string::String>, ) -> Self { self.inner = self.inner.set_identity_pool_id(input); self } } #[derive(std::fmt::Debug)] pub struct GetCognitoEvents< C = smithy_client::erase::DynConnector, M = aws_hyper::AwsMiddleware, R = smithy_client::retry::Standard, > { handle: std::sync::Arc<super::Handle<C, M, R>>, inner: crate::input::get_cognito_events_input::Builder, } impl<C, M, R> GetCognitoEvents<C, M, R> where C: smithy_client::bounds::SmithyConnector, M: smithy_client::bounds::SmithyMiddleware<C>, R: smithy_client::retry::NewRequestPolicy, { pub(crate) fn new(handle: std::sync::Arc<super::Handle<C, M, R>>) -> Self { Self { handle, inner: Default::default(), } } pub async fn send( self, ) -> std::result::Result< crate::output::GetCognitoEventsOutput, smithy_http::result::SdkError<crate::error::GetCognitoEventsError>, > where R::Policy: smithy_client::bounds::SmithyRetryPolicy< crate::input::GetCognitoEventsInputOperationOutputAlias, crate::output::GetCognitoEventsOutput, crate::error::GetCognitoEventsError, crate::input::GetCognitoEventsInputOperationRetryAlias, >, { let input = self .inner .build() .map_err(|err| smithy_http::result::SdkError::ConstructionFailure(err.into()))?; let op = input .make_operation(&self.handle.conf) .map_err(|err| smithy_http::result::SdkError::ConstructionFailure(err.into()))?; self.handle.client.call(op).await } /// <p>The Cognito Identity Pool ID for the request</p> pub fn identity_pool_id(mut self, inp: impl Into<std::string::String>) -> Self { self.inner = self.inner.identity_pool_id(inp); self } pub fn set_identity_pool_id( mut self, input: std::option::Option<std::string::String>, ) -> Self { self.inner = self.inner.set_identity_pool_id(input); self } } #[derive(std::fmt::Debug)] pub struct GetIdentityPoolConfiguration< C = smithy_client::erase::DynConnector, M = aws_hyper::AwsMiddleware, R = smithy_client::retry::Standard, > { handle: std::sync::Arc<super::Handle<C, M, R>>, inner: crate::input::get_identity_pool_configuration_input::Builder, } impl<C, M, R> GetIdentityPoolConfiguration<C, M, R> where C: smithy_client::bounds::SmithyConnector, M: smithy_client::bounds::SmithyMiddleware<C>, R: smithy_client::retry::NewRequestPolicy, { pub(crate) fn new(handle: std::sync::Arc<super::Handle<C, M, R>>) -> Self { Self { handle, inner: Default::default(), } } pub async fn send( self, ) -> std::result::Result< crate::output::GetIdentityPoolConfigurationOutput, smithy_http::result::SdkError<crate::error::GetIdentityPoolConfigurationError>, > where R::Policy: smithy_client::bounds::SmithyRetryPolicy< crate::input::GetIdentityPoolConfigurationInputOperationOutputAlias, crate::output::GetIdentityPoolConfigurationOutput, crate::error::GetIdentityPoolConfigurationError, crate::input::GetIdentityPoolConfigurationInputOperationRetryAlias, >, { let input = self .inner .build() .map_err(|err| smithy_http::result::SdkError::ConstructionFailure(err.into()))?; let op = input .make_operation(&self.handle.conf) .map_err(|err| smithy_http::result::SdkError::ConstructionFailure(err.into()))?; self.handle.client.call(op).await } /// <p>A name-spaced GUID (for example, us-east-1:23EC4050-6AEA-7089-A2DD-08002EXAMPLE) created by /// Amazon Cognito. This is the ID of the pool for which to return a configuration.</p> pub fn identity_pool_id(mut self, inp: impl Into<std::string::String>) -> Self { self.inner = self.inner.identity_pool_id(inp); self } pub fn set_identity_pool_id( mut self, input: std::option::Option<std::string::String>, ) -> Self { self.inner = self.inner.set_identity_pool_id(input); self } } #[derive(std::fmt::Debug)] pub struct ListDatasets< C = smithy_client::erase::DynConnector, M = aws_hyper::AwsMiddleware, R = smithy_client::retry::Standard, > { handle: std::sync::Arc<super::Handle<C, M, R>>, inner: crate::input::list_datasets_input::Builder, } impl<C, M, R> ListDatasets<C, M, R> where C: smithy_client::bounds::SmithyConnector, M: smithy_client::bounds::SmithyMiddleware<C>, R: smithy_client::retry::NewRequestPolicy, { pub(crate) fn new(handle: std::sync::Arc<super::Handle<C, M, R>>) -> Self { Self { handle, inner: Default::default(), } } pub async fn send( self, ) -> std::result::Result< crate::output::ListDatasetsOutput, smithy_http::result::SdkError<crate::error::ListDatasetsError>, > where R::Policy: smithy_client::bounds::SmithyRetryPolicy< crate::input::ListDatasetsInputOperationOutputAlias, crate::output::ListDatasetsOutput, crate::error::ListDatasetsError, crate::input::ListDatasetsInputOperationRetryAlias, >, { let input = self .inner .build() .map_err(|err| smithy_http::result::SdkError::ConstructionFailure(err.into()))?; let op = input .make_operation(&self.handle.conf) .map_err(|err| smithy_http::result::SdkError::ConstructionFailure(err.into()))?; self.handle.client.call(op).await } /// A name-spaced GUID (for example, /// us-east-1:23EC4050-6AEA-7089-A2DD-08002EXAMPLE) created by Amazon Cognito. GUID generation is /// unique within a region. pub fn identity_pool_id(mut self, inp: impl Into<std::string::String>) -> Self { self.inner = self.inner.identity_pool_id(inp); self } pub fn set_identity_pool_id( mut self, input: std::option::Option<std::string::String>, ) -> Self { self.inner = self.inner.set_identity_pool_id(input); self } /// A name-spaced GUID (for example, /// us-east-1:23EC4050-6AEA-7089-A2DD-08002EXAMPLE) created by Amazon Cognito. GUID generation is /// unique within a region. pub fn identity_id(mut self, inp: impl Into<std::string::String>) -> Self { self.inner = self.inner.identity_id(inp); self } pub fn set_identity_id(mut self, input: std::option::Option<std::string::String>) -> Self { self.inner = self.inner.set_identity_id(input); self } /// A pagination token for obtaining the next /// page of results. pub fn next_token(mut self, inp: impl Into<std::string::String>) -> Self { self.inner = self.inner.next_token(inp); self } pub fn set_next_token(mut self, input: std::option::Option<std::string::String>) -> Self { self.inner = self.inner.set_next_token(input); self } /// The maximum number of results to be /// returned. pub fn max_results(mut self, inp: i32) -> Self { self.inner = self.inner.max_results(inp); self } pub fn set_max_results(mut self, input: std::option::Option<i32>) -> Self { self.inner = self.inner.set_max_results(input); self } } #[derive(std::fmt::Debug)] pub struct ListIdentityPoolUsage< C = smithy_client::erase::DynConnector, M = aws_hyper::AwsMiddleware, R = smithy_client::retry::Standard, > { handle: std::sync::Arc<super::Handle<C, M, R>>, inner: crate::input::list_identity_pool_usage_input::Builder, } impl<C, M, R> ListIdentityPoolUsage<C, M, R> where C: smithy_client::bounds::SmithyConnector, M: smithy_client::bounds::SmithyMiddleware<C>, R: smithy_client::retry::NewRequestPolicy, { pub(crate) fn new(handle: std::sync::Arc<super::Handle<C, M, R>>) -> Self { Self { handle, inner: Default::default(), } } pub async fn send( self, ) -> std::result::Result< crate::output::ListIdentityPoolUsageOutput, smithy_http::result::SdkError<crate::error::ListIdentityPoolUsageError>, > where R::Policy: smithy_client::bounds::SmithyRetryPolicy< crate::input::ListIdentityPoolUsageInputOperationOutputAlias, crate::output::ListIdentityPoolUsageOutput, crate::error::ListIdentityPoolUsageError, crate::input::ListIdentityPoolUsageInputOperationRetryAlias, >, { let input = self .inner .build() .map_err(|err| smithy_http::result::SdkError::ConstructionFailure(err.into()))?; let op = input .make_operation(&self.handle.conf) .map_err(|err| smithy_http::result::SdkError::ConstructionFailure(err.into()))?; self.handle.client.call(op).await } /// A pagination token for obtaining /// the next page of results. pub fn next_token(mut self, inp: impl Into<std::string::String>) -> Self { self.inner = self.inner.next_token(inp); self } pub fn set_next_token(mut self, input: std::option::Option<std::string::String>) -> Self { self.inner = self.inner.set_next_token(input); self } /// The maximum number of results to /// be returned. pub fn max_results(mut self, inp: i32) -> Self { self.inner = self.inner.max_results(inp); self } pub fn set_max_results(mut self, input: std::option::Option<i32>) -> Self { self.inner = self.inner.set_max_results(input); self } } #[derive(std::fmt::Debug)] pub struct ListRecords< C = smithy_client::erase::DynConnector, M = aws_hyper::AwsMiddleware, R = smithy_client::retry::Standard, > { handle: std::sync::Arc<super::Handle<C, M, R>>, inner: crate::input::list_records_input::Builder, } impl<C, M, R> ListRecords<C, M, R> where C: smithy_client::bounds::SmithyConnector, M: smithy_client::bounds::SmithyMiddleware<C>, R: smithy_client::retry::NewRequestPolicy, { pub(crate) fn new(handle: std::sync::Arc<super::Handle<C, M, R>>) -> Self { Self { handle, inner: Default::default(), } } pub async fn send( self, ) -> std::result::Result< crate::output::ListRecordsOutput, smithy_http::result::SdkError<crate::error::ListRecordsError>, > where R::Policy: smithy_client::bounds::SmithyRetryPolicy< crate::input::ListRecordsInputOperationOutputAlias, crate::output::ListRecordsOutput, crate::error::ListRecordsError, crate::input::ListRecordsInputOperationRetryAlias, >, { let input = self .inner .build() .map_err(|err| smithy_http::result::SdkError::ConstructionFailure(err.into()))?; let op = input .make_operation(&self.handle.conf) .map_err(|err| smithy_http::result::SdkError::ConstructionFailure(err.into()))?; self.handle.client.call(op).await } /// A name-spaced GUID (for example, /// us-east-1:23EC4050-6AEA-7089-A2DD-08002EXAMPLE) created by Amazon Cognito. GUID generation is /// unique within a region. pub fn identity_pool_id(mut self, inp: impl Into<std::string::String>) -> Self { self.inner = self.inner.identity_pool_id(inp); self } pub fn set_identity_pool_id( mut self, input: std::option::Option<std::string::String>, ) -> Self { self.inner = self.inner.set_identity_pool_id(input); self } /// A name-spaced GUID (for example, /// us-east-1:23EC4050-6AEA-7089-A2DD-08002EXAMPLE) created by Amazon Cognito. GUID generation is /// unique within a region. pub fn identity_id(mut self, inp: impl Into<std::string::String>) -> Self { self.inner = self.inner.identity_id(inp); self } pub fn set_identity_id(mut self, input: std::option::Option<std::string::String>) -> Self { self.inner = self.inner.set_identity_id(input); self } /// A string of up to 128 characters. Allowed /// characters are a-z, A-Z, 0-9, '_' (underscore), '-' (dash), and '.' (dot). pub fn dataset_name(mut self, inp: impl Into<std::string::String>) -> Self { self.inner = self.inner.dataset_name(inp); self } pub fn set_dataset_name(mut self, input: std::option::Option<std::string::String>) -> Self { self.inner = self.inner.set_dataset_name(input); self } /// The last server sync count for this /// record. pub fn last_sync_count(mut self, inp: i64) -> Self { self.inner = self.inner.last_sync_count(inp); self } pub fn set_last_sync_count(mut self, input: std::option::Option<i64>) -> Self { self.inner = self.inner.set_last_sync_count(input); self } /// A pagination token for obtaining the next /// page of results. pub fn next_token(mut self, inp: impl Into<std::string::String>) -> Self { self.inner = self.inner.next_token(inp); self } pub fn set_next_token(mut self, input: std::option::Option<std::string::String>) -> Self { self.inner = self.inner.set_next_token(input); self } /// The maximum number of results to be /// returned. pub fn max_results(mut self, inp: i32) -> Self { self.inner = self.inner.max_results(inp); self } pub fn set_max_results(mut self, input: std::option::Option<i32>) -> Self { self.inner = self.inner.set_max_results(input); self } /// A token containing a session ID, /// identity ID, and expiration. pub fn sync_session_token(mut self, inp: impl Into<std::string::String>) -> Self { self.inner = self.inner.sync_session_token(inp); self } pub fn set_sync_session_token( mut self, input: std::option::Option<std::string::String>, ) -> Self { self.inner = self.inner.set_sync_session_token(input); self } } #[derive(std::fmt::Debug)] pub struct RegisterDevice< C = smithy_client::erase::DynConnector, M = aws_hyper::AwsMiddleware, R = smithy_client::retry::Standard, > { handle: std::sync::Arc<super::Handle<C, M, R>>, inner: crate::input::register_device_input::Builder, } impl<C, M, R> RegisterDevice<C, M, R> where C: smithy_client::bounds::SmithyConnector, M: smithy_client::bounds::SmithyMiddleware<C>, R: smithy_client::retry::NewRequestPolicy, { pub(crate) fn new(handle: std::sync::Arc<super::Handle<C, M, R>>) -> Self { Self { handle, inner: Default::default(), } } pub async fn send( self, ) -> std::result::Result< crate::output::RegisterDeviceOutput, smithy_http::result::SdkError<crate::error::RegisterDeviceError>, > where R::Policy: smithy_client::bounds::SmithyRetryPolicy< crate::input::RegisterDeviceInputOperationOutputAlias, crate::output::RegisterDeviceOutput, crate::error::RegisterDeviceError, crate::input::RegisterDeviceInputOperationRetryAlias, >, { let input = self .inner .build() .map_err(|err| smithy_http::result::SdkError::ConstructionFailure(err.into()))?; let op = input .make_operation(&self.handle.conf) .map_err(|err| smithy_http::result::SdkError::ConstructionFailure(err.into()))?; self.handle.client.call(op).await } /// <p>A name-spaced GUID (for example, us-east-1:23EC4050-6AEA-7089-A2DD-08002EXAMPLE) created by /// Amazon Cognito. Here, the ID of the pool that the identity belongs to.</p> pub fn identity_pool_id(mut self, inp: impl Into<std::string::String>) -> Self { self.inner = self.inner.identity_pool_id(inp); self } pub fn set_identity_pool_id( mut self, input: std::option::Option<std::string::String>, ) -> Self { self.inner = self.inner.set_identity_pool_id(input); self } /// <p>The unique ID for this identity.</p> pub fn identity_id(mut self, inp: impl Into<std::string::String>) -> Self { self.inner = self.inner.identity_id(inp); self } pub fn set_identity_id(mut self, input: std::option::Option<std::string::String>) -> Self { self.inner = self.inner.set_identity_id(input); self } /// <p>The SNS platform type (e.g. GCM, SDM, APNS, APNS_SANDBOX).</p> pub fn platform(mut self, inp: crate::model::Platform) -> Self { self.inner = self.inner.platform(inp); self } pub fn set_platform(mut self, input: std::option::Option<crate::model::Platform>) -> Self { self.inner = self.inner.set_platform(input); self } /// <p>The push token.</p> pub fn token(mut self, inp: impl Into<std::string::String>) -> Self { self.inner = self.inner.token(inp); self } pub fn set_token(mut self, input: std::option::Option<std::string::String>) -> Self { self.inner = self.inner.set_token(input); self } } #[derive(std::fmt::Debug)] pub struct SetCognitoEvents< C = smithy_client::erase::DynConnector, M = aws_hyper::AwsMiddleware, R = smithy_client::retry::Standard, > { handle: std::sync::Arc<super::Handle<C, M, R>>, inner: crate::input::set_cognito_events_input::Builder, } impl<C, M, R> SetCognitoEvents<C, M, R> where C: smithy_client::bounds::SmithyConnector, M: smithy_client::bounds::SmithyMiddleware<C>, R: smithy_client::retry::NewRequestPolicy, { pub(crate) fn new(handle: std::sync::Arc<super::Handle<C, M, R>>) -> Self { Self { handle, inner: Default::default(), } } pub async fn send( self, ) -> std::result::Result< crate::output::SetCognitoEventsOutput, smithy_http::result::SdkError<crate::error::SetCognitoEventsError>, > where R::Policy: smithy_client::bounds::SmithyRetryPolicy< crate::input::SetCognitoEventsInputOperationOutputAlias, crate::output::SetCognitoEventsOutput, crate::error::SetCognitoEventsError, crate::input::SetCognitoEventsInputOperationRetryAlias, >, { let input = self .inner .build() .map_err(|err| smithy_http::result::SdkError::ConstructionFailure(err.into()))?; let op = input .make_operation(&self.handle.conf) .map_err(|err| smithy_http::result::SdkError::ConstructionFailure(err.into()))?; self.handle.client.call(op).await } /// <p>The Cognito Identity Pool to use when configuring Cognito Events</p> pub fn identity_pool_id(mut self, inp: impl Into<std::string::String>) -> Self { self.inner = self.inner.identity_pool_id(inp); self } pub fn set_identity_pool_id( mut self, input: std::option::Option<std::string::String>, ) -> Self { self.inner = self.inner.set_identity_pool_id(input); self } /// Adds a key-value pair to `Events`. /// /// To override the contents of this collection use [`set_events`](Self::set_events). /// <p>The events to configure</p> pub fn events( mut self, k: impl Into<std::string::String>, v: impl Into<std::string::String>, ) -> Self { self.inner = self.inner.events(k, v); self } pub fn set_events( mut self, input: std::option::Option< std::collections::HashMap<std::string::String, std::string::String>, >, ) -> Self { self.inner = self.inner.set_events(input); self } } #[derive(std::fmt::Debug)] pub struct SetIdentityPoolConfiguration< C = smithy_client::erase::DynConnector, M = aws_hyper::AwsMiddleware, R = smithy_client::retry::Standard, > { handle: std::sync::Arc<super::Handle<C, M, R>>, inner: crate::input::set_identity_pool_configuration_input::Builder, } impl<C, M, R> SetIdentityPoolConfiguration<C, M, R> where C: smithy_client::bounds::SmithyConnector, M: smithy_client::bounds::SmithyMiddleware<C>, R: smithy_client::retry::NewRequestPolicy, { pub(crate) fn new(handle: std::sync::Arc<super::Handle<C, M, R>>) -> Self { Self { handle, inner: Default::default(), } } pub async fn send( self, ) -> std::result::Result< crate::output::SetIdentityPoolConfigurationOutput, smithy_http::result::SdkError<crate::error::SetIdentityPoolConfigurationError>, > where R::Policy: smithy_client::bounds::SmithyRetryPolicy< crate::input::SetIdentityPoolConfigurationInputOperationOutputAlias, crate::output::SetIdentityPoolConfigurationOutput, crate::error::SetIdentityPoolConfigurationError, crate::input::SetIdentityPoolConfigurationInputOperationRetryAlias, >, { let input = self .inner .build() .map_err(|err| smithy_http::result::SdkError::ConstructionFailure(err.into()))?; let op = input .make_operation(&self.handle.conf) .map_err(|err| smithy_http::result::SdkError::ConstructionFailure(err.into()))?; self.handle.client.call(op).await } /// <p>A name-spaced GUID (for example, us-east-1:23EC4050-6AEA-7089-A2DD-08002EXAMPLE) created by /// Amazon Cognito. This is the ID of the pool to modify.</p> pub fn identity_pool_id(mut self, inp: impl Into<std::string::String>) -> Self { self.inner = self.inner.identity_pool_id(inp); self } pub fn set_identity_pool_id( mut self, input: std::option::Option<std::string::String>, ) -> Self { self.inner = self.inner.set_identity_pool_id(input); self } /// <p>Options to apply to this identity pool for push synchronization.</p> pub fn push_sync(mut self, inp: crate::model::PushSync) -> Self { self.inner = self.inner.push_sync(inp); self } pub fn set_push_sync(mut self, input: std::option::Option<crate::model::PushSync>) -> Self { self.inner = self.inner.set_push_sync(input); self } /// Options to apply to this identity pool for Amazon Cognito streams. pub fn cognito_streams(mut self, inp: crate::model::CognitoStreams) -> Self { self.inner = self.inner.cognito_streams(inp); self } pub fn set_cognito_streams( mut self, input: std::option::Option<crate::model::CognitoStreams>, ) -> Self { self.inner = self.inner.set_cognito_streams(input); self } } #[derive(std::fmt::Debug)] pub struct SubscribeToDataset< C = smithy_client::erase::DynConnector, M = aws_hyper::AwsMiddleware, R = smithy_client::retry::Standard, > { handle: std::sync::Arc<super::Handle<C, M, R>>, inner: crate::input::subscribe_to_dataset_input::Builder, } impl<C, M, R> SubscribeToDataset<C, M, R> where C: smithy_client::bounds::SmithyConnector, M: smithy_client::bounds::SmithyMiddleware<C>, R: smithy_client::retry::NewRequestPolicy, { pub(crate) fn new(handle: std::sync::Arc<super::Handle<C, M, R>>) -> Self { Self { handle, inner: Default::default(), } } pub async fn send( self, ) -> std::result::Result< crate::output::SubscribeToDatasetOutput, smithy_http::result::SdkError<crate::error::SubscribeToDatasetError>, > where R::Policy: smithy_client::bounds::SmithyRetryPolicy< crate::input::SubscribeToDatasetInputOperationOutputAlias, crate::output::SubscribeToDatasetOutput, crate::error::SubscribeToDatasetError, crate::input::SubscribeToDatasetInputOperationRetryAlias, >, { let input = self .inner .build() .map_err(|err| smithy_http::result::SdkError::ConstructionFailure(err.into()))?; let op = input .make_operation(&self.handle.conf) .map_err(|err| smithy_http::result::SdkError::ConstructionFailure(err.into()))?; self.handle.client.call(op).await } /// <p>A name-spaced GUID (for example, us-east-1:23EC4050-6AEA-7089-A2DD-08002EXAMPLE) created by /// Amazon Cognito. The ID of the pool to which the identity belongs.</p> pub fn identity_pool_id(mut self, inp: impl Into<std::string::String>) -> Self { self.inner = self.inner.identity_pool_id(inp); self } pub fn set_identity_pool_id( mut self, input: std::option::Option<std::string::String>, ) -> Self { self.inner = self.inner.set_identity_pool_id(input); self } /// <p>Unique ID for this identity.</p> pub fn identity_id(mut self, inp: impl Into<std::string::String>) -> Self { self.inner = self.inner.identity_id(inp); self } pub fn set_identity_id(mut self, input: std::option::Option<std::string::String>) -> Self { self.inner = self.inner.set_identity_id(input); self } /// <p>The name of the dataset to subcribe to.</p> pub fn dataset_name(mut self, inp: impl Into<std::string::String>) -> Self { self.inner = self.inner.dataset_name(inp); self } pub fn set_dataset_name(mut self, input: std::option::Option<std::string::String>) -> Self { self.inner = self.inner.set_dataset_name(input); self } /// <p>The unique ID generated for this device by Cognito.</p> pub fn device_id(mut self, inp: impl Into<std::string::String>) -> Self { self.inner = self.inner.device_id(inp); self } pub fn set_device_id(mut self, input: std::option::Option<std::string::String>) -> Self { self.inner = self.inner.set_device_id(input); self } } #[derive(std::fmt::Debug)] pub struct UnsubscribeFromDataset< C = smithy_client::erase::DynConnector, M = aws_hyper::AwsMiddleware, R = smithy_client::retry::Standard, > { handle: std::sync::Arc<super::Handle<C, M, R>>, inner: crate::input::unsubscribe_from_dataset_input::Builder, } impl<C, M, R> UnsubscribeFromDataset<C, M, R> where C: smithy_client::bounds::SmithyConnector, M: smithy_client::bounds::SmithyMiddleware<C>, R: smithy_client::retry::NewRequestPolicy, { pub(crate) fn new(handle: std::sync::Arc<super::Handle<C, M, R>>) -> Self { Self { handle, inner: Default::default(), } } pub async fn send( self, ) -> std::result::Result< crate::output::UnsubscribeFromDatasetOutput, smithy_http::result::SdkError<crate::error::UnsubscribeFromDatasetError>, > where R::Policy: smithy_client::bounds::SmithyRetryPolicy< crate::input::UnsubscribeFromDatasetInputOperationOutputAlias, crate::output::UnsubscribeFromDatasetOutput, crate::error::UnsubscribeFromDatasetError, crate::input::UnsubscribeFromDatasetInputOperationRetryAlias, >, { let input = self .inner .build() .map_err(|err| smithy_http::result::SdkError::ConstructionFailure(err.into()))?; let op = input .make_operation(&self.handle.conf) .map_err(|err| smithy_http::result::SdkError::ConstructionFailure(err.into()))?; self.handle.client.call(op).await } /// <p>A name-spaced GUID (for example, us-east-1:23EC4050-6AEA-7089-A2DD-08002EXAMPLE) created by /// Amazon Cognito. The ID of the pool to which this identity belongs.</p> pub fn identity_pool_id(mut self, inp: impl Into<std::string::String>) -> Self { self.inner = self.inner.identity_pool_id(inp); self } pub fn set_identity_pool_id( mut self, input: std::option::Option<std::string::String>, ) -> Self { self.inner = self.inner.set_identity_pool_id(input); self } /// <p>Unique ID for this identity.</p> pub fn identity_id(mut self, inp: impl Into<std::string::String>) -> Self { self.inner = self.inner.identity_id(inp); self } pub fn set_identity_id(mut self, input: std::option::Option<std::string::String>) -> Self { self.inner = self.inner.set_identity_id(input); self } /// <p>The name of the dataset from which to unsubcribe.</p> pub fn dataset_name(mut self, inp: impl Into<std::string::String>) -> Self { self.inner = self.inner.dataset_name(inp); self } pub fn set_dataset_name(mut self, input: std::option::Option<std::string::String>) -> Self { self.inner = self.inner.set_dataset_name(input); self } /// <p>The unique ID generated for this device by Cognito.</p> pub fn device_id(mut self, inp: impl Into<std::string::String>) -> Self { self.inner = self.inner.device_id(inp); self } pub fn set_device_id(mut self, input: std::option::Option<std::string::String>) -> Self { self.inner = self.inner.set_device_id(input); self } } #[derive(std::fmt::Debug)] pub struct UpdateRecords< C = smithy_client::erase::DynConnector, M = aws_hyper::AwsMiddleware, R = smithy_client::retry::Standard, > { handle: std::sync::Arc<super::Handle<C, M, R>>, inner: crate::input::update_records_input::Builder, } impl<C, M, R> UpdateRecords<C, M, R> where C: smithy_client::bounds::SmithyConnector, M: smithy_client::bounds::SmithyMiddleware<C>, R: smithy_client::retry::NewRequestPolicy, { pub(crate) fn new(handle: std::sync::Arc<super::Handle<C, M, R>>) -> Self { Self { handle, inner: Default::default(), } } pub async fn send( self, ) -> std::result::Result< crate::output::UpdateRecordsOutput, smithy_http::result::SdkError<crate::error::UpdateRecordsError>, > where R::Policy: smithy_client::bounds::SmithyRetryPolicy< crate::input::UpdateRecordsInputOperationOutputAlias, crate::output::UpdateRecordsOutput, crate::error::UpdateRecordsError, crate::input::UpdateRecordsInputOperationRetryAlias, >, { let input = self .inner .build() .map_err(|err| smithy_http::result::SdkError::ConstructionFailure(err.into()))?; let op = input .make_operation(&self.handle.conf) .map_err(|err| smithy_http::result::SdkError::ConstructionFailure(err.into()))?; self.handle.client.call(op).await } /// A name-spaced GUID (for example, /// us-east-1:23EC4050-6AEA-7089-A2DD-08002EXAMPLE) created by Amazon Cognito. GUID generation is /// unique within a region. pub fn identity_pool_id(mut self, inp: impl Into<std::string::String>) -> Self { self.inner = self.inner.identity_pool_id(inp); self } pub fn set_identity_pool_id( mut self, input: std::option::Option<std::string::String>, ) -> Self { self.inner = self.inner.set_identity_pool_id(input); self } /// A name-spaced GUID (for example, /// us-east-1:23EC4050-6AEA-7089-A2DD-08002EXAMPLE) created by Amazon Cognito. GUID generation is /// unique within a region. pub fn identity_id(mut self, inp: impl Into<std::string::String>) -> Self { self.inner = self.inner.identity_id(inp); self } pub fn set_identity_id(mut self, input: std::option::Option<std::string::String>) -> Self { self.inner = self.inner.set_identity_id(input); self } /// A string of up to 128 characters. /// Allowed characters are a-z, A-Z, 0-9, '_' (underscore), '-' (dash), and '.' /// (dot). pub fn dataset_name(mut self, inp: impl Into<std::string::String>) -> Self { self.inner = self.inner.dataset_name(inp); self } pub fn set_dataset_name(mut self, input: std::option::Option<std::string::String>) -> Self { self.inner = self.inner.set_dataset_name(input); self } /// <p>The unique ID generated for this device by Cognito.</p> pub fn device_id(mut self, inp: impl Into<std::string::String>) -> Self { self.inner = self.inner.device_id(inp); self } pub fn set_device_id(mut self, input: std::option::Option<std::string::String>) -> Self { self.inner = self.inner.set_device_id(input); self } /// Appends an item to `RecordPatches`. /// /// To override the contents of this collection use [`set_record_patches`](Self::set_record_patches). /// A list of patch /// operations. pub fn record_patches(mut self, inp: impl Into<crate::model::RecordPatch>) -> Self { self.inner = self.inner.record_patches(inp); self } pub fn set_record_patches( mut self, input: std::option::Option<std::vec::Vec<crate::model::RecordPatch>>, ) -> Self { self.inner = self.inner.set_record_patches(input); self } /// The SyncSessionToken returned by a /// previous call to ListRecords for this dataset and identity. pub fn sync_session_token(mut self, inp: impl Into<std::string::String>) -> Self { self.inner = self.inner.sync_session_token(inp); self } pub fn set_sync_session_token( mut self, input: std::option::Option<std::string::String>, ) -> Self { self.inner = self.inner.set_sync_session_token(input); self } /// Intended to supply a device ID that /// will populate the lastModifiedBy field referenced in other methods. The /// ClientContext field is not yet implemented. pub fn client_context(mut self, inp: impl Into<std::string::String>) -> Self { self.inner = self.inner.client_context(inp); self } pub fn set_client_context( mut self, input: std::option::Option<std::string::String>, ) -> Self { self.inner = self.inner.set_client_context(input); self } } } impl<C> Client<C, aws_hyper::AwsMiddleware, smithy_client::retry::Standard> { pub fn from_conf_conn(conf: crate::Config, conn: C) -> Self { let client = aws_hyper::Client::new(conn); Self { handle: std::sync::Arc::new(Handle { client, conf }), } } } impl Client< smithy_client::erase::DynConnector, aws_hyper::AwsMiddleware, smithy_client::retry::Standard, > { #[cfg(any(feature = "rustls", feature = "native-tls"))] pub fn new(config: &aws_types::config::Config) -> Self { Self::from_conf(config.into()) } #[cfg(any(feature = "rustls", feature = "native-tls"))] pub fn from_conf(conf: crate::Config) -> Self { let client = aws_hyper::Client::https(); Self { handle: std::sync::Arc::new(Handle { client, conf }), } } }
true
bb0c78059bde1e4ab17152aae3c6c5d2ebe6f598
Rust
jgouly/keyboard-app
/src/state.rs
UTF-8
1,552
3.15625
3
[]
no_license
use matrix::Matrix; #[derive(Copy, Clone)] #[cfg_attr(test, derive(Debug))] #[derive(PartialEq)] pub enum KeyState { None, Pressed, Held, Released, } impl Default for KeyState { fn default() -> KeyState { KeyState::None } } pub fn process_key_state<RM, SM>(result: &RM, previous_result: &RM) -> SM where RM: Matrix<T = u32>, SM: Matrix<T = KeyState>, { let mut res = SM::new(); for c in 0..result.get_num_columns() { for r in 0..result.get_num_rows() { let state = if result.get(r, c) == 1 { if previous_result.get(r, c) == 1 { KeyState::Held } else { KeyState::Pressed } } else { if previous_result.get(r, c) == 1 { KeyState::Released } else { KeyState::None } }; res.put(r, c, state); } } res } #[cfg(test)] fn private_basic() { gen_matrix!(Matrix2x3u32, 2, 2, u32); gen_matrix!(Matrix2x3KS, 2, 2, KeyState); let r0 = Matrix2x3u32::new(); let r1 = Matrix2x3u32::new(); let result: Matrix2x3KS = process_key_state(&r0, &r1); assert_eq!(result.data, [KeyState::None; 4]); let r0 = Matrix2x3u32::new_with_data([1, 0, 1, 0]); let r1 = Matrix2x3u32::new_with_data([0, 1, 1, 0]); let result: Matrix2x3KS = process_key_state(&r0, &r1); assert_eq!( result.data, [ KeyState::Pressed, KeyState::Released, KeyState::Held, KeyState::None ] ); } #[cfg(test)] mod tests { use super::private_basic; #[test] fn basic() { private_basic(); } }
true
1ef4c9c54fe3b411b4a1da3e5d571306783eabd0
Rust
edutilos6666/RustProject
/Runner.rs
UTF-8
3,150
2.78125
3
[]
no_license
//use std::{i8,i16,i32,i64,u8,u16,u32,u64, isize, usize, f32, f64}; use std::io::stdin; mod OperatorsExample; mod DataTypesExample; mod DecisionMakingExample; mod LoopExample; mod StringExample; mod IOExample; mod ContainerDataTypesExample; mod FunctionExample; mod StructExample; mod TraitExample; mod EnumExample; mod SimpleMath; pub fn main() { run_OperatorsExample(); run_DataTypesExample(); run_DecisionMakingExample(); run_LoopExample(); run_StringExample(); run_IOExample(); run_ContainerDataTypesExample(); run_FunctionExample(); run_StructExample(); run_TraitExample(); run_EnumExample(); run_ModuleExample(); } fn run_OperatorsExample() { println!("<<OperatorsExample>>"); let mut ex = OperatorsExample::M {}; ex.test_ArithmeticOps(); ex.test_RelationalOps(); ex.test_LogicalOps(); ex.test_BitwiseOps(); ex.test_MathFunctions(); println!(""); } fn run_DataTypesExample() { println!("<<DataTypesExample>>"); let ex = DataTypesExample::M{}; ex.example1(); println!(); } fn run_DecisionMakingExample() { println!("<<DecisionMakingExample>>"); let ex = DecisionMakingExample::M{}; ex.example1(); ex.example2(); println!(); } fn run_LoopExample() { println!("<<LoopExample>>"); let ex = LoopExample::M{}; ex.example1(); ex.example2(); ex.example3(); println!(); } fn run_StringExample() { println!("<<StringExample>>"); let ex = StringExample::M{}; ex.example1(); println!(); } fn run_IOExample() { println!("<<IOExample>>"); let ex = IOExample::M {}; ex.example1(); ex.example2(); ex.example3(); println!(); } fn run_ContainerDataTypesExample() { println!("<<ContainerDataTypesExample>>"); let ex = ContainerDataTypesExample::M {}; ex.arrayExample(); ex.vectorExample(); ex.tupleExample(); println!(); } fn run_FunctionExample() { println!("<<FunctionExample>>"); let ex = FunctionExample::M {}; ex.example1(); ex.example2(); ex.example3(); println!(); } fn run_StructExample() { println!("<<StructExample>>"); let ex = StructExample::M {}; ex.example1(); ex.example2(); println!(); } fn run_TraitExample() { println!("<<TraitExample>>"); let ex = TraitExample::M {}; ex.example1(); println!(); } fn run_EnumExample() { println!("<<EnumExample>>"); let ex = EnumExample::M {}; ex.example1(); println!(); } /* I could not use `mod SimpleMath ` in files , which did not contain "main function" */ fn run_ModuleExample() { println!("<<ModuleExample>>"); println!("<<example1()>>"); let x:f64 = 10f64 ; let y: f64 = 3.0; let add_res = SimpleMath::add(x, y); let sub_res = SimpleMath::sub(x,y); let mult_res = SimpleMath::mult(x, y); let div_res = SimpleMath::div(x,y); println!("<<SimpleMath>>"); println!("x = {} and y = {}", x, y); println!("x + y = {}", add_res); println!("x - y = {}", sub_res); println!("x * y = {}", mult_res); println!("x / y = {}", div_res); println!(); }
true
eb08863daff76ff530a7fd846e4b94cc41906cae
Rust
frankegoesdown/LeetCode-in-Go
/Algorithms/0523.continuous-subarray-sum/continuous-subarray-sum_test.go
UTF-8
803
2.65625
3
[ "MIT" ]
permissive
package problem0523 import ( "fmt" "testing" "github.com/stretchr/testify/assert" ) // tcs is testcase slice var tcs = []struct { nums []int k int ans bool }{ { []int{1, 2}, 4, false, }, { []int{0, 0}, 0, true, }, { []int{23, 2, 4, 6, 7}, 6, true, }, { []int{1, 2, 3, 4}, 5, true, }, { []int{23, 2, 6, 4, 7}, 6, true, }, { []int{23, 2, 6, 4, 7}, 42, true, }, // 可以有多个 testcase } func Test_checkSubarraySum(t *testing.T) { ast := assert.New(t) for _, tc := range tcs { fmt.Printf("~~%v~~\n", tc) ast.Equal(tc.ans, checkSubarraySum(tc.nums, tc.k), "输入:%v", tc) } } func Benchmark_checkSubarraySum(b *testing.B) { for i := 0; i < b.N; i++ { for _, tc := range tcs { checkSubarraySum(tc.nums, tc.k) } } }
true
bcc35c7b2ebcae4c66e18454420869640956e64d
Rust
jodal/garmon
/src/main.rs
UTF-8
603
2.546875
3
[]
no_license
use std::{thread, time::Duration}; use garmon::components::{hcsr04::HcSr04, led::Led}; use gpio_cdev::Chip; fn main() -> Result<(), Box<dyn std::error::Error>> { println!("{}", garmon::PROGRAM_NAME); let mut chip = Chip::new("/dev/gpiochip0")?; let led = Led::new(chip.get_line(16)?)?; let hc_sr04 = HcSr04::new(chip.get_line(5)?, chip.get_line(6)?)?; loop { let distance = hc_sr04.measure_distance_in_cm()?; if distance < 20.0 { led.on()?; } else { led.off()?; } thread::sleep(Duration::from_millis(100)); } }
true
0294077176e79c830db2b35de0aa751117224c21
Rust
beningodfrey4/rusting
/references/src/main.rs
UTF-8
1,062
3.609375
4
[ "MIT" ]
permissive
fn main() { let l = calculate_length(&String::from("test")); println!("{}", l); let mut x = String::from("test"); change(&mut x); // no ownership transferred to change, so no move and drop, but a pointer to this reference is created locally. let _y = &x; let _z = &x; //allowed as _y and _z are immutable //let _a = &mut x; //cannot mix mutable and immutable references let mut s = String::from("this"); let _t = &mut s; //let _u = &mut s; //can't have multiple mutable borrows in the same scope let &mut b = returning_references(); let a: &str = using_slices(&b); } fn calculate_length(s: &String) -> usize { return s.len(); } fn change(x: &mut String) { x.push_str(" this"); } fn returning_references() -> &mut String { return &mut String::from("this is why I need rust"); //this is an error as ownership is not transferred } //to caller as it is a reference but the string is dropped as the scope of the string is over fn using_slices(b: &str) -> &str { return &b[..]; }
true
99862d6a8397306e20cef775dd0b5f30a71115a4
Rust
jamesmarva/Head-First-Rust
/ch21/s2/d1/src/main.rs
UTF-8
217
3.078125
3
[]
no_license
fn main() { } fn compare_option<T>(first: Option<T>, second: Option<T>) -> bool { match(first, second) { (Some(..), Some(..)) => true, (None, None) => true, _ => false, } }
true
38e48b86f73a302a718853368ae53ad05090b7f7
Rust
Brent-A/AdventOfCode2019
/day/02/intcode/src/main.rs
UTF-8
5,587
3.0625
3
[ "MIT" ]
permissive
#[cfg(test)] mod test { use super::*; #[test] fn example1() { let mut program = [1,0,0,0,99]; execute(&mut program); assert_eq!([2,0,0,0,99], program); } #[test] fn example2() { let mut program = [2,3,0,3,99]; execute(&mut program); assert_eq!([2,3,0,6,99], program); } #[test] fn example3() { let mut program = [2,4,4,5,99,0]; execute(&mut program); assert_eq!([2,4,4,5,99,9801], program); } #[test] fn example4() { let mut program = [1,1,1,4,99,5,6,0,99]; execute(&mut program); assert_eq!([30,1,1,4,2,5,6,0,99], program); } } use std::fs::File; use std::io::prelude::*; use std::io::BufReader; use std::convert::TryInto; #[macro_use] extern crate num_derive; use num_traits::FromPrimitive; type Integer = u32; type Memory = [Integer]; #[derive(Copy, Clone, Debug, PartialEq)] struct Address(usize); #[derive(Copy, Clone, Debug, PartialEq)] struct Value(Integer); #[derive(Debug, PartialEq)] enum Instruction { Add { arg1: Address, arg2: Address, out: Address}, Mult { arg1: Address, arg2: Address, out: Address}, Terminate, } #[derive(FromPrimitive)] enum InstructionCode { Add = 1, Mult = 2, Terminate = 99 } #[derive(Debug)] struct InvalidInstructionInfo { pub invalid_instruction: Value, pub instruction_location: Address, } #[derive(Debug)] struct InvalidAddressInfo { pub invalid_address: Address, pub address_location: Address, } #[derive(Debug)] enum Error { InvalidInstruction { instruction_value: Value, instruction_location: Address }, InvalidAddress { invalid_address: Address, address_location: Address }, AddressOutOfRange(Address), } struct Machine<'a> { memory: &'a mut Memory, ip: Address, } impl Machine<'_> { fn pop_address(&mut self) -> Result<Address, Error> { let a = self.read_address(self.ip)?; self.ip.0 += 1; Ok(a) } fn pop_value(&mut self) -> Result<Value, Error> { let v = Value(self.memory[self.ip.0]); self.ip.0 += 1; Ok(v) } fn pop_instruction_code(&mut self) -> Result<InstructionCode, Error> { let numeric_value = self.memory[self.ip.0]; match FromPrimitive::from_u64(numeric_value.try_into().unwrap()) { Option::Some(x) => { self.ip.0 += 1; return Ok(x); }, Option::None => return Err(Error::InvalidInstruction { instruction_value: Value(numeric_value), instruction_location: self.ip, }) } } fn pop_instruction(&mut self) -> Result<Instruction, Error> { match self.pop_instruction_code()? { InstructionCode::Add => Ok(Instruction::Add{ arg1: self.pop_address()?, arg2: self.pop_address()?, out: self.pop_address()?}), InstructionCode::Mult => Ok(Instruction::Mult{ arg1: self.pop_address()?, arg2: self.pop_address()?, out: self.pop_address()?}), InstructionCode::Terminate => Ok(Instruction::Terminate), } } fn read_value(&self, address: Address) -> Result<Value, Error> { Ok(Value(self.memory[address.0])) } fn read_address(&self, address: Address) -> Result<Address, Error> { let a = Address(self.memory[address.0].try_into().unwrap()); if a.0 < 0 || a.0 >= self.memory.len() { return Err(Error::InvalidAddress { invalid_address: a, address_location: address, }) } Ok(a) } fn set_value(&mut self, address: Address, value: Value) -> Result<(), Error> { self.memory[address.0] = value.0; Ok(()) } fn execute(&mut self, instruction: Instruction) -> Result<(), Error> { match instruction { Instruction::Add { arg1: i1, arg2: i2, out: o } => { self.set_value(o, Value(self.read_value(i1)?.0 + self.read_value(i2)?.0))?; }, Instruction::Mult { arg1: i1, arg2: i2, out: o } => { self.set_value(o, Value(self.read_value(i1)?.0 * self.read_value(i2)?.0))?; }, Instruction::Terminate => { panic!("Terminate instruction can't be executed"); } } Ok(()) } } fn execute(program: &mut [u32]) { let mut m = Machine { memory: program, ip: Address(0) }; loop { let i = m.pop_instruction().unwrap(); println!("{:?}", i); if (i == Instruction::Terminate) { break; } m.execute(i); } } fn main() { let file = std::fs::read_to_string("input.txt").unwrap(); //let mut file = File::open("input.txt").unwrap().read_to_string(); //let mut buf_reader = BufReader::new(file); //let program = buf_reader.read_to_string().unwrap() // .split(","); let search = 19690720; let original : Vec<u32> = file.split(",").map(|x| x.parse::<u32>().unwrap()).collect(); for noun in 0..original.len() { for verb in 0..original.len() { let mut program = original.clone(); program[1] = noun.try_into().unwrap(); program[2] = verb.try_into().unwrap(); execute(&mut program); let output = program[0]; if output == search { println!("noun: {} verb: {} output: {}", noun, verb, output); println!("result: {}", 100 * noun + verb); } } } }
true
097a15a66bdf7d7e3f95fd90d0a88b367c638a56
Rust
andrewarrow/tinted_paradise
/src/server.rs
UTF-8
1,050
2.796875
3
[]
no_license
use std::net::{TcpStream}; use std::io::Write; use std::io::Read; use std::str; use std::collections::HashMap; use auth; #[derive(Debug)] pub struct Paradise { cstream: TcpStream, map: HashMap<String, fn()> } impl Paradise { pub fn new(stream: TcpStream, map: &HashMap<String, fn()>) -> Paradise { Paradise {cstream: stream, map: map.clone()} } pub fn start(&mut self) { self.write_message(220, "Welcome to Paradise"); loop { let mut buffer = [0; 100]; let chars = self.cstream.read(&mut buffer).unwrap()-2; println!("{}", chars); let line = str::from_utf8(&buffer[0..chars]).unwrap(); let v: Vec<&str> = line.split_terminator(' ').collect(); println!("{:?}", v); let command = v[0]; let param = v[1]; //self.write_message(331, "User name ok, password required"); self.map[command](); } } pub fn write_message(&mut self, code: i32, message: &str) { let foo = format!("{} {}\r\n", code, message); let _ = self.cstream.write(foo.as_bytes()); } }
true
c4bbe7afe953723cb5b676c7c691d331a3916677
Rust
TianyiShi2001/nom-pdb
/src/title_section/expdta.rs
UTF-8
1,703
2.796875
3
[ "MIT" ]
permissive
// Copyright (c) 2020 Tianyi Shi // // This software is released under the MIT License. // https://opensource.org/licenses/MIT //! Parses EXPDTA records which is a continuation type of record which may span multi-lines. //! Record contains list of `;` seperated experimental techniques. If seuccesfull returns //! [Record](../ast/types/enum.Record.html) variant containing //! [ExperimentalTechniques](../ast/types/struct.Experimental.html) //! //! # Record structure //! //! | COLUMNS | DATA TYPE | FIELD | DEFINITION | //! |---------|---------------|--------------|-------------------------------------------| //! | 1 - 6 | Record name | EXPDTA | | //! | 9 - 10 | Continuation | continuation | Allows concatenation of multiple records. | //! | 11 - 79 | SList | technique | The experimental technique(s) with | //! | | | optional comment desc | use crate::common::parser::{parse_multiline_list, FieldParser}; use crate::types::*; pub struct ExperimentalTechniquesParser; impl FieldParser for ExperimentalTechniquesParser { type Output = Vec<ExperimentalTechnique>; fn parse(inp: &[u8]) -> nom::IResult<&[u8], Vec<ExperimentalTechnique>> { let (inp, techniques_as_str) = parse_multiline_list(inp)?; let techniques: Vec<ExperimentalTechnique> = techniques_as_str .into_iter() .map(|s| { s.parse::<ExperimentalTechnique>() .expect("Failed to parse experimental techniques") }) .collect(); Ok((inp, techniques)) } }
true
3cfcc5ab3279c718ecbe39298a10f1419c3bdd41
Rust
hexagram30/eco
/examples/tile.rs
UTF-8
1,041
2.84375
3
[ "Apache-2.0" ]
permissive
use hxgm30eco::tile::{NormalDistTile, TileOptions}; pub fn main() { let opts = TileOptions { parent_x: 108, parent_y: 54, width: 10, height: 10, max_value: 10.0, min_value: 0.0, mean: 5.0, std_dev: 2.0, }; let t = NormalDistTile::new(opts); println!(" {:?}", t.subtiles); println!("{}", t.get(0, 0)); println!("{}", t.get(1, 0)); println!("{}", t.get(2, 0)); println!("{}", t.get(3, 0)); println!("{}", t.get(0, 5)); println!("{}", t.get(1, 5)); println!("{}", t.get(2, 5)); println!("{}", t.get(3, 5)); println!("{}", t.get((t.width - 4) as u8, (t.height - 1) as u8)); println!("{}", t.get((t.width - 3) as u8, (t.height - 1) as u8)); println!("{}", t.get((t.width - 2) as u8, (t.height - 1) as u8)); println!("{}", t.get((t.width - 1) as u8, (t.height - 1) as u8)); } // 0 1 2 3 4 5 6 7 8 9 // 10 11 12 13 14 15 16 17 18 19 // 20 21 22 23 24 25 26 27 28 29 // ... // 90 91 92 93 94 95 96 97 98 99
true
f8eeb63335a1de30ce890fc839f61a6ba0af5605
Rust
EFanZh/LeetCode
/src/problem_0557_reverse_words_in_a_string_iii/iterative.rs
UTF-8
722
2.828125
3
[]
no_license
pub struct Solution; // ------------------------------------------------------ snip ------------------------------------------------------ // impl Solution { pub fn reverse_words(s: String) -> String { let mut s = s.into_bytes(); s.split_mut(|&c| c == b' ').for_each(<[_]>::reverse); String::from_utf8(s).unwrap() } } // ------------------------------------------------------ snip ------------------------------------------------------ // impl super::Solution for Solution { fn reverse_words(s: String) -> String { Self::reverse_words(s) } } #[cfg(test)] mod tests { #[test] fn test_solution() { super::super::tests::run::<super::Solution>(); } }
true
51fb42a207695c391ed0070e498aae06d0f5fe58
Rust
menski/foobar
/src/lib.rs
UTF-8
1,686
2.609375
3
[ "MIT" ]
permissive
#[macro_use] extern crate derive_builder; #[macro_use] extern crate error_chain; #[macro_use] extern crate serde_derive; extern crate serde; extern crate serde_json; extern crate chrono; pub mod errors; pub mod protocol; pub mod widgets; use errors::*; use std::fmt; use std::thread::sleep; use std::time::Duration; pub trait ToBlock { fn to_block(&self) -> Result<protocol::Block>; } pub struct Status { refresh: Duration, blocks: Vec<Box<ToBlock>>, } impl Default for Status { fn default() -> Status { Status { refresh: Duration::from_secs(1), blocks: Vec::new(), } } } impl Status { pub fn new(refresh: Duration) -> Status { Status { refresh, blocks: Vec::new(), } } pub fn add(&mut self, block: Box<ToBlock>) -> &mut Self { let mut new = self; new.blocks.push(block); new } pub fn header(&self) -> Result<protocol::Header> { Ok(protocol::HeaderBuilder::default().version(1).build()?) } pub fn run(&self) -> Result<()> { println!("{}", self.header()?); println!("["); loop { println!("{},", self); sleep(self.refresh); } } } impl fmt::Display for Status { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { write!(f, "[")?; for (idx, block) in self.blocks.iter().enumerate() { let block = block.to_block().map_err(|_| fmt::Error)?; if idx == 0 { write!(f, "{}", block)?; } else { write!(f, ",{}", block)?; } } write!(f, "]") } }
true
a38d676a7b2ad5721cb7749929be637296dbd5b9
Rust
tokio-rs/axum
/examples/tracing-aka-logging/src/main.rs
UTF-8
3,475
2.796875
3
[]
no_license
//! Run with //! //! ```not_rust //! cargo run -p example-tracing-aka-logging //! ``` use axum::{ body::Bytes, extract::MatchedPath, http::{HeaderMap, Request}, response::{Html, Response}, routing::get, Router, }; use std::time::Duration; use tokio::net::TcpListener; use tower_http::{classify::ServerErrorsFailureClass, trace::TraceLayer}; use tracing::{info_span, Span}; use tracing_subscriber::{layer::SubscriberExt, util::SubscriberInitExt}; #[tokio::main] async fn main() { tracing_subscriber::registry() .with( tracing_subscriber::EnvFilter::try_from_default_env().unwrap_or_else(|_| { // axum logs rejections from built-in extractors with the `axum::rejection` // target, at `TRACE` level. `axum::rejection=trace` enables showing those events "example_tracing_aka_logging=debug,tower_http=debug,axum::rejection=trace".into() }), ) .with(tracing_subscriber::fmt::layer()) .init(); // build our application with a route let app = Router::new() .route("/", get(handler)) // `TraceLayer` is provided by tower-http so you have to add that as a dependency. // It provides good defaults but is also very customizable. // // See https://docs.rs/tower-http/0.1.1/tower_http/trace/index.html for more details. // // If you want to customize the behavior using closures here is how. .layer( TraceLayer::new_for_http() .make_span_with(|request: &Request<_>| { // Log the matched route's path (with placeholders not filled in). // Use request.uri() or OriginalUri if you want the real path. let matched_path = request .extensions() .get::<MatchedPath>() .map(MatchedPath::as_str); info_span!( "http_request", method = ?request.method(), matched_path, some_other_field = tracing::field::Empty, ) }) .on_request(|_request: &Request<_>, _span: &Span| { // You can use `_span.record("some_other_field", value)` in one of these // closures to attach a value to the initially empty field in the info_span // created above. }) .on_response(|_response: &Response, _latency: Duration, _span: &Span| { // ... }) .on_body_chunk(|_chunk: &Bytes, _latency: Duration, _span: &Span| { // ... }) .on_eos( |_trailers: Option<&HeaderMap>, _stream_duration: Duration, _span: &Span| { // ... }, ) .on_failure( |_error: ServerErrorsFailureClass, _latency: Duration, _span: &Span| { // ... }, ), ); // run it let listener = TcpListener::bind("127.0.0.1:3000").await.unwrap(); tracing::debug!("listening on {}", listener.local_addr().unwrap()); axum::serve(listener, app).await.unwrap(); } async fn handler() -> Html<&'static str> { Html("<h1>Hello, World!</h1>") }
true
82f800c83c4288cde773aff2d495972a6906697c
Rust
sammyne/mastering-rustls
/mutual-auth/client/src/main.rs
UTF-8
2,330
2.765625
3
[]
no_license
use std::fs; use std::io::{self, Read, Write}; use std::net::TcpStream; use std::sync::Arc; use rustls::{self, Session}; fn read_certs(path: &str) -> Result<Vec<rustls::Certificate>, String> { let data = match fs::File::open(path) { Ok(v) => v, Err(err) => return Err(err.to_string()), }; let mut reader = io::BufReader::new(data); match rustls::internal::pemfile::certs(&mut reader) { Err(_) => Err("failed to read out cert".to_string()), Ok(certs) => match certs.len() { 0 => return Err("no cert".to_string()), _ => Ok(certs), }, } } // @dev the header in PEM block of key must be "BEGIN RSA PRIVATE KEY" fn read_private_key(path: &str) -> Result<rustls::PrivateKey, String> { let key_pem = match fs::File::open(path) { Ok(v) => v, Err(err) => return Err(err.to_string()), }; let mut reader = io::BufReader::new(key_pem); let keys = match rustls::internal::pemfile::rsa_private_keys(&mut reader) { Ok(keys) => keys, Err(_) => return Err("failed to read key".to_string()), }; Ok(keys[0].clone()) } fn main() { const CA_CERT_PATH: &str = "../../pki/ca.cert"; const CLIENT_CERT_PATH: &str = "../../pki/client.cert"; const KEY_PATH: &str = "../../pki/client.key"; let ca_certs = read_certs(CA_CERT_PATH).unwrap(); let client_certs = read_certs(CLIENT_CERT_PATH).unwrap(); let key = read_private_key(KEY_PATH).unwrap(); let config = { let mut c = rustls::ClientConfig::new(); c.root_store.add(&ca_certs[0]).unwrap(); c.set_single_client_cert(client_certs, key); Arc::new(c) }; let domain_name = webpki::DNSNameRef::try_from_ascii_str("localhost").unwrap(); let mut session = rustls::ClientSession::new(&config, domain_name); let mut socket = TcpStream::connect("localhost:4433").unwrap(); let mut client = rustls::Stream::new(&mut session, &mut socket); client.write(b"hello world").unwrap(); client.flush().unwrap(); let ciphersuite = client.sess.get_negotiated_ciphersuite().unwrap(); println!("Current ciphersuite: {:?}", ciphersuite.suite); let mut plaintext = Vec::new(); client.read_to_end(&mut plaintext).unwrap(); io::stdout().write_all(&plaintext).unwrap(); }
true
cd25ca9508da25f4184e9cac284d7eb0dd35dea7
Rust
jb-abbadie/matasano_rust
/src/challenge.rs
UTF-8
4,478
2.9375
3
[]
no_license
extern crate base64; extern crate hex; use crypto; use std::f64; use std::fs::File; use std::io::prelude::*; pub fn challenge_1() { let input = "49276d206b696c6c696e6720796f757220627261696e206c696b65206120706f69736f6e6f7573206d757368726f6f6d"; let hex_string = hex::decode(input).expect("ok"); let b64_string = base64::encode(&hex_string); println!("{} {:?}", input, b64_string); } pub fn challenge_2() { let input1 = hex::decode("1c0111001f010100061a024b53535009181c").unwrap(); let input2 = hex::decode("686974207468652062756c6c277320657965").unwrap(); let output = crypto::xor_repeating(&input1, &input2); println!("{}", hex::encode(output)); } pub fn challenge_3() { let input = hex::decode("1b37373331363f78151b7f2b783431333d78397828372d363c78373e783a393b3736").unwrap(); let (key, conf) = crypto::find_single_xor(&input); println!("{} {} {}", key, String::from_utf8(crypto::xor_repeating(&input, &vec![key])).unwrap(), conf); } pub fn challenge_4() { let mut file = File::open("data/4.txt").unwrap(); let mut all_file = String::new(); file.read_to_string(&mut all_file).unwrap(); let mut best_guess = (vec![], 0, f64::INFINITY); for line in all_file.lines() { let data = hex::decode(line).expect("Test"); let (key, conf) = crypto::find_single_xor(&data); if conf < best_guess.2 { best_guess = (data, key, conf); } } let output = crypto::xor_repeating(&best_guess.0, &vec![best_guess.1]); println!("{} {}", String::from_utf8(output).unwrap(), best_guess.2); } pub fn challenge_5() { let input = "Burning 'em, if you ain't quick and nimble\nI go crazy when I hear a cymbal"; let out = crypto::xor_repeating(&input.as_bytes().to_vec(), &"ICE".as_bytes().to_vec()); let out_str = hex::encode(out); println!("{}", out_str); } pub fn challenge_6() { let mut file = File::open("data/6good.txt").unwrap(); let mut all_file = String::new(); file.read_to_string(&mut all_file).unwrap(); let data = base64::decode(&all_file).unwrap(); let key = crypto::break_xor(&data); let cleartext = crypto::xor_repeating(&data, &key); let cleartext_string = String::from_utf8(cleartext).unwrap(); println!("{}", cleartext_string); println!("{}", hex::encode(&cleartext_string[0..32])); } pub fn challenge_7() { let mut file = File::open("data/7good.txt").unwrap(); let mut all_file = String::new(); file.read_to_string(&mut all_file).unwrap(); let data = base64::decode(&all_file).unwrap(); let key = "YELLOW SUBMARINE".as_bytes().to_vec(); let out = crypto::aes_decrypt_ecb(&data, &key); let cleartext = String::from_utf8(out).unwrap(); println!("{}", cleartext); } pub fn challenge_8() { let mut file = File::open("data/8.txt").unwrap(); let mut all_file = String::new(); file.read_to_string(&mut all_file).unwrap(); let mut best_guess = ("", 0); for line in all_file.lines() { let data = hex::decode(line).unwrap(); let nb_rep = crypto::number_repetition(&data, 16); if nb_rep > best_guess.1 { best_guess = (line, nb_rep); } } println!("{} {}", best_guess.0, best_guess.1); } pub fn challenge_9() { let data = "YELLOW SUBMARINE".as_bytes().to_vec(); let out = crypto::pkcs7_padding(&data, 20); println!("{}", hex::encode(&out)); assert_eq!(out.len(), 20); assert_eq!(hex::encode(&out), "59454c4c4f57205355424d4152494e4504040404"); } pub fn challenge_10() { let mut file = File::open("data/10good.txt").unwrap(); let mut all_file = String::new(); file.read_to_string(&mut all_file).unwrap(); let data = base64::decode(&all_file).unwrap(); let key = "YELLOW SUBMARINE".as_bytes().to_vec(); let iv = vec![0x00;16]; let out = crypto::aes_decrypt_cbc(&data, &key, &iv); let cleartext = String::from_utf8(out.clone()).unwrap(); println!("{}", cleartext); } pub fn challenge_11() { let mut file = File::open("data/11.txt").unwrap(); let mut all_file = String::new(); file.read_to_string(&mut all_file).unwrap(); let data = base64::decode(&all_file).unwrap(); let it_data = crypto::encryption_oracle(data.clone()); if crypto::is_ecb(it_data) { println!("ECB"); } else { println!("CBC"); } } pub fn challenge_12() { println!("TODO"); } pub fn challenge_13() { println!("TODO"); } pub fn challenge_14() { println!("TODO"); } pub fn challenge_15() { println!("TODO"); }
true
bcfae59e8812e10ba69f58091a59ab704f353f33
Rust
kanerogers/basis-universal-rs
/basis-universal/src/encoding/compressor_params.rs
UTF-8
10,380
2.859375
3
[ "MIT", "BSD-3-Clause", "LicenseRef-scancode-unknown-license-reference", "CC0-1.0", "Apache-2.0", "BSD-2-Clause" ]
permissive
use super::*; use crate::{BasisTextureFormat, UserData}; use basis_universal_sys as sys; pub use basis_universal_sys::ColorU8; /// The color space the image to be compressed is encoded in. Using the correct color space will #[derive(Debug, Copy, Clone)] pub enum ColorSpace { /// Used for normal maps or other "data" images Linear, /// Used for color maps and other "visual" images Srgb, } /// Parameters that are used to configure a [Compressor] pub struct CompressorParams(pub *mut sys::CompressorParams); impl Default for CompressorParams { fn default() -> Self { Self::new() } } impl CompressorParams { /// Create a compressor with default options pub fn new() -> Self { unsafe { let mut params = CompressorParams(sys::compressor_params_new()); params.set_default_options(); params } } /// Resets the compressor params to default state pub fn reset(&mut self) { unsafe { sys::compressor_params_clear(self.0); self.set_default_options(); self.clear_source_image_list(); } } // The default options that are applied when creating a new compressor or calling reset() on it fn set_default_options(&mut self) { // Set a default quality level. Leaving this unset results in undefined behavior, so we set // it to a working value by default self.set_etc1s_quality_level(crate::ETC1S_QUALITY_DEFAULT); self.set_uastc_quality_level(crate::UASTC_QUALITY_DEFAULT); // The library by default prints to stdout, but since this is a library we should disable // that by default self.set_print_status_to_stdout(false); } // // These function are used to load image data into the compressor // /// Get a reference to the source index. The internal list of source images is resized as needed /// such that the image will exist pub fn source_image_mut( &mut self, image_index: u32, ) -> CompressorImageRef { unsafe { CompressorImageRef(sys::compressor_params_get_or_create_source_image( self.0, image_index, )) } } /// Resizes the source image list. If the provided length is shorter than the list, the data /// beyond the provided length is truncated. pub fn resize_source_image_list( &mut self, size: u32, ) { unsafe { sys::compressor_params_resize_source_image_list(self.0, size as _); } } /// Resets the image list to be zero-length pub fn clear_source_image_list(&mut self) { unsafe { sys::compressor_params_clear_source_image_list(self.0); } } // // These set parameters for compression // /// Enable stdout logging pub fn set_print_status_to_stdout( &mut self, print_status_to_stdout: bool, ) { unsafe { sys::compressor_params_set_status_output(self.0, print_status_to_stdout) } } /// Set ETC1S quality level. The value MUST be >= [ETC1S_QUALITY_MIN](crate::ETC1S_QUALITY_MIN) /// and <= [ETC1S_QUALITY_MAX](crate::ETC1S_QUALITY_MAX). pub fn set_etc1s_quality_level( &mut self, quality_level: u32, ) { assert!(quality_level >= crate::ETC1S_QUALITY_MIN); assert!(quality_level <= crate::ETC1S_QUALITY_MAX); unsafe { sys::compressor_params_set_quality_level(self.0, quality_level as i32); } } /// Sets UASTC quality level. The value MUST be >= [UASTC_QUALITY_MIN](crate::UASTC_QUALITY_MIN) /// and <= [UASTC_QUALITY_MAX](crate::UASTC_QUALITY_MAX). pub fn set_uastc_quality_level( &mut self, quality_level: u32, ) { assert!(quality_level >= crate::UASTC_QUALITY_MIN); assert!(quality_level <= crate::UASTC_QUALITY_MAX); unsafe { let mut flags = sys::compressor_params_get_pack_uastc_flags(self.0); flags |= quality_level; sys::compressor_params_set_pack_uastc_flags(self.0, flags); } } /// Use the global codebook to compress the image. slightly smaller files, but lower quality, /// slower encoding pub fn set_use_global_codebook( &mut self, use_global_codebook: bool, ) { unsafe { sys::compressor_params_set_global_sel_pal(self.0, use_global_codebook); } } /// Automatically use virtual selector palettes on small images for slightly smaller files /// (defaults to off for faster encoding time) pub fn set_auto_use_global_codebook( &mut self, auto_use_global_codebook: bool, ) { unsafe { sys::compressor_params_set_auto_global_sel_pal(self.0, auto_use_global_codebook); } } /// Set the basis format we will compress to. See basis documentation for details. This /// corresponds to the -uastc flag in the basisu command line tool and the m_uastc boolean param /// on `basis_compressor_params` in the original library /// /// UASTC encoding result in significantly higher texture quality, but larger files. pub fn set_basis_format( &mut self, basis_format: BasisTextureFormat, ) { let is_uastc = match basis_format { BasisTextureFormat::ETC1S => false, BasisTextureFormat::UASTC4x4 => true, }; unsafe { sys::compressor_params_set_uastc(self.0, is_uastc); } } /// Sets the color space the images to be compressed is encoded in /// /// Setting a linear color space will: /// * Use linear colorspace metrics (instead of the default sRGB) /// * By default use linear (not sRGB) mipmap filtering pub fn set_color_space( &mut self, color_space: ColorSpace, ) { let perceptual = match color_space { ColorSpace::Linear => false, ColorSpace::Srgb => true, }; unsafe { sys::compressor_params_set_perceptual(self.0, perceptual); } } /// Override the mipmap generation color space behavior. This function is not necessary to call /// if you call [set_color_space] with the correct value. /// /// * If the color space is sRGB, convert image to linear before filtering, then back to sRGB /// * If the color space is linear, we keep the image in linear during mipmap filtering /// (i.e. do not convert to/from sRGB for filtering purposes) pub fn set_mip_color_space( &mut self, color_space: ColorSpace, ) { let mip_srgb = match color_space { ColorSpace::Linear => false, ColorSpace::Srgb => true, }; unsafe { sys::compressor_params_set_mip_srgb(self.0, mip_srgb); } } /// Disable backend's selector rate distortion optimizations (slightly faster, less noisy /// output, but lower quality per output bit) pub fn set_no_selector_rdo( &mut self, no_selector_rdo: bool, ) { unsafe { sys::compressor_params_set_no_selector_rdo(self.0, no_selector_rdo); } } /// Disable backend's endpoint rate distortion optimizations (slightly faster, less noisy /// output, but lower quality per output bit) pub fn set_no_endpoint_rdo( &mut self, no_endpoint_rdo: bool, ) { unsafe { sys::compressor_params_set_no_endpoint_rdo(self.0, no_endpoint_rdo); } } /// Enable/disable UASTC RDO post-processing and set UASTC RDO quality scalar to X. Lower /// values=higher quality/larger LZ compressed files, higher values=lower quality/smaller LZ /// compressed files. Good range to try is [.2-4] pub fn set_rdo_uastc( &mut self, rdo_uastc_quality_scalar: Option<f32>, ) { unsafe { match rdo_uastc_quality_scalar { Some(quality_scalar) => { sys::compressor_params_set_rdo_uastc(self.0, true); sys::compressor_params_set_rdo_uastc_quality_scalar(self.0, quality_scalar); } None => { sys::compressor_params_set_rdo_uastc(self.0, false); } } } } /// Generate mipmaps for each source image /// /// By default, sRGB textures will be converted from sRGB to linear before mipmap filtering. /// This can be changed by calling [set_color_space] or [set_mip_color_space] pub fn set_generate_mipmaps( &mut self, generate_mipmaps: bool, ) { unsafe { sys::compressor_params_set_generate_mipmaps(self.0, generate_mipmaps); } } /// Sets the smallest dimension mipmap that will be generated pub fn set_mipmap_smallest_dimension( &mut self, smallest_dimension: u32, ) { unsafe { sys::compressor_params_set_mip_smallest_dimension(self.0, smallest_dimension as _); } } /// Set arbitrary userdata to be included with the basis-universal binary data pub fn set_userdata( &mut self, userdata: UserData, ) { unsafe { sys::compressor_params_set_userdata(self.0, userdata.userdata0, userdata.userdata1); } } /// The `basisu` command line compressor offers a -normal_map parameter that sets several /// values automatically. This convenience function mimics that parameter. /// /// * linear colorspace metrics /// * linear mipmap filtering /// * no selector RDO /// * no sRGB pub fn tune_for_normal_maps(&mut self) { //TODO unsafe { sys::compressor_params_set_perceptual(self.0, false); sys::compressor_params_set_mip_srgb(self.0, false); sys::compressor_params_set_no_selector_rdo(self.0, true); sys::compressor_params_set_no_endpoint_rdo(self.0, true); } } // set_multithreaded not implemented here as this is controlled by thread count passed to // `Compressor::new()` } impl Drop for CompressorParams { fn drop(&mut self) { unsafe { sys::compressor_params_delete(self.0); } } }
true
22413518120d27cd7074724c35b3695b319149f2
Rust
Nukesor/collatz_conjecture
/src/algorithms/fixed_vector.rs
UTF-8
3,824
3.046875
3
[]
no_license
use std::time::Instant; use color_eyre::eyre::Result; use crossbeam::channel::Receiver; use num_format::{Locale, ToFormattedString}; use crate::{BATCH_SIZE, DEFAULT_MAX_PROVEN_NUMBER, REPORTING_SIZE, THREAD_COUNT}; /// We have to implement our own non-moving vector, since the backlog is by far the slowest part of /// the main thread. Without some kind of special datastructure, we're quickly accumulating a lot /// of messages in our mpsc channel. /// /// This is value is simply a vector of zeros with the last bit flipped. static EMPTY_SLOT: u128 = 0; #[allow(dead_code)] pub fn fixed_vector(receiver: Receiver<u128>) -> Result<()> { // This is used to store all numbers that haven't been solved yet. // -> For instance, if the task for 10 completes, but 7, 8 and 9 haven't yet, these will be // added to the backlog. // In theory, there should never be more than `threadpool_count` elements in the backlog. let mut backlog: Vec<u128> = vec![EMPTY_SLOT; THREAD_COUNT]; let mut highest_number = DEFAULT_MAX_PROVEN_NUMBER - BATCH_SIZE; // The highest number that's connected in the sequence of natural numbers from `(0..number)`. let mut highest_sequential_number = DEFAULT_MAX_PROVEN_NUMBER; let mut counter = 0; let start = Instant::now(); loop { let next_number = receiver.recv()?; if next_number > highest_number { // Add all missing numbers that haven't been returned yet. let mut backlog_slot_iter = 0..THREAD_COUNT; // Add all missing numbers that haven't been returned yet. let mut missing = highest_number + BATCH_SIZE; while missing < next_number { // Scan the vector for free slots (slots with 0) // By using a stateful-vector, we only do a single scan for multiple missing // elements. while let Some(slot) = backlog_slot_iter.next() { let value = backlog[slot]; if value == 0 { backlog[slot] = missing; break; } } missing += BATCH_SIZE; } // Set the new number as the highest number. highest_number = next_number; } else { // The number must be in the backlog. let mut found_in_backlog = false; for i in 0..backlog.len() { if backlog[i] == next_number { backlog[i] = 0; found_in_backlog = true; break; } } if !found_in_backlog { panic!( "Couldn't find number {} in backlog {:?}", next_number, backlog ); } } // We only print stuff every X iterations, as printing is super slow. // We also only update the highest_sequential_number during this interval. if counter % (REPORTING_SIZE / BATCH_SIZE) == 0 { // Find the smallest number in our backlog. // That number minus 1 is our last succesfully calculated value. backlog.sort(); for i in backlog.iter() { if i == &0 { continue; } highest_sequential_number = i - 1; } println!( "Batch : {}, Time: {}ms, Max number: {}, Channel size: {}, Backlog size: {}", counter, start.elapsed().as_millis().to_formatted_string(&Locale::en), highest_sequential_number.to_formatted_string(&Locale::en), receiver.len(), backlog.len() ); } counter += 1; } }
true
5fcaf6165236ff797e051fa5f9fa3224e834db19
Rust
divinerapier/crack-the-coding-interview
/linked-list/src/main.rs
UTF-8
517
2.96875
3
[]
no_license
#![feature(box_into_raw_non_null)] pub mod linkedlist; fn main() { println!("Hello, world!"); } impl<T> linkedlist::LinkedList<T> where T: std::cmp::Eq + std::hash::Hash, { // fn remove_duplicates(&self) { // let set: std::collections::HashSet<T> = std::collections::HashSet::new(); // let mut temp = self.head; // loop { // match temp { // None => break, // Some(node) => set.get(node.data), // } // } // } }
true
4a005aae80c84082ef95cc7be098e8f18a4dcd16
Rust
magnusmanske/gulp
/src/list.rs
UTF-8
14,202
2.75
3
[]
no_license
use std::collections::HashMap; use std::collections::HashSet; use std::sync::Arc; use serde::Serialize; use mysql_async::{prelude::*, Conn}; use serde_json::json; use crate::app_state::AppState; use crate::data_source::{DataSource, CellSet}; use crate::header::*; use crate::cell::*; use crate::row::*; use crate::GulpError; const ROW_INSERT_BATCH_SIZE: usize = 1000; #[derive(Clone, Debug, Serialize)] pub struct List { pub id: DbId, pub name: String, pub revision_id: DbId, // ALWAYS THE CURRENT (LATEST) ONE pub header: Header, #[serde(skip_serializing)] pub app: Arc<AppState>, } impl List { pub async fn create_new(app: &Arc<AppState>, name: &str, header_schema_id: DbId) -> Option<Self> { let mut conn = app.get_gulp_conn().await.ok()?; let header_schema = HeaderSchema::from_id(&mut conn, header_schema_id).await?; let sql = "INSERT INTO `list` (`name`) VALUES (:name)" ; conn.exec_drop(sql, params!{name}).await.ok()?; let list_id = conn.last_insert_id()?; drop(conn); let mut header = Header { id: 0, list_id, revision_id: 0, schema: header_schema }; let _ = header.create_in_db(app).await.ok()?; Self::from_id(app, list_id).await } pub async fn set_header_schema(&mut self, header_schema: HeaderSchema) -> Result<(),GulpError> { let mut header = Header { id: 0, list_id:self.id, revision_id: self.revision_id, schema: header_schema }; let _ = header.create_in_db(&self.app).await?; self.header = header; Ok(()) } pub async fn add_access(&self, app: &Arc<AppState>, user_id: DbId, access: &str) -> Result<(),GulpError> { let list_id = self.id; let sql = "INSERT IGNORE INTO `access` (list_id,user_id,`right`) VALUES (:list_id,:user_id,:access)"; app.get_gulp_conn().await?.exec_drop(sql, params!{list_id,user_id,access}).await?; Ok(()) } pub async fn from_id(app: &Arc<AppState>, list_id: DbId) -> Option<Self> { let sql = r#"SELECT id,name,revision_id FROM `list` WHERE id=:list_id"#; let row = app.get_gulp_conn().await.ok()? .exec_iter(sql,params! {list_id}).await.ok()? .map_and_drop(|row| row).await.ok()? .get(0)?.to_owned(); Self::from_row(app, &row, list_id).await } pub async fn from_row(app: &Arc<AppState>, row: &mysql_async::Row, list_id: DbId) -> Option<Self> { let mut conn = app.get_gulp_conn().await.ok()?; let header = Header::from_list_id(&mut conn, list_id).await?; Some(Self { app: app.clone(), id: row.get(0)?, name: row.get(1)?, revision_id: row.get(2)?, header: header, }) } pub fn get_file_basename(&self, revision_id: Option<DbId>) -> String { let revision_id = revision_id.unwrap_or(self.revision_id); if self.name.is_empty() { format!("GULP_list_{}.revision_{}",self.id,revision_id) } else { format!("{}.revision_{}",self.name.replace(" ","_"),revision_id) } } pub async fn get_rows_for_revision(&self, revision_id: DbId) -> Result<Vec<Row>, GulpError> { self.get_rows_for_revision_paginated(revision_id, 0, None).await } pub async fn get_rows_for_revision_paginated(&self, revision_id: DbId, start: DbId, length: Option<DbId>) -> Result<Vec<Row>, GulpError> { let length = length.unwrap_or(DbId::MAX); let list_id = self.id ; let sql = r#"SELECT row.id,list_id,row_num,revision_id,json,json_md5,user_id,modified FROM `row` WHERE revision_id=(SELECT max(revision_id) FROM `row` i WHERE i.row_num = row.row_num AND i.list_id=:list_id AND revision_id<=:revision_id) AND list_id=:list_id AND revision_id<=:revision_id ORDER BY row_num LIMIT :length OFFSET :start"#; let row_opts = self.app.get_gulp_conn().await? .exec_iter(sql,params! {list_id,revision_id,start,length}).await? .map_and_drop(|row| Row::from_row(&row,&self.header)).await?; let rows: Vec<Row> = row_opts.iter().cloned().filter_map(|row|row).collect(); Ok(rows) } pub async fn get_users_in_revision(&self, revision_id: DbId) -> Result<HashMap<DbId,String>, GulpError> { let sql = r#"SELECT DISTINCT user_id,user.name FROM `row`,`user` WHERE revision_id=(SELECT max(revision_id) FROM `row` i WHERE i.row_num = row.row_num AND i.list_id=:list_id AND revision_id<=:revision_id) AND list_id=:list_id AND revision_id<=:revision_id AND user_id=user.id"#; let list_id = self.id ; let ret = self.app.get_gulp_conn().await? .exec_iter(sql,params! {list_id,revision_id}).await? .map_and_drop(|row| mysql_async::from_row::<(DbId,String)>(row)).await? .into_iter().collect(); Ok(ret) } pub async fn get_users_by_id(&self, user_ids: &Vec<DbId>) -> Result<HashMap<DbId,String>, GulpError> { if user_ids.is_empty() { return Ok(HashMap::new()); } let user_ids: Vec<String> = user_ids.iter().map(|id|format!("{id}")).collect(); let user_ids = user_ids.join(","); let sql = format!("SELECT DISTINCT user.id,user.name FROM `user` WHERE id IN ({user_ids})"); let ret = self.app.get_gulp_conn().await? .exec_iter(sql,()).await? .map_and_drop(|row| mysql_async::from_row::<(DbId,String)>(row)).await? .into_iter().collect(); Ok(ret) } pub async fn get_rows_in_revision(&self, revision_id: DbId) -> Result<usize, GulpError> { let sql = r#"SELECT count(*) FROM `row` WHERE revision_id=(SELECT max(revision_id) FROM `row` i WHERE i.row_num = row.row_num AND i.list_id=:list_id AND revision_id<=:revision_id) AND list_id=:list_id AND revision_id<=:revision_id"#; let list_id = self.id ; let row_number = self.app.get_gulp_conn().await? .exec_iter(sql,params! {list_id,revision_id}).await? .map_and_drop(|row| mysql_async::from_row::<usize>(row)).await?.get(0).cloned().unwrap_or(0); Ok(row_number) } pub fn get_client() -> Result<reqwest::Client, GulpError> { let client = reqwest::Client::builder() .user_agent("gulp/0.1") .build()?; Ok(client) } pub fn get_text_from_url(url: &str) -> Result<String,GulpError> { let agent = ureq::AgentBuilder::new().build(); Ok(agent.get(&url).call()?.into_string()?) } async fn load_json_md5s(&self, conn: &mut Conn) -> Result<HashSet<String>,GulpError> { let list_id = self.id; let sql = r#"SELECT json_md5 FROM `row` WHERE revision_id=(SELECT max(revision_id) FROM `row` i WHERE i.row_num = row.row_num AND i.list_id=:list_id) AND list_id=:list_id"#; let ret = conn .exec_iter(sql,params! {list_id}).await? .map_and_drop(|row| mysql_async::from_row::<String>(row)).await?; let ret: HashSet<String> = ret.into_iter().collect(); Ok(ret) } async fn flush_row_insert(&self, conn: &mut Conn, rows: &mut Vec<Row>) -> Result<(), GulpError> { if rows.is_empty() { return Ok(()); } let params: Vec<mysql_async::Params> = rows.iter().map(|row| { let list_id = row.list_id; let row_num = row.row_num; let revision_id = row.revision_id; let json = &row.json; let json_md5 = &row.json_md5; let user_id = row.user_id; params!{list_id,row_num,revision_id,json,json_md5,user_id} }).collect(); let sql = r#"INSERT INTO `row` (list_id,row_num,revision_id,json,json_md5,user_id,modified) VALUES (:list_id,:row_num,:revision_id,:json,:json_md5,:user_id,now())"#; let tx_opts = mysql_async::TxOpts::default() .with_consistent_snapshot(true) .with_isolation_level(mysql_async::IsolationLevel::RepeatableRead) .to_owned(); let mut transaction = conn.start_transaction(tx_opts).await?; transaction.exec_batch(sql, params.iter()).await?; transaction.commit().await?; rows.clear(); Ok(()) } pub async fn get_sources(&self) -> Result<Vec<DataSource>, GulpError> { let list_id = self.id; let sql = r#"SELECT id,list_id,source_type,source_format,location,user_id FROM data_source WHERE list_id=:list_id"#; let sources = self.app.get_gulp_conn().await? .exec_iter(sql,params! {list_id}).await? .map_and_drop(|row| DataSource::from_row(&row)).await? .iter().cloned().filter_map(|s|s).collect(); Ok(sources) } /// Checks if a revision_id increase is necessary. /// Returns the new current revision_id (which might be unchanged). pub async fn snapshot(&mut self) -> Result<DbId, GulpError> { let mut conn = self.app.get_gulp_conn().await?; // Check if there is a need to create a new snapshot, that is, increase the revision ID let sql = "SELECT count(id) FROM `row` WHERE list_id=:list_id AND revision_id=:revision_id" ; let list_id = self.id; let revision_id = self.revision_id; let results = conn .exec_iter(sql,params! {list_id,revision_id}).await? .map_and_drop(|row| mysql_async::from_row::<DbId>(row)).await?; let numer_of_rows_with_current_revision = results.get(0); let numer_of_rows_with_current_revision = match numer_of_rows_with_current_revision { Some(x) => *x, None => return Err("snapshot: database query error".into()), }; if numer_of_rows_with_current_revision==0 { // No need to make a new snapshot return Ok(self.revision_id); } // Create new revision ID self.revision_id += 1 ; let sql = "UPDATE `list` SET revision_id=:revision_id WHERE id=:list_id" ; let list_id = self.id; let revision_id = self.revision_id; conn.exec_drop(sql, params!{list_id,revision_id}).await?; Ok(self.revision_id) } async fn check_json_exists(&self, _conn: &mut Conn, _json_text: &str, _json_md5: &str) -> Result<bool, GulpError> { // Already checked via md5, might have to implement if collisions occur Ok(true) } async fn get_or_ignore_new_row(&self, conn: &mut Conn, md5s: &HashSet<String>, cells: Vec<Option<Cell>>, row_num: DbId, user_id: DbId) -> Result<Option<Row>, GulpError> { let cells2j: Vec<_> = cells .iter() .zip(self.header.schema.columns.iter()) .collect(); let cells2j: Vec<serde_json::Value> = cells2j.iter().cloned() .map(|(cell,column)| cell.to_owned().map(|c|c.as_json(column))) .map(|cell| cell.unwrap_or_else(||json!(null))) .collect(); let cells_json = json!{cells2j}; let cells_json_text = cells_json.to_string(); let json_md5 = Row::md5(&cells_json_text); let json_exists = if md5s.contains(&json_md5) { self.check_json_exists(conn, &cells_json_text,&json_md5).await? } else { false }; if !json_exists { let new_row = Row{ id:0, list_id: self.id, row_num, revision_id: self.revision_id, json: cells_json_text.to_owned(), json_md5, user_id, modified: String::new(), cells, }; return Ok(Some(new_row)); } Ok(None) } pub async fn update_from_source(&self, source: &DataSource, user_id: DbId) -> Result<(),GulpError> { let cell_set = source.get_cells(None).await?; self.import_cells(&cell_set, user_id).await?; Ok(()) } async fn import_cells(&self, cell_set: &CellSet, user_id: DbId) -> Result<(), GulpError> { // TODO delete rows? let mut conn = self.app.get_gulp_conn().await?; let mut md5s = self.load_json_md5s(&mut conn).await?; let mut next_row_num = self.get_max_row_num(&mut conn).await? + 1; let mut rows = vec![]; for row in &cell_set.rows { if let Some(row) = self.get_or_ignore_new_row(&mut conn, &md5s, row.cells.to_owned(), next_row_num, user_id).await? { if row.cells.is_empty() { continue; } next_row_num += 1; md5s.insert(row.json_md5.to_owned()); rows.push(row); if rows.len()>=ROW_INSERT_BATCH_SIZE { self.flush_row_insert(&mut conn, &mut rows).await?; } } } self.flush_row_insert(&mut conn, &mut rows).await?; Ok(()) } async fn get_max_row_num(&self, conn: &mut Conn) -> Result<DbId, GulpError> { let list_id = self.id; let sql = r#"SELECT IFNULL(max(row_num),0) FROM `row` WHERE revision_id=(SELECT max(revision_id) FROM `row` i WHERE i.row_num = row.row_num AND i.list_id=:list_id) AND list_id=:list_id"#; let result: Vec<DbId> = conn .exec_iter(sql,params! {list_id}).await? .map_and_drop(|row| mysql_async::from_row::<DbId>(row)).await?; match result.get(0) { Some(result) => Ok(*result), None => Ok(0) } } } #[cfg(test)] mod tests { use super::*; use crate::app_state::*; #[tokio::test] async fn test_from_id() { let app = AppState::from_config_file("config.json").expect("app creation failed"); let app = Arc::new(app); let list = List::from_id(&app, 4).await.expect("from_id fail"); assert_eq!(list.id,4); assert_eq!(list.name,"File candidates Hessen"); println!("{:?}",list.header.schema.columns[0]); //assert_eq!(list.header.schema.columns[0],"File candidates Hessen"); } }
true
d4a4cd9217acadb627e713d8d4cb4e955e68e89e
Rust
Thaelz/ranaz
/ranaz/src/main.rs
UTF-8
3,171
2.8125
3
[]
no_license
#[macro_use] extern crate clap; pub mod utils; pub mod markov; pub mod fourier; use clap::App; use std::fs::File; use std::io::prelude::*; /* Statistical basic analisys * -> bits (1 and 0 count) * -> dibits (00, 01, 10, 11 count) * -> bytes (hex : 00 to ff count) * -> word, 4-bytes word * We try to do it smart, register the position of the byte in the qword * We use a [u128; 1024], so that the byte x according to its position y * such that y in [0-3] increments [y*256 + x] */ fn bit_dibit_byte_stat(s : &Vec<u8>) -> ( [u128; 2], [u128; 4], [u128; 256], [u128; 1024] ) { let mut bits: [u128; 2] = [0; 2]; let mut dibits: [u128; 4] = [0; 4]; let mut bytes: [u128; 256] = [0; 256]; let mut words: [u128; 1024] = [0; 1024]; let tot_bits = (s.len() * 8) as u128; let mut i: u128 = 0; for byte in s { let mut b = *byte; for _ in 0..4 { let d = (b & 3) as usize; dibits[d] += 1; b >>= 2; } bytes[b as usize] += 1; words[ (( (i & 3) * 256) + b as u128) as usize ] += 1; i += 1; } bits[1] = dibits[1] + dibits[2] + (dibits[3] << 1); bits[0] = tot_bits - bits[1]; return (bits, dibits, bytes, words); } fn analyze(filename : &str) -> std::io::Result<()> { // TODO: pass the File object as parameter? let mut file = File::open(filename)?; let mut contents = Vec::new(); file.read_to_end(&mut contents)?; let (bits, dibits, bytes, words) = bit_dibit_byte_stat(&mut contents); /* Bit test */ println!("Bit test : Z({}), O({})", bits[0], bits[1]); let mut bit_diff = utils::u_substract(bits[0], bits[1]); bit_diff *= bit_diff; bit_diff /= bits[0] + bits[1]; println!("Bit test === {}", bit_diff); /* Dibit test */ println!("Dibit test: 00:{} - 01:{} - 10:{} - 11:{}", dibits[0], dibits[1], dibits[2], dibits[3]); let mut dibit_diff2= utils::iter_sums_u_subs(&dibits); dibit_diff2 *= dibit_diff2; let mut sum: u128 = dibits.iter().sum(); dibit_diff2 /= sum; println!("Dibit2 test === {}", dibit_diff2); /* Byte test */ Ok(()) } // TODO: use a logger system and print more if verbose_level is high? fn main() -> std::io::Result<()> { let yaml = load_yaml!("cli.yml"); let matches = App::from_yaml(yaml).get_matches(); let mut input_file = File::open(matches.value_of("INPUT").unwrap())?; let _verbose_level = match matches.occurrences_of("v") { lvl if lvl < 3 => lvl, _ => panic!("Cannot take more than 2 verbose arguments.") }; if let Some(matches) = matches.subcommand_matches("markov") { if matches.is_present("png") { let mut bytes = Vec::<u8>::new(); input_file.read_to_end(&mut bytes)?; let matrix = markov::get_markov_array(&bytes); markov::to_img(&matrix, matches.value_of("OUTPUT").unwrap()); } else { println!("Usage : cargo run /tmp/rand markov -p /tmp/rand.png") } } else { analyze(matches.value_of("INPUT").unwrap())?; } Ok(()) }
true
28b48243d40bc828abf288a10af29f27e883b508
Rust
krobelus/rate
/rate-common/src/memory.rs
UTF-8
1,835
3.5
4
[ "MIT" ]
permissive
//! General purpose data structures //! //! These are simply `std::vec::Vec` wrappers tuned for a specific purpose, //! so they are harder to misuse, or more efficient. //! //! For example: //! //! - The first template argument in `Array<I, T>` and `StackMapping<I, T>` //! requires to specify a type that will be used for indexing. This prevents //! us from accidentally using an index of the wrong type. //! //! - If we know a good upper bound for the size of a vector we prefer to //! use `Array<I, T>`, `BoundedVector<T>` or `StackMapping<Key, T>` as //! they never allocate after being constructed. //! //! - Bounds checking can be disabled for all these vectors. mod array; mod boundedvector; #[macro_use] mod vector; mod stackmapping; use std::convert::TryFrom; pub use crate::memory::{ array::Array, boundedvector::BoundedVector, stackmapping::StackMapping, vector::{assert_in_bounds, Vector}, }; /// Trait for types that can be used as an array index. pub trait Offset { fn as_offset(&self) -> usize; } impl Offset for usize { fn as_offset(&self) -> usize { *self } } impl Offset for u64 { fn as_offset(&self) -> usize { requires!(usize::try_from(*self).is_ok()); *self as usize } } impl Offset for i32 { fn as_offset(&self) -> usize { requires!(usize::try_from(*self).is_ok()); *self as usize } } /// A trait for objects that can report their memory usage on the heap pub trait HeapSpace { /// The number of bytes allocated on the heap that this owns. fn heap_space(&self) -> usize; } impl<T: Copy> HeapSpace for T { fn heap_space(&self) -> usize { 0 } } /// Convert bytes to megabytes for readability. pub fn format_memory_usage(bytes: usize) -> String { format!("{:12}", bytes >> 20) // MB }
true
d4a8fce1d4ca92dfd88a4b2a3a55c852e7fc945d
Rust
IThawk/rust-project
/rust-master/src/test/run-pass/structs-enums/enum-discrim-manual-sizing.rs
UTF-8
1,977
2.671875
3
[ "MIT", "LicenseRef-scancode-other-permissive", "Apache-2.0", "BSD-3-Clause", "BSD-2-Clause", "NCSA" ]
permissive
// run-pass #![allow(dead_code)] use std::mem::{size_of, align_of}; #[repr(i8)] enum Ei8 { Ai8 = 0, Bi8 = 1 } #[repr(u8)] enum Eu8 { Au8 = 0, Bu8 = 1 } #[repr(i16)] enum Ei16 { Ai16 = 0, Bi16 = 1 } #[repr(u16)] enum Eu16 { Au16 = 0, Bu16 = 1 } #[repr(i32)] enum Ei32 { Ai32 = 0, Bi32 = 1 } #[repr(u32)] enum Eu32 { Au32 = 0, Bu32 = 1 } #[repr(i64)] enum Ei64 { Ai64 = 0, Bi64 = 1 } #[repr(u64)] enum Eu64 { Au64 = 0, Bu64 = 1 } #[repr(isize)] enum Eint { Aint = 0, Bint = 1 } #[repr(usize)] enum Euint { Auint = 0, Buint = 1 } #[repr(u8)] enum Eu8NonCLike<T> { _None, _Some(T), } #[repr(i64)] enum Ei64NonCLike<T> { _None, _Some(T), } #[repr(u64)] enum Eu64NonCLike<T> { _None, _Some(T), } pub fn main() { assert_eq!(size_of::<Ei8>(), 1); assert_eq!(size_of::<Eu8>(), 1); assert_eq!(size_of::<Ei16>(), 2); assert_eq!(size_of::<Eu16>(), 2); assert_eq!(size_of::<Ei32>(), 4); assert_eq!(size_of::<Eu32>(), 4); assert_eq!(size_of::<Ei64>(), 8); assert_eq!(size_of::<Eu64>(), 8); assert_eq!(size_of::<Eint>(), size_of::<isize>()); assert_eq!(size_of::<Euint>(), size_of::<usize>()); assert_eq!(size_of::<Eu8NonCLike<()>>(), 1); assert_eq!(size_of::<Ei64NonCLike<()>>(), 8); assert_eq!(size_of::<Eu64NonCLike<()>>(), 8); let u8_expected_size = round_up(9, align_of::<Eu64NonCLike<u8>>()); assert_eq!(size_of::<Eu64NonCLike<u8>>(), u8_expected_size); let array_expected_size = round_up(28, align_of::<Eu64NonCLike<[u32; 5]>>()); assert_eq!(size_of::<Eu64NonCLike<[u32; 5]>>(), array_expected_size); assert_eq!(size_of::<Eu64NonCLike<[u32; 6]>>(), 32); assert_eq!(align_of::<Eu32>(), align_of::<u32>()); assert_eq!(align_of::<Eu64NonCLike<u8>>(), align_of::<u64>()); } // Rounds x up to the next multiple of a fn round_up(x: usize, a: usize) -> usize { ((x + (a - 1)) / a) * a }
true
0c1167be410a4284fb7aa0200ee22cbcd3c66d2b
Rust
prz23/zinc
/zinc-types/src/request/initialize.rs
UTF-8
1,310
2.71875
3
[ "Apache-2.0" ]
permissive
//! //! The contract resource `initialize` POST request. //! use std::iter::IntoIterator; use serde::Deserialize; use serde::Serialize; use zksync_types::Address; use crate::transaction::Transaction; /// /// The contract resource `initialize` POST request query. /// #[derive(Debug, Deserialize)] pub struct Query { /// The contract ETH address. pub address: Address, } impl Query { /// /// A shortcut constructor. /// pub fn new(address: Address) -> Self { Self { address } } } impl IntoIterator for Query { type Item = (&'static str, String); type IntoIter = std::vec::IntoIter<Self::Item>; fn into_iter(self) -> Self::IntoIter { vec![( "address", serde_json::to_string(&self.address) .expect(zinc_const::panic::DATA_CONVERSION) .replace("\"", ""), )] .into_iter() } } /// /// The contract resource `initialize` POST request body. /// #[derive(Debug, Serialize, Deserialize)] pub struct Body { /// The signed initial transfer transaction which must be sent directly to zkSync. pub transaction: Transaction, } impl Body { /// /// A shortcut constructor. /// pub fn new(transaction: Transaction) -> Self { Self { transaction } } }
true
164f5874cc68a909bc3036ed7636c1200114a5e7
Rust
joseluis/spaceindex
/spaceindex/src/geometry/region.rs
UTF-8
6,395
3.1875
3
[ "MIT", "Apache-2.0" ]
permissive
use geo::bounding_rect::BoundingRect; use std::borrow::Cow; use crate::geometry::point::IntoPoint; use crate::geometry::{ check_dimensions_match, min_distance_point_region, min_distance_region, LineSegment, Point, Shape, Shapelike, ShapelikeError, }; #[derive(Debug, Clone, PartialEq)] pub struct Region { pub coordinates: Vec<(f64, f64)>, } impl Region { /// Creates a new [`Region`]. pub fn new(coordinates: Vec<(f64, f64)>) -> Self { Self { coordinates } } /// Creates an infinite [`Region'] pub fn infinite(dimension: usize) -> Self { let coordinates = vec![(std::f64::MIN, std::f64::MAX); dimension]; Self::new(coordinates) } /// Returns an iterator over coordinates in this region. pub fn coordinates_iter(&self) -> impl Iterator<Item = (f64, f64)> + '_ { self.coordinates.iter().cloned() } /// Constructs a region from a pair of points. #[inline(always)] pub fn from_points(a: &Point, b: &Point) -> Self { Self::new(a.coordinate_iter().zip(b.coordinate_iter()).collect()) } /// Determines whether this region contains another region `other`. pub fn contains_region(&self, other: &Region) -> Result<bool, ShapelikeError> { check_dimensions_match(self, other)?; Ok(!self .coordinates_iter() .zip(other.coordinates_iter()) .any(|((s_low, s_high), (o_low, o_high))| s_low > o_low || s_high < o_high)) } /// Combines this region with another region `other`. #[inline(always)] pub fn combine_region(&self, other: &Region) -> Result<Region, ShapelikeError> { check_dimensions_match(self, other)?; Ok(Region::new( self.coordinates_iter() .zip(other.coordinates_iter()) .map(|((s_low, s_high), (o_low, o_high))| { (f64::min(s_low, o_low), f64::max(s_high, o_high)) }) .collect(), )) } /// Combines this region with another region `other` in place. #[inline(always)] pub fn combine_region_in_place(&mut self, other: &Region) { check_dimensions_match(self, other).unwrap(); for ((s_low, s_high), (o_low, o_high)) in self.coordinates.iter_mut().zip(other.coordinates_iter()) { *s_low = f64::min(*s_low, o_low); *s_high = f64::max(*s_high, o_high); } } } impl Shapelike for Region { fn get_center(&self) -> Point { // take the average of high + low coordinates Point::new( self.coordinates_iter() .map(|(x, y)| (x + y) / 2.0) .collect(), ) } fn get_dimension(&self) -> usize { self.coordinates.len() } fn get_min_bounding_region(&self) -> Region { self.clone() } #[inline(always)] fn get_area(&self) -> f64 { let mut area = 1.0; for (low, high) in self.coordinates_iter() { area *= high - low; } area } fn get_min_distance(&self, other: &Shape) -> Result<f64, ShapelikeError> { check_dimensions_match(self, other)?; match other { Shape::Point(point) => Ok(min_distance_point_region(point, self)), Shape::LineSegment(_) => Err(ShapelikeError::UnsupportedOperation), Shape::Region(region) => Ok(min_distance_region(region, self)), } } fn contains_point(&self, point: &Point) -> Result<bool, ShapelikeError> { check_dimensions_match(self, point)?; Ok(!point .coordinate_iter() .zip(self.coordinates_iter()) .any(|(pc, (low, high))| low > pc || high < pc)) } fn intersects_line_segment(&self, line: &LineSegment) -> Result<bool, ShapelikeError> { if self.get_dimension() != 2 { return Err(ShapelikeError::UnexpectedDimension(self.get_dimension(), 2)); } check_dimensions_match(self, line)?; let (low0, high0) = self.coordinates[0]; let (low1, high1) = self.coordinates[1]; let ll = Point::new(vec![low0, high0]); let ur = Point::new(vec![low1, high1]); let ul = Point::new(vec![low0, high1]); let lr = Point::new(vec![high0, low1]); let (start, end) = line.get_points(); Ok(self.contains_point(start)? || self.contains_point(end)? || line.intersects_line_segment(&LineSegment::new(ll.clone(), ul.clone()))? || line.intersects_line_segment(&LineSegment::new(ul, ur.clone()))? || line.intersects_line_segment(&LineSegment::new(ur, lr.clone()))? || line.intersects_line_segment(&LineSegment::new(lr, ll))?) } fn intersects_region(&self, region: &Region) -> Result<bool, ShapelikeError> { check_dimensions_match(self, region)?; Ok(!self .coordinates_iter() .zip(region.coordinates_iter()) .any(|((s_low, s_high), (o_low, o_high))| s_low > o_high || s_high < o_low)) } } /// We can't implement Into<Cow<'a, Region>> for types such as (f64, f64) or ((f64, f64), (f64, f64)), /// so we have the [`IntoRegion<'a>]` trait which is essentially identical. This makes many of our /// internal API's much nicer to work with. pub trait IntoRegion<'a> { fn into_region(self) -> Cow<'a, Region>; } impl<'a> IntoRegion<'a> for Region { fn into_region(self) -> Cow<'a, Region> { Cow::Owned(self) } } impl<'a> IntoRegion<'a> for Cow<'a, Region> { fn into_region(self) -> Cow<'a, Region> { self } } impl<'a> IntoRegion<'a> for (f64, f64) { fn into_region(self) -> Cow<'a, Region> { Cow::Owned(Region::new(vec![(self.0, self.1)])) } } impl<'a> IntoRegion<'a> for ((f64, f64), (f64, f64)) { fn into_region(self) -> Cow<'a, Region> { Cow::Owned(Region::from_points( &(self.0).into_pt(), &(self.1).into_pt(), )) } } impl<'a> IntoRegion<'a> for &geo_types::LineString<f64> { fn into_region(self) -> Cow<'a, Region> { let bounding_rect = self.bounding_rect().expect("failed to get bounding rect"); ( (bounding_rect.min().x, bounding_rect.min().y), (bounding_rect.max().x, bounding_rect.max().y), ) .into_region() } }
true
8fc7a7d05fcd44963b13a029eaa26cf422c40cb6
Rust
peter-signal/dtls
/src/curve/named_curve.rs
UTF-8
2,352
3.078125
3
[ "MIT" ]
permissive
use rand_core::OsRng; // requires 'getrandom' feature use util::Error; use crate::errors::*; // https://www.iana.org/assignments/tls-parameters/tls-parameters.xml#tls-parameters-8 #[derive(Copy, Clone, PartialEq, Debug)] pub enum NamedCurve { P256 = 0x0017, P384 = 0x0018, X25519 = 0x001d, Unsupported, } impl From<u16> for NamedCurve { fn from(val: u16) -> Self { match val { 0x0017 => NamedCurve::P256, 0x0018 => NamedCurve::P384, 0x001d => NamedCurve::X25519, _ => NamedCurve::Unsupported, } } } pub(crate) enum NamedCurvePrivateKey { EphemeralSecretP256(p256::ecdh::EphemeralSecret), StaticSecretX25519(x25519_dalek::StaticSecret), } pub struct NamedCurveKeypair { pub(crate) curve: NamedCurve, pub(crate) public_key: Vec<u8>, pub(crate) private_key: NamedCurvePrivateKey, } fn elliptic_curve_keypair(curve: NamedCurve) -> Result<NamedCurveKeypair, Error> { let (public_key, private_key) = match curve { NamedCurve::P256 => { let secret_key = p256::ecdh::EphemeralSecret::random(&mut OsRng); let public_key = p256::EncodedPoint::from(secret_key.public_key()); ( public_key.as_bytes().to_vec(), NamedCurvePrivateKey::EphemeralSecretP256(secret_key), ) } NamedCurve::X25519 => { let secret_key = x25519_dalek::StaticSecret::new(rand_core::OsRng); let public_key = x25519_dalek::PublicKey::from(&secret_key); ( public_key.as_bytes().to_vec(), NamedCurvePrivateKey::StaticSecretX25519(secret_key), ) } //TODO: add NamedCurve::p384 _ => return Err(ERR_INVALID_NAMED_CURVE.clone()), }; Ok(NamedCurveKeypair { curve, public_key, private_key, }) } impl NamedCurve { pub fn generate_keypair(&self) -> Result<NamedCurveKeypair, Error> { match *self { //TODO: add P384 NamedCurve::X25519 => elliptic_curve_keypair(NamedCurve::X25519), NamedCurve::P256 => elliptic_curve_keypair(NamedCurve::P256), //NamedCurve::P384 => elliptic_curve_keypair(NamedCurve::P384), _ => Err(ERR_INVALID_NAMED_CURVE.clone()), } } }
true
478fde5b317d5eb88bb7d55f57eba587c2a8e985
Rust
markjansnl/aoc2020
/aoc13/src/bin/b.rs
UTF-8
1,608
2.859375
3
[]
no_license
use num::integer::*; use aoc13::{input, *}; fn main() { println!("{}", earliest_timestamp(input::USER)); } fn earliest_timestamp(input: &str) -> usize { let schedules = parse_input(input).1; let busses: Vec<(usize, usize)> = schedules .iter() .enumerate() .filter_map(|(index, schedule)| match schedule { Schedule::Bus(bus_id) => Some((index, *bus_id)), _ => None, }) .collect(); busses[1..] .iter() .fold((0, busses[0].1), |(offset1, step1), (offset2, step2)| { let mut times1 = 1usize; let mut times2 = 0usize; loop { let diff = (offset1 + offset2 + times1 * step1) as isize - (times2 * step2) as isize; match diff.signum() { 0 => return (offset1 + times1 * step1, step1.lcm(step2)), -1 => times1 += (diff.abs() as usize).div_ceil(&step1), 1 => times2 += (diff.abs() as usize).div_ceil(&step2), _ => unreachable!(), } } }) .0 } #[test] fn test_examples() { assert_eq!(earliest_timestamp(input::EXAMPLE), 1068781); assert_eq!(earliest_timestamp("0\n17,x,13"), 102); assert_eq!(earliest_timestamp("0\n17,x,13,19"), 3417); assert_eq!(earliest_timestamp("0\n67,7,59,61"), 754018); assert_eq!(earliest_timestamp("0\n67,x,7,59,61"), 779210); assert_eq!(earliest_timestamp("0\n67,7,x,59,61"), 1261476); assert_eq!(earliest_timestamp("0\n1789,37,47,1889"), 1202161486); }
true
a0c4f1677b06e646e38a8c4ea265ee36ffbaf265
Rust
ArnaudValensi/rust-opengl-game-engine
/src/voxel/voxel_mesh_builder.rs
UTF-8
1,339
2.859375
3
[]
no_license
use mesh_data::MeshData; use super::chunk::Chunk; use super::direction::Direction; use super::position::Position; use super::voxel_geometry::{add_quad_triangles, create_vertex_position_face}; pub fn build_mesh(chunk: &Chunk) -> MeshData { // TODO: Instanciate mesh_data with_capacity. let mut mesh_data = MeshData::new(); for x in 0..chunk.size_x { for y in 0..chunk.size_y { for z in 0..chunk.size_z { if chunk.is_solid(i64::from(x), i64::from(y), i64::from(z)) { voxel_data(chunk, i64::from(x), i64::from(y), i64::from(z), &mut mesh_data); } } } } mesh_data } fn voxel_data<'a>(chunk: &Chunk, x: i64, y: i64, z: i64, mesh_data: &'a mut MeshData) { let position: Position = Position::new(x, y, z); for direction in Direction::iterator() { let touching_pos: Position = position.add_direction(*direction); // Build the face if there is no touching cube or if is the side of the chunk. if chunk.is_position_out_of_bound(&touching_pos) || chunk.is_position_air(&touching_pos) { let color_index = chunk.get_voxel(x, y, z).unwrap(); create_vertex_position_face(mesh_data, &position, color_index, *direction); add_quad_triangles(mesh_data); } } }
true
f9d7b5bf1f8607518f2c6edb12fcad4f95cabbd3
Rust
Techcable/mcp-database
/src/utils.rs
UTF-8
1,503
2.59375
3
[]
no_license
use std::{fmt, slice, mem}; use std::marker::PhantomData; use serde::de::{self, Deserializer, Deserialize, Visitor}; pub unsafe trait TransmuteFixedBytes {} #[inline] pub fn deserialize_borrowed_list<'de, T, D>(deserializer: D) -> Result<T, D::Error> where T: TransmuteFixedBytes, D: Deserializer<'de> { struct FixedBytesList<T>(PhantomData<T>); impl<'de, T: TransmuteFixedBytes> Visitor<'de> for U32Visitor { type Value = &'de [T]; fn expecting(&self, formatter: &mut fmt::Formatter) -> fmt::Result { formatter.write_str("an VersionIdList") } fn visit_borrowed_bytes<E>(self, v: &'de [u8]) -> Result<Self::Value, E> where E: de::Error, { if v.len() % mem::size_of::<T>() == 0 { unsafe { Ok(slice::from_raw_parts( v.as_ptr() as *const T, v.len() / mem::size_of::<T>() )) } } else { Err(E::invalid_length(v.len(), "a multiple of mem::size_of::<T>()")) } } } deserializer.deserialize_bytes(FixedBytesList) } #[inline] pub fn binary_search_left_by_key<T, B, F>(target: &[T], b: &B, func: F) -> Option<(usize, &T)> where F: FnMut(&T) -> B { match target.binary_search_by_key(b, func) { Ok(index) => Some((index, unsafe { target.get_unchecked(index) })), Err(index) => target.get(index).map(|value| (index, value)), } }
true
7d7162f616799057a97bcaadf14dd4f33cde7344
Rust
spacebox-org/spacebox
/src/fs/scan.rs
UTF-8
1,832
3.46875
3
[ "MIT" ]
permissive
use std::path::{Path}; use std::fs; pub enum FileChange<P: AsRef<Path>> { Creation(P), Deletion(P), Modification(P), } /// Scans a given directory recursively for changes in files by comparing them against /// a database of known hashes of the files. /// /// If a file is not found in the database of known files, then it is considered new, /// and is tagged as `FileChange::Creation(path)`. /// /// If a file is in the database, but has a different stored hash then it is considered /// modified and is tagged as `FileChange::Modification(path)`. /// /// If a path is in the database, but not found at the expected path, it is considered /// to have been deleted and tagged as `FileChange::Deletion(path)`. pub fn scan_directory<P: AsRef<Path>>(path: P, cache_file: P) -> Vec<FileChange<P>> { unimplemented!() } #[cfg(test)] mod tests { use super::*; use std::fs; #[test] /// Tests the case when there is no metadata file present, in which case, everything /// should be considered as a new file. fn no_metadata() { } #[test] /// Tests the case when there are no changes to the directory, in which case, the /// return `Vec<FileChange<P>>` should be empty. fn no_changes() { } #[test] /// Tests when there are a few, unmodified files in a single layer directory and /// several more are added. Should return a `Vec<FileChange<P>>` with only /// `FileChange::Creation` fn file_additions() { } #[test] fn file_modifications() { } #[test] fn file_deletions() { } #[test] /// Tests a mix of addition, deletion, and modification in a single layer directory fn basic_scan() { } #[test] fn file_moves() { } #[test] fn file_copies() { } }
true
8c4c575b012c96bb2f323e1d1df361159a50da44
Rust
amazingefren/rust-notes
/concepts/vectors/src/main.rs
UTF-8
3,614
3.640625
4
[ "MIT" ]
permissive
#![allow(dead_code)] use std::collections::HashMap; fn main() { let mut scores = HashMap::new(); scores.insert(String::from("Blue"), 10); scores.insert(String::from("Yellow"), 50); scores.insert(String::from("Blue"), 30); scores.entry(String::from("Red")).or_insert(0); // let teams = vec![String::from("Blue"), String::from("Yellow")]; // let initial_scores = vec![10, 50]; // let vector_scores = HashMap<_, _> = teams.into.iter().zip(initial_scores.into_iter()).collect(); // let team_name = String::from("Blue"); // let score = scores.get(&team_name); for (key, value) in &scores { println!("{}: {}", key, value); } //Updating Value using original value let text = "Hello World Wonderful World"; let mut map = HashMap::new(); for word in text.split_whitespace() { let count = map.entry(word).or_insert(0); *count += 1; } println!("{:?}", map); } // Storing Strigs UTF-8 fn _string_main() { // let mut s1 = String::new(); let data = "initial contents"; let mut s1 = data.to_string(); let s2 = "initial contents".to_string(); println!("{}", s1); println!("{}", s2); s1 = "Hello World".to_string(); println!("{}", s1); s1.push('!'); println!("{}", s1); s1.push_str(" Hi From Efren"); println!("{}", s1); let _hello = String::from("السلام عليكم"); let _hello = String::from("Dobrý den"); let str1 = String::from("Hello"); let mut str2 = String::from("שָׁלוֹם"); let str3 = str1 + &str2; println!("{}", str3); // str1.push('h'); str2.push('h'); let _hello = String::from("नमस्ते"); let _hello = String::from("こんにちは"); println!("{}", _hello); let _hello1 = String::from("안녕하세요"); let _hello2 = String::from("你好"); let _hello3 = String::from("Olá"); let hello_formatted = format!("{}, {}, {}", _hello1, _hello2, _hello3); println!("Ownership not taken ? {}", _hello1); println!("{}", hello_formatted); // let hello_russia = String::from("Здравствуйте"); let hello_russia = "Здравствуйте"; // let first_russian = &hello_russia[0]; will not work because utf-8 encoding means first character ranges past 1 byte let first_russian = &hello_russia[0..8]; println!("First Russian: {}", first_russian); let hello_spanish = String::from("Hola"); for c in hello_spanish.chars() { println!("{}", c); } for c in hello_spanish.bytes() { println!("{}", c); } } // Storing values with Vectors fn _values_main() { let _v: Vec<i32> = Vec::new(); let mut v1 = vec![1, 2, 3]; v1.push(4); v1.push(5); let three: &i32 = &v1[2]; println!("{}", three); match v1.get(2) { Some(third) => println!("The third element is {}", third), None => println!("There is no element there"), } let mut v2 = vec![1, 2, 3, 4, 5]; let first = &v2[0]; println!("The first element is {}", first); v2.push(6); for number in &v2 { println!("{}", number); } v2.push(7); println!("--------------------"); for number in &mut v2 { *number += 50; } for number in &v2 { println!("{}", number); } #[derive(Debug)] enum SpreadsheetCell { Int(i32), Float(f64), Text(String), } let row = vec![ SpreadsheetCell::Int(3), SpreadsheetCell::Float(0.001), SpreadsheetCell::Text(String::from("Hello World")), ]; println!("{:?}", &row[2]) }
true
02828d90fbb621fbe7b40a7c97bcbc4d71c4c599
Rust
knknkn1162/sutramaking-rustbyexample
/src/primitives/arr_slice.rs
UTF-8
748
3.453125
3
[]
no_license
use std::mem; fn analyze_slice(slice: &[i32]) { println!("first element of the slice : {}", slice[0]); println!("the slice has {} elements", slice.len()); } pub fn test() { let xs: [i32; 5] = [1,2,3,4,5]; let ys: [i32; 500] = [0;500]; println!("first element of the array: {}", xs[0]); println!("second element of the array: {}", xs[1]); println!("array size: {}", xs.len()); println!("array occupies {} bytes", mem::size_of_val(&xs)); println!("borrow the whole array as a slice"); analyze_slice(&ys[1..4]); //panic!! out-of-bound // println!("{}", xs[5]); // `[i32]` does not have a constant size known at compile-time //println!("{:?}", ys[1..4]); println!("{:?}", &ys[1..4]); }
true
f19ba0ecb5514d1a357a6af54badb0c812a98b18
Rust
zoispag/amka-rs
/src/lib.rs
UTF-8
2,394
3.375
3
[ "MIT" ]
permissive
/*! A validator for greek social security number (AMKA) More information is available on [AMKA.gr](https://www.amka.gr/tieinai.html). */ use chrono::format::ParseError; use chrono::NaiveDate; use luhn; fn is_string_numeric(str: String) -> bool { for c in str.chars() { if !c.is_numeric() { return false; } } true } /// Validates the given string for AMKA pub fn validate(amka: &str) -> (bool, &str) { if !is_string_numeric(amka.to_string()) { return (false, "Provided AMKA is not a numeric value"); } if amka.chars().count() != 11 { return (false, "Provided number is not 11 digits long"); } let check_date = || -> Result<(), ParseError> { let date_part = &amka[..6]; NaiveDate::parse_from_str(date_part, "%d%m%y")?; Ok(()) }; if let Err(_err) = check_date() { return ( false, "First 6 digits of the provided AMKA do not represent a valid date", ); } if !luhn::valid(amka) { return (false, "Provided AMKA does not have a valid checkdigit"); } (true, "") } #[cfg(test)] mod tests { use crate::validate; #[test] fn it_validates_valid_amka() { let (is_valid, _err) = validate("09095986684"); assert_eq!(true, is_valid); } #[test] fn it_fails_when_not_a_number() { let (is_valid, _err) = validate("asvs"); assert_eq!(false, is_valid); } #[test] fn it_fails_when_short_number() { let (is_valid, _err) = validate("09095986"); assert_eq!(false, is_valid); } #[test] fn it_fails_when_long_number() { let (is_valid, _err) = validate("090959866845"); assert_eq!(false, is_valid); } #[test] fn it_fails_when_not_a_valid_date() { let (is_valid, _err) = validate("39095986681"); assert_eq!(false, is_valid); } #[test] fn it_fails_with_bad_checkdigit() { let (is_valid, _err) = validate("09095986680"); assert_eq!(false, is_valid); } #[test] fn readme() { // An invalid AMKA let (is_valid, err) = validate("09095986680"); assert!(!is_valid); println!("{}", err); // An valid AMKA let (is_valid, err) = validate("09095986684"); assert!(is_valid); assert_eq!("", err) } }
true
766f6ea9473e64115e9c8fa80d6d41594b2d281c
Rust
Ninjani/advent-of-code-2018
/src/day9.rs
UTF-8
6,035
2.734375
3
[]
no_license
#![allow(dead_code)] use hashbrown::HashMap; use itertools::Itertools; #[aoc_generator(day9)] pub fn generate_day9(input: &str) -> Box<(usize, usize)> { let parts: Vec<_> = input.split_whitespace().collect(); Box::new((parts[0].parse().unwrap(), parts[6].parse().unwrap())) } #[allow(dead_code)] fn get_circular_index(length: usize, offset: isize) -> usize { let new_index = offset; if new_index >= 0 && new_index < length as isize { new_index as usize } else if new_index < 0 { length - (new_index.abs() as usize) } else { (new_index as usize) - length } } //#[aoc(day9, part1)] pub fn test_day9(_: &(usize, usize)) -> usize { println!("{} 32", solve_day9_part1(&(9, 25))); println!("{} 8317", solve_day9_part1(&(10, 1618))); println!("{} 146373", solve_day9_part1(&(13, 7999))); println!("{} 2764", solve_day9_part1(&(17, 1104))); println!("{} 54718", solve_day9_part1(&(21, 6111))); println!("{} 37305", solve_day9_part1(&(30, 5807))); 0 } pub struct GameCircle { circle: Vec<usize>, num_marbles: usize, num_players: usize, num_marbles_used: usize, current_marble_index: usize, current_player: usize, scores: HashMap<usize, usize>, } impl GameCircle { fn new(num_players: usize, num_marbles: usize) -> Self { GameCircle { circle: vec![0], num_marbles, num_players, num_marbles_used: 1, current_marble_index: 0, current_player: 1, scores: HashMap::new(), } } fn move_player(&mut self, offset: usize) { self.current_player += offset; if self.current_player > self.num_players { self.current_player -= self.num_players; } } fn add_score(&mut self, score: usize) { *self.scores.entry(self.current_player).or_insert(0) += score; } fn get_circular_index(&self, offset: isize) -> usize { let new_index = offset; if new_index >= 0 && new_index < self.circle.len() as isize { new_index as usize } else if new_index < 0 { self.circle.len() - (new_index.abs() as usize) } else { (new_index as usize) - self.circle.len() } } fn interleave(&mut self, interleave_index: usize, num_to_add: usize) { self.circle = [ &self.circle[..interleave_index], &(interleave_index..self.circle.len()) .map(|i| self.circle[i]) .interleave((0..num_to_add).map(|i| i + self.num_marbles_used)) .collect::<Vec<_>>(), ] .concat(); self.num_marbles_used += num_to_add; self.current_marble_index = interleave_index + num_to_add * 2 - 1; self.move_player(num_to_add + 1); } fn next_circle(&mut self) { let interleave_index = self.get_circular_index(self.current_marble_index as isize + 1); let num_to_add = (self.circle.len() - interleave_index).min(self.num_marbles - self.num_marbles_used); if let Some((index, marble)) = (0..num_to_add) .map(|i| (i, self.num_marbles_used + i)) .find(|(_, m)| *m % 23 == 0) { self.interleave(interleave_index, index); // player scores let remove_marble_index = self.get_circular_index(self.current_marble_index as isize - 7); let remove_marble = self.circle[remove_marble_index]; self.circle = [ &self.circle[..remove_marble_index], &self.circle[remove_marble_index + 1..], ] .concat(); self.add_score(marble + remove_marble); self.num_marbles_used += 1; self.current_marble_index = self.get_circular_index(remove_marble_index as isize); self.move_player(1); // continue } else { self.interleave(interleave_index, num_to_add); } } } fn get_max_score_2(num_players: usize, num_marbles: usize) -> usize { let mut game = GameCircle::new(num_players, num_marbles); loop { // println!("{:?}", game.circle); if game.num_marbles_used >= num_marbles { break; } game.next_circle() } *game.scores.values().max().unwrap() } #[allow(dead_code)] fn get_max_score(num_players: usize, num_marbles: usize) -> usize { let mut circle = vec![0]; let mut current_marble_index = 0; let mut current_player = 0; let mut scores = HashMap::new(); for i in 1..=num_marbles { current_player += 1; if current_player >= num_players { current_player = 0; } if i % 23 == 0 { let remove_marble_index = get_circular_index(circle.len(), current_marble_index as isize - 7); let remove_marble = circle[remove_marble_index]; circle = [ &circle[..remove_marble_index], &circle[remove_marble_index + 1..], ] .concat(); *scores.entry(current_player).or_insert(0) += i + remove_marble; current_marble_index = get_circular_index(circle.len(), remove_marble_index as isize); } else { let insert_index = get_circular_index(circle.len(), current_marble_index as isize + 1); circle = [ &circle[..=insert_index], &[i], &circle[(insert_index + 1)..], ] .concat(); current_marble_index = get_circular_index(circle.len(), insert_index as isize + 1); } } *scores.values().max().unwrap() } #[aoc(day9, part1)] pub fn solve_day9_part1(input: &(usize, usize)) -> usize { let (num_players, num_marbles) = (input.0, input.1); get_max_score(num_players, num_marbles) } #[aoc(day9, part2)] pub fn solve_day9_part2(input: &(usize, usize)) -> usize { get_max_score(input.0, input.1 * 100) }
true
1b12cfa0940e80207e6e1a1ad98a78e9587aa501
Rust
uutils/coreutils
/src/uu/test/src/parser.rs
UTF-8
15,254
3.03125
3
[ "MIT", "GPL-1.0-or-later", "GPL-3.0-or-later" ]
permissive
// This file is part of the uutils coreutils package. // // For the full copyright and license information, please view the LICENSE // file that was distributed with this source code. // spell-checker:ignore (grammar) BOOLOP STRLEN FILETEST FILEOP INTOP STRINGOP ; (vars) LParen StrlenOp use std::ffi::{OsStr, OsString}; use std::iter::Peekable; use super::error::{ParseError, ParseResult}; use uucore::display::Quotable; /// Represents one of the binary comparison operators for strings, integers, or files #[derive(Debug, PartialEq, Eq)] pub enum Operator { String(OsString), Int(OsString), File(OsString), } /// Represents one of the unary test operators for strings or files #[derive(Debug, PartialEq, Eq)] pub enum UnaryOperator { StrlenOp(OsString), FiletestOp(OsString), } /// Represents a parsed token from a test expression #[derive(Debug, PartialEq, Eq)] pub enum Symbol { LParen, Bang, BoolOp(OsString), Literal(OsString), Op(Operator), UnaryOp(UnaryOperator), None, } impl Symbol { /// Create a new Symbol from an OsString. /// /// Returns Symbol::None in place of None fn new(token: Option<OsString>) -> Self { match token { Some(s) => match s.to_str() { Some(t) => match t { "(" => Self::LParen, "!" => Self::Bang, "-a" | "-o" => Self::BoolOp(s), "=" | "==" | "!=" => Self::Op(Operator::String(s)), "-eq" | "-ge" | "-gt" | "-le" | "-lt" | "-ne" => Self::Op(Operator::Int(s)), "-ef" | "-nt" | "-ot" => Self::Op(Operator::File(s)), "-n" | "-z" => Self::UnaryOp(UnaryOperator::StrlenOp(s)), "-b" | "-c" | "-d" | "-e" | "-f" | "-g" | "-G" | "-h" | "-k" | "-L" | "-N" | "-O" | "-p" | "-r" | "-s" | "-S" | "-t" | "-u" | "-w" | "-x" => { Self::UnaryOp(UnaryOperator::FiletestOp(s)) } _ => Self::Literal(s), }, None => Self::Literal(s), }, None => Self::None, } } /// Convert this Symbol into a Symbol::Literal, useful for cases where /// test treats an operator as a string operand (test has no reserved /// words). /// /// # Panics /// /// Panics if `self` is Symbol::None fn into_literal(self) -> Self { Self::Literal(match self { Self::LParen => OsString::from("("), Self::Bang => OsString::from("!"), Self::BoolOp(s) | Self::Literal(s) | Self::Op(Operator::String(s)) | Self::Op(Operator::Int(s)) | Self::Op(Operator::File(s)) | Self::UnaryOp(UnaryOperator::StrlenOp(s)) | Self::UnaryOp(UnaryOperator::FiletestOp(s)) => s, Self::None => panic!(), }) } } /// Implement Display trait for Symbol to make it easier to print useful errors. /// We will try to match the format in which the symbol appears in the input. impl std::fmt::Display for Symbol { /// Format a Symbol for printing fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { let s = match &self { Self::LParen => OsStr::new("("), Self::Bang => OsStr::new("!"), Self::BoolOp(s) | Self::Literal(s) | Self::Op(Operator::String(s)) | Self::Op(Operator::Int(s)) | Self::Op(Operator::File(s)) | Self::UnaryOp(UnaryOperator::StrlenOp(s)) | Self::UnaryOp(UnaryOperator::FiletestOp(s)) => OsStr::new(s), Self::None => OsStr::new("None"), }; write!(f, "{}", s.quote()) } } /// Recursive descent parser for test, which converts a list of OsStrings /// (typically command line arguments) into a stack of Symbols in postfix /// order. /// /// Grammar: /// /// EXPR → TERM | EXPR BOOLOP EXPR /// TERM → ( EXPR ) /// TERM → ! EXPR /// TERM → UOP str /// UOP → STRLEN | FILETEST /// TERM → str OP str /// TERM → str | 𝜖 /// OP → STRINGOP | INTOP | FILEOP /// STRINGOP → = | == | != /// INTOP → -eq | -ge | -gt | -le | -lt | -ne /// FILEOP → -ef | -nt | -ot /// STRLEN → -n | -z /// FILETEST → -b | -c | -d | -e | -f | -g | -G | -h | -k | -L | -N | -O | -p | /// -r | -s | -S | -t | -u | -w | -x /// BOOLOP → -a | -o /// #[derive(Debug)] struct Parser { tokens: Peekable<std::vec::IntoIter<OsString>>, pub stack: Vec<Symbol>, } impl Parser { /// Construct a new Parser from a `Vec<OsString>` of tokens. fn new(tokens: Vec<OsString>) -> Self { Self { tokens: tokens.into_iter().peekable(), stack: vec![], } } /// Fetch the next token from the input stream as a Symbol. fn next_token(&mut self) -> Symbol { Symbol::new(self.tokens.next()) } /// Consume the next token & verify that it matches the provided value. fn expect(&mut self, value: &str) -> ParseResult<()> { match self.next_token() { Symbol::Literal(s) if s == value => Ok(()), _ => Err(ParseError::Expected(value.quote().to_string())), } } /// Peek at the next token from the input stream, returning it as a Symbol. /// The stream is unchanged and will return the same Symbol on subsequent /// calls to `next()` or `peek()`. fn peek(&mut self) -> Symbol { Symbol::new(self.tokens.peek().map(|s| s.to_os_string())) } /// Test if the next token in the stream is a BOOLOP (-a or -o), without /// removing the token from the stream. fn peek_is_boolop(&mut self) -> bool { matches!(self.peek(), Symbol::BoolOp(_)) } /// Parse an expression. /// /// EXPR → TERM | EXPR BOOLOP EXPR fn expr(&mut self) -> ParseResult<()> { if !self.peek_is_boolop() { self.term()?; } self.maybe_boolop()?; Ok(()) } /// Parse a term token and possible subsequent symbols: "(", "!", UOP, /// literal, or None. fn term(&mut self) -> ParseResult<()> { let symbol = self.next_token(); match symbol { Symbol::LParen => self.lparen()?, Symbol::Bang => self.bang()?, Symbol::UnaryOp(_) => self.uop(symbol), Symbol::None => self.stack.push(symbol), literal => self.literal(literal)?, } Ok(()) } /// Parse a (possibly) parenthesized expression. /// /// test has no reserved keywords, so "(" will be interpreted as a literal /// in certain cases: /// /// * when found at the end of the token stream /// * when followed by a binary operator that is not _itself_ interpreted /// as a literal /// fn lparen(&mut self) -> ParseResult<()> { // Look ahead up to 3 tokens to determine if the lparen is being used // as a grouping operator or should be treated as a literal string let peek3: Vec<Symbol> = self .tokens .clone() .take(3) .map(|token| Symbol::new(Some(token))) .collect(); match peek3.as_slice() { // case 1: lparen is a literal when followed by nothing [] => { self.literal(Symbol::LParen.into_literal())?; Ok(()) } // case 2: error if end of stream is `( <any_token>` [symbol] => Err(ParseError::MissingArgument(format!("{symbol}"))), // case 3: `( uop <any_token> )` → parenthesized unary operation; // this case ensures we don’t get confused by `( -f ) )` // or `( -f ( )`, for example [Symbol::UnaryOp(_), _, Symbol::Literal(s)] if s == ")" => { let symbol = self.next_token(); self.uop(symbol); self.expect(")")?; Ok(()) } // case 4: binary comparison of literal lparen, e.g. `( != )` [Symbol::Op(_), Symbol::Literal(s)] | [Symbol::Op(_), Symbol::Literal(s), _] if s == ")" => { self.literal(Symbol::LParen.into_literal())?; Ok(()) } // case 5: after handling the prior cases, any single token inside // parentheses is a literal, e.g. `( -f )` [_, Symbol::Literal(s)] | [_, Symbol::Literal(s), _] if s == ")" => { let symbol = self.next_token(); self.literal(symbol)?; self.expect(")")?; Ok(()) } // case 6: two binary ops in a row, treat the first op as a literal [Symbol::Op(_), Symbol::Op(_), _] => { let symbol = self.next_token(); self.literal(symbol)?; self.expect(")")?; Ok(()) } // case 7: if earlier cases didn’t match, `( op <any_token>…` // indicates binary comparison of literal lparen with // anything _except_ ")" (case 4) [Symbol::Op(_), _] | [Symbol::Op(_), _, _] => { self.literal(Symbol::LParen.into_literal())?; Ok(()) } // Otherwise, lparen indicates the start of a parenthesized // expression _ => { self.expr()?; self.expect(")")?; Ok(()) } } } /// Parse a (possibly) negated expression. /// /// Example cases: /// /// * `! =`: negate the result of the implicit string length test of `=` /// * `! = foo`: compare the literal strings `!` and `foo` /// * `! = = str`: negate comparison of literal `=` and `str` /// * `!`: bang followed by nothing is literal /// * `! EXPR`: negate the result of the expression /// /// Combined Boolean & negation: /// /// * `! ( EXPR ) [BOOLOP EXPR]`: negate the parenthesized expression only /// * `! UOP str BOOLOP EXPR`: negate the unary subexpression /// * `! str BOOLOP str`: negate the entire Boolean expression /// * `! str BOOLOP EXPR BOOLOP EXPR`: negate the value of the first `str` term /// fn bang(&mut self) -> ParseResult<()> { match self.peek() { Symbol::Op(_) | Symbol::BoolOp(_) => { // we need to peek ahead one more token to disambiguate the first // three cases listed above let peek2 = Symbol::new(self.tokens.clone().nth(1)); match peek2 { // case 1: `! <OP as literal>` // case 3: `! = OP str` Symbol::Op(_) | Symbol::None => { // op is literal let op = self.next_token().into_literal(); self.literal(op)?; self.stack.push(Symbol::Bang); } // case 2: `<! as literal> OP str [BOOLOP EXPR]`. _ => { // bang is literal; parsing continues with op self.literal(Symbol::Bang.into_literal())?; self.maybe_boolop()?; } } } // bang followed by nothing is literal Symbol::None => self.stack.push(Symbol::Bang.into_literal()), _ => { // peek ahead up to 4 tokens to determine if we need to negate // the entire expression or just the first term let peek4: Vec<Symbol> = self .tokens .clone() .take(4) .map(|token| Symbol::new(Some(token))) .collect(); match peek4.as_slice() { // we peeked ahead 4 but there were only 3 tokens left [Symbol::Literal(_), Symbol::BoolOp(_), Symbol::Literal(_)] => { self.expr()?; self.stack.push(Symbol::Bang); } _ => { self.term()?; self.stack.push(Symbol::Bang); } } } } Ok(()) } /// Peek at the next token and parse it as a BOOLOP or string literal, /// as appropriate. fn maybe_boolop(&mut self) -> ParseResult<()> { if self.peek_is_boolop() { let symbol = self.next_token(); // BoolOp by itself interpreted as Literal if let Symbol::None = self.peek() { self.literal(symbol.into_literal())?; } else { self.boolop(symbol)?; self.maybe_boolop()?; } } Ok(()) } /// Parse a Boolean expression. /// /// Logical and (-a) has higher precedence than or (-o), so in an /// expression like `foo -o '' -a ''`, the and subexpression is evaluated /// first. fn boolop(&mut self, op: Symbol) -> ParseResult<()> { if op == Symbol::BoolOp(OsString::from("-a")) { self.term()?; } else { self.expr()?; } self.stack.push(op); Ok(()) } /// Parse a (possible) unary argument test (string length or file /// attribute check). /// /// If a UOP is followed by nothing it is interpreted as a literal string. fn uop(&mut self, op: Symbol) { match self.next_token() { Symbol::None => self.stack.push(op.into_literal()), symbol => { self.stack.push(symbol.into_literal()); self.stack.push(op); } } } /// Parse a string literal, optionally followed by a comparison operator /// and a second string literal. fn literal(&mut self, token: Symbol) -> ParseResult<()> { self.stack.push(token.into_literal()); // EXPR → str OP str if let Symbol::Op(_) = self.peek() { let op = self.next_token(); match self.next_token() { Symbol::None => { return Err(ParseError::MissingArgument(format!("{op}"))); } token => self.stack.push(token.into_literal()), } self.stack.push(op); } Ok(()) } /// Parser entry point: parse the token stream `self.tokens`, storing the /// resulting `Symbol` stack in `self.stack`. fn parse(&mut self) -> ParseResult<()> { self.expr()?; match self.tokens.next() { Some(token) => Err(ParseError::ExtraArgument(token.quote().to_string())), None => Ok(()), } } } /// Parse the token stream `args`, returning a `Symbol` stack representing the /// operations to perform in postfix order. pub fn parse(args: Vec<OsString>) -> ParseResult<Vec<Symbol>> { let mut p = Parser::new(args); p.parse()?; Ok(p.stack) }
true
e159984aa9b829c1a3da446f969cedb8e0fc27d0
Rust
mescam/aoc2020
/day6/day6.rs
UTF-8
1,333
3.265625
3
[]
no_license
use std::fs; use std::collections::HashSet; use std::iter::FromIterator; fn parse(content: &String) -> Vec<HashSet<char>> { let mut vec: Vec<HashSet<char>> = Vec::new(); let groups_it = content .split("\n\n"); for group in groups_it { let mut ans: HashSet<char> = HashSet::new(); let merged = group.replace("\n", ""); for c in merged.chars() { ans.insert(c); } vec.push(ans); } vec } fn parse2(content: &String) -> Vec<HashSet<char>> { let mut vec: Vec<HashSet<char>> = Vec::new(); let groups_it = content .split("\n\n"); for group in groups_it { let mut ans: HashSet<char> = HashSet::from_iter("abcdefghijklmnopqrstuvwxyz".chars()); for person in group.lines() { ans = ans.intersection(&HashSet::from_iter(person.chars())).copied().collect(); } vec.push(ans); } vec } fn main() { let contents = fs::read_to_string("input.txt") .expect("error loading file"); let answers = parse(&contents); let answers2 = parse2(&contents); // part 1 let result1 = answers.iter().fold(0, |acc, x| acc + x.len()); println!("result1 = {}", result1); // part 2 let result2 = answers2.iter().fold(0, |acc, x| acc + x.len()); println!("result2 = {}", result2); }
true
84d7e055e5025c2e9c93d9c0870fe35e7af67da6
Rust
aDotInTheVoid/noria
/server/mir/src/rewrite.rs
UTF-8
6,327
2.796875
3
[ "MIT", "Apache-2.0" ]
permissive
use crate::column::Column; use crate::query::MirQuery; use crate::MirNodeRef; use std::collections::HashMap; fn has_column(n: &MirNodeRef, column: &Column) -> bool { if n.borrow().columns().contains(column) { return true; } else { for a in n.borrow().ancestors() { if has_column(a, column) { return true; } } } false } pub(super) fn make_universe_naming_consistent( q: &mut MirQuery, table_mapping: &HashMap<(String, Option<String>), String>, base_name: String, ) { let mut queue = Vec::new(); let new_q = q.clone(); queue.push(q.leaf.clone()); let leaf_node: MirNodeRef = new_q.leaf; let mut nodes_to_check: Vec<MirNodeRef> = Vec::new(); nodes_to_check.push(leaf_node.clone()); // get the node that is the base table of the universe let mut base_node: MirNodeRef = leaf_node.clone(); while !nodes_to_check.is_empty() { let node_to_check = nodes_to_check.pop().unwrap(); if node_to_check.borrow().name == base_name { base_node = node_to_check; break; } for parent in node_to_check.borrow().ancestors() { nodes_to_check.push(parent.clone()); } } let mut nodes_to_rewrite: Vec<MirNodeRef> = Vec::new(); nodes_to_rewrite.push(base_node.clone()); while !nodes_to_rewrite.is_empty() { let node_to_rewrite = nodes_to_rewrite.pop().unwrap(); for col in &mut node_to_rewrite.borrow_mut().columns { let mut _res = { match col.table { Some(ref table) => { let key = (col.name.to_owned(), Some(table.to_owned())); table_mapping.get(&key).cloned() } None => None, } }; } for child in node_to_rewrite.borrow().children() { nodes_to_rewrite.push(child.clone()); } } } pub(super) fn pull_required_base_columns( q: &mut MirQuery, table_mapping: Option<&HashMap<(String, Option<String>), String>>, sec: bool, ) { let mut queue = Vec::new(); queue.push(q.leaf.clone()); if sec { match table_mapping { Some(_) => (), None => panic!("no table mapping computed, but in secure universe."), } } while !queue.is_empty() { let mn = queue.pop().unwrap(); // a node needs all of the columns it projects into its output // however, it may also need *additional* columns to perform its functionality; consider, // e.g., a filter that filters on a column that it doesn't project let needed_columns: Vec<Column> = mn .borrow() .referenced_columns() .into_iter() .filter(|c| { !mn.borrow() .ancestors() .iter() .any(|a| a.borrow().columns().iter().any(|ac| ac == c)) }) .collect(); let mut found: Vec<&Column> = Vec::new(); match table_mapping { Some(ref map) => { for ancestor in mn.borrow().ancestors() { if ancestor.borrow().ancestors().is_empty() { // base, do nothing continue; } for c in &needed_columns { match c.table { Some(ref table) => { let key = (c.name.to_owned(), Some(table.to_owned())); if !map.contains_key(&key) && !found.contains(&c) && has_column(ancestor, c) { ancestor.borrow_mut().add_column(c.clone()); found.push(c); } } None => { if !map.contains_key(&(c.name.to_owned(), None)) && !found.contains(&c) && has_column(ancestor, c) { ancestor.borrow_mut().add_column(c.clone()); found.push(c); } } } } queue.push(ancestor.clone()); } } None => { for ancestor in mn.borrow().ancestors() { if ancestor.borrow().ancestors().is_empty() { // base, do nothing continue; } for c in &needed_columns { if !found.contains(&c) && has_column(ancestor, c) { ancestor.borrow_mut().add_column(c.clone()); found.push(c); } } queue.push(ancestor.clone()); } } } } } // currently unused #[allow(dead_code)] pub(super) fn push_all_base_columns(q: &mut MirQuery) { let mut queue = Vec::new(); queue.extend(q.roots.clone()); while !queue.is_empty() { let mn = queue.pop().unwrap(); let columns = mn.borrow().columns().to_vec(); for child in mn.borrow().children() { // N.B. this terminates before reaching the actual leaf, since the last node of the // query (before the MIR `Leaf` node) already carries the query name. (`Leaf` nodes are // virtual nodes that will be removed and converted into materializations.) if child.borrow().versioned_name() == q.leaf.borrow().versioned_name() { continue; } for c in &columns { // push through if the child doesn't already have this column if !child.borrow().columns().contains(c) { child.borrow_mut().add_column(c.clone()); } } queue.push(child.clone()); } } }
true
9cee99edb93e6811a153d2e2e2e3bed5a3ba9f0e
Rust
rome/tools
/crates/rome_js_syntax/src/jsx_ext.rs
UTF-8
17,119
3.15625
3
[ "MIT" ]
permissive
use std::collections::HashSet; use crate::{ inner_string_text, static_value::StaticValue, AnyJsxAttribute, AnyJsxAttributeName, AnyJsxAttributeValue, AnyJsxChild, AnyJsxElementName, JsSyntaxToken, JsxAttribute, JsxAttributeList, JsxElement, JsxName, JsxOpeningElement, JsxSelfClosingElement, JsxString, }; use rome_rowan::{declare_node_union, AstNode, AstNodeList, SyntaxResult, TokenText}; impl JsxString { /// Get the inner text of a string not including the quotes /// /// ## Examples /// /// ``` /// use rome_js_factory::make; /// use rome_rowan::TriviaPieceKind; /// ///let string = make::jsx_string(make::jsx_string_literal("button") /// .with_leading_trivia(vec![(TriviaPieceKind::Whitespace, " ")])); /// assert_eq!(string.inner_string_text().unwrap().text(), "button"); /// ``` pub fn inner_string_text(&self) -> SyntaxResult<TokenText> { Ok(inner_string_text(&self.value_token()?)) } } impl JsxOpeningElement { /// Find and return the `JsxAttribute` that matches the given name /// /// ## Examples /// /// ``` /// /// use rome_js_factory::make; /// use rome_js_factory::make::{ident, jsx_attribute, jsx_name, jsx_opening_element, token, jsx_attribute_list}; /// use rome_js_syntax::{AnyJsxAttribute, AnyJsxAttributeName, AnyJsxElementName, T}; /// /// let div = AnyJsxAttribute::JsxAttribute(jsx_attribute( /// AnyJsxAttributeName::JsxName( /// jsx_name(ident("div")) /// ) /// ).build()); /// /// let img = AnyJsxAttribute::JsxAttribute(jsx_attribute( /// AnyJsxAttributeName::JsxName( /// jsx_name(ident("img")) /// ) /// ).build()); /// /// let attributes = jsx_attribute_list(vec![ /// div, /// img /// ]); /// /// let opening_element = jsx_opening_element( /// token(T![<]), /// AnyJsxElementName::JsxName( /// jsx_name(ident("Test")) /// ), /// attributes, /// token(T![>]), /// ).build(); /// /// assert_eq!(opening_element.find_attribute_by_name("div").unwrap().is_some(), true); /// assert_eq!(opening_element.find_attribute_by_name("img").unwrap().is_some(), true); /// assert_eq!(opening_element.find_attribute_by_name("p").unwrap().is_some(), false); /// ``` /// pub fn find_attribute_by_name( &self, name_to_lookup: &str, ) -> SyntaxResult<Option<JsxAttribute>> { self.attributes().find_by_name(name_to_lookup) } /// It checks if current attribute has a trailing spread props /// /// ## Examples /// /// ``` /// use rome_js_factory::make; /// use rome_js_factory::make::{ident, jsx_attribute, jsx_name, jsx_opening_element, token, jsx_attribute_list, jsx_self_closing_element, jsx_spread_attribute, jsx_ident, js_identifier_expression, js_reference_identifier}; /// use rome_js_syntax::{AnyJsExpression, AnyJsxAttribute, AnyJsxAttributeName, AnyJsxElementName, T}; /// /// let div = AnyJsxAttribute::JsxAttribute(jsx_attribute( /// AnyJsxAttributeName::JsxName( /// jsx_name(ident("div")) /// ) /// ).build()); /// /// let spread = AnyJsxAttribute::JsxSpreadAttribute(jsx_spread_attribute( /// token(T!['{']), /// token(T![...]), /// AnyJsExpression::JsIdentifierExpression(js_identifier_expression( /// js_reference_identifier(ident("spread")) /// )), /// token(T!['}']), /// )); /// /// /// /// let attributes = jsx_attribute_list(vec![ /// div, /// spread /// ]); /// /// let opening_element = jsx_opening_element( /// token(T![<]), /// AnyJsxElementName::JsxName( /// jsx_name(ident("Test")) /// ), /// attributes, /// token(T![>]), /// ).build(); /// /// let div = opening_element.find_attribute_by_name("div").unwrap().unwrap(); /// assert!(opening_element.has_trailing_spread_prop(div.clone())); /// ``` pub fn has_trailing_spread_prop(&self, current_attribute: impl Into<AnyJsxAttribute>) -> bool { self.attributes() .has_trailing_spread_prop(current_attribute) } /// Check if jsx element has a child that is accessible pub fn has_accessible_child(&self) -> bool { self.parent::<JsxElement>().map_or(false, |parent| { parent .children() .into_iter() .any(|child| child.is_accessible_node().unwrap_or(true)) }) } } impl JsxSelfClosingElement { /// Find and return the `JsxAttribute` that matches the given name /// /// ## Examples /// /// ``` /// /// use rome_js_factory::make; /// use rome_js_factory::make::{ident, jsx_attribute, jsx_name, jsx_opening_element, token, jsx_attribute_list, jsx_self_closing_element}; /// use rome_js_syntax::{AnyJsxAttribute, AnyJsxAttributeName, AnyJsxElementName, T}; /// /// let div = AnyJsxAttribute::JsxAttribute(jsx_attribute( /// AnyJsxAttributeName::JsxName( /// jsx_name(ident("div")) /// ) /// ).build()); /// /// let img = AnyJsxAttribute::JsxAttribute(jsx_attribute( /// AnyJsxAttributeName::JsxName( /// jsx_name(ident("img")) /// ) /// ).build()); /// /// let attributes = jsx_attribute_list(vec![ /// div, /// img /// ]); /// /// let opening_element = jsx_self_closing_element( /// token(T![<]), /// AnyJsxElementName::JsxName( /// jsx_name(ident("Test")) /// ), /// attributes, /// token(T![/]), /// token(T![>]), /// ).build(); /// /// assert_eq!(opening_element.find_attribute_by_name("div").unwrap().is_some(), true); /// assert_eq!(opening_element.find_attribute_by_name("img").unwrap().is_some(), true); /// assert_eq!(opening_element.find_attribute_by_name("p").unwrap().is_some(), false); /// ``` /// pub fn find_attribute_by_name( &self, name_to_lookup: &str, ) -> SyntaxResult<Option<JsxAttribute>> { self.attributes().find_by_name(name_to_lookup) } /// It checks if current attribute has a trailing spread props /// /// ## Examples /// /// ``` /// use rome_js_factory::make; /// use rome_js_factory::make::{ident, jsx_attribute, jsx_name, jsx_opening_element, token, jsx_attribute_list, jsx_self_closing_element, jsx_spread_attribute, jsx_ident, js_identifier_expression, js_reference_identifier}; /// use rome_js_syntax::{AnyJsExpression, AnyJsxAttribute, AnyJsxAttributeName, AnyJsxElementName, T}; /// /// let div = AnyJsxAttribute::JsxAttribute(jsx_attribute( /// AnyJsxAttributeName::JsxName( /// jsx_name(ident("div")) /// ) /// ).build()); /// /// let spread = AnyJsxAttribute::JsxSpreadAttribute(jsx_spread_attribute( /// token(T!['{']), /// token(T![...]), /// AnyJsExpression::JsIdentifierExpression(js_identifier_expression( /// js_reference_identifier(ident("spread")) /// )), /// token(T!['}']), /// )); /// /// /// /// let attributes = jsx_attribute_list(vec![ /// div, /// spread /// ]); /// /// let opening_element = jsx_self_closing_element( /// token(T![<]), /// AnyJsxElementName::JsxName( /// jsx_name(ident("Test")) /// ), /// attributes, /// token(T![/]), /// token(T![>]), /// ).build(); /// /// let div = opening_element.find_attribute_by_name("div").unwrap().unwrap(); /// assert!(opening_element.has_trailing_spread_prop(div.clone())); /// ``` pub fn has_trailing_spread_prop(&self, current_attribute: impl Into<AnyJsxAttribute>) -> bool { self.attributes() .has_trailing_spread_prop(current_attribute) } } impl JsxAttributeList { /// Finds and returns attributes `JsxAttribute` that matches the given names like [Self::find_by_name]. /// Only attributes with name as [JsxName] can be returned. /// /// Each name of "names_to_lookup" should be unique. /// /// Supports maximum of 16 names to avoid stack overflow. Each attribute will consume: /// /// - 8 bytes for the `Option<JsxAttribute>` result; /// - plus 16 bytes for the [&str] argument. pub fn find_by_names<const N: usize>( &self, names_to_lookup: [&str; N], ) -> [Option<JsxAttribute>; N] { // assert there are no duplicates debug_assert!(HashSet::<_>::from_iter(names_to_lookup).len() == N); debug_assert!(N <= 16); const INIT: Option<JsxAttribute> = None; let mut results = [INIT; N]; let mut missing = N; 'attributes: for att in self { if let Some(attribute) = att.as_jsx_attribute() { if let Some(name) = attribute .name() .ok() .and_then(|x| x.as_jsx_name()?.value_token().ok()) { let name = name.text_trimmed(); for i in 0..N { if results[i].is_none() && names_to_lookup[i] == name { results[i] = Some(attribute.clone()); if missing == 1 { break 'attributes; } else { missing -= 1; break; } } } } } } results } pub fn find_by_name(&self, name_to_lookup: &str) -> SyntaxResult<Option<JsxAttribute>> { let attribute = self.iter().find_map(|attribute| { let attribute = JsxAttribute::cast_ref(attribute.syntax())?; let name = attribute.name().ok()?; let name = JsxName::cast_ref(name.syntax())?; if name.value_token().ok()?.text_trimmed() == name_to_lookup { Some(attribute) } else { None } }); Ok(attribute) } pub fn has_trailing_spread_prop(&self, current_attribute: impl Into<AnyJsxAttribute>) -> bool { let mut current_attribute_found = false; let current_attribute = current_attribute.into(); for attribute in self { if attribute == current_attribute { current_attribute_found = true; continue; } if current_attribute_found && attribute.as_jsx_spread_attribute().is_some() { return true; } } false } } impl AnyJsxElementName { pub fn name_value_token(&self) -> Option<JsSyntaxToken> { match self { AnyJsxElementName::JsxMemberName(member) => member.member().ok()?.value_token().ok(), AnyJsxElementName::JsxName(name) => name.value_token().ok(), AnyJsxElementName::JsxNamespaceName(name) => name.name().ok()?.value_token().ok(), AnyJsxElementName::JsxReferenceIdentifier(name) => name.value_token().ok(), } } } declare_node_union! { pub AnyJsxElement = JsxOpeningElement | JsxSelfClosingElement } impl AnyJsxElement { pub fn attributes(&self) -> JsxAttributeList { match self { AnyJsxElement::JsxOpeningElement(element) => element.attributes(), AnyJsxElement::JsxSelfClosingElement(element) => element.attributes(), } } pub fn name(&self) -> SyntaxResult<AnyJsxElementName> { match self { AnyJsxElement::JsxOpeningElement(element) => element.name(), AnyJsxElement::JsxSelfClosingElement(element) => element.name(), } } pub fn name_value_token(&self) -> Option<JsSyntaxToken> { self.name().ok()?.name_value_token() } /// Return true if the current element is actually a component /// /// - `<Span />` is a component and it would return `true` /// - `<span ></span>` is **not** component and it returns `false` pub fn is_custom_component(&self) -> bool { self.name().map_or(false, |it| it.as_jsx_name().is_none()) } /// Return true if the current element is an HTML element /// /// - `<Span />` is a component and it would return `false` /// - `<span ></span>` is **not** component and it returns `true` pub fn is_element(&self) -> bool { self.name().map_or(false, |it| it.as_jsx_name().is_some()) } pub fn has_spread_prop(&self) -> bool { self.attributes() .into_iter() .any(|attribute| matches!(attribute, AnyJsxAttribute::JsxSpreadAttribute(_))) } pub fn has_trailing_spread_prop(&self, current_attribute: impl Into<AnyJsxAttribute>) -> bool { match self { AnyJsxElement::JsxSelfClosingElement(element) => { element.has_trailing_spread_prop(current_attribute) } AnyJsxElement::JsxOpeningElement(element) => { element.has_trailing_spread_prop(current_attribute) } } } pub fn find_attribute_by_name(&self, name_to_lookup: &str) -> Option<JsxAttribute> { match self { AnyJsxElement::JsxSelfClosingElement(element) => { element.find_attribute_by_name(name_to_lookup).ok()? } AnyJsxElement::JsxOpeningElement(element) => { element.find_attribute_by_name(name_to_lookup).ok()? } } } pub fn has_truthy_attribute(&self, name_to_lookup: &str) -> bool { self.find_attribute_by_name(name_to_lookup) .map_or(false, |attribute| { attribute .as_static_value() .map_or(true, |value| !(value.is_falsy() || value.text() == "false")) && !self.has_trailing_spread_prop(attribute) }) } } impl JsxAttribute { pub fn is_value_null_or_undefined(&self) -> bool { self.as_static_value() .map_or(false, |it| it.is_null_or_undefined()) } pub fn as_static_value(&self) -> Option<StaticValue> { self.initializer()?.value().ok()?.as_static_value() } pub fn name_value_token(&self) -> Option<JsSyntaxToken> { match self.name().ok()? { AnyJsxAttributeName::JsxName(name) => name.value_token().ok(), AnyJsxAttributeName::JsxNamespaceName(name) => name.name().ok()?.value_token().ok(), } } } impl AnyJsxAttributeValue { pub fn is_value_null_or_undefined(&self) -> bool { self.as_static_value() .map_or(false, |it| it.is_null_or_undefined()) } pub fn as_static_value(&self) -> Option<StaticValue> { match self { AnyJsxAttributeValue::AnyJsxTag(_) => None, AnyJsxAttributeValue::JsxExpressionAttributeValue(expression) => { expression.expression().ok()?.as_static_value() } AnyJsxAttributeValue::JsxString(string) => { Some(StaticValue::String(string.value_token().ok()?)) } } } } impl AnyJsxChild { /// Check if jsx child node is accessible for screen readers pub fn is_accessible_node(&self) -> Option<bool> { Some(match self { AnyJsxChild::JsxText(text) => { let value_token = text.value_token().ok()?; value_token.text_trimmed().trim() != "" } AnyJsxChild::JsxExpressionChild(expression) => { let expression = expression.expression()?; expression .as_static_value() .map_or(true, |value| !value.is_falsy()) } AnyJsxChild::JsxElement(element) => { let opening_element = element.opening_element().ok()?; let jsx_element = AnyJsxElement::cast(opening_element.syntax().clone())?; // We don't check if a component (e.g. <Text aria-hidden />) is using the `aria-hidden` property, // since we don't have enough information about how the property is used. jsx_element.is_custom_component() || !jsx_element.has_truthy_attribute("aria-hidden") } AnyJsxChild::JsxSelfClosingElement(element) => { let jsx_element = AnyJsxElement::cast(element.syntax().clone())?; jsx_element.is_custom_component() || !jsx_element.has_truthy_attribute("aria-hidden") } AnyJsxChild::JsxFragment(fragment) => fragment .children() .into_iter() .any(|child| child.is_accessible_node().unwrap_or(true)), _ => true, }) } }
true
1d6fc2619357434b74d5e09988415850102df076
Rust
mikedilger/sarek
/src/lib.rs
UTF-8
5,025
2.546875
3
[ "MIT", "LicenseRef-scancode-warranty-disclaimer", "Apache-2.0", "LicenseRef-scancode-unknown-license-reference" ]
permissive
extern crate vks; extern crate winit; extern crate image as imageformat; extern crate libc; #[cfg(windows)] extern crate user32; #[cfg(windows)] extern crate winapi; #[macro_use] extern crate bitflags; // Include our macros early include!(concat!(env!("CARGO_MANIFEST_DIR"), "/src/macros.rs")); pub mod error; pub use error::Error; pub mod version; pub use version::Version; pub mod instance; pub use instance::{InstanceLoader, Instance}; pub mod format; pub use format::*; pub mod image; pub use image::*; use std::ptr; use std::mem; use std::str; use std::ffi::{CString, CStr}; use vks::*; /// See vulkan specification, section 30.2 Extensions pub struct ExtensionProperties { pub extension_name: String, pub spec_version: u32 } impl ExtensionProperties { fn from_vk(vk: VkExtensionProperties) -> Result<ExtensionProperties, Error> { Ok(ExtensionProperties { extension_name: unsafe { str::from_utf8( CStr::from_ptr( vk.extensionName.as_ptr()) .to_bytes())? .to_owned() }, spec_version: vk.specVersion, }) } } /// See vulkan specification, section 30.2 Extensions. /// Despite what the name implies, this returns a Vec not an Iterator. pub fn enumerate_instance_extension_properties(layer_name: Option<&str>) -> Result<Vec<ExtensionProperties>, Error> { let layer_name_cstring: Option<CString> = match layer_name { Some(s) => Some(CString::new(s)?), None => None }; let p_layer_name = match layer_name_cstring { Some(ref s) => s.as_ptr(), None => ptr::null(), }; // Call once to get the property count let mut property_count: u32 = 0; vk_try!(unsafe { vkEnumerateInstanceExtensionProperties( p_layer_name, &mut property_count, ptr::null_mut() )}); // Prepare room for the output let capacity: usize = property_count as usize; let mut properties: Vec<VkExtensionProperties> = Vec::with_capacity(capacity); // Call again to get the data vk_try!(unsafe { vkEnumerateInstanceExtensionProperties( p_layer_name, &mut property_count, properties.as_mut_ptr() )}); // Trust the data now in the properties vector let properties = unsafe { let p = properties.as_mut_ptr(); mem::forget(properties); Vec::from_raw_parts(p, property_count as usize, capacity) }; // Translate for output let mut output: Vec<ExtensionProperties> = Vec::with_capacity(property_count as usize); for property in properties { output.push(ExtensionProperties::from_vk(property)?); } Ok(output) } /// See vulkan specification, section 30.1 Layers pub struct LayerProperties { pub layer_name: String, pub spec_version: u32, pub implementation_version: u32, pub description: String, } impl LayerProperties { fn from_vk(vk: VkLayerProperties) -> Result<LayerProperties, Error> { Ok(LayerProperties { layer_name: unsafe { str::from_utf8( CStr::from_ptr( vk.layerName.as_ptr()) .to_bytes())? .to_owned() }, spec_version: vk.specVersion, implementation_version: vk.implementationVersion, description: unsafe { str::from_utf8( CStr::from_ptr( vk.description.as_ptr()) .to_bytes())? .to_owned() }, }) } } /// See vulkan specification, section 30.1 Layers. /// Despite what the name implies, this returns a Vec not an Iterator. pub fn enumerate_instance_layer_properties() -> Result<Vec<LayerProperties>, Error> { // Call once to get the property count let mut property_count: u32 = 0; vk_try!(unsafe { vkEnumerateInstanceLayerProperties( &mut property_count, ptr::null_mut() )}); // Prepare room for the output let capacity: usize = property_count as usize; let mut properties: Vec<VkLayerProperties> = Vec::with_capacity(capacity); // Call again to get the data vk_try!(unsafe { vkEnumerateInstanceLayerProperties( &mut property_count, properties.as_mut_ptr() )}); // Trust the data now in the properties vector let properties = unsafe { let p = properties.as_mut_ptr(); mem::forget(properties); Vec::from_raw_parts(p, property_count as usize, capacity) }; // Translate for output let mut output: Vec<LayerProperties> = Vec::with_capacity(property_count as usize); for property in properties { output.push(LayerProperties::from_vk(property)?); } Ok(output) } pub type Bool32 = VkBool32; pub type SampleCountFlags = VkSampleCountFlags; // u32 pub type Extent3D = VkExtent3D; pub type Extent2D = VkExtent2D;
true
9bfa561947be56bc82628a122d2c81b90afe6f11
Rust
foresterre/sic
/crates/sic_image_engine/src/operations/diff.rs
UTF-8
4,419
3.015625
3
[ "LicenseRef-scancode-unknown-license-reference", "MIT", "OFL-1.1", "Apache-2.0", "BSD-2-Clause", "CC0-1.0", "Unlicense", "Unicode-DFS-2016", "BSD-3-Clause" ]
permissive
use crate::errors::SicImageEngineError; use crate::operations::ImageOperation; use crate::wrapper::image_path::ImageFromPath; use rayon::iter::{IndexedParallelIterator, IntoParallelRefMutIterator, ParallelIterator}; use sic_core::image::{DynamicImage, GenericImageView, ImageBuffer, Rgba, RgbaImage}; use sic_core::{image, SicImage}; use std::cmp; use std::convert::TryFrom; pub struct Diff<'image> { path: &'image ImageFromPath, } impl<'image> Diff<'image> { pub fn new(path: &'image ImageFromPath) -> Self { Self { path } } } impl<'image> ImageOperation for Diff<'image> { fn apply_operation(&self, image: &mut SicImage) -> Result<(), SicImageEngineError> { match image { SicImage::Static(image) => diff_impl(image, self.path), SicImage::Animated(image) => diff_animated_image(image.frames_mut(), self.path), } } } fn diff_animated_image( frames: &mut [image::Frame], path: &ImageFromPath, ) -> Result<(), SicImageEngineError> { // Open matching image let other = path.open_image()?; match other { SicImage::Static(image) => diff_animated_with_static(frames, &image), SicImage::Animated(other) => diff_animated_with_animated(frames, other.frames()), } Ok(()) } // fn diff_animated_with_animated(frames: &mut [image::Frame], other: &[image::Frame]) { frames.par_iter_mut().zip(other).for_each(|(lhs, rhs)| { *lhs.buffer_mut() = produce_image_diff( &DynamicImage::ImageRgba8(lhs.buffer().clone()), &DynamicImage::ImageRgba8(rhs.buffer().clone()), ); }); } fn diff_animated_with_static(frames: &mut [image::Frame], other: &DynamicImage) { dbg!("chrispls"); frames.par_iter_mut().for_each(|frame| { *frame.buffer_mut() = produce_image_diff(&DynamicImage::ImageRgba8(frame.buffer().clone()), other); }); } fn diff_impl(image: &mut DynamicImage, path: &ImageFromPath) -> Result<(), SicImageEngineError> { let cmp = path.open_image()?; // NB: Diffing a static image currently requires the right hand side image to be a static image // We could do the same as we do on loading an image: simply pick the first frame // Right now we error instead. let cmp = DynamicImage::try_from(cmp)?; *image = DynamicImage::ImageRgba8(produce_image_diff(image, &cmp)); Ok(()) } // same -> white pixel pub(crate) const DIFF_PX_SAME: Rgba<u8> = Rgba([255, 255, 255, 255]); // different -> coloured pixel pub(crate) const DIFF_PX_DIFF: Rgba<u8> = Rgba([255, 0, 0, 255]); // non overlapping -> transparent pixel pub(crate) const DIFF_PX_NO_OVERLAP: Rgba<u8> = Rgba([0, 0, 0, 0]); /// Takes the diff of two images. /// /// If a pixel at `(x, y)` in the image `this` (`P`) compared to the pixel at `(x, y)` in the image `that` (`Q`): /// * is the same: the output image will colour that pixel white. /// * differs: the output image will colour that pixel red. /// /// The output image (`R`) will have width `w=max(width(this), width(that))` and height /// `h=max(height(this), height(that))`. /// /// In case that two images when overlapped differ inversely in both width and height, so /// `(P_width > Q_width ∧ P_height < Q_height) ⊕ (P_width < Q_width ∧ P_height > Q_height)` then /// there will be pixels in `R`, for which for some pixels `p_{i, j} ∈ R | p_{i, j} ∉ P ∨ p_{i, j} ∉ Q`. /// That is, the part of output image which isn't part of either of the two original input images. /// These pixels will be 'coloured' black but with an alpha value of 0, so they will be transparent /// as to show they were not part of the input images. fn produce_image_diff(this: &DynamicImage, other: &DynamicImage) -> RgbaImage { let (lw, lh) = this.dimensions(); let (rw, rh) = other.dimensions(); let w = cmp::max(lw, rw); let h = cmp::max(lh, rh); let mut buffer = ImageBuffer::new(w, h); for (x, y, pixel) in buffer.enumerate_pixels_mut() { if this.in_bounds(x, y) && other.in_bounds(x, y) { if this.get_pixel(x, y) == other.get_pixel(x, y) { *pixel = DIFF_PX_SAME; } else { *pixel = DIFF_PX_DIFF; } } else if this.in_bounds(x, y) || other.in_bounds(x, y) { *pixel = DIFF_PX_DIFF; } else { *pixel = DIFF_PX_NO_OVERLAP; } } buffer }
true
2dd58b7c562ae2949b02990ec35058365e82b12d
Rust
nicholasnjihian/Code-Wars-Squares-in-a-true-rectangle-challenge-
/src/main.rs
UTF-8
713
3.21875
3
[]
no_license
fn get_squares(rect_len: i32, rect_width: i32) -> Option<Vec<i32>> { let mut area: i32 = rect_len * rect_width; let mut v: Vec<i32> = Vec::new(); loop { let (r, d): (i32, i32) = iterate_through_vals(area); v.push(r); if d == 0_i32 { break; } area = d; } Some(v) } fn iterate_through_vals(prod: i32) -> (i32, i32) { let root: f32 = ((prod.clone() as f32).sqrt()).trunc(); let diff: f32 = (prod as f32) - (root * root); (root as i32, diff as i32) } fn main() { let vals = match get_squares(5, 3) { Some(v) => v, None => panic!("No value was found as no value was given!"), }; println!("{:?}", vals); }
true
c7faf74ec87b2389b868b0486745295122a3e8d7
Rust
club-code/CodingChallenges
/advent-of-code/2020/rust/day8/src/bin/part2.rs
UTF-8
1,536
3.34375
3
[]
no_license
use anyhow::Result; use day8::{parse_instructions, Evaluator, Instruction, Operation}; /// Solves part 2 by traversing all the instructions, inverting `nop`s and `jmp`s, /// and checking if that fixes the code by trying executing it until a loop. fn main() -> Result<()> { let inss = parse_instructions()?; for (pc, ins) in inss.iter().enumerate() { match ins.op { Operation::Nothing => { // Don't invert zero `nop`s as `jmp +0` results in a loop. if ins.arg != 0 && print_fixed_acc(&inss, Operation::Jump, pc) { break; } } Operation::Jump => { // Finish as soon as one inversion fixes the code. if print_fixed_acc(&inss, Operation::Nothing, pc) { break; } } Operation::Accumulate => {} } } Ok(()) } /// Clones `inss`, assigns `op` to the instruction at index `pc` and runs the /// evaluation. If it results in executing the program until the very last /// instruction, then prints the accumulator and returns `true`, else `false`. fn print_fixed_acc(inss: &[Instruction], op: Operation, pc: usize) -> bool { let mut fixed_inss = inss.to_vec(); fixed_inss[pc].op = op; match Evaluator::new(&mut fixed_inss).eval_until_loop() { (final_pc, final_acc, _) if final_pc == fixed_inss.len() => { println!("{}", final_acc); true } _ => false, } }
true
8c7cc37702271e041265f9aca89b9bfa3a9968ca
Rust
hopkings2008/nalgebra
/src/third_party/glam/glam_similarity.rs
UTF-8
1,436
2.84375
3
[ "Apache-2.0" ]
permissive
use crate::{Similarity2, Similarity3}; use glam::{DMat3, DMat4, Mat3, Mat4}; impl From<Similarity2<f32>> for Mat3 { fn from(iso: Similarity2<f32>) -> Mat3 { iso.to_homogeneous().into() } } impl From<Similarity3<f32>> for Mat4 { fn from(iso: Similarity3<f32>) -> Mat4 { iso.to_homogeneous().into() } } impl From<Similarity2<f64>> for DMat3 { fn from(iso: Similarity2<f64>) -> DMat3 { iso.to_homogeneous().into() } } impl From<Similarity3<f64>> for DMat4 { fn from(iso: Similarity3<f64>) -> DMat4 { iso.to_homogeneous().into() } } #[cfg(feature = "convert-glam-unchecked")] mod unchecked { use crate::{Matrix3, Matrix4, Similarity2, Similarity3}; use glam::{DMat3, DMat4, Mat3, Mat4}; impl From<Mat3> for Similarity2<f32> { fn from(mat3: Mat3) -> Similarity2<f32> { crate::convert_unchecked(Matrix3::from(mat3)) } } impl From<Mat4> for Similarity3<f32> { fn from(mat4: Mat4) -> Similarity3<f32> { crate::convert_unchecked(Matrix4::from(mat4)) } } impl From<DMat3> for Similarity2<f64> { fn from(mat3: DMat3) -> Similarity2<f64> { crate::convert_unchecked(Matrix3::from(mat3)) } } impl From<DMat4> for Similarity3<f64> { fn from(mat4: DMat4) -> Similarity3<f64> { crate::convert_unchecked(Matrix4::from(mat4)) } } }
true
0085d594898aa9b7efb6a3ca2566fbdf3ff9bfc1
Rust
sammyne/encoding-rs
/crates/binary/src/lib.rs
UTF-8
908
2.953125
3
[]
no_license
//! Implementation of simple translation between numbers and byte //! sequences and encoding and decoding of varints. //! //! Numbers are translated by reading and writing fixed-size values. //! A fixed-size value is either a fixed-size arithmetic //! type (bool, i8, u8, i16, f32, ...) //! or an array or struct containing only fixed-size values. //! //! The varint functions encode and decode single integer values using //! a variable-length encoding; smaller values require fewer bytes. //! For a specification, see //! <https://developers.google.com/protocol-buffers/docs/encoding>. //! //! This module favors simplicity over efficiency. Clients that require //! high-performance serialization, especially for large data structures, //! should look at more advanced solutions such as protocol buffers. mod binary; mod errors; mod varint; pub use self::binary::*; pub use errors::*; pub use varint::*;
true
45e863628729fc5f691453a9c7a8d1a1e3c91c07
Rust
Krout0n/vecmat-rs
/src/complex/complex_.rs
UTF-8
11,769
3.21875
3
[ "MIT", "Apache-2.0", "LicenseRef-scancode-unknown-license-reference" ]
permissive
use crate::{ matrix::Matrix2x2, traits::{Conj, Dot, NormL1, NormL2, Normalize}, vector::Vector2, }; use core::ops::{Add, AddAssign, Div, DivAssign, Mul, MulAssign, Neg, Rem, Sub, SubAssign}; use num_complex::{Complex as NumComplex, ParseComplexError}; use num_traits::{Float, Num, One, Zero, Inv}; /// Complex number. #[repr(transparent)] #[derive(Clone, Copy, Default, PartialEq)] pub struct Complex<T> { vec: Vector2<T>, } impl<T> Complex<T> { pub fn new(w: T, x: T) -> Self { Self { vec: [w, x].into() } } pub fn from_vector(vec: Vector2<T>) -> Self { Self { vec } } pub fn from_array(arr: [T; 2]) -> Self { Self { vec: arr.into() } } pub fn from_tuple(tup: (T, T)) -> Self { Self { vec: tup.into() } } pub fn from_num(nc: NumComplex<T>) -> Self { Self::new(nc.re, nc.im) } pub fn into_vector(self) -> Vector2<T> { self.vec } pub fn into_array(self) -> [T; 2] { self.vec.into() } pub fn into_tuple(self) -> (T, T) { self.vec.into() } pub fn into_num(self) -> NumComplex<T> { let (re, im) = self.into(); NumComplex { re, im } } } impl<T> From<Vector2<T>> for Complex<T> { fn from(vec: Vector2<T>) -> Self { Self::from_vector(vec) } } impl<T> From<Complex<T>> for Vector2<T> { fn from(comp: Complex<T>) -> Self { comp.into_vector() } } impl<T> From<[T; 2]> for Complex<T> { fn from(arr: [T; 2]) -> Self { Self::from_array(arr) } } impl<T> From<Complex<T>> for [T; 2] { fn from(comp: Complex<T>) -> Self { comp.into_array() } } impl<T> From<(T, T)> for Complex<T> { fn from(tup: (T, T)) -> Self { Self::from_tuple(tup) } } impl<T> From<Complex<T>> for (T, T) { fn from(comp: Complex<T>) -> Self { comp.into_tuple() } } impl<T> From<NumComplex<T>> for Complex<T> { fn from(nc: NumComplex<T>) -> Self { Self::from_num(nc) } } impl<T> From<Complex<T>> for NumComplex<T> { fn from(comp: Complex<T>) -> Self { comp.into_num() } } impl<T: Copy> Complex<T> { pub fn re(&self) -> T { self.vec.x() } pub fn im(&self) -> T { self.vec.y() } } impl<T> Complex<T> { pub fn re_ref(&self) -> &T { self.vec.x_ref() } pub fn im_ref(&self) -> &T { self.vec.y_ref() } pub fn re_mut(&mut self) -> &mut T { self.vec.x_mut() } pub fn im_mut(&mut self) -> &mut T { self.vec.y_mut() } } impl<T> Complex<T> where T: Neg<Output = T> + Copy, { pub fn into_matrix(self) -> Matrix2x2<T> { let (re, im) = self.into(); Matrix2x2::from([[re, -im], [im, re]]) } } impl<T> Neg for Complex<T> where T: Neg<Output = T>, { type Output = Self; fn neg(self) -> Self { (-self.vec).into() } } impl<T> Complex<T> where T: Neg<Output = T>, { pub fn conj(self) -> Self { let (w, x) = self.into(); Self::new(w, -x) } } impl<T> Conj for Complex<T> where T: Neg<Output = T>, { fn conj(self) -> Self { Complex::conj(self) } } impl<T> Add for Complex<T> where T: Add<Output = T>, { type Output = Self; fn add(self, other: Self) -> Self { (self.vec + other.vec).into() } } impl<T> Add<T> for Complex<T> where T: Add<Output = T>, { type Output = Self; fn add(self, other: T) -> Self { let (w, x) = self.into(); Self::new(w + other, x) } } impl<T> Sub for Complex<T> where T: Sub<Output = T>, { type Output = Self; fn sub(self, other: Self) -> Self { (self.vec - other.vec).into() } } impl<T> Sub<T> for Complex<T> where T: Sub<Output = T>, { type Output = Self; fn sub(self, other: T) -> Self { let (w, x) = self.into(); Self::new(w - other, x) } } macro_rules! reverse_add_sub { ($T:ident) => { /// Workaround for reverse addition. impl Add<Complex<$T>> for $T { type Output = Complex<$T>; fn add(self, other: Complex<$T>) -> Self::Output { other + self } } /// Workaround for reverse subtraction. impl Sub<Complex<$T>> for $T { type Output = Complex<$T>; fn sub(self, other: Complex<$T>) -> Self::Output { -other + self } } }; } reverse_add_sub!(f32); reverse_add_sub!(f64); impl<T> AddAssign for Complex<T> where T: AddAssign, { fn add_assign(&mut self, other: Self) { self.vec += other.vec; } } impl<T> AddAssign<T> for Complex<T> where T: AddAssign, { fn add_assign(&mut self, other: T) { *self.re_mut() += other; } } impl<T> SubAssign for Complex<T> where T: SubAssign, { fn sub_assign(&mut self, other: Self) { self.vec -= other.vec; } } impl<T> SubAssign<T> for Complex<T> where T: SubAssign, { fn sub_assign(&mut self, other: T) { *self.re_mut() -= other; } } impl<T> Zero for Complex<T> where T: Zero, { fn zero() -> Self { Self::new(T::zero(), T::zero()) } fn is_zero(&self) -> bool { self.vec.is_zero() } } impl<T> Mul for Complex<T> where T: Add<Output = T> + Sub<Output = T> + Mul<Output = T> + Copy, { type Output = Self; fn mul(self, other: Self) -> Self { Self::new( self.re() * other.re() - self.im() * other.im(), self.re() * other.im() + self.im() * other.re(), ) } } impl<T> Mul<T> for Complex<T> where T: Mul<Output = T> + Copy, { type Output = Self; fn mul(self, other: T) -> Self { (self.vec * other).into() } } impl<T> MulAssign for Complex<T> where Self: Mul<Output = Self> + Copy, { fn mul_assign(&mut self, other: Self) { *self = *self * other; } } impl<T> MulAssign<T> for Complex<T> where Self: Mul<T, Output = Self> + Copy, { fn mul_assign(&mut self, other: T) { *self = *self * other; } } impl<T> One for Complex<T> where T: Zero + One + Sub<Output = T> + Copy, { fn one() -> Self { Self::new(T::one(), T::zero()) } } impl<T> Complex<T> where T: Zero + One, { pub fn i() -> Self { Self::new(T::zero(), T::one()) } } impl<T> Complex<T> where T: Add<Output = T> + Mul<Output = T> + Copy, { pub fn norm_sqr(self) -> T { self.vec.square_length() } } impl<T: Float> Complex<T> { pub fn norm(self) -> T { self.vec.length() } pub fn arg(self) -> T { self.im().atan2(self.re()) } pub fn to_polar(self) -> (T, T) { (self.norm(), self.arg()) } pub fn from_polar(r: T, theta: T) -> Self { Self::new(r * theta.cos(), r * theta.sin()) } } impl<T> NormL1 for Complex<T> where Vector2<T>: NormL1<Output = T>, { type Output = T; fn norm_l1(self) -> T { self.vec.norm_l1() } } impl<T: Float> NormL2 for Complex<T> { type Output = T; fn norm_l2(self) -> T { self.norm() } fn norm_l2_sqr(self) -> T { self.norm_sqr() } } impl<T> Div<T> for Complex<T> where T: Div<Output = T> + Copy, { type Output = Self; fn div(self, other: T) -> Self { (self.vec / other).into() } } impl<T> Complex<T> where T: Float, { pub fn normalize(self) -> Self { self / self.norm() } } impl<T> Normalize for Complex<T> where T: Float { fn normalize(self) -> Self { Complex::normalize(self) } } impl<T> Complex<T> where T: Neg<Output = T> + Num + Copy, { pub fn inv(self) -> Self { self.conj() / self.norm_sqr() } } impl<T> Inv for Complex<T> where T: Float { type Output = Self; fn inv(self) -> Self { Complex::inv(self) } } #[allow(clippy::suspicious_arithmetic_impl)] impl<T> Div for Complex<T> where T: Neg<Output = T> + Num + Copy, { type Output = Self; fn div(self, other: Self) -> Self { self * other.inv() } } impl<T> DivAssign for Complex<T> where Self: Div<Output = Self> + Copy, { fn div_assign(&mut self, other: Self) { *self = *self / other; } } impl<T> DivAssign<T> for Complex<T> where Self: Div<T, Output = Self> + Copy, { fn div_assign(&mut self, other: T) { *self = *self / other; } } impl<T: Neg<Output = T> + Num + Copy> Rem for Complex<T> { type Output = Self; fn rem(self, other: Self) -> Self { (self.into_num() % other.into_num()).into() } } impl<T: Neg<Output = T> + Num + Copy> Num for Complex<T> { type FromStrRadixErr = ParseComplexError<T::FromStrRadixErr>; fn from_str_radix(s: &str, radix: u32) -> Result<Self, Self::FromStrRadixErr> { NumComplex::from_str_radix(s, radix).map(Self::from_num) } } macro_rules! reverse_mul_div { ($T:ident) => { /// Workaround for reverse multiplication. impl Mul<Complex<$T>> for $T { type Output = Complex<$T>; fn mul(self, other: Complex<$T>) -> Self::Output { other * self } } /// Workaround for reverse division. #[allow(clippy::suspicious_arithmetic_impl)] impl Div<Complex<$T>> for $T { type Output = Complex<$T>; fn div(self, other: Complex<$T>) -> Self::Output { self * other.inv() } } }; } reverse_mul_div!(f32); reverse_mul_div!(f64); impl<T> Dot for Complex<T> where T: Add<Output = T> + Mul<Output = T>, { type Output = T; fn dot(self, other: Self) -> T { self.vec.dot(other.vec) } } impl<T: Num + Copy> Complex<T> { pub fn powu(&self, exp: u32) -> Self { self.into_num().powu(exp).into() } } impl<T: Neg<Output = T> + Num + Copy> Complex<T> { pub fn powi(&self, exp: i32) -> Self { self.into_num().powi(exp).into() } } impl<T: Float> Complex<T> { pub fn exp(self) -> Self { self.into_num().exp().into() } pub fn ln(self) -> Self { self.into_num().ln().into() } pub fn sqrt(self) -> Self { self.into_num().sqrt().into() } pub fn cbrt(self) -> Self { self.into_num().cbrt().into() } pub fn powf(self, exp: T) -> Self { self.into_num().powf(exp).into() } pub fn log(self, base: T) -> Self { self.into_num().log(base).into() } pub fn powc(self, exp: Self) -> Self { self.into_num().powc(exp.into_num()).into() } pub fn expf(self, base: T) -> Self { self.into_num().expf(base).into() } pub fn sin(self) -> Self { self.into_num().sin().into() } pub fn cos(self) -> Self { self.into_num().cos().into() } pub fn tan(self) -> Self { self.into_num().tan().into() } pub fn asin(self) -> Self { self.into_num().asin().into() } pub fn acos(self) -> Self { self.into_num().acos().into() } pub fn atan(self) -> Self { self.into_num().atan().into() } pub fn sinh(self) -> Self { self.into_num().sinh().into() } pub fn cosh(self) -> Self { self.into_num().cosh().into() } pub fn tanh(self) -> Self { self.into_num().tanh().into() } pub fn asinh(self) -> Self { self.into_num().asinh().into() } pub fn acosh(self) -> Self { self.into_num().acosh().into() } pub fn atanh(self) -> Self { self.into_num().atanh().into() } pub fn finv(self) -> Self { self.into_num().finv().into() } pub fn fdiv(self, other: Self) -> Self { self.into_num().fdiv(other.into_num()).into() } }
true
78b04221969f268da8ec3751a2f25819f8880949
Rust
jihokoo/rustful
/src/cache.rs
UTF-8
7,422
3.359375
3
[ "MIT" ]
permissive
//!Traits and implementations for cached resources. #![stable] use std::io::{File, IoResult}; use std::io::fs::PathExtensions; use std::sync::{RwLock, RwLockReadGuard}; use time; use time::Timespec; ///A trait for cache storage. #[unstable] pub trait Cache { ///Free all the unused cached resources. fn free_unused(&self); } impl Cache for () { fn free_unused(&self) {} } ///This trait provides functions for handling cached resources. #[unstable] pub trait CachedValue<'a, Value> { ///Borrow the cached value, without loading or reloading it. fn borrow_current(&'a self) -> Value; ///Load the cached value. fn load(&self); ///Free the cached value. fn free(&self); ///Check if the cached value has expired. fn expired(&self) -> bool; ///Check if the cached value is unused and should be removed. fn unused(&self) -> bool; ///Reload the cached value if it has expired and borrow it. fn borrow(&'a self) -> Value { if self.expired() { self.load(); } self.borrow_current() } ///Free the cached value if it's unused. fn clean(&self) { if self.unused() { self.free(); } } } ///Cached raw file content. /// ///The whole file will be loaded when accessed. /// ///```rust ///# #![allow(unstable)] ///use rustful::cache::{CachedValue, CachedFile}; /// ///let file = CachedFile::new(Path::new("/some/file/path.txt"), None); /// ///match *file.borrow() { /// Some(ref content) => println!("loaded file with {} bytes of data", content.len()), /// None => println!("the file was not loaded") ///} ///``` #[unstable] pub struct CachedFile { path: Path, file: RwLock<Option<Vec<u8>>>, modified: RwLock<u64>, last_accessed: RwLock<Timespec>, unused_after: Option<i64> } #[unstable] impl CachedFile { ///Creates a new `CachedFile` which will be freed `unused_after` seconds after the latest access. pub fn new(path: Path, unused_after: Option<u32>) -> CachedFile { CachedFile { path: path, file: RwLock::new(None), modified: RwLock::new(0), last_accessed: RwLock::new(Timespec::new(0, 0)), unused_after: unused_after.map(|i| i as i64), } } } impl<'a> CachedValue<'a, RwLockReadGuard<'a, Option<Vec<u8>>>> for CachedFile { fn borrow_current(&'a self) -> RwLockReadGuard<'a, Option<Vec<u8>>> { if self.unused_after.is_some() { *self.last_accessed.write().unwrap() = time::get_time(); } self.file.read().unwrap() } fn load(&self) { *self.modified.write().unwrap() = self.path.stat().map(|s| s.modified).unwrap_or(0); *self.file.write().unwrap() = File::open(&self.path).read_to_end().ok(); if self.unused_after.is_some() { *self.last_accessed.write().unwrap() = time::get_time(); } } fn free(&self) { *self.file.write().unwrap() = None; } fn expired(&self) -> bool { if self.file.read().unwrap().is_some() { self.path.stat().map(|s| s.modified > *self.modified.read().unwrap()).unwrap_or(false) } else { true } } fn unused(&self) -> bool { if self.file.read().unwrap().is_some() { self.unused_after.map(|t| { let last_accessed = self.last_accessed.read().unwrap(); let unused_time = Timespec::new(last_accessed.sec + t, last_accessed.nsec); time::get_time() > unused_time }).unwrap_or(false) } else { false } } } ///A processed cached file. /// ///The file will be processed by a provided function ///each time it is loaded and the result will be stored. /// ///```rust ///# #![allow(unstable)] ///use std::io::{File, IoResult}; ///use rustful::cache::{CachedValue, CachedProcessedFile}; /// ///fn get_size(file: IoResult<File>) -> IoResult<Option<u64>> { /// file.and_then(|mut file| file.stat()).map(|stat| Some(stat.size)) ///} /// ///let file = CachedProcessedFile::new(Path::new("/some/file/path.txt"), None, get_size); /// ///match *file.borrow() { /// Some(ref size) => println!("file contains {} bytes of data", size), /// None => println!("the file was not loaded") ///} ///``` #[unstable] pub struct CachedProcessedFile<T> { path: Path, file: RwLock<Option<T>>, modified: RwLock<u64>, last_accessed: RwLock<Timespec>, unused_after: Option<i64>, processor: fn(IoResult<File>) -> IoResult<Option<T>> } #[unstable] impl<T: Send+Sync> CachedProcessedFile<T> { ///Creates a new `CachedProcessedFile` which will be freed `unused_after` seconds after the latest access. ///The file will be processed by the provided `processor` function each time it's loaded. pub fn new(path: Path, unused_after: Option<u32>, processor: fn(IoResult<File>) -> IoResult<Option<T>>) -> CachedProcessedFile<T> { CachedProcessedFile { path: path, file: RwLock::new(None), modified: RwLock::new(0), last_accessed: RwLock::new(Timespec::new(0, 0)), unused_after: unused_after.map(|i| i as i64), processor: processor } } } impl<'a, T: Send+Sync> CachedValue<'a, RwLockReadGuard<'a, Option<T>>> for CachedProcessedFile<T> { fn borrow_current(&'a self) -> RwLockReadGuard<'a, Option<T>> { if self.unused_after.is_some() { *self.last_accessed.write().unwrap() = time::get_time(); } self.file.read().unwrap() } fn load(&self) { *self.modified.write().unwrap() = self.path.stat().map(|s| s.modified).unwrap_or(0); *self.file.write().unwrap() = (self.processor)(File::open(&self.path)).ok().and_then(|result| result); if self.unused_after.is_some() { *self.last_accessed.write().unwrap() = time::get_time(); } } fn free(&self) { *self.file.write().unwrap() = None; } fn expired(&self) -> bool { if self.file.read().unwrap().is_some() { self.path.stat().map(|s| s.modified > *self.modified.read().unwrap()).unwrap_or(true) } else { true } } fn unused(&self) -> bool { if self.file.read().unwrap().is_some() { self.unused_after.map(|t| { let last_accessed = self.last_accessed.read().unwrap(); let unused_time = Timespec::new(last_accessed.sec + t, last_accessed.nsec); time::get_time() > unused_time }).unwrap_or(false) } else { false } } } #[test] fn file() { let file = CachedFile::new(Path::new("LICENSE"), None); assert_eq!(file.expired(), true); assert!(file.borrow().as_ref().map(|v| v.len()).unwrap_or(0) > 0); assert_eq!(file.expired(), false); file.free(); assert_eq!(file.expired(), true); } #[test] fn modified_file() { fn just_read(mut file: IoResult<File>) -> IoResult<Option<Vec<u8>>> { file.read_to_end().map(|v| Some(v)) } let file = CachedProcessedFile::new(Path::new("LICENSE"), None, just_read); assert_eq!(file.expired(), true); assert!(file.borrow().as_ref().map(|v| v.len()).unwrap_or(0) > 0); assert_eq!(file.expired(), false); file.free(); assert_eq!(file.expired(), true); }
true
6f81edae64e4c56feb9f251d917c897811e6e6bc
Rust
joseluis/spaceindex
/spaceindex/src/geometry/tests.rs
UTF-8
3,350
3.140625
3
[ "MIT", "Apache-2.0" ]
permissive
use rand::Rng; use crate::geometry::point::IntoPoint; use crate::geometry::region::IntoRegion; use crate::geometry::{LineSegment, Point, Region, Shape, Shapelike, ShapelikeError}; use crate::rtree::RTree; #[test] fn test_line_intersections() { let p1 = Point::new(vec![1.0, 0.0]); let p2 = Point::new(vec![3.0, 2.0]); let p3 = Point::new(vec![2.0, 0.0]); let p3a = Point::new(vec![2.0, 3.0]); let p4 = Point::new(vec![2.0, 4.0]); let p5 = Point::new(vec![1.0, 1.0]); let p6 = Point::new(vec![2.5, 3.0]); let p7 = Point::new(vec![1.0, 2.0]); let p8 = Point::new(vec![0.0, -1.0]); let p9 = Point::new(vec![4.0, 3.0]); let ls1 = LineSegment::new(p1, p2); let ls2 = LineSegment::new(p3, p4.clone()); let ls3 = LineSegment::new(p3a, p4); assert_eq!(ls1.intersects_line_segment(&ls2), Ok(true)); assert_eq!(ls1.intersects_line_segment(&ls3), Ok(false)); let r1 = Region::from_points(&p5, &p6); let r2 = Region::from_points(&p7, &p6); let r3 = Region::from_points(&p8, &p9); assert_eq!(r1.intersects_line_segment(&ls1), Ok(true)); assert_eq!(ls1.intersects_region(&r1), Ok(true)); assert_eq!(r2.intersects_line_segment(&ls1), Ok(false)); assert_eq!(ls1.intersects_region(&r2), Ok(false)); assert_eq!(r3.intersects_line_segment(&ls1), Ok(true)); assert_eq!(ls1.intersects_region(&r3), Ok(true)); } #[test] fn test_into_point_impl() { let _pt: Point = 0.1_f32.into_pt(); let _pt: Point = 0.1_f64.into_pt(); let _pt: Point = (0.5, 0.3).into_pt(); let _pt: Point = (1.0, -1.0).into_pt(); let _pt: Point = (1.0, 2.0, 3.0).into_pt(); } #[test] fn test_point_shapelike_impl() { let p = (1.0, 2.0, 3.0).into_pt(); // check our basic functions work assert_eq!(p.get_dimension(), 3); assert_eq!(p.get_area(), 0.0); assert_eq!(p.get_center(), p); let q = Shape::Point((2.0, 3.0, 4.0).into_pt()); // the (minimum) distance between p and q is the square root of 3 assert_eq!(p.get_min_distance(&q), Ok(3.0_f64.sqrt())); } #[test] fn test_region_area() { let ll = (0.0, 0.0).into_pt(); let ur = (2.0, 2.0).into_pt(); let r = Region::from_points(&ll, &ur); assert_eq!(r.get_area(), 4.0); } #[test] fn test_combine_regions() { // Make the region going from (0.0, 0.0) -> (2.0, 2.0) let b = ((0.0, 0.0), (2.0, 2.0)).into_region(); // Make the region going from (0.5, 0.5) -> (1.5, 3) let c = ((0.5, 0.5), (1.5, 3.0)).into_region(); let combined_region = b .combine_region(&c) .expect("Failed to combine regions `b` and `c`"); // The combined region should go from (0.0) -> (2, 3) assert_eq!( combined_region, ((0.0, 0.0), (2.0, 3.0)).into_region().into_owned() ); } #[test] fn test_rtree_insert() -> Result<(), ShapelikeError> { let mut tree = RTree::new(2); // insert 50 random positions let mut rng = rand::thread_rng(); for _ in 0..50 { let xmin = rng.gen_range(0.0..=100.0); let ymin = rng.gen_range(0.0..=100.0); let height = rng.gen_range(5.0..=10.0); let width = rng.gen_range(5.0..=10.0); let r = ((xmin, ymin), (xmin + width, ymin + height)).into_region(); tree.insert(r, 11)?; } tree.validate_consistency(); dbg!(&tree); Ok(()) }
true
acda49b49cbbead8cc5837ad684cd16c67a81d8a
Rust
jinjagit/Rust
/threadpool/src/lib.rs
UTF-8
1,906
3.5
4
[]
no_license
// from tutorial: https://www.youtube.com/watch?v=2mwwYbBRJSo use std::sync::mpsc::{channel, Sender}; use std::sync::Mutex; use std::sync::Arc; pub struct ThreadPool { _handles: Vec<std::thread::JoinHandle<()>>, tx: Sender<Box<dyn FnMut() + Send>>, } impl ThreadPool { pub fn new(num_threads: u8) -> Self { let (tx, rx) = channel::<Box<dyn FnMut() + Send>>(); let rx = Arc::new(Mutex::new(rx)); let mut _handles = vec![]; for _ in 0..num_threads { let clone = rx.clone(); let handle = std::thread::spawn(move || loop { let mut work = match clone.lock().unwrap().recv() { Ok(work) => work, Err(_) => break, }; work(); }); _handles.push(handle); } Self { _handles, tx } } // We need to use a generic type that implements a closure trait, like 'Fn()', // and we need it to be mutable, hence `FnMut()`. pub fn execute<T: FnMut() + Send + 'static>(&self, work: T) { self.tx.send(Box::new(work)).unwrap(); } } #[cfg(test)] mod tests { use super::*; #[test] fn it_works() { use std::sync::atomic::{AtomicU32, Ordering}; let nref = Arc::new(AtomicU32::new(0)); let pool = ThreadPool::new(10); let clone = nref.clone(); let foo = move || { clone.fetch_add(1, Ordering::SeqCst); }; pool.execute(foo.clone()); pool.execute(foo); std::thread::sleep(std::time::Duration::from_secs(1)); assert_eq!(nref.load(Ordering::SeqCst), 2); } } // Notes: // JoinHandle: An owned permission to join on a thread (block on its termination). // A JoinHandle detaches the associated thread when it is dropped, which means // that there is no longer any handle to thread and no way to join on it.
true
20fc7eccf165af55bd691ae773b94a37cc24c950
Rust
toshuno/solved_problems
/aribon/gcj/3_7_1.rs
UTF-8
2,386
2.75
3
[]
no_license
#[allow(unused_imports)] use std::cmp::{max, min, Ordering}; #[allow(unused_imports)] use std::collections::{BTreeMap, BTreeSet, BinaryHeap, HashMap, HashSet, VecDeque}; #[allow(unused_imports)] use std::io::prelude::BufRead; #[allow(unused_imports)] use std::io::{stdin, stdout, BufReader, BufWriter, Write}; #[allow(unused_imports)] use std::mem::swap; #[allow(unused_imports)] use std::str::FromStr; fn main() { let n = read::<u64>(); let mut coefs = vec![(3_i64, 1_i64); 64]; for i in 1..64 { let new_a = ((coefs[i - 1].0).pow(2) + (coefs[i - 1].1).pow(2) * 5) % 1000; let new_b = (2 * coefs[i - 1].0 * coefs[i - 1].1) % 1000; coefs[i] = (new_a, new_b); } let mut ans = (1, 0); let mut n = n; for i in 0..64 { let bin = n % 2; if bin == 1 { ans = ((ans.0 * coefs[i].0 + ans.1 * coefs[i].1 * 5) % 1000, (ans.0 * coefs[i].1 + ans.1 * coefs[i].0) % 1000); } n >>= 1; } let int = 2 * ans.0 - 1; if int < 100 { print!("0"); } println!("{}", int % 1000); } #[allow(dead_code)] fn read<T>() -> T where T: std::str::FromStr, T::Err: std::fmt::Debug, { let mut buf = String::new(); stdin().read_line(&mut buf).unwrap(); return buf.trim().parse().unwrap(); } #[allow(dead_code)] fn read_vector<T>() -> Vec<T> where T: std::str::FromStr, T::Err: std::fmt::Debug, { let mut buf = String::with_capacity(100); stdin().read_line(&mut buf).unwrap(); return buf.split_whitespace().map(|s| s.parse().unwrap()).collect(); } #[allow(dead_code)] fn read_matrix<T>() -> Vec<Vec<T>> where T: std::str::FromStr, T::Err: std::fmt::Debug, { use std::io::prelude::*; let stdin = stdin(); let mut reader = BufReader::with_capacity(100 * 1024, stdin); let mut line = String::with_capacity(100); let mut matrix: Vec<Vec<T>> = Vec::new(); while reader.read_line(&mut line).unwrap() > 0 { matrix.push( line.trim() .split_whitespace() .map(|s| s.parse().unwrap()) .collect(), ); line.clear(); } return matrix; } #[allow(dead_code)] fn read_chars() -> Vec<char> { let stdin = stdin(); let mut buf = String::new(); let _bytes = stdin.read_line(&mut buf).unwrap(); return buf.trim().chars().collect(); }
true
3008c6537eca80826ec6dd06f8100a61c09b299c
Rust
devsnek/slither
/src/intrinsics/array_iterator_prototype.rs
UTF-8
1,528
2.59375
3
[ "MIT" ]
permissive
use crate::agent::Agent; use crate::value::{Args, ObjectKey, Value, ValueType}; fn next(args: Args) -> Result<Value, Value> { let o = args.this(); if o.type_of() != ValueType::Object { return Err(Value::new_error(args.agent(), "invalid receiver")); } let a = o.get_slot("iterated object"); if a == Value::Null { return Value::new_iter_result(args.agent(), Value::Null, true); } let index = if let Value::Number(n) = o.get_slot("array iterator next index") { n } else { unreachable!(); }; let len = if let Value::Number(n) = a.get( args.agent(), Value::from("length").to_object_key(args.agent())?, )? { n } else { return Err(Value::new_error(args.agent(), "invalid array length")); }; if index >= len { o.set_slot("iterated object", Value::Null); return Value::new_iter_result(args.agent(), Value::Null, true); } o.set_slot("array iterator next index", Value::from(index + 1.0)); let value = a.get( args.agent(), Value::from(index).to_object_key(args.agent())?, )?; Value::new_iter_result(args.agent(), value, false) } pub(crate) fn create_array_iterator_prototype(agent: &Agent) -> Value { let proto = Value::new_object(agent.intrinsics.iterator_prototype.clone()); proto .set( agent, ObjectKey::from("next"), Value::new_builtin_function(agent, next, false), ) .unwrap(); proto }
true
f8ef1a6f22d61578eacf0159ed9a7eb92147a529
Rust
dotanavi/dotamoji
/src/search_cache/no_cache.rs
UTF-8
771
2.578125
3
[]
no_license
use super::{SearchCache, SearchCache2}; #[derive(Serialize, Deserialize)] pub struct NoCache; impl SearchCache for NoCache { #[inline] fn new(_size: usize) -> Self { NoCache } #[inline] fn extend(&mut self, _size: usize) {} #[inline] fn mark(&mut self, _index: usize) {} #[inline] fn is_filled(&self, index: usize, check: &[u32]) -> bool { index < check.len() && check[index] != 0 } #[inline] fn find_empty(&self, search_start: usize, check: &[u32]) -> usize { let mut ix = search_start + 1; while ix < check.len() && check[ix] != 0 { ix += 1; } return ix; } } impl SearchCache2 for NoCache { #[inline] fn unmark(&mut self, _index: usize) {} }
true
c2003f2c966540fff8f3962dac79dad6e34452e7
Rust
wschella/rstat
/src/univariate/beta.rs
UTF-8
3,836
2.890625
3
[ "MIT" ]
permissive
use crate::{ consts::{ONE_THIRD, TWO_THIRDS}, prelude::*, }; use rand; use spaces::real::Interval; use std::fmt; shape_params! { Params<f64> { alpha, beta } } new_dist!(Beta<Params>); macro_rules! get_params { ($self:ident) => { ($self.0.alpha.0, $self.0.beta.0) } } impl Beta { pub fn new(alpha: f64, beta: f64) -> Result<Beta, failure::Error> { Params::new(alpha, beta).map(|p| Beta(p)) } pub fn new_unchecked(alpha: f64, beta: f64) -> Beta { Beta(Params::new_unchecked(alpha, beta)) } } impl Default for Beta { fn default() -> Beta { Beta(Params::new_unchecked(1.0, 1.0)) } } impl Distribution for Beta { type Support = Interval; type Params = Params; fn support(&self) -> Interval { Interval::bounded(0.0, 1.0) } fn params(&self) -> Params { self.0 } fn cdf(&self, x: &f64) -> Probability { use special_fun::FloatSpecial; let (alpha, beta) = get_params!(self); Probability::new_unchecked(x.betainc(alpha, beta)) } fn sample<R: rand::Rng + ?Sized>(&self, rng: &mut R) -> f64 { use rand_distr::Distribution as _; let (alpha, beta) = get_params!(self); rand_distr::Beta::new(alpha, beta).unwrap().sample(rng) } } impl ContinuousDistribution for Beta { fn pdf(&self, x: &f64) -> f64 { use special_fun::FloatSpecial; let (a, b) = get_params!(self); let numerator = x.powf(a - 1.0) * (1.0 - x).powf(b - 1.0); let denominator = a.beta(b); numerator / denominator } } impl UnivariateMoments for Beta { fn mean(&self) -> f64 { let (a, b) = get_params!(self); 1.0 / (1.0 + b / a) } fn variance(&self) -> f64 { let (a, b) = get_params!(self); let apb = a + b; a * b / (apb * apb * (apb + 1.0)) } fn skewness(&self) -> f64 { let (a, b) = get_params!(self); let apb = a + b; 2.0 * (b - a) * (apb + 1.0).sqrt() / (apb + 2.0) / (a * b).sqrt() } fn excess_kurtosis(&self) -> f64 { let (a, b) = get_params!(self); let apb = a + b; let asb = a - b; let amb = a * b; let apbp2 = apb + 2.0; 3.0 * asb * asb * (apb + 1.0) - amb * apbp2 / amb / apbp2 / (apb + 3.0) } } impl Quantiles for Beta { fn quantile(&self, _: Probability) -> f64 { unimplemented!() } fn median(&self) -> f64 { let (a, b) = get_params!(self); if (a - b).abs() < 1e-7 { 0.5 } else if a > 1.0 && b > 1.0 { (a - ONE_THIRD) / (a + b - TWO_THIRDS) } else if (a - 1.0).abs() < 1e-7 { 1.0 - 2.0f64.powf(-1.0 / b) } else if (b - 1.0).abs() < 1e-7 { 2.0f64.powf(-1.0 / a) } else if (a - 3.0).abs() < 1e-7 && (b - 2.0).abs() < 1e-7 { 0.6142724318676105 } else if (a - 2.0).abs() < 1e-7 && (b - 3.0).abs() < 1e-7 { 0.38572756813238945 } else { undefined!() } } } impl Modes for Beta { fn modes(&self) -> Vec<f64> { let (a, b) = get_params!(self); if a > 1.0 && b > 1.0 { vec![(a - 1.0) / (a + b - 2.0)] } else if a < 1.0 && b < 1.0 { vec![0.0, 1.0] } else { vec![] } } } impl Entropy for Beta { fn entropy(&self) -> f64 { use special_fun::FloatSpecial; let (a, b) = get_params!(self); let apb = a + b; a.logbeta(b) - (a - 1.0) * a.digamma() - (b - 1.0) * b.digamma() + (apb - 2.0) * apb.digamma() } } impl fmt::Display for Beta { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { let (a, b) = get_params!(self); write!(f, "Beta({}, {})", a, b) } }
true
9d084e8e9ce0652e39c6143b894376b911f10aa7
Rust
kazzix14/keybox
/src/lib.rs
UTF-8
2,570
3.546875
4
[]
no_license
use bs58; use itertools::Itertools; use sha3::{digest::*, Shake256}; // Bitcoin style // 123456789ABCDEFGHJKLMNPQRSTUVWXYZabcdefghijkmnopqrstuvwxyz pub struct KeyGenerator { password_digest: String, hasher: Shake256, } impl KeyGenerator { pub fn new(password: String) -> Self { let mut hasher = Shake256::default(); hasher.update(password); let password_digest = hasher.finalize_boxed_reset(64); Self { password_digest: bs58::encode(password_digest.into_vec()).into_string(), hasher, } } pub fn gen( &mut self, nickname: String, key_length: usize, additional_characters: Vec<char>, ) -> String { let password_digest = &self.password_digest; let source = key_length.to_string() + password_digest + &nickname; self.hasher.update(source); let result = self.hasher.finalize_boxed_reset(64); let mut encoded = bs58::encode(result.into_vec()).into_string(); encoded.truncate(key_length); let char_counts = encoded.chars().counts(); for (from, to) in char_counts .into_iter() .sorted() .zip(additional_characters.into_iter()) { encoded = encoded.replace(&String::from(from.0), &String::from(to)); } encoded } } #[cfg(test)] mod test { use crate::*; #[test] fn test_hasher() { let mut key_gen = KeyGenerator::new(String::from("My very very long rememberable password!!!!!")); let key_length = 16; let key = key_gen.gen( String::from("nickname of a service or something that I can remember"), key_length, vec!['!', '#'], ); // source : `My very very long rememberable password!!!!!` // shake256 (512 bits) : `521322bc4f5f53a5b37265d7c0df3c043ddc47e885f260c2903645043e9b8d6d8efaed4bd13b2cb86569e99cb1068c0daea40ea0a77bed6d1caa984a1455a22f` // base58 : `2eB7XEwM9pFnRfSfy9NJXfzNn1HVmpUYVz61z8Cwd4paKw5cQVt35cihEMQ5heW5i1bn4cDy9hh6Yy1QZUwHCJog` // source is key_length + password_digest + nickname // source : `162eB7XEwM9pFnRfSfy9NJXfzNn1HVmpUYVz61z8Cwd4paKw5cQVt35cihEMQ5heW5i1bn4cDy9hh6Yy1QZUwHCJognickname of a service or something that I can remember` // shake256 (512 bits) : `b00050118d5b49f85808d09ce9c5953fa3905b4964539bfe5c2e4de045f37b5723a39fb9dfd7c0355abe22a321867e13dbc370c4f9ca1b0f38b7fc15812f5f4e` // base58 : `4X6Lbepf8fUz9Sa63aNHpGar35ptodWaAEPPxA7mFtHXzMxPcPNE6PdZ3rfmRuhb5w3h9aZbfag4dPcjiFHmRGay` // key is first {key_length} characters of base58 // key : `4X6Lbepf8fUz9Sa6` // if additional characters are specified, characters in key is replaced from small(in ascii code) character. // numbers -> capital letters -> small letters // key : `!X#Lbepf8fUz9Sa#` assert_eq!(key, "!X#Lbepf8fUz9Sa#"); } }
true
7de5443bfa98c6e406879135ee940b9cb1583e89
Rust
aashah/advent_of_code
/2016/day6/src/main.rs
UTF-8
428
2.9375
3
[]
no_license
use std::fs::File; use std::io::prelude::*; extern crate day6; fn main() { let mut file = File::open("src/input.txt").expect("Could not open src/input.txt"); let mut input = String::new(); file.read_to_string(&mut input) .expect("Could not read file"); let input = input.trim(); println!("The input is {}", input); let answer = day6::puzzle(&input); println!("The answer is {}", answer); }
true
eb8be1b3522021a9dfa44bbb1a72ef32352ffaed
Rust
klaxit/heroku_rs
/examples/src/custom_examples.rs
UTF-8
1,882
2.90625
3
[ "MIT", "LicenseRef-scancode-unknown-license-reference", "Apache-2.0" ]
permissive
extern crate heroku_rs; use super::print_response; use heroku_rs::endpoints::custom; use heroku_rs::framework::apiclient::HerokuApiClient; use heroku_rs::framework::endpoint::Method; use serde::Serialize; pub fn run<ApiClientType: HerokuApiClient>(api_client: &ApiClientType) { let app_name = String::from("heroku-rs-tests"); get_app_custom(api_client, app_name); // create_app_custom(api_client, app_name); // delete_app_custom(api_client, app_name); //Careful here :) } // Delete an app fn delete_app_custom<T: HerokuApiClient>(api_client: &T, app_id: String) { let query = format!("{}{}", "apps/", app_id); let method = Method::Delete; let response = api_client.request(&custom::CustomEndpointSimple { query: query, method: method, }); print_response(response); } // get info about a slug fn create_app_custom<T: HerokuApiClient>(api_client: &T, app_id: String) { let query = String::from("apps"); let param = AppCreateParam { name: Some(app_id), region: None, stack: None, }; let method = Method::Post; let response = api_client.request(&custom::CustomEndpoint { query: query, method: method, params: param, }); print_response(response); } #[derive(Serialize, Clone, Debug)] pub struct AppCreateParam { /// name of app. pattern: ^[a-z][a-z0-9-]{1,28}[a-z0-9]$ pub name: Option<String>, /// unique identifier or name of region pub region: Option<String>, /// unique name or identifier of stack pub stack: Option<String>, } // get info about a app fn get_app_custom<T: HerokuApiClient>(api_client: &T, app_id: String) { let query = format!("{}{}", "apps/", app_id); let method = Method::Get; let response = api_client.request(&custom::CustomEndpointSimple::new(query, method)); print_response(response); }
true
c3ce8a7aaf410fe651dbe0c1c6289f79081e9fe5
Rust
pmuens/crawler
/src/logging.rs
UTF-8
605
2.765625
3
[]
no_license
use chrono::DateTime; use chrono::Utc; use std::time::SystemTime; pub fn formatted_now() -> String { let system_time = SystemTime::now(); let date_time: DateTime<Utc> = system_time.into(); date_time.format("%d/%m/%Y %T").to_string() } #[allow(unused_macros)] macro_rules! log { ($msg:expr) => { println!("INFO - {} - \"{}\"", $crate::logging::formatted_now(), $msg); }; } #[allow(unused_macros)] macro_rules! loge { ($msg:expr) => { eprintln!( "FATAL - {} - \"{}\"", $crate::logging::formatted_now(), $msg ); }; }
true
dd13394f16f31d52818162364d3c8a1c2ed87af9
Rust
xixixao/orbtk
/crates/widgets/src/behaviors/selection_behavior.rs
UTF-8
2,222
2.796875
3
[ "MIT" ]
permissive
use crate::{api::prelude::*, proc_macros::*}; /// The `SelectionBehaviorState` handles the `SelectionBehavior` widget. #[derive(Default, AsAny)] pub struct SelectionBehaviorState { toggle_selection: bool, selected: bool, } impl SelectionBehaviorState { fn toggle_selection(&mut self) { self.toggle_selection = true; } } impl State for SelectionBehaviorState { fn init(&mut self, _: &mut Registry, ctx: &mut Context) { self.selected = *SelectionBehavior::selected_ref(&ctx.widget()); let target = *ctx.widget().get::<u32>("target"); toggle_flag("selected", &mut ctx.get_widget(Entity(target))); ctx.get_widget(Entity(target)).update(false); } fn update(&mut self, _: &mut Registry, ctx: &mut Context) { let selected = *SelectionBehavior::selected_ref(&ctx.widget()); let target: Entity = (*SelectionBehavior::target_ref(&ctx.widget())).into(); if self.selected == selected && !self.toggle_selection { return; } if *SelectionBehavior::enabled_ref(&ctx.widget()) && self.toggle_selection { ctx.get_widget(target).set("selected", !selected); } self.toggle_selection = false; self.selected = *SelectionBehavior::selected_ref(&ctx.widget()); toggle_flag("selected", &mut ctx.get_widget(target)); ctx.get_widget(target).update(false); } } widget!( /// The `SelectionBehavior` widget is used to handle internal the pressed behavior of a widget. /// /// **style:** `check-box` SelectionBehavior<SelectionBehaviorState>: MouseHandler { /// Sets or shares the target of the behavior. target: u32, /// Sets or shares the selected property. selected: bool, /// Sets the parent id. parent: u32 } ); impl Template for SelectionBehavior { fn template(self, id: Entity, _: &mut BuildContext) -> Self { self.name("SelectionBehavior") .selected(true) .on_click(move |states, _| { states .get_mut::<SelectionBehaviorState>(id) .toggle_selection(); false }) } }
true
8eee8760d627c9ed0ccd949720df18ad5cacf1ed
Rust
totechite/chibidb
/src/storage/catalog.rs
UTF-8
1,873
2.734375
3
[ "MIT" ]
permissive
use std::env; use crate::storage::util::Scheme; use std::fs::{File, ReadDir, read_dir, create_dir}; use std::io::{BufWriter, IntoInnerError, Error, Write, Read}; use std::env::VarError; use std::collections::{HashSet, HashMap}; use serde_json::Deserializer; use std::path::{Path, PathBuf}; use crate::storage::magic_number::PAGE_SIZE; #[derive(Default, Debug)] pub struct Catalog { pub schemes: HashMap<u64, Scheme> } impl Catalog { pub fn add(&mut self, s: Scheme) { self.schemes.insert(s.table_id, s.clone()); self.save(s).unwrap(); } } impl Catalog { fn new() -> Self { Default::default() } pub fn save(&self, s: Scheme) -> Result<(), Error> { let mut f = { let SCHEME_PATH = env::var("CHIBIDB_SCHEME_PATH").unwrap(); let mut path = PathBuf::from(SCHEME_PATH); path.push(format!("{:?}.scheme ", s.table_id)); let mut f = File::create(path)?; BufWriter::new(f) }; let json: String = serde_json::to_string(&s)?; f.write_all(json.as_bytes())?; Ok(()) } pub fn load() -> Result<Self, Error> { let mut schemes = HashMap::new(); let mut scheme_dir = { let SCHEME_PATH = env::var("CHIBIDB_SCHEME_PATH").unwrap(); let path = Path::new(&SCHEME_PATH); if !path.exists() { create_dir(&path).unwrap(); } read_dir( path) }; if let Ok(scheme_dir) = scheme_dir { for f in scheme_dir { let p = f?.path(); let mut buf = Vec::new(); File::open(p)?.read_to_end(&mut buf)?; let scheme: Scheme = serde_json::from_slice::<Scheme>(&buf)?; schemes.insert(scheme.table_id, scheme); } } Ok(Catalog { schemes }) } }
true
ddec50e1728e61e8d913a1b87492d40914a130dd
Rust
HdrHistogram/HdrHistogram_rust
/tests/data_access.rs
UTF-8
18,642
3.015625
3
[ "MIT", "Apache-2.0" ]
permissive
//! Tests from HistogramDataAccessTest.java use hdrhistogram::Histogram; macro_rules! assert_near { ($a:expr, $b:expr, $tolerance:expr) => {{ let a = $a as f64; let b = $b as f64; let tol = $tolerance as f64; assert!( (a - b).abs() <= b * tol, "assertion failed: `(left ~= right) (left: `{}`, right: `{}`, tolerance: `{:.5}%`)", a, b, 100.0 * tol ); }}; } #[allow(dead_code)] struct Loaded { hist: Histogram<u64>, scaled_hist: Histogram<u64>, raw: Histogram<u64>, scaled_raw: Histogram<u64>, post: Histogram<u64>, scaled_post: Histogram<u64>, } const TRACKABLE_MAX: u64 = 3600 * 1000 * 1000; // Store up to 2 * 10^3 in single-unit precision. Can be 5 at most. const SIGFIG: u8 = 3; const EINTERVAL: u64 = 10000; /* 10 msec expected EINTERVAL */ const SCALEF: u64 = 512; fn load_histograms() -> Loaded { let mut hist = Histogram::new_with_max(TRACKABLE_MAX, SIGFIG).unwrap(); let mut scaled_hist = Histogram::new_with_bounds(1000, TRACKABLE_MAX * SCALEF, SIGFIG).unwrap(); let mut raw = Histogram::new_with_max(TRACKABLE_MAX, SIGFIG).unwrap(); let mut scaled_raw = Histogram::new_with_bounds(1000, TRACKABLE_MAX * SCALEF, SIGFIG).unwrap(); // Log hypothetical scenario: 100 seconds of "perfect" 1msec results, sampled // 100 times per second (10,000 results), followed by a 100 second pause with a single (100 // second) recorded result. Recording is done indicating an expected EINTERVAL between samples // of 10 msec: for _ in 0..10_000 { let v = 1_000; // 1ms hist.record_correct(v, EINTERVAL).unwrap(); scaled_hist .record_correct(v * SCALEF, EINTERVAL * SCALEF) .unwrap(); raw += v; scaled_raw += v * SCALEF; } let v = 100_000_000; hist.record_correct(v, EINTERVAL).unwrap(); scaled_hist .record_correct(v * SCALEF, EINTERVAL * SCALEF) .unwrap(); raw += v; scaled_raw += v * SCALEF; let post = raw.clone_correct(EINTERVAL); let scaled_post = scaled_raw.clone_correct(EINTERVAL * SCALEF); Loaded { hist, scaled_hist, raw, scaled_raw, post, scaled_post, } } #[test] fn scaling_equivalence() { let Loaded { hist, scaled_hist, post, scaled_post, .. } = load_histograms(); assert_near!(hist.mean() * SCALEF as f64, scaled_hist.mean(), 0.000001); assert_eq!(hist.len(), scaled_hist.len()); let expected_99th = hist.value_at_quantile(0.99) * 512; let scaled_99th = scaled_hist.value_at_quantile(0.99); assert_eq!( hist.lowest_equivalent(expected_99th), scaled_hist.lowest_equivalent(scaled_99th) ); // averages should be equivalent assert_near!(hist.mean() * SCALEF as f64, scaled_hist.mean(), 0.000001); // total count should be the same assert_eq!(hist.len(), scaled_hist.len()); // 99%'iles should be equivalent assert_eq!( scaled_hist.highest_equivalent(hist.value_at_quantile(0.99) * 512), scaled_hist.highest_equivalent(scaled_hist.value_at_quantile(0.99)) ); // Max should be equivalent assert_eq!( scaled_hist.highest_equivalent(hist.max() * 512), scaled_hist.max() ); // Same for post-corrected: // averages should be equivalent assert_near!(post.mean() * SCALEF as f64, scaled_post.mean(), 0.000001); // total count should be the same assert_eq!(post.len(), scaled_post.len()); // 99%'iles should be equivalent assert_eq!( post.lowest_equivalent(post.value_at_quantile(0.99)) * SCALEF, scaled_post.lowest_equivalent(scaled_post.value_at_quantile(0.99)) ); // Max should be equivalent assert_eq!( scaled_post.highest_equivalent(post.max() * 512), scaled_post.max() ); } #[test] fn total_count() { let Loaded { hist, raw, .. } = load_histograms(); assert_eq!(raw.len(), 10001); assert_eq!(hist.len(), 20000); } #[test] fn get_max_value() { let Loaded { hist, .. } = load_histograms(); assert!(hist.equivalent(hist.max(), 100000000)); } #[test] fn get_min_value() { let Loaded { hist, .. } = load_histograms(); assert!(hist.equivalent(hist.min(), 1000)); } #[test] fn get_mean() { let Loaded { hist, raw, .. } = load_histograms(); // direct avg. of raw results let expected_raw_mean = ((10000.0 * 1000.0) + (1.0 * 100000000.0)) / 10001.0; // avg. 1 msec for half the time, and 50 sec for other half let expected_mean = (1000.0 + 50000000.0) / 2.0; // We expect to see the mean to be accurate to ~3 decimal points (~0.1%): assert_near!(raw.mean(), expected_raw_mean, 0.001); assert_near!(hist.mean(), expected_mean, 0.001); } #[test] fn get_stdev() { let Loaded { hist, raw, .. } = load_histograms(); // direct avg. of raw results let expected_raw_mean: f64 = ((10000.0 * 1000.0) + (1.0 * 100000000.0)) / 10001.0; let expected_raw_std_dev = (((10000.0 * (1000_f64 - expected_raw_mean).powi(2)) + (100000000_f64 - expected_raw_mean).powi(2)) / 10001.0) .sqrt(); // avg. 1 msec for half the time, and 50 sec for other half let expected_mean = (1000.0 + 50000000.0) / 2_f64; let mut expected_square_deviation_sum = 10000.0 * (1000_f64 - expected_mean).powi(2); let mut value = 10000_f64; while value <= 100000000.0 { expected_square_deviation_sum += (value - expected_mean).powi(2); value += 10000.0; } let expected_std_dev = (expected_square_deviation_sum / 20000.0).sqrt(); // We expect to see the standard deviations to be accurate to ~3 decimal points (~0.1%): assert_near!(raw.stdev(), expected_raw_std_dev, 0.001); assert_near!(hist.stdev(), expected_std_dev, 0.001); } #[test] fn quantiles() { let Loaded { hist, raw, .. } = load_histograms(); assert_near!(raw.value_at_quantile(0.3), 1000.0, 0.001); assert_near!(raw.value_at_quantile(0.99), 1000.0, 0.001); assert_near!(raw.value_at_quantile(0.9999), 1000.0, 0.001); assert_near!(raw.value_at_quantile(0.99999), 100000000.0, 0.001); assert_near!(raw.value_at_quantile(1.0), 100000000.0, 0.001); assert_near!(hist.value_at_quantile(0.3), 1000.0, 0.001); assert_near!(hist.value_at_quantile(0.5), 1000.0, 0.001); assert_near!(hist.value_at_quantile(0.75), 50000000.0, 0.001); assert_near!(hist.value_at_quantile(0.9), 80000000.0, 0.001); assert_near!(hist.value_at_quantile(0.99), 98000000.0, 0.001); assert_near!(hist.value_at_quantile(0.99999), 100000000.0, 0.001); assert_near!(hist.value_at_quantile(1.0), 100000000.0, 0.001); } #[test] fn large_quantile() { let largest_value = 1000000000000_u64; let mut h = Histogram::<u64>::new_with_max(largest_value, 5).unwrap(); h += largest_value; assert!(h.value_at_quantile(1.0) > 0); } #[test] fn quantile_atorbelow() { let Loaded { hist, raw, .. } = load_histograms(); assert_near!(0.9999, raw.quantile_below(5000), 0.0001); assert_near!(0.5, hist.quantile_below(5000), 0.0001); assert_near!(1.0, hist.quantile_below(100000000_u64), 0.0001); } #[test] fn quantile_below_saturates() { let mut h = Histogram::<u64>::new_with_bounds(1, u64::max_value(), 3).unwrap(); for i in 0..1024 { h.record_n(i, u64::max_value() - 1).unwrap(); } // really it should be 0.5 but it saturates at u64::max_value() assert_eq!(1.0, h.quantile_below(512)); } #[test] fn quantile_below_value_beyond_max() { let mut h = Histogram::<u64>::new_with_bounds(1, 100_000, 3).unwrap(); for i in 0..1024 { h.record(i).unwrap(); } // also a bunch at maximum value, should be included in the resulting quantile for _ in 0..1024 { h.record(100_000).unwrap(); } assert_eq!(1.0, h.quantile_below(u64::max_value())); } #[test] fn count_between() { let Loaded { hist, raw, .. } = load_histograms(); assert_eq!(raw.count_between(1000, 1000), 10000); assert_eq!(raw.count_between(5000, 150000000), 1); assert_eq!(hist.count_between(5000, 150000000), 10000); } #[test] fn count_between_high_beyond_max() { let mut h = Histogram::<u64>::new_with_bounds(1, 100_000, 3).unwrap(); // largest expressible value will land in last index h.record((1 << 17) - 1).unwrap(); assert_eq!(1, h.count_between(50, 300_000)); } #[test] fn count_between_low_and_high_beyond_max() { let mut h = Histogram::<u64>::new_with_bounds(1, 100_000, 3).unwrap(); // largest expressible value will land in last index h.record((1 << 17) - 1).unwrap(); assert_eq!(1, h.count_between(200_000, 300_000)); } #[test] fn count_between_saturates() { let mut h = Histogram::<u64>::new_with_bounds(1, u64::max_value(), 3).unwrap(); for i in 0..1024 { h.record_n(i, u64::max_value() - 1).unwrap(); } assert_eq!(u64::max_value(), h.count_between(100, 200)); } #[test] fn count_at() { let Loaded { hist, raw, .. } = load_histograms(); assert_eq!(raw.count_between(10000, 10010), 0); assert_eq!(hist.count_between(10000, 10010), 1); assert_eq!(raw.count_at(1000), 10000); assert_eq!(hist.count_at(1000), 10000); } #[test] fn count_at_beyond_max_value() { let mut h = Histogram::<u64>::new_with_bounds(1, 100_000, 3).unwrap(); // largest expressible value will land in last index h.record((1 << 17) - 1).unwrap(); assert_eq!(1, h.count_at(u64::max_value())); } #[test] fn quantile_iter() { let Loaded { hist, .. } = load_histograms(); for v in hist.iter_quantiles(5 /* ticks per half */) { assert_eq!( v.value_iterated_to(), hist.highest_equivalent(hist.value_at_quantile(v.quantile())) ); } } #[test] fn linear_iter_raw() { let Loaded { raw, .. } = load_histograms(); // Note that using linear buckets should work "as expected" as long as the number of linear // buckets is lower than the resolution level determined by // largest_value_with_single_unit_resolution (2000 in this case). Above that count, some of the // linear buckets can end up rounded up in size (to the nearest local resolution unit level), // which can result in a smaller number of buckets that expected covering the range. // Iterate raw data using linear buckets of 100 msec each. let mut num = 0; for (i, v) in raw.iter_linear(100_000).enumerate() { match i { // Raw Linear 100 msec bucket # 0 added a count of 10000 0 => assert_eq!(v.count_since_last_iteration(), 10_000), // Raw Linear 100 msec bucket # 999 added a count of 1 999 => assert_eq!(v.count_since_last_iteration(), 1), // Remaining raw Linear 100 msec buckets add a count of 0 _ => assert_eq!(v.count_since_last_iteration(), 0), } num += 1; } assert_eq!(num, 1_000); } #[test] fn linear_iter_corrected() { let Loaded { hist, .. } = load_histograms(); let mut num = 0; let mut total_added_counts = 0; // Iterate data using linear buckets of 10 msec each. for (i, v) in hist.iter_linear(10_000).enumerate() { if i == 0 { assert_eq!(v.count_since_last_iteration(), 10_000); } // Because value resolution is low enough (3 digits) that multiple linear buckets will end // up residing in a single value-equivalent range, some linear buckets will have counts of // 2 or more, and some will have 0 (when the first bucket in the equivalent range was the // one that got the total count bump). However, we can still verify the sum of counts added // in all the buckets... total_added_counts += v.count_since_last_iteration(); num += 1; } // There should be 10000 linear buckets of size 10000 usec between 0 and 100 sec. assert_eq!(num, 10_000); assert_eq!(total_added_counts, 20_000); num = 0; total_added_counts = 0; // Iterate data using linear buckets of 1 msec each. for (i, v) in hist.iter_linear(1_000).enumerate() { if i == 1 { assert_eq!(v.count_since_last_iteration(), 10_000); } // Because value resolution is low enough (3 digits) that multiple linear buckets will end // up residing in a single value-equivalent range, some linear buckets will have counts of // 2 or more, and some will have 0 (when the first bucket in the equivalent range was the // one that got the total count bump). However, we can still verify the sum of counts added // in all the buckets... total_added_counts += v.count_since_last_iteration(); num += 1 } // You may ask "why 100007 and not 100000?" for the value below? The answer is that at this // fine a linear stepping resolution, the final populated sub-bucket (at 100 seconds with 3 // decimal point resolution) is larger than our liner stepping, and holds more than one linear // 1 msec step in it. // // Since we only know we're done with linear iteration when the next iteration step will step // out of the last populated bucket, there is not way to tell if the iteration should stop at // 100000 or 100007 steps. The proper thing to do is to run to the end of the sub-bucket // quanta... assert_eq!(num, 100_007); assert_eq!(total_added_counts, 20_000); } #[test] fn iter_log() { let Loaded { hist, raw, .. } = load_histograms(); // Iterate raw data using logarithmic buckets starting at 10 msec. let mut num = 0; for (i, v) in raw.iter_log(10000, 2.0).enumerate() { match i { // Raw logarithmic 10 msec bucket # 0 added a count of 10000 0 => assert_eq!(v.count_since_last_iteration(), 10000), // Raw logarithmic 10 msec bucket # 14 added a count of 1 14 => assert_eq!(v.count_since_last_iteration(), 1), // Remaining raw logarithmic 100 msec buckets add a count of 0 _ => assert_eq!(v.count_since_last_iteration(), 0), } num += 1; } assert_eq!(num - 1, 14); num = 0; let mut total_added_counts = 0; for (i, v) in hist.iter_log(10000, 2.0).enumerate() { if i == 0 { assert_eq!(v.count_since_last_iteration(), 10000); } total_added_counts += v.count_since_last_iteration(); num += 1; } // There should be 14 Logarithmic buckets of size 10000 usec between 0 and 100 sec. assert_eq!(num - 1, 14); assert_eq!(total_added_counts, 20000); } #[test] fn iter_recorded() { let Loaded { hist, raw, .. } = load_histograms(); // Iterate raw data by stepping through every value that has a count recorded: let mut num = 0; for (i, v) in raw.iter_recorded().enumerate() { match i { // Raw recorded value bucket # 0 added a count of 10000 0 => assert_eq!(v.count_since_last_iteration(), 10000), // Remaining recorded value buckets add a count of 1 _ => assert_eq!(v.count_since_last_iteration(), 1), } num += 1; } assert_eq!(num, 2); num = 0; let mut total_added_counts = 0; for (i, v) in hist.iter_recorded().enumerate() { if i == 0 { assert_eq!(v.count_since_last_iteration(), 10000); } // The count in a recorded iterator value should never be zero assert_ne!(v.count_at_value(), 0); // The count in a recorded iterator value should exactly match the amount added since the // last iteration assert_eq!(v.count_at_value(), v.count_since_last_iteration()); total_added_counts += v.count_since_last_iteration(); num += 1; } assert_eq!(total_added_counts, 20000); } #[test] fn iter_all() { let Loaded { hist, raw, .. } = load_histograms(); // Iterate raw data by stepping through every value that has a count recorded: let mut num = 0; for (i, v) in raw.iter_all().enumerate() { if i == 1000 { assert_eq!(v.count_since_last_iteration(), 10000); } else if hist.equivalent(v.value_iterated_to(), 100000000) { assert_eq!(v.count_since_last_iteration(), 1); } else { assert_eq!(v.count_since_last_iteration(), 0); } // TODO: also test total count and total value once the iterator exposes this num += 1; } assert_eq!(num, hist.distinct_values()); num = 0; let mut total_added_counts = 0; // HistogramIterationValue v1 = null; for (i, v) in hist.iter_all().enumerate() { // v1 = v; if i == 1000 { assert_eq!(v.count_since_last_iteration(), 10000); } // The count in iter_all buckets should exactly match the amount added since the last // iteration assert_eq!(v.count_at_value(), v.count_since_last_iteration()); total_added_counts += v.count_since_last_iteration(); num += 1; } assert_eq!(num, hist.distinct_values()); assert_eq!(total_added_counts, 20000); } #[test] fn linear_iter_steps() { let mut histogram = Histogram::<u64>::new(2).unwrap(); histogram += 193; histogram += 0; histogram += 1; histogram += 64; histogram += 128; assert_eq!(histogram.iter_linear(64).count(), 4); } #[test] fn value_duplication() { let Loaded { hist, .. } = load_histograms(); let histogram1 = hist.clone(); let mut num = 0; let mut ranges = Vec::with_capacity(histogram1.distinct_values()); let mut counts = Vec::with_capacity(histogram1.distinct_values()); for v in histogram1.iter_all() { if v.count_since_last_iteration() > 0 { ranges.push(v.value_iterated_to()); counts.push(v.count_since_last_iteration()); } num += 1; } assert_eq!(num, histogram1.distinct_values()); let mut histogram2 = Histogram::new_with_max(TRACKABLE_MAX, SIGFIG).unwrap(); for i in 0..ranges.len() { histogram2.record_n(ranges[i], counts[i]).unwrap(); } assert_eq!( histogram1, histogram2, "histograms should be equal after re-recording" ); } #[test] fn total_count_exceeds_bucket_type() { let mut h: Histogram<u8> = Histogram::new(3).unwrap(); for _ in 0..200 { h.record(100).unwrap(); } for _ in 0..200 { h.record(100_000).unwrap(); } assert_eq!(400, h.len()); }
true
d5a55ee02cdc992bb260226ac341039303346837
Rust
bhechinger/tunnel_manager
/src/storage/permissions.rs
UTF-8
4,866
2.796875
3
[]
no_license
use diesel::prelude::*; use diesel::r2d2::{ConnectionManager, Pool}; use tonic::Status; use tracing::instrument; use crate::api::permission_request::IdOrName; use crate::api::PermissionData; use crate::schema::permissions; use crate::schema::permissions::dsl::*; use crate::storage::helpers::sql_err_to_grpc_error; #[derive(Queryable, Default, Debug)] pub struct Permission { pub id: i32, pub name: String, pub description: String, } #[derive(Insertable)] #[diesel(table_name = permissions)] pub struct NewPermission<'a> { pub name: &'a str, pub description: &'a str, } #[derive(AsChangeset, Default)] #[diesel(table_name = permissions)] pub struct UpdatePermission { pub name: Option<String>, pub description: Option<String>, } impl From<Permission> for PermissionData { fn from(p: Permission) -> PermissionData { PermissionData { id: Some(p.id), name: p.name, description: p.description, } } } impl From<&Permission> for PermissionData { fn from(p: &Permission) -> PermissionData { PermissionData { id: Some(p.id), name: p.name.clone(), description: p.description.clone(), } } } impl Permission { #[instrument] pub async fn all( pool: &Pool<ConnectionManager<PgConnection>>, ) -> Result<Vec<PermissionData>, Status> { let conn = &mut pool.get().unwrap(); match permissions.load::<Permission>(conn) { Ok(results) => Ok(results.iter().map(|t| t.into()).collect()), Err(err) => Err(sql_err_to_grpc_error(err)), } } #[instrument] pub async fn get( pool: &Pool<ConnectionManager<PgConnection>>, id_or_name: &IdOrName, ) -> Result<PermissionData, Status> { let conn = &mut pool.get().unwrap(); match id_or_name { IdOrName::Id(user_id) => match permissions.find(user_id).first::<Permission>(conn) { Ok(results) => Ok(results.into()), Err(err) => Err(sql_err_to_grpc_error(err)), }, IdOrName::Name(permission_name) => { match permissions .filter(name.eq(permission_name)) .first::<Permission>(conn) { Ok(results) => Ok(results.into()), Err(err) => Err(sql_err_to_grpc_error(err)), } } } } #[instrument] pub async fn add( pool: &Pool<ConnectionManager<PgConnection>>, permission_data: PermissionData, ) -> Result<PermissionData, Status> { let new_user = NewPermission { name: permission_data.name.as_str(), description: permission_data.description.as_str(), }; let conn = &mut pool.get().unwrap(); match diesel::insert_into(permissions) .values(&new_user) .get_result::<Permission>(conn) { Ok(results) => Ok(results.into()), Err(err) => Err(sql_err_to_grpc_error(err)), } } #[instrument] pub async fn update( pool: &Pool<ConnectionManager<PgConnection>>, permission_data: PermissionData, ) -> Result<PermissionData, Status> { let conn = &mut pool.get().unwrap(); let mut update = UpdatePermission::default(); if permission_data.id.is_none() { return Err(Status::invalid_argument("Permission id is required")); } if !permission_data.name.is_empty() { update.name = Some(permission_data.name); } if !permission_data.description.is_empty() { update.description = Some(permission_data.description) } match diesel::update(permissions.find(permission_data.id.unwrap())) .set(update) .get_result::<Permission>(conn) { Ok(results) => Ok(results.into()), Err(err) => Err(sql_err_to_grpc_error(err)), } } #[instrument] pub async fn delete( pool: &Pool<ConnectionManager<PgConnection>>, id_or_name: IdOrName, ) -> Result<usize, Status> { let conn = &mut pool.get().unwrap(); match id_or_name { IdOrName::Id(permission_id) => { match diesel::delete(permissions.find(permission_id)).execute(conn) { Ok(results) => Ok(results), Err(err) => Err(sql_err_to_grpc_error(err)), } } IdOrName::Name(permission_name) => { match diesel::delete(permissions.filter(name.eq(permission_name))).execute(conn) { Ok(results) => Ok(results), Err(err) => Err(sql_err_to_grpc_error(err)), } } } } }
true
a02368712c389c07c718d952bb7866092ed29966
Rust
halzy/twitch_api2
/src/helix/subscriptions/get_broadcaster_subscriptions.rs
UTF-8
5,839
3.28125
3
[ "Apache-2.0", "MIT", "LicenseRef-scancode-unknown-license-reference" ]
permissive
//! Get all of a broadcaster’s subscriptions. //! [`get-broadcaster-subscriptions`](https://dev.twitch.tv/docs/api/reference#get-broadcaster-subscriptions) //! //! # Accessing the endpoint //! //! ## Request: [GetBroadcasterSubscriptionsRequest] //! //! To use this endpoint, construct a [`GetBroadcasterSubscriptionsRequest`] with the [`GetBroadcasterSubscriptionsRequest::builder()`] method. //! //! ```rust, no_run //! use twitch_api2::helix::subscriptions::get_broadcaster_subscriptions; //! let request = get_broadcaster_subscriptions::GetBroadcasterSubscriptionsRequest::builder() //! .broadcaster_id("1234") //! .build(); //! ``` //! //! ## Response: [BroadcasterSubscription] //! //! Send the request to receive the response with [`HelixClient::req_get()`](helix::HelixClient::req_get). //! //! ```rust, no_run //! use twitch_api2::helix::{self, subscriptions::get_broadcaster_subscriptions}; //! # use twitch_api2::client; //! # #[tokio::main] //! # async fn main() -> Result<(), Box<dyn std::error::Error + Send + Sync + 'static>> { //! # let client: helix::HelixClient<'static, client::DummyHttpClient> = helix::HelixClient::default(); //! # let token = twitch_oauth2::AccessToken::new("validtoken".to_string()); //! # let token = twitch_oauth2::UserToken::from_existing(twitch_oauth2::dummy_http_client, token, None, None).await?; //! let request = get_broadcaster_subscriptions::GetBroadcasterSubscriptionsRequest::builder() //! .broadcaster_id("1234") //! .build(); //! let response: Vec<get_broadcaster_subscriptions::BroadcasterSubscription> = client.req_get(request, &token).await?.data; //! # Ok(()) //! # } //! ``` //! //! You can also get the [`http::Request`] with [`request.create_request(&token, &client_id)`](helix::RequestGet::create_request) //! and parse the [`http::Response`] with [`GetBroadcasterSubscriptionsRequest::parse_response(None, &request.get_uri(), response)`](GetBroadcasterSubscriptionsRequest::parse_response) use super::*; use helix::RequestGet; /// Query Parameters for [Get Broadcaster Subscriptions](super::get_broadcaster_subscriptions) /// /// [`get-broadcaster-subscriptions`](https://dev.twitch.tv/docs/api/reference#get-broadcaster-subscriptions) #[derive(PartialEq, typed_builder::TypedBuilder, Deserialize, Serialize, Clone, Debug)] #[non_exhaustive] pub struct GetBroadcasterSubscriptionsRequest { /// User ID of the broadcaster. Must match the User ID in the Bearer token. #[builder(setter(into))] pub broadcaster_id: types::UserId, /// Unique identifier of account to get subscription status of. Accepts up to 100 values. #[builder(default)] pub user_id: Vec<types::UserId>, /// Cursor for forward pagination: tells the server where to start fetching the next set of results, in a multi-page response. The cursor value specified here is from the pagination response field of a prior query. #[builder(default)] pub after: Option<helix::Cursor>, /// Number of values to be returned per page. Limit: 100. Default: 20. #[builder(setter(into), default)] pub first: Option<String>, } /// Return Values for [Get Broadcaster Subscriptions](super::get_broadcaster_subscriptions) /// /// [`get-broadcaster-subscriptions`](https://dev.twitch.tv/docs/api/reference#get-broadcaster-subscriptions) #[derive(PartialEq, Deserialize, Serialize, Debug, Clone)] #[cfg_attr(feature = "deny_unknown_fields", serde(deny_unknown_fields))] #[non_exhaustive] pub struct BroadcasterSubscription { /// User ID of the broadcaster. pub broadcaster_id: types::UserId, /// Login of the broadcaster. pub broadcaster_login: types::UserName, /// Display name of the broadcaster. pub broadcaster_name: types::DisplayName, /// Determines if the subscription is a gift subscription. pub is_gift: bool, /// Type of subscription (Tier 1, Tier 2, Tier 3). 1000 = Tier 1, 2000 = Tier 2, 3000 = Tier 3 subscriptions. pub tier: types::SubscriptionTier, /// Name of the subscription. pub plan_name: String, /// ID of the subscribed user. pub user_id: types::UserId, /// Login of the subscribed user. pub user_login: types::UserName, /// Display name of the subscribed user. pub user_name: types::DisplayName, } impl Request for GetBroadcasterSubscriptionsRequest { type Response = Vec<BroadcasterSubscription>; const PATH: &'static str = "subscriptions"; #[cfg(feature = "twitch_oauth2")] const SCOPE: &'static [twitch_oauth2::Scope] = &[twitch_oauth2::Scope::ChannelReadSubscriptions]; } impl RequestGet for GetBroadcasterSubscriptionsRequest {} impl helix::Paginated for GetBroadcasterSubscriptionsRequest { fn set_pagination(&mut self, cursor: Option<helix::Cursor>) { self.after = cursor } } #[test] fn test_request() { use helix::*; let req = GetBroadcasterSubscriptionsRequest::builder() .broadcaster_id("123".to_string()) .build(); // From twitch docs. Malformed example on https://dev.twitch.tv/docs/api/reference#get-broadcaster-subscriptions let data = br#" { "data": [ { "broadcaster_id": "123", "broadcaster_login": "test_user", "broadcaster_name": "test_user", "is_gift": true, "tier": "1000", "plan_name": "The Ninjas", "user_id": "123", "user_login": "snoirf", "user_name": "snoirf" } ], "pagination": { "cursor": "xxxx" } } "# .to_vec(); let http_response = http::Response::builder().body(data).unwrap(); let uri = req.get_uri().unwrap(); assert_eq!( uri.to_string(), "https://api.twitch.tv/helix/subscriptions?broadcaster_id=123" ); dbg!( GetBroadcasterSubscriptionsRequest::parse_response(Some(req), &uri, http_response).unwrap() ); }
true
6618dd7f226de699f8ab8c619a359e826b3631a2
Rust
mgbatchelor/rust-playground
/chapter2/operators.rs
UTF-8
526
3.171875
3
[]
no_license
fn main() { println!("1 + 2 = {}", 1u32 + 2); println!("1 - 2 = {}", 1i32 - 2); println!("true && true = {}", true && true); println!("true || false = {}", true || false); println!("!true = {}", !true); println!("0011 AND 0101 = {:04b}", 0b0011 & 0b0101); println!("0011 OR 0101 = {:04b}", 0b0011 | 0b0101); println!("0011 XOR 0101 = {:04b}", 0b0011 ^ 0b0101); println!("1 << 5 = {}", 1 << 5); println!("0x80 >> 2 = 0x{:X}", 0x80 >> 2); println!("one million {}", 1_000_000); }
true
f496557b891bcc32549f324886ce2833db802706
Rust
AcrylicShrimp/mazemaze
/src/network/handler.rs
UTF-8
5,983
2.90625
3
[]
no_license
extern crate byteorder; use byteorder::ReadBytesExt; pub enum Context { WorldReceive { width: u32, height: u32, data: Option<Vec<u8>>, player: Option<u32>, }, PlayerReceive, PlayerIdReceive, MoveReceive, } pub struct Handler { status: Option<u16>, context: Option<Context>, } impl Handler { pub fn new() -> Handler { Handler { status: None, context: None, } } pub fn handle_socket( &mut self, socket: &mut super::socket::Socket, world: &mut super::super::world::world::World, ) { let status_code: u16; { if self.status.is_none() { let received = socket.retrieve(); if received.is_none() { return; } self.status = Some( std::io::Cursor::new(received.unwrap()) .read_u16::<byteorder::LittleEndian>() .unwrap(), ); } status_code = self.status.unwrap(); } self.handle_packet(status_code, socket, world); } fn handle_packet( &mut self, status: u16, socket: &mut super::socket::Socket, world: &mut super::super::world::world::World, ) { match status { 1 => match self.context.as_mut() { Some(context) => match context { Context::WorldReceive { width, height, data, player, } => match socket.retrieve() { Some(received) => { if data.is_none() { *data = Some(received); socket.receive(4); } else if player.is_none() { let player_count = std::io::Cursor::new(&received) .read_u32::<byteorder::LittleEndian>() .unwrap(); *player = Some(player_count); socket.receive(player_count as usize * 19); } else { let mut players = vec![]; for index in 0..player.unwrap() as usize { let offset = index * 19; let id = std::io::Cursor::new(&received[offset..offset + 8]) .read_u64::<byteorder::LittleEndian>() .unwrap(); let color = ( received[offset + 8], received[offset + 9], received[offset + 10], ); let x = std::io::Cursor::new(&received[offset + 11..offset + 15]) .read_i32::<byteorder::LittleEndian>() .unwrap(); let y = std::io::Cursor::new(&received[offset + 15..offset + 19]) .read_i32::<byteorder::LittleEndian>() .unwrap(); world.add_player(id, color, x, y); players.push((id, color, x, y)); } world .player_controller_mut() .set_player_id(players.first().unwrap().0); println!("players: {:?}", players); world.init_map(super::super::world::map::Map::from_data( *width, *height, data.take().unwrap(), )); socket.receive(2); self.status = None; self.context = None; } } None => {} }, _ => unreachable!(), }, None => match socket.retrieve() { Some(received) => { let width = std::io::Cursor::new(&received[0..4]) .read_u32::<byteorder::LittleEndian>() .unwrap(); let height = std::io::Cursor::new(&received[4..8]) .read_u32::<byteorder::LittleEndian>() .unwrap(); self.context = Some(Context::WorldReceive { width, height, data: None, player: None, }); socket.receive((width * height) as usize); } None => { socket.receive(8); } }, }, 2 => match self.context.as_mut() { Some(context) => match context { Context::PlayerReceive => match socket.retrieve() { Some(received) => { let id = std::io::Cursor::new(&received[0..8]) .read_u64::<byteorder::LittleEndian>() .unwrap(); let color = (received[8], received[9], received[10]); let x = std::io::Cursor::new(&received[11..15]) .read_i32::<byteorder::LittleEndian>() .unwrap(); let y = std::io::Cursor::new(&received[15..19]) .read_i32::<byteorder::LittleEndian>() .unwrap(); world.add_player(id, color, x, y); println!("new player income: {:?}", (id, color, x, y)); socket.receive(2); self.status = None; self.context = None; } None => {} }, _ => unreachable!(), }, None => { socket.receive(19); self.context = Some(Context::PlayerReceive); } }, 3 => match self.context.as_mut() { Some(context) => match context { Context::PlayerIdReceive => match socket.retrieve() { Some(received) => { let id = std::io::Cursor::new(&received) .read_u64::<byteorder::LittleEndian>() .unwrap(); world.remove_player(id); println!("player exit: {:?}", id); socket.receive(2); self.status = None; self.context = None; } None => {} }, _ => unreachable!(), }, None => { socket.receive(8); self.context = Some(Context::PlayerIdReceive); } }, 4 => match self.context.as_mut() { Some(context) => match context { Context::MoveReceive => match socket.retrieve() { Some(received) => { let id = std::io::Cursor::new(&received) .read_u64::<byteorder::LittleEndian>() .unwrap(); for player in world.players_mut().iter_mut() { if player.id() != id { continue; } match received[8] { 0 => { player.y -= 1; } 1 => { player.y += 1; } 2 => { player.x -= 1; } 3 => { player.x += 1; } _ => {} } } socket.receive(2); self.status = None; self.context = None; } None => {} }, _ => unreachable!(), }, None => { socket.receive(9); self.context = Some(Context::MoveReceive); } }, _ => { socket.receive(2); self.status = None; self.context = None; } } } }
true
58f194356a75e7b899c981cca6f145c933583db8
Rust
ra192/card-api-rust
/src/main.rs
UTF-8
2,313
2.515625
3
[]
no_license
mod db; mod token; mod merchant; mod transaction; mod account; mod card; mod customer; use warp::Filter; use crate::db::{create_pool, DBPool}; use std::convert::Infallible; use serde::{Serialize}; extern crate pretty_env_logger; #[macro_use] extern crate log; use std::env; fn with_db(db_pool: DBPool) -> impl Filter<Extract=(DBPool, ), Error=Infallible> + Clone { warp::any().map(move || db_pool.clone()) } pub enum Errors { MerchantError(String), AccountError(String), CustomerError(String), CardError(String), TransactionError(String), } #[derive(Serialize)] pub struct ErrorResponse { pub error: String, } #[tokio::main] async fn main() { env::set_var("RUST_LOG", "info"); pretty_env_logger::init(); let log = warp::log("myLog"); let pool = create_pool().unwrap(); let token_route = warp::path!("api"/"token").and(warp::post()) .and(with_db(pool.clone())).and(warp::body::json()) .and_then(token::create_token_handler); let fund_route = warp::path!("api"/"account"/"fund").and(warp::post()) .and(with_db(pool.clone())).and(warp::header("Authorization")) .and(warp::body::json()).and_then(transaction::fund_account_handler); let create_customer = warp::path!("api"/"customer").and(warp::post()) .and(with_db(pool.clone())).and(warp::header("Authorization")) .and(warp::body::json()).and_then(customer::create_handler); let create_card = warp::path!("api"/"card").and(warp::post()) .and(with_db(pool.clone())).and(warp::header("Authorization")) .and(warp::body::json()).and_then(card::create_virtual_handler); let deposit_card = warp::path!("api"/"card"/"deposit").and(warp::post()) .and(with_db(pool.clone())).and(warp::header("Authorization")) .and(warp::body::json()).and_then(card::deposit_virtual_handler); let withdraw_card = warp::path!("api"/"card"/"withdraw").and(warp::post()) .and(with_db(pool.clone())).and(warp::header("Authorization")) .and(warp::body::json()).and_then(card::withdraw_virtual_handler); let routes = token_route.or(fund_route).or(create_customer) .or(create_card).or(deposit_card).or(withdraw_card).with(log); warp::serve(routes) .run(([127, 0, 0, 1], 8080)) .await; }
true
ca6d87f1da78e877065864c1802f73bc1bf8533b
Rust
luojia65/coruscant
/coruscant-nbt/examples/nbt-tag-enum.rs
UTF-8
631
2.9375
3
[ "MIT" ]
permissive
use serde::Serialize; #[derive(Serialize)] #[serde(tag = "type")] enum Message { Request { id: &'static str, method: &'static str, params: i8, }, Response { id: &'static str, result: i8, }, } fn main() { let data = Message::Request { id: "...", method: "...", params: 1, }; let out = coruscant_nbt::to_string_transcript(&data).unwrap(); println!("{}", out); let data = Message::Response { id: "...", result: 2, }; let out = coruscant_nbt::to_string_transcript(&data).unwrap(); println!("{}", out); }
true
aee6dd57988e74bd1ae4dcaa0a38fa6839e43ad8
Rust
AndrewMendezLacambra/rust-programming-contest-solutions
/atcoder/agc009_b.rs
UTF-8
1,829
2.90625
3
[]
no_license
/// Thank you tanakh!!! /// https://qiita.com/tanakh/items/0ba42c7ca36cd29d0ac8 macro_rules! input { (source = $s:expr, $($r:tt)*) => { let mut iter = $s.split_whitespace(); input_inner!{iter, $($r)*} }; ($($r:tt)*) => { let mut s = { use std::io::Read; let mut s = String::new(); std::io::stdin().read_to_string(&mut s).unwrap(); s }; let mut iter = s.split_whitespace(); input_inner!{iter, $($r)*} }; } macro_rules! input_inner { ($iter:expr) => {}; ($iter:expr, ) => {}; ($iter:expr, $var:ident : $t:tt $($r:tt)*) => { let $var = read_value!($iter, $t); input_inner!{$iter $($r)*} }; } macro_rules! read_value { ($iter:expr, ( $($t:tt),* )) => { ( $(read_value!($iter, $t)),* ) }; ($iter:expr, [ $t:tt ; $len:expr ]) => { (0..$len).map(|_| read_value!($iter, $t)).collect::<Vec<_>>() }; ($iter:expr, chars) => { read_value!($iter, String).chars().collect::<Vec<char>>() }; ($iter:expr, usize1) => { read_value!($iter, usize) - 1 }; ($iter:expr, $t:ty) => { $iter.next().unwrap().parse::<$t>().expect("Parse error") }; } fn main() { input!(n: usize, a: [usize1; n - 1]); let mut tree = vec![vec![]; n]; for i in 0..(n - 1) { tree[a[i]].push(i + 1); } println!("{}", dfs(0, &tree) - 1); } fn dfs(v: usize, tree: &Vec<Vec<usize>>) -> usize { let mut children = vec![]; for &child in tree[v].iter() { let child = dfs(child, tree); children.push(child); } children.sort(); children.reverse(); let max = children.iter().enumerate().map(|(i, c)| c + i + 1).max(); match max { Some(max) => max, _ => 1, } }
true
a5b923e2f218d38b410d200bb13ddf8fd681138d
Rust
henrikpersson/potatis
/nes/src/ppu/state.rs
UTF-8
1,426
3.203125
3
[ "MIT" ]
permissive
#[derive(Default, PartialEq, Eq, Copy, Clone)] pub(crate) enum Phase { PreRender, #[default] Render, PostRender, EnteringVblank, Vblank, } pub(crate) enum Rendering { Enabled, Disabled, } #[derive(Default)] pub(crate) struct State { phase: Phase, cycle: usize, scanline: usize, clock: usize, odd_frame: bool, } impl State { pub fn next(&mut self, rendering_enabled: bool) -> (Phase, usize, Rendering) { self.cycle = self.clock % 341; self.scanline = self.clock / 341; self.clock += 1; self.phase = match self.scanline { 261 => Phase::PreRender, 0..=239 => Phase::Render, 240 => Phase::PostRender, 241 => Phase::EnteringVblank, 242..=260 => Phase::Vblank, _ => unreachable!(), }; if self.phase == Phase::PreRender { if self.cycle == 339 && self.odd_frame && rendering_enabled { self.clock = 0; } if self.cycle == 340 { self.clock = 0; } } self.odd_frame = !self.odd_frame; ( self.phase, self.cycle, if rendering_enabled { Rendering::Enabled } else { Rendering::Disabled }, ) } pub fn even_frame(&self) -> bool { !self.odd_frame } pub fn scanline(&self) -> usize { self.scanline } pub fn cycle(&self) -> usize { self.cycle } #[allow(dead_code)] pub fn clock(&self) -> usize { self.clock } }
true
03e6e460a35fafad34d41698d74088a6dba3db03
Rust
Parabellum1905y/doublelinkedlist
/src/element.rs
UTF-8
1,051
3.609375
4
[]
no_license
#[derive(Clone)] pub struct Element { value: String, next: Option<Box<Element>>, prev: Option<Box<Element>> } impl Element { pub fn new(value: String) -> Element { Element { value: value, next: None, prev: None } } pub fn from_existing(value: String, prev: Option<Box<Element>>, next: Option<Box<Element>>) -> Element { Element { value: value, next: next, prev: prev } } pub fn get_next(&self) -> Option<Box<Element>> { self.next.to_owned() } pub fn set_next(&mut self, element: Element) { self.next = Some(Box::new(element)); } pub fn get_prev(&self) -> Option<Box<Element>> { self.prev.to_owned() } pub fn set_prev(&mut self, element: Element) { self.prev = Some(Box::new(element)); } pub fn get_value(&self) -> String { self.value.to_owned() } pub fn set_value(&mut self, value: String) { self.value = value; } }
true
3414f0ef222bf2524b6b6450dd10a3d50277386f
Rust
selatotal/entendendo-algoritmos
/04.quicksort/rust/quicksort/src/main.rs
UTF-8
616
3.3125
3
[ "MIT" ]
permissive
fn quicksort(arr: &mut Vec<i64>) -> Vec<i64> { if arr.len() < 2 { return arr.clone(); } let pivot = arr.remove(0); let mut lowers:Vec<i64> = arr.clone().into_iter().filter(|&v| v <= pivot).collect(); let mut greaters:Vec<i64> = arr.clone().into_iter().filter(|&v| v > pivot).collect(); let mut result = quicksort(lowers.as_mut()).clone(); result.push(pivot); result.append(quicksort(greaters.as_mut()).as_mut()); result.clone() } fn main() { let mut list: Vec<i64> = vec![24, 50, 54, 6, 9, 20, 1, 3, 80, 4]; println!("Sorted: {:?}", quicksort(list.as_mut())); }
true
7e30510fafdd8dfa974c060a02d8347363eb916a
Rust
jakeswenson/stoken
/src/tokens/aes.rs
UTF-8
574
2.75
3
[]
no_license
use crypto::buffer::{RefReadBuffer, RefWriteBuffer}; use crypto::{aes, aes::KeySize, blockmodes::NoPadding}; pub const KEY_SIZE: usize = 16; pub const BLOCK_SIZE: usize = 16; pub fn encrypt(key: &[u8], data: &[u8]) -> [u8; BLOCK_SIZE] { let mut output_array = [0u8; BLOCK_SIZE]; let mut input = RefReadBuffer::new(&data); let mut output: RefWriteBuffer = RefWriteBuffer::new(&mut output_array); let mut encryptor = aes::ecb_encryptor(KeySize::KeySize128, key, NoPadding); encryptor.encrypt(&mut input, &mut output, true).unwrap(); output_array }
true
e1c529f59c8afae83b1d12cbf1cad83ff82d30c6
Rust
rustysec/parselnk-rs
/src/lib.rs
UTF-8
7,332
3.234375
3
[]
no_license
//! Parse windows .lnk files using only safe rust. Windows lnk files //! describe links to data objects as defined by //! [this specification](https://docs.microsoft.com/en-us/openspecs/windows_protocols/ms-shllink/16cb4ca1-9339-4d0c-a68d-bf1d6cc0f943). //! //! # Examples //! //! You can process the `Lnk` data from a memory buffer that implements //! `std::io::Read`. //! //! ```no_run //! use parselnk::Lnk; //! use std::convert::TryFrom; //! //! let mut lnk_data: Vec<u8> = Vec::new(); //! // read your link into `lnk_data` here... //! let lnk = Lnk::try_from(lnk_data); //! ``` //! //! Or you can process any `Lnk` on disk. //! ```no_run //! use parselnk::Lnk; //! use std::convert::TryFrom; //! //! let path = std::path::Path::new("c:\\users\\me\\shortcut.lnk"); //! //! let lnk = Lnk::try_from(path).unwrap(); //! ``` #![warn(missing_docs)] pub mod error; pub mod extra_data; pub mod header; pub mod link_info; pub mod link_target_id_list; pub mod string_data; pub use extra_data::*; pub use header::*; pub use link_info::*; pub use link_target_id_list::*; use std::{ convert::TryFrom, path::{Path, PathBuf}, }; pub use string_data::*; /// Result type wrapping around `parselnk::error::Error` pub type Result<T> = std::result::Result<T, error::Error>; /// Represents a windows .lnk file #[derive(Clone, Debug)] pub struct Lnk { /// Path to the `.lnk` file path: Option<PathBuf>, /// The ShellLinkHeader structure contains identification information, timestamps, and flags that specify the presence of optional structures, including LinkTargetIDList (section 2.2), LinkInfo (section 2.3), and StringData (section 2.4). pub header: ShellLinkHeader, /// StringData refers to a set of structures that convey user interface and path identification information. The presence of these optional structures is controlled by LinkFlags (section 2.1.1) in the ShellLinkHeader (section 2.1). pub string_data: StringData, /// The LinkTargetIDList structure specifies the target of the link. The presence of this optional structure is specified by the HasLinkTargetIDList bit (LinkFlags section 2.1.1) in the ShellLinkHeader (section 2.1). pub link_target_id_list: LinkTargetIdList, /// The LinkInfo structure specifies information necessary to resolve a link target if it is not found in its original location. This includes information about the volume that the target was stored on, the mapped drive letter, and a Universal Naming Convention (UNC) form of the path if one existed when the link was created. For more details about UNC paths, see [MS-DFSNM] section 2.2.1.4.:w pub link_info: LinkInfo, /// ExtraData refers to a set of structures that convey additional information about a link target. These optional structures can be present in an extra data section that is appended to the basic Shell Link Binary File Format. pub extra_data: ExtraData, } impl Lnk { /// Creates a new `Lnk` from a `Read` source. /// /// # Example /// /// ```no_run /// use parselnk::Lnk; /// use std::fs::File; /// /// let mut file = File::open(r"c:\users\me\desktop\firefox.lnk").unwrap(); /// let lnk = Lnk::new(&mut file); /// ``` /// pub fn new<S: std::io::Read>(reader: &mut S) -> Result<Lnk> { let mut data_buf = Vec::new(); reader .read_to_end(&mut data_buf) .map_err(error::HeaderError::Read)?; let mut cursor = std::io::Cursor::new(data_buf); let header = ShellLinkHeader::try_from(&mut cursor)?; let link_target_id_list = LinkTargetIdList::new(&mut cursor, &header)?; let link_info = LinkInfo::new(&mut cursor, &header)?; let string_data = StringData::new(&mut cursor, &header)?; let extra_data = ExtraData::new(&mut cursor, &header)?; Ok(Lnk { path: None, header, string_data, link_target_id_list, link_info, extra_data, }) } /// The command line arguments supplied via the `Lnk` pub fn arguments(&self) -> Option<String> { self.string_data.command_line_arguments.clone() } /// The relative path to the resource of the `Lnk`` pub fn relative_path(&self) -> Option<PathBuf> { self.string_data.relative_path.clone() } /// The working directory of the `Lnk` pub fn working_dir(&self) -> Option<PathBuf> { self.string_data.working_dir.clone() } /// The description of the `Lnk` pub fn description(&self) -> Option<String> { self.string_data.name_string.clone() } /// The creation `FileTime` as a u64 pub fn creation_time(&self) -> u64 { self.header.creation_time } /// The access `FileTime` as a u64 pub fn access_time(&self) -> u64 { self.header.access_time } /// The write `FileTime` as a u64 pub fn write_time(&self) -> u64 { self.header.write_time } /// The creation `FileTime` as a `DateTime` #[cfg(feature = "chrono")] pub fn created_on(&self) -> Option<chrono::DateTime<chrono::Utc>> { self.header.created_on } /// The access `FileTime` as a `DateTime` #[cfg(feature = "chrono")] pub fn accessed_on(&self) -> Option<chrono::DateTime<chrono::Utc>> { self.header.accessed_on } /// The write `FileTime` as a `DateTime` #[cfg(feature = "chrono")] pub fn modified_on(&self) -> Option<chrono::DateTime<chrono::Utc>> { self.header.modified_on } } impl TryFrom<&Path> for Lnk { type Error = crate::error::Error; fn try_from(p: &Path) -> std::result::Result<Self, Self::Error> { let mut f = std::fs::File::open(p).map_err(crate::error::Error::from)?; Lnk::new(&mut f).map(|mut lnk| { lnk.path = Some(p.to_path_buf()); lnk }) } } impl TryFrom<PathBuf> for Lnk { type Error = crate::error::Error; fn try_from(p: PathBuf) -> std::result::Result<Self, Self::Error> { Self::try_from(p.as_path()) } } impl TryFrom<&[u8]> for Lnk { type Error = crate::error::Error; fn try_from(mut p: &[u8]) -> std::result::Result<Self, Self::Error> { Lnk::new(&mut p) } } impl TryFrom<Vec<u8>> for Lnk { type Error = crate::error::Error; fn try_from(p: Vec<u8>) -> std::result::Result<Self, Self::Error> { Lnk::new(&mut p.as_slice()) } } impl TryFrom<&Vec<u8>> for Lnk { type Error = crate::error::Error; fn try_from(p: &Vec<u8>) -> std::result::Result<Self, Self::Error> { Lnk::new(&mut p.as_slice()) } } #[cfg(test)] mod tests { use crate::Lnk; use std::convert::TryFrom; use std::path::Path; #[test] fn firefox() { let path = Path::new("./test_data/firefox.lnk"); assert!(Lnk::try_from(path).is_ok()); } #[test] fn commander() { let path = Path::new("./test_data/commander.lnk"); assert!(Lnk::try_from(path).is_ok()); } #[test] fn notepad() { let path = Path::new("./test_data/notepad.lnk"); assert!(Lnk::try_from(path).is_ok()); } #[test] fn xp_outlook_express() { let path = Path::new("./test_data/outlook_express.lnk"); assert!(Lnk::try_from(path).is_ok()); } }
true
76dbea47ba2a99027702162ef424abf213bc45f6
Rust
dev-chee/ash
/ash/src/entry.rs
UTF-8
8,001
2.6875
3
[ "MIT", "Apache-2.0" ]
permissive
use crate::instance::Instance; use crate::prelude::*; use crate::vk; use crate::RawPtr; use std::error::Error; use std::ffi::CStr; use std::fmt; use std::mem; use std::os::raw::c_char; use std::os::raw::c_void; use std::ptr; /// Holds a custom type `L` to load symbols from (usually a handle to a `dlopen`ed library), /// the [`vkGetInstanceProcAddr`][vk::StaticFn::get_instance_proc_addr()] loader function from /// this library (in [`vk::StaticFn`]), and Vulkan's "entry point" functions (resolved with `NULL` /// `instance`) as listed in [`vkGetInstanceProcAddr`'s description]. /// /// [`vkGetInstanceProcAddr`'s description]: https://www.khronos.org/registry/vulkan/specs/1.2-extensions/man/html/vkGetInstanceProcAddr.html#_description #[derive(Clone)] pub struct EntryCustom<L> { static_fn: vk::StaticFn, entry_fn_1_0: vk::EntryFnV1_0, entry_fn_1_1: vk::EntryFnV1_1, entry_fn_1_2: vk::EntryFnV1_2, lib: L, } /// Vulkan core 1.0 #[allow(non_camel_case_types)] impl<L> EntryCustom<L> { pub fn new_custom<Load>( mut lib: L, mut load: Load, ) -> std::result::Result<Self, MissingEntryPoint> where Load: FnMut(&mut L, &::std::ffi::CStr) -> *const c_void, { // Bypass the normal StaticFn::load so we can return an error let static_fn = vk::StaticFn::load_checked(|name| load(&mut lib, name))?; let load_fn = |name: &std::ffi::CStr| unsafe { mem::transmute(static_fn.get_instance_proc_addr(vk::Instance::null(), name.as_ptr())) }; let entry_fn_1_0 = vk::EntryFnV1_0::load(load_fn); let entry_fn_1_1 = vk::EntryFnV1_1::load(load_fn); let entry_fn_1_2 = vk::EntryFnV1_2::load(load_fn); Ok(EntryCustom { static_fn, entry_fn_1_0, entry_fn_1_1, entry_fn_1_2, lib, }) } pub fn fp_v1_0(&self) -> &vk::EntryFnV1_0 { &self.entry_fn_1_0 } pub fn static_fn(&self) -> &vk::StaticFn { &self.static_fn } #[doc = "<https://www.khronos.org/registry/vulkan/specs/1.2-extensions/man/html/vkEnumerateInstanceVersion.html>"] /// ```no_run /// # use ash::{Entry, vk}; /// # fn main() -> Result<(), Box<dyn std::error::Error>> { /// let entry = unsafe { Entry::new() }?; /// match entry.try_enumerate_instance_version()? { /// // Vulkan 1.1+ /// Some(version) => { /// let major = vk::version_major(version); /// let minor = vk::version_minor(version); /// let patch = vk::version_patch(version); /// }, /// // Vulkan 1.0 /// None => {}, /// } /// # Ok(()) } /// ``` pub fn try_enumerate_instance_version(&self) -> VkResult<Option<u32>> { unsafe { let mut api_version = 0; let enumerate_instance_version: Option<vk::PFN_vkEnumerateInstanceVersion> = { let name = b"vkEnumerateInstanceVersion\0".as_ptr() as *const _; mem::transmute( self.static_fn .get_instance_proc_addr(vk::Instance::null(), name), ) }; if let Some(enumerate_instance_version) = enumerate_instance_version { (enumerate_instance_version)(&mut api_version) .result_with_success(Some(api_version)) } else { Ok(None) } } } #[doc = "<https://www.khronos.org/registry/vulkan/specs/1.2-extensions/man/html/vkCreateInstance.html>"] /// /// # Safety /// In order for the created [`Instance`] to be valid for the duration of its /// usage, the [`Entry`](Self) this was called on must be dropped later than the /// resulting [`Instance`]. pub unsafe fn create_instance( &self, create_info: &vk::InstanceCreateInfo, allocation_callbacks: Option<&vk::AllocationCallbacks>, ) -> Result<Instance, InstanceError> { let mut instance = mem::zeroed(); self.entry_fn_1_0 .create_instance( create_info, allocation_callbacks.as_raw_ptr(), &mut instance, ) .result() .map_err(InstanceError::VkError)?; Ok(Instance::load(&self.static_fn, instance)) } #[doc = "<https://www.khronos.org/registry/vulkan/specs/1.2-extensions/man/html/vkEnumerateInstanceLayerProperties.html>"] pub fn enumerate_instance_layer_properties(&self) -> VkResult<Vec<vk::LayerProperties>> { unsafe { read_into_uninitialized_vector(|count, data| { self.entry_fn_1_0 .enumerate_instance_layer_properties(count, data) }) } } #[doc = "<https://www.khronos.org/registry/vulkan/specs/1.2-extensions/man/html/vkEnumerateInstanceExtensionProperties.html>"] pub fn enumerate_instance_extension_properties( &self, ) -> VkResult<Vec<vk::ExtensionProperties>> { unsafe { read_into_uninitialized_vector(|count, data| { self.entry_fn_1_0 .enumerate_instance_extension_properties(ptr::null(), count, data) }) } } #[doc = "<https://www.khronos.org/registry/vulkan/specs/1.2-extensions/man/html/vkGetInstanceProcAddr.html>"] pub unsafe fn get_instance_proc_addr( &self, instance: vk::Instance, p_name: *const c_char, ) -> vk::PFN_vkVoidFunction { self.static_fn.get_instance_proc_addr(instance, p_name) } } /// Vulkan core 1.1 #[allow(non_camel_case_types)] impl<L> EntryCustom<L> { pub fn fp_v1_1(&self) -> &vk::EntryFnV1_1 { &self.entry_fn_1_1 } #[deprecated = "This function is unavailable and therefore panics on Vulkan 1.0, please use `try_enumerate_instance_version` instead"] #[doc = "<https://www.khronos.org/registry/vulkan/specs/1.2-extensions/man/html/vkEnumerateInstanceVersion.html>"] /// /// Please use [`Self::try_enumerate_instance_version`] instead. pub fn enumerate_instance_version(&self) -> VkResult<u32> { unsafe { let mut api_version = 0; self.entry_fn_1_1 .enumerate_instance_version(&mut api_version) .result_with_success(api_version) } } } /// Vulkan core 1.2 #[allow(non_camel_case_types)] impl<L> EntryCustom<L> { pub fn fp_v1_2(&self) -> &vk::EntryFnV1_2 { &self.entry_fn_1_2 } } #[derive(Clone, Debug)] pub enum InstanceError { LoadError(Vec<&'static str>), VkError(vk::Result), } impl fmt::Display for InstanceError { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { match self { InstanceError::LoadError(e) => write!(f, "{}", e.join("; ")), InstanceError::VkError(e) => write!(f, "{}", e), } } } impl Error for InstanceError {} impl vk::StaticFn { pub fn load_checked<F>(mut _f: F) -> Result<Self, MissingEntryPoint> where F: FnMut(&::std::ffi::CStr) -> *const c_void, { // TODO: Make this a &'static CStr once CStr::from_bytes_with_nul_unchecked is const static ENTRY_POINT: &[u8] = b"vkGetInstanceProcAddr\0"; Ok(vk::StaticFn { get_instance_proc_addr: unsafe { let cname = CStr::from_bytes_with_nul_unchecked(ENTRY_POINT); let val = _f(cname); if val.is_null() { return Err(MissingEntryPoint); } else { ::std::mem::transmute(val) } }, }) } } #[derive(Clone, Debug)] pub struct MissingEntryPoint; impl std::fmt::Display for MissingEntryPoint { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::result::Result<(), std::fmt::Error> { write!(f, "Cannot load `vkGetInstanceProcAddr` symbol from library") } } impl std::error::Error for MissingEntryPoint {}
true
72c6cc5310a55f3b6e7c0c6cbdc009a35d02769f
Rust
neonphog/must_future
/src/lib.rs
UTF-8
5,174
3.078125
3
[ "Apache-2.0" ]
permissive
#![deny(warnings)] #![deny(missing_docs)] #![deny(unused_must_use)] //! BoxFutures cannot be marked `#[must_use]` because they are just type //! definitions. This newtype struct wraps a BoxFuture with something that //! can be marked `#[must_use]`. //! //! # Will Not Compile: //! //! ```compile_fail //! #![deny(unused_must_use)] //! //! use futures::future::FutureExt; //! //! #[tokio::main] //! async fn main() { //! fn get_future() -> must_future::MustBoxFuture<'static, ()> { //! async { }.boxed().into() //! } //! //! get_future(); // unused `must_future::MustBoxFuture` that must be used //! } //! ``` use futures::future::BoxFuture; /// Wrap a future that may or may not be marked must_use with a newtype /// that is marked must_use. #[must_use = "futures do nothing unless you `.await` or poll them"] pub struct MustFuture<F: std::future::Future> { sub_fut: F, } impl<F: std::future::Future> MustFuture<F> { pin_utils::unsafe_pinned!(sub_fut: F); } impl<F: std::future::Future> From<F> for MustFuture<F> { fn from(f: F) -> Self { Self { sub_fut: f } } } impl<F: std::future::Future> std::future::Future for MustFuture<F> { type Output = F::Output; fn poll( self: std::pin::Pin<&mut Self>, cx: &mut std::task::Context, ) -> std::task::Poll<Self::Output> { let p: std::pin::Pin<&mut F> = self.sub_fut(); std::future::Future::poll(p, cx) } } impl<F: std::future::Future + std::marker::Unpin> std::marker::Unpin for MustFuture<F> {} impl<F: std::future::Future> std::fmt::Debug for MustFuture<F> { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { f.debug_struct("MustFuture").finish() } } /// BoxFutures cannot be marked must_use because they are just type definitions. /// This newtype struct wraps a BoxFuture with something that can be marked must_use. #[must_use = "futures do nothing unless you `.await` or poll them"] pub struct MustBoxFuture<'lt, T> { sub_fut: BoxFuture<'lt, T>, } impl<'lt, T> MustBoxFuture<'lt, T> { /// Construct a new MustBoxFuture from a a raw unboxed future. /// Would be nice to `impl From<F: Future> for MustBoxFuture`, /// but blanket impls in rust core prevent this. pub fn new<F: 'lt + std::future::Future<Output = T> + Send>(f: F) -> Self { Self { sub_fut: futures::future::FutureExt::boxed(f), } } } impl<T: ?Sized> IntoMustBoxFuture for T where T: std::future::Future {} /// Helper trait for converting raw unboxed futures into MustBoxFutures. /// Would be nice to `impl<F: Future> Into<MustBoxFuture> for F`, /// but blanket impls in rust core prevent this. pub trait IntoMustBoxFuture: std::future::Future { /// Convert this raw future into a MustBoxFuture fn must_box<'a>(self) -> MustBoxFuture<'a, Self::Output> where Self: 'a + Sized + Send, { MustBoxFuture::new(self) } } impl<'lt, T> From<BoxFuture<'lt, T>> for MustBoxFuture<'lt, T> { fn from(f: BoxFuture<'lt, T>) -> Self { Self { sub_fut: f } } } impl<'lt, T> std::future::Future for MustBoxFuture<'lt, T> { type Output = T; fn poll( mut self: std::pin::Pin<&mut Self>, cx: &mut std::task::Context, ) -> std::task::Poll<Self::Output> { std::future::Future::poll(self.sub_fut.as_mut(), cx) } } impl<'lt, T> std::fmt::Debug for MustBoxFuture<'lt, T> { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { f.debug_struct("MustBoxFuture").finish() } } #[cfg(test)] mod tests { use super::*; use futures::future::FutureExt; #[tokio::test] pub async fn must_box_future_is_debug() { fn get_future() -> MustBoxFuture<'static, &'static str> { async { "test1" }.boxed().into() } assert_eq!("MustBoxFuture", &format!("{:?}", get_future())); } #[tokio::test] pub async fn must_box_future_can_still_process() { fn get_future() -> MustBoxFuture<'static, &'static str> { async { "test1" }.boxed().into() } assert_eq!("test1", get_future().await); } #[tokio::test] pub async fn must_box_future_with_new() { fn get_future() -> MustBoxFuture<'static, &'static str> { MustBoxFuture::new(async { "test1" }) } assert_eq!("test1", get_future().await); } #[tokio::test] pub async fn must_box_future_with_must_box() { fn get_future() -> MustBoxFuture<'static, &'static str> { async { "test1" }.must_box() } assert_eq!("test1", get_future().await); } #[tokio::test] pub async fn must_future_is_debug() { fn get_future() -> MustFuture<BoxFuture<'static, &'static str>> { async { "test2" }.boxed().into() } assert_eq!("MustFuture", &format!("{:?}", get_future())); } #[tokio::test] pub async fn must_future_can_still_process() { fn get_future() -> MustFuture<BoxFuture<'static, &'static str>> { async { "test2" }.boxed().into() } assert_eq!("test2", get_future().await); } }
true
86f4f70714d52d02296238c8bd8cdf29046dbf12
Rust
ShisoftResearch/pmem-alloc
/src/mmap.rs
UTF-8
4,384
2.75
3
[ "MIT" ]
permissive
use libc::*; use std::{ptr, io}; use crate::Ptr; use std::ffi::c_void; use std::path::Path; use std::fs::{File, OpenOptions}; use std::ptr::{null, null_mut}; use std::os::unix::io::IntoRawFd; use errno::errno; use crate::utils::{BLOCK_SIZE, PAGE_SIZE, BLOCK_MASK}; const DEFAULT_PROT: c_int = PROT_READ | PROT_WRITE; pub fn mmap_without_fd(size: usize) -> io::Result<Ptr> { let ptr = unsafe { mmap( ptr::null_mut(), size as size_t, DEFAULT_PROT, MAP_ANONYMOUS | MAP_PRIVATE, -1, 0, ) }; check_mmap_ptr(ptr) } pub fn mmap_to_file<P: AsRef<Path>>(size: usize, path: P) -> io::Result<Ptr> { let ptr = unsafe { mmap( ptr::null_mut(), size as size_t, DEFAULT_PROT, MAP_SHARED, open_file(path, size)?, 0, ) }; check_mmap_ptr(ptr) } pub fn mmap_to_file_trimmed<P: AsRef<Path>>(size: usize, path: P) -> io::Result<Ptr> { unsafe { // Get the trimmed anonymous space let trimmed_ptr = mmap_trimmed_anonymous(size)?; let fd = open_file(path, size)?; check_mmap_ptr(mmap(trimmed_ptr, size, DEFAULT_PROT, MAP_SHARED | MAP_FIXED, fd, 0)) } } pub fn check_mmap_ptr(ptr: Ptr) -> io::Result<Ptr> { if ptr == -1 as isize as *mut c_void { let err = errno(); Err(io::Error::new(io::ErrorKind::Other, format!("mmap failed: [{}] {}", err.0, err))) } else { Ok(ptr) } } // Trim the mmapped space aligned with block size and desired size pub unsafe fn mmap_trimmed_anonymous(size: usize) -> io::Result<Ptr> { let aligned_size = alignment_size(size); let desired = size; let ptr = mmap_without_fd(size)?; let addr = ptr as usize; let padding_start= addr + (BLOCK_SIZE - PAGE_SIZE); let aligned_addr = padding_start & BLOCK_MASK; let lower_size = aligned_addr - addr; if lower_size > 0 { debug_assert!(munmap(ptr, lower_size) >= 0); } let higher_size = aligned_size - (desired + lower_size); if higher_size > 0 { let high_pos = aligned_addr + desired; debug_assert!(munmap(high_pos as Ptr, higher_size) >= 0 ); } Ok(aligned_addr as Ptr) } pub fn alignment_size(desired: usize) -> usize { desired + (BLOCK_SIZE - PAGE_SIZE) } pub fn open_file<P: AsRef<Path>>(path: P, size: usize) -> io::Result<i32> { let file = OpenOptions::new().read(true).write(true).create(true).open(path)?; file.set_len(size as u64)?; Ok(file.into_raw_fd()) } #[cfg(test)] mod test { use crate::mmap::*; use std::{fs, ptr}; use std::fs::File; use crate::utils::BLOCK_SIZE; const TEST_SIZE: usize = 40960; #[test] fn mmap() { let addr = mmap_without_fd(TEST_SIZE).unwrap(); unsafe { ptr::write(addr as *mut usize, 42); assert_eq!(ptr::read(addr as *mut usize), 42); } assert!(addr as usize > 0); } #[test] fn mmap_file() { let file_name = "test.mmap_to_file.bin"; fs::remove_file(file_name); let addr = mmap_to_file(TEST_SIZE, file_name).unwrap(); unsafe { ptr::write(addr as *mut usize, 42); assert_eq!(ptr::read(addr as *mut usize), 42); munmap(addr, TEST_SIZE); assert!(fs::metadata(file_name).unwrap().len() > 0); // Remap let addr = mmap_to_file(TEST_SIZE, file_name).unwrap(); assert_eq!(ptr::read(addr as *mut usize), 42); } } #[test] fn mmap_trim_space() { let file_name = "test.mmap_to_file_aligned.bin"; fs::remove_file(file_name); let desired = BLOCK_SIZE * 5; let addr = mmap_to_file_trimmed(desired, file_name).unwrap() as usize; unsafe { for i in addr..addr + desired { ptr::write(i as *mut u8, i as u8); } for i in addr..addr + desired { assert_eq!(ptr::read(i as *mut u8), i as u8); } munmap(addr as Ptr, desired); let addr = mmap_to_file_trimmed(desired, file_name).unwrap() as usize; for i in addr..addr + desired { assert_eq!(ptr::read(i as *mut u8), i as u8); } munmap(addr as Ptr, desired); } } }
true
cb5da4c569c738f530759d5fd529fd0c40b3d921
Rust
rust-lang/rust-bindgen
/bindgen-tests/tests/quickchecking/src/lib.rs
UTF-8
3,821
2.9375
3
[ "BSD-3-Clause" ]
permissive
//! A library to generate __fuzzed__ C headers for use with `quickcheck` //! //! ## Example //! //! ```rust //! extern crate quickcheck; //! extern crate quickchecking; //! use quickcheck::{Arbitrary, Gen}; //! use quickchecking::fuzzers; //! //! fn main() { //! let generate_range: usize = 10; // Determines things like the length of //! // arbitrary vectors generated. //! let header = fuzzers::HeaderC::arbitrary( //! &mut Gen::new(generate_range)); //! println!("{}", header); //! } //! ``` //! #![deny(missing_docs)] #[macro_use] extern crate lazy_static; extern crate quickcheck; extern crate tempfile; use quickcheck::{Gen, QuickCheck, TestResult}; use std::error::Error; use std::fs::File; use std::io::Write; use std::path::{Path, PathBuf}; use std::process::{Command, Output}; use std::sync::Mutex; use tempfile::Builder; /// Contains definitions of and impls for types used to fuzz C declarations. pub mod fuzzers; // Global singleton, manages context across tests. For now that context is // only the output_path for inspecting fuzzed headers (if specified). struct Context { output_path: Option<String>, } // Initialize global context. lazy_static! { static ref CONTEXT: Mutex<Context> = Mutex::new(Context { output_path: None }); } // Passes fuzzed header to the `csmith-fuzzing/predicate.py` script, returns // output of the associated command. fn run_predicate_script( header: fuzzers::HeaderC, ) -> Result<Output, Box<dyn Error>> { let dir = Builder::new().prefix("bindgen_prop").tempdir()?; let header_path = dir.path().join("prop_test.h"); let mut header_file = File::create(&header_path)?; header_file.write_all(header.to_string().as_bytes())?; header_file.sync_all()?; let header_path_string = header_path .into_os_string() .into_string() .map_err(|_| "error converting path into String")?; let mut predicate_script_path = PathBuf::from(env!("CARGO_MANIFEST_DIR")); predicate_script_path.push("../../csmith-fuzzing/predicate.py"); let predicate_script_path_string = predicate_script_path .into_os_string() .into_string() .map_err(|_| "error converting path into String")?; // Copy generated temp files to output_path directory for inspection. // If `None`, output path not specified, don't copy. if let Some(ref path) = CONTEXT.lock().unwrap().output_path { Command::new("cp") .arg("-a") .arg(dir.path().to_str().unwrap()) .arg(path) .output()?; } Ok(Command::new(predicate_script_path_string) .arg(&header_path_string) .output()?) } // Generatable property. Pass generated headers off to run through the // `csmith-fuzzing/predicate.py` script. Success is measured by the success // status of that command. fn bindgen_prop(header: fuzzers::HeaderC) -> TestResult { match run_predicate_script(header) { Ok(o) => TestResult::from_bool(o.status.success()), Err(e) => { println!("{:?}", e); TestResult::from_bool(false) } } } /// Instantiate a Quickcheck object and use it to run property tests using /// fuzzed C headers generated with types defined in the `fuzzers` module. /// Success/Failure is dictated by the result of passing the fuzzed headers /// to the `csmith-fuzzing/predicate.py` script. pub fn test_bindgen( generate_range: usize, tests: u64, output_path: Option<&Path>, ) { if let Some(path) = output_path { CONTEXT.lock().unwrap().output_path = Some(path.display().to_string()); } QuickCheck::new() .tests(tests) .gen(Gen::new(generate_range)) .quickcheck(bindgen_prop as fn(fuzzers::HeaderC) -> TestResult) }
true
ddeb5175e61682b1d437d80d1d988871d3fe8391
Rust
boltlabs-inc/tezedge-client
/ledger_api/src/ledger_request.rs
UTF-8
1,852
2.8125
3
[ "MIT" ]
permissive
use ledger_apdu::APDUCommand; use crate::{Ledger, LedgerError, LedgerResponse}; pub type ResultHandler<T> = dyn Fn(&mut Ledger, Vec<u8>) -> Result<T, LedgerError>; pub struct LedgerRequestData<T> { pub command: APDUCommand, pub handler: Box<ResultHandler<T>>, } impl<T> LedgerRequestData<T> where T: 'static, { pub fn map<U, F>(self, handler: F) -> LedgerRequestData<U> where F: 'static + Fn(&mut Ledger, T) -> Result<U, LedgerError>, U: 'static, { let old_handler = self.handler; LedgerRequestData { command: self.command, handler: Box::new(move |ledger, raw_result| { let res = old_handler(ledger, raw_result)?; handler(ledger, res) }), } } pub fn send<'a>(self, ledger: &'a mut Ledger) -> LedgerResponse<'a, T> where T: 'static, { ledger.call(self.command, self.handler).into() } } pub struct LedgerRequest<'a, T> { pub ledger: &'a mut Ledger, pub data: LedgerRequestData<T>, } impl<'a> LedgerRequest<'a, Vec<u8>> { pub fn new( ledger: &'a mut Ledger, command: APDUCommand, ) -> Self { LedgerRequest { ledger, data: LedgerRequestData { command, handler: Box::new(|_, x| Ok(x)), }, } } } impl<'a, T: 'a> LedgerRequest<'a, T> where T: 'static, { pub fn map<U, F>(self, handler: F) -> LedgerRequest<'a, U> where F: 'static + Fn(&mut Ledger, T) -> Result<U, LedgerError>, U: 'static, { LedgerRequest { ledger: self.ledger, data: self.data.map(handler), } } pub fn send(self) -> LedgerResponse<'a, T> where T: 'static, { self.data.send(self.ledger) } }
true
16c29be0e05f30d3771303512e743336c9e38a78
Rust
CuriouslyCurious/gw2api
/src/v2/pvp/heroes.rs
UTF-8
6,575
2.734375
3
[ "Apache-2.0", "MIT" ]
permissive
use serde::Deserialize; use crate::client::Client; use crate::error::ApiError; use crate::utils::ids_to_string; const ENDPOINT_URL: &str = "/v2/pvp/heroes"; /// A hero used in the Stronghold game structured PvP game type. #[derive(Debug, Deserialize, PartialEq)] pub struct Hero { /// id of the hero. pub id: String, /// Name of the hero. pub name: String, /// Flavor text describing the lore behind the hero. pub description: String, /// Flavor type describing the hero. #[serde(rename = "type")] pub flavor_type: String, /// A struct containing the champion's stats: offense, defense and speed. pub stats: Stats, /// Url to the overlay art for the champion. #[serde(rename = "overlay")] pub overlay_url: String, /// Url to the underlay art for the champion. #[serde(rename = "underlay")] pub underlay_url: String, /// A `Vec` of the skins available to the given hero. pub skins: Vec<Skin>, } /// Struct that contains the offense, defense and speed stats for a given hero. #[derive(Debug, Deserialize, PartialEq)] pub struct Stats { pub offense: u32, pub defense: u32, pub speed: u32, } /// Cosmetic skin information of a hero. #[derive(Debug, Deserialize, PartialEq)] pub struct Skin { /// Skin id pub id: u32, /// Name of the skin pub name: String, /// Url to the icon #[serde(rename = "icon")] pub icon_url: String, /// Whether the skin is the default for that hero or not. pub default: bool, /// Item ids which unlock the skin. #[serde(default)] pub unlock_items: Vec<u32>, } impl Hero { /// Retrieve a hero by its id. pub fn get_id(client: &Client, id: String) -> Result<Hero, ApiError> { let url = format!("{}?id={}", ENDPOINT_URL, id); client.request(&url) } /// Retrieve the ids for all available heroes. pub fn get_all_ids(client: &Client) -> Result<Vec<String>, ApiError> { client.request(ENDPOINT_URL) } /// Retrieve all heroes that are available. pub fn get_all_heroes(client: &Client) -> Result<Vec<Hero>, ApiError> { let url = format!("{}?ids=all", ENDPOINT_URL); client.request(&url) } /// Retrive heroes by their ids. pub fn get_heroes_by_ids(client: &Client, ids: Vec<String>) -> Result<Vec<Hero>, ApiError> { let url = format!("{}?ids={}", ENDPOINT_URL, ids_to_string(ids)); client.request(&url) } } impl Stats {} impl Skin {} #[cfg(test)] mod tests { use crate::v2::pvp::heroes::*; use crate::client::Client; const JSON_HERO: &str = r#" { "id": "115C140F-C2F5-40EB-8EA2-C3773F2AE468", "name": "Nika", "description": "Nika was a proficient assassin schooled in her youth at Shing Jea Monastery. She served Cantha as a member of the Obsidian Flame.", "type": "Specialist Hero", "stats": { "offense": 3, "defense": 2, "speed": 4 }, "overlay": "https://render.guildwars2.com/file/2CACF4120E370D1997A4C3D69BF592D7CC1870C8/993693.png", "underlay": "https://render.guildwars2.com/file/103108E0D8EDD22C577FA4171618D004A82AD955/993694.png", "skins": [ { "id": 1, "name": "Nika", "icon": "https://render.guildwars2.com/file/4602BDC15B73422011AC664425D93750707F04F3/1058576.png", "default": true, "unlock_items": [ 70076 ] }, { "id": 7, "name": "Shadow Assassin Nika", "icon": "https://render.guildwars2.com/file/01643F1BD1202007BEE8E37F7DA3EA31AEE9536C/1322841.png", "default": false, "unlock_items": [ 72077 ] }, { "id": 15, "name": "Festive Nika", "icon": "https://render.guildwars2.com/file/002248777FC6341B1650040AF1ADBD79A4772CA5/1322839.png", "default": false, "unlock_items": [ 77642 ] }, { "id": 11, "name": "Sneakthief Nika", "icon": "https://render.guildwars2.com/file/DB2DCD0AEDDCD0474F4FC2426203384E06D2380D/1322842.png", "default": false, "unlock_items": [ 73002 ] }, { "id": 12, "name": "Strider's Nika", "icon": "https://render.guildwars2.com/file/CE35793C96D74CC657736D15FB02C7B64E610208/1322843.png", "default": false, "unlock_items": [ 76274 ] } ] }"#; const JSON_STATS: &str = r#" { "offense": 3, "defense": 2, "speed": 4 }"#; const JSON_SKIN: &str = r#" { "id": 1, "name": "Nika", "icon": "https://render.guildwars2.com/file/4602BDC15B73422011AC664425D93750707F04F3/1058576.png", "default": true, "unlock_items": [ 70076 ] }"#; #[test] fn create_hero() { serde_json::from_str::<Hero>(JSON_HERO).unwrap(); } #[test] fn create_stats() { serde_json::from_str::<Stats>(JSON_STATS).unwrap(); } #[test] fn create_skin() { serde_json::from_str::<Skin>(JSON_SKIN).unwrap(); } #[test] fn get_id() { let client = Client::new(); let hero = serde_json::from_str::<Hero>(JSON_HERO).unwrap(); assert_eq!(hero, Hero::get_id(&client, hero.id.to_string()).unwrap()); } #[test] fn get_all_ids() { let client = Client::new(); let ids = vec!( "115C140F-C2F5-40EB-8EA2-C3773F2AE468", "B7EA9889-5F16-4636-9705-4FCAF8B39ECD", "BEA79596-CA8B-4D46-9B9C-EA1B606BCF42", "CF977AE5-C605-4586-A802-3E25F0F35772", ); assert_eq!(ids, Hero::get_all_ids(&client).unwrap()); } #[test] fn get_all_heroes() { let client = Client::new(); let ids = vec!( "115C140F-C2F5-40EB-8EA2-C3773F2AE468", "B7EA9889-5F16-4636-9705-4FCAF8B39ECD", "BEA79596-CA8B-4D46-9B9C-EA1B606BCF42", "CF977AE5-C605-4586-A802-3E25F0F35772", ); assert!(Hero::get_all_heroes(&client).unwrap().len() == ids.len()); } #[test] fn get_heroes_by_ids() { let client = Client::new(); let ids = vec!( "115C140F-C2F5-40EB-8EA2-C3773F2AE468".to_string(), "B7EA9889-5F16-4636-9705-4FCAF8B39ECD".to_string(), ); assert!(Hero::get_heroes_by_ids(&client, ids.clone()).unwrap().len() == ids.len()); } }
true
1dd78e220ba5e3a7739fb46cf5aa1050eec8ae3e
Rust
rettgerst/exercism-rust
/matching-brackets/src/lib.rs
UTF-8
1,301
3.390625
3
[]
no_license
pub fn brackets_are_balanced(string: &str) -> bool { let mut brackets = vec![]; for char in string.chars() { match char { '(' => brackets.push(char), '[' => brackets.push(char), '{' => brackets.push(char), '}' => { if brackets.len() == 0 { return false; } let top = brackets.last().unwrap(); if *top == '{' { brackets.pop(); } else { return false; } } ']' => { if brackets.len() == 0 { return false; } let top = brackets.last().unwrap(); if *top == '[' { brackets.pop(); } else { return false; } } ')' => { if brackets.len() == 0 { return false; } let top = brackets.last().unwrap(); if *top == '(' { brackets.pop(); } else { return false; } } _ => (), } } brackets.len() == 0 }
true
05957a375ba4e0c1ac1bd21ac8675dce036a9203
Rust
sgmarz/raytrace
/src/threadpool.rs
UTF-8
3,316
2.8125
3
[ "MIT" ]
permissive
use crate::camera::Camera; use crate::hitable::HitList; use crate::random::random_f64; use crate::vector::Vec3; use std::sync::mpsc::{channel, Receiver, Sender}; use std::sync::Arc; use std::thread::{spawn, JoinHandle}; use std::vec::Vec; pub struct ControlPacket { pub row: u32, pub col: u32, pub camera: Arc<Camera>, pub objects: Arc<HitList>, pub done: bool, pub samples: u32, pub image_width: u32, pub image_height: u32, pub max_depth: i32, } impl ControlPacket { pub const fn new(row: u32, col: u32, camera: Arc<Camera>, objects: Arc<HitList>, samples: u32, image_width: u32, image_height: u32, max_depth: i32) -> Self { Self { row, col, camera, objects, done: false, samples, image_width, image_height, max_depth, } } pub fn done() -> Self { Self { row: 0, col: 0, camera: Arc::new(Camera::default()), objects: Arc::new(HitList::default()), done: true, samples: 0, image_width: 0, image_height: 0, max_depth: 0, } } } pub struct DataPacket { pub row: u32, pub col: u32, pub color: Vec3, } impl DataPacket { pub const fn new(row: u32, col: u32, color: Vec3) -> Self { Self { row, col, color, } } } pub struct Thread { pub thread: JoinHandle<()>, pub data: Receiver<DataPacket>, pub control: Sender<ControlPacket>, pub packets_sent: usize, } pub struct ThreadPool { pub threads: Vec<Thread>, pub next: usize, } impl ThreadPool { pub fn new(num_threads: usize) -> Self { assert!(num_threads > 0); let mut threads = Vec::with_capacity(num_threads); let background = Vec3::new(0.0, 0.0, 0.0); for _ in 0..num_threads { let (data_s, data_r): (Sender<DataPacket>, Receiver<_>) = channel(); let (control_s, control_r): (Sender<ControlPacket>, Receiver<_>) = channel(); let cws = data_s.clone(); let t = Thread { thread: spawn(move || { while let Ok(packet) = control_r.recv() { if packet.done == true { break; } let iwf = packet.image_width as f64 - 1.0; let ihf = packet.image_height as f64 - 1.0; let mut color = Vec3::new(0.0, 0.0, 0.0); for _ in 0..packet.samples { let u = (random_f64() + packet.col as f64) / iwf; let v = (random_f64() + packet.row as f64) / ihf; color += &packet.camera.get_ray(u, v).color(&background, &packet.objects, 10); } let dp = DataPacket::new(packet.row, packet.col, color); cws.send(dp).unwrap(); } }), data: data_r, control: control_s, packets_sent: 0, }; threads.push(t); } Self { threads, next: 0, } } pub fn run(&mut self, control: ControlPacket) -> bool { let t = &mut self.threads[self.next]; t.packets_sent += 1; let res = t.control.send(control); self.next += 1; if self.next >= self.threads.len() { self.next = 0; } res.is_ok() } pub fn run_c(&mut self, row: u32, col: u32, camera: Arc<Camera>, objects: Arc<HitList>, samples: u32, image_width: u32, image_height: u32, max_depth: i32) -> bool { let cp = ControlPacket::new(row, col, camera, objects, samples, image_width, image_height, max_depth); self.run(cp) } } impl Drop for ThreadPool { fn drop(&mut self) { for t in self.threads.drain(..) { t.control.send(ControlPacket::done()).unwrap(); t.thread.join().unwrap(); } } }
true
1feb1e6251ef16a115424acda38c1adba0f52d2b
Rust
rust-lang/glacier
/fixed/77218.rs
UTF-8
231
2.84375
3
[ "Apache-2.0", "MIT" ]
permissive
pub struct Cache { data: Vec<i32>, } pub fn list_data(cache: &Cache, key: usize) { for reference in vec![1, 2, 3] { if /*let*/ Some(reference) = cache.data.get(key) { unimplemented!() } } }
true
b3606f7a5bbe942c8f105367ca6bfbb96a6f652f
Rust
uchenma/Assignment1_DiningConciergeAgent
/shared-types/src/lib.rs
UTF-8
1,808
2.578125
3
[]
no_license
use dynomite::{Attributes, Item}; use serde::{Deserialize, Serialize}; #[derive(Attributes, Deserialize, Serialize, Debug, Clone)] pub struct YelpCategory { pub alias: String, pub title: String, } #[derive(Attributes, Deserialize, Serialize, Debug, Clone)] pub struct YelpAddress { #[serde(default)] city: Option<String>, #[serde(default)] country: Option<String>, pub address1: String, #[serde(default)] address2: Option<String>, #[serde(default)] address3: Option<String>, #[serde(default)] state: Option<String>, #[serde(default)] zip_code: Option<String>, } #[derive(Attributes, Deserialize, Serialize, Debug, Clone)] pub struct YelpCoordinates { #[serde(default)] latitude: Option<f32>, #[serde(default)] longitude: Option<f32>, } #[derive(Item, Deserialize, Serialize, Debug, Clone)] pub struct YelpBusiness { #[dynomite(partition_key)] pub id: String, pub alias: String, pub name: String, #[serde(default)] pub image_url: Option<String>, #[serde(default)] pub url: Option<String>, pub categories: Vec<YelpCategory>, #[serde(default)] pub rating: Option<f32>, #[serde(default)] pub review_count: Option<u32>, #[serde(default)] pub coordinates: Option<YelpCoordinates>, pub location: YelpAddress, #[serde(default)] #[serde(rename(serialize = "insertedAtTimestamp"))] pub inserted_at_timestamp: Option<String>, } #[derive(Deserialize, Serialize, Debug, Clone)] pub struct YelpBusinessEs { pub id: String, pub dynamo_id: String, pub cuisines: Vec<String>, } #[derive(Serialize, Deserialize)] pub struct RestaurantRequest { pub phonenumber: String, pub cuisine: String, pub num_people: String, pub date_and_time: String, }
true
5a50f475940b9d4187f81b8f3a0dd0ae65c36d8c
Rust
peteralieber/rustrays
/src/main.rs
UTF-8
666
2.640625
3
[]
no_license
use rustrays::util::*; use rustrays::*; use log::*; fn main() { //set_max_level(LevelFilter::Info); //println!("RustRays!"); //output_color_gradient(); //output_blue_white_gradient(); //output_sphere_on_sphere(); output_metal_spheres(); let v1 = vectors::Vector3 {x:1.0,y:1.0,z:1.0}; let v2 = vectors::Vector3 {x:1.0,y:2.0,z:3.0}; let v3 = vectors::Vector3 {x:2.0,y:2.0,z:2.0}; info!("Vector (1,1,1) + (1,2,3) = {}", v1+v2); info!("Vector (2,2,2) * (1,2,3) = {}", v3*v2); info!("Vector (2,2,2) . (1,2,3) = {}", v3.dot(v2)); info!("Vector (2,2,2) x (1,2,3) = {}", v3.cross(v2)); eprintln!("Done!"); }
true
194669ea58db606647b219069f6c046b71fe201f
Rust
magiclen/random-integer
/src/lib.rs
UTF-8
2,995
3.390625
3
[ "MIT" ]
permissive
/*! # Random Integer Generate a random integer between two integer numbers (including the two integer numbers). ## Example ```rust extern crate random_integer; let rnd = random_integer::random_u8(224, 255); println!("{}", rnd); ``` */ #![no_std] extern crate core; pub extern crate rand; use core::cmp::Ordering; use rand::distributions::uniform::{SampleBorrow, SampleUniform, Uniform}; use rand::distributions::Distribution; use rand::Rng; #[inline] fn random<N>(a: N, b: N) -> N where N: SampleUniform + SampleBorrow<N> + Ord + Sized, { let mut rng = rand::thread_rng(); random_with_rng(a, b, &mut rng) } #[inline] fn random_with_rng<N, T: Rng>(a: N, b: N, rng: &mut T) -> N where N: SampleUniform + SampleBorrow<N> + Ord + Sized, { match a.cmp(&b) { Ordering::Greater => { let simpler = Uniform::new_inclusive(b, a); simpler.sample(rng) } Ordering::Equal => a, Ordering::Less => { let simpler = Uniform::new_inclusive(a, b); simpler.sample(rng) } } } #[inline] pub fn random_usize(a: usize, b: usize) -> usize { random(a, b) } #[inline] pub fn random_usize_with_rng<T: Rng>(a: usize, b: usize, rng: &mut T) -> usize { random_with_rng(a, b, rng) } #[inline] pub fn random_u64(a: u64, b: u64) -> u64 { random(a, b) } #[inline] pub fn random_u64_with_rng<T: Rng>(a: u64, b: u64, rng: &mut T) -> u64 { random_with_rng(a, b, rng) } #[inline] pub fn random_u32(a: u32, b: u32) -> u32 { random(a, b) } #[inline] pub fn random_u32_with_rng<T: Rng>(a: u32, b: u32, rng: &mut T) -> u32 { random_with_rng(a, b, rng) } #[inline] pub fn random_u16(a: u16, b: u16) -> u16 { random(a, b) } #[inline] pub fn random_u16_with_rng<T: Rng>(a: u16, b: u16, rng: &mut T) -> u16 { random_with_rng(a, b, rng) } #[inline] pub fn random_u8(a: u8, b: u8) -> u8 { random(a, b) } #[inline] pub fn random_u8_with_rng<T: Rng>(a: u8, b: u8, rng: &mut T) -> u8 { random_with_rng(a, b, rng) } #[inline] pub fn random_isize(a: isize, b: isize) -> isize { random(a, b) } #[inline] pub fn random_isize_with_rng<T: Rng>(a: isize, b: isize, rng: &mut T) -> isize { random_with_rng(a, b, rng) } #[inline] pub fn random_i64(a: i64, b: i64) -> i64 { random(a, b) } #[inline] pub fn random_i64_with_rng<T: Rng>(a: i64, b: i64, rng: &mut T) -> i64 { random_with_rng(a, b, rng) } #[inline] pub fn random_i32(a: i32, b: i32) -> i32 { random(a, b) } #[inline] pub fn random_i32_with_rng<T: Rng>(a: i32, b: i32, rng: &mut T) -> i32 { random_with_rng(a, b, rng) } #[inline] pub fn random_i16(a: i16, b: i16) -> i16 { random(a, b) } #[inline] pub fn random_i16_with_rng<T: Rng>(a: i16, b: i16, rng: &mut T) -> i16 { random_with_rng(a, b, rng) } #[inline] pub fn random_i8(a: i8, b: i8) -> i8 { random(a, b) } #[inline] pub fn random_i8_with_rng<T: Rng>(a: i8, b: i8, rng: &mut T) -> i8 { random_with_rng(a, b, rng) }
true
87a9a13ca12f763a7cdb34c932f953ab841047f0
Rust
alerdenisov/unrust
/src/world/fps.rs
UTF-8
2,133
2.71875
3
[ "Apache-2.0", "LicenseRef-scancode-unknown-license-reference", "MIT" ]
permissive
use uni_app::now; use std; use std::collections::VecDeque; pub struct DeltaTimeStats { pub dt_max: f64, pub dt_min: f64, pub dt_avg: f64, pub dt_history: VecDeque<f64>, } impl DeltaTimeStats { fn new() -> DeltaTimeStats { DeltaTimeStats { dt_max: std::f64::MIN, dt_min: std::f64::MAX, dt_avg: 0.0, dt_history: VecDeque::new(), } } fn update(&mut self, dt: f64) { self.dt_max = if self.dt_max > dt { self.dt_max } else { dt }; self.dt_min = if self.dt_min < dt { self.dt_min } else { dt }; self.dt_history.push_back(dt); if self.dt_history.len() > 60 { self.dt_history.pop_front(); } self.dt_avg = self.dt_history.iter().fold(0.0, |acc, dt| acc + *dt) / (self.dt_history.len() as f64); } } pub struct FPS { counter: u32, delta_time: f64, delta_time_stats: DeltaTimeStats, last_delta_time_stats: DeltaTimeStats, last_second: f64, last_frame: f64, pub fps: u32, } impl FPS { pub fn new() -> FPS { let fps = FPS { counter: 0, last_second: now(), last_frame: now(), fps: 0, delta_time: 0.0, delta_time_stats: DeltaTimeStats::new(), last_delta_time_stats: DeltaTimeStats::new(), }; fps } pub fn delta_time(&self) -> f64 { self.delta_time } pub fn delta_time_stats(&self) -> &DeltaTimeStats { &self.last_delta_time_stats } pub fn step(&mut self) { self.counter += 1; let curr = now(); self.delta_time = curr - self.last_frame; self.delta_time_stats.update(self.delta_time); if curr - self.last_second > 1.0 { self.last_second = curr; self.fps = self.counter; self.counter = 0; std::mem::swap(&mut self.last_delta_time_stats, &mut self.delta_time_stats); self.delta_time_stats = DeltaTimeStats::new(); } self.last_frame = curr; } }
true
89a50e71da170c21ff3ddc6546568789dbb77591
Rust
jroimartin/expos
/expos/src/main.rs
UTF-8
1,702
2.6875
3
[]
no_license
//! expOS is a tiny Operating System focused on experimentation. #![no_std] #![cfg_attr(not(test), no_main)] #![feature(panic_info_message)] use range::RangeSet; use uefi::acpi; #[cfg(not(test))] mod panic; mod serial; struct BootInfo { available_memory: RangeSet, acpi_madt: acpi::Madt, } /// UEFI entry point. #[no_mangle] extern "C" fn efi_main( image_handle: uefi::Handle, system_table_ptr: uefi::Ptr, ) -> ! { // Initialize serial. serial::init_serial(); // Parse UEFI's system table. let system_table = unsafe { uefi::SystemTable::new(system_table_ptr).unwrap() }; // Get LAPIC data. let config_tables = system_table.configuration_tables().unwrap(); let rsdp20_ptr = config_tables.acpi_rsdp20_ptr().unwrap(); let rsdp20 = unsafe { acpi::Rsdp20::new(rsdp20_ptr).unwrap() }; let xsdt = rsdp20.xsdt().unwrap(); let madt = xsdt.madt().unwrap(); // Get available memory. let boot_services = system_table.boot_services().unwrap(); let (available_memory, map_key) = uefi::mem::get_available_memory(&boot_services).unwrap(); // Exit UEFI boot services. boot_services .exit_boot_services(image_handle, map_key) .unwrap(); // Fill `BootInfo` structure and call kernel's entrypoint. let boot_info = BootInfo { available_memory, acpi_madt: madt, }; os_main(boot_info) } /// Kernel entry point. fn os_main(boot_info: BootInfo) -> ! { println!("lapic: {:#x?}", boot_info.acpi_madt.lapic()); println!("memory map: {:#x?}", boot_info.available_memory.ranges()); println!("memory size: {}", boot_info.available_memory.size()); panic!("end"); }
true
c968daa7cc804355f6e0c423edc9d6f96f20109a
Rust
ocstl/project_euler
/src/bin/problem68.rs
UTF-8
2,670
3.21875
3
[]
no_license
use permutohedron::LexicalPermutation; use std::convert::TryFrom; use std::error::Error; use std::fmt; const NODES: u32 = 10; #[derive(Debug)] struct MagicRingError; impl fmt::Display for MagicRingError { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { write!(f, "Invalid magic ring.") } } impl Error for MagicRingError { fn source(&self) -> Option<&(dyn Error + 'static)> { None } } struct MagicRing { inner: Vec<u32>, outer: Vec<u32>, } impl MagicRing { fn new(inner: &[u32], outer: &[u32]) -> Self { MagicRing { inner: inner.to_vec(), outer: outer.to_vec(), } } // Generate lines from the outside in. fn lines<'a>(&'a self) -> impl Iterator<Item = (u32, u32, u32)> + 'a { let inner = self.inner.iter().zip(self.inner.iter().cycle().skip(1)); self.outer.iter().zip(inner).map(|(&a, (&b, &c))| (a, b, c)) } } // Print the `MagicRing` by concatenating its lines (from the outside in), starting with the // smallest value in the outside "ring". impl fmt::Display for MagicRing { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { let mut lines: Vec<(u32, u32, u32)> = self.lines().collect(); let min_line = lines.iter().min().unwrap(); let idx = lines.iter().position(|line| line == min_line).unwrap(); lines.rotate_left(idx); let s: String = lines .iter() .map(|(a, b, c)| format!("{}{}{}", a, b, c)) .collect(); write!(f, "{}", s) } } impl TryFrom<&[u32]> for MagicRing { type Error = MagicRingError; fn try_from(values: &[u32]) -> Result<Self, Self::Error> { let l = values.len(); if l % 2 != 0 { return Err(MagicRingError); } let (outer, inner) = values.split_at(l / 2); let ring = MagicRing::new(inner, outer); let sum_line = |line: (u32, u32, u32)| line.0 + line.1 + line.2; { let mut lines = ring.lines(); let value = sum_line(lines.next().unwrap()); if lines.any(|line| sum_line(line) != value) { return Err(MagicRingError); } } Ok(ring) } } fn main() { let mut numbers: Vec<u32> = (1..=NODES).collect(); let mut rings = Vec::new(); while numbers.next_permutation() { if let Ok(ring) = MagicRing::try_from(numbers.as_slice()) { rings.push(format!("{}", ring)); } } let answer = rings .into_iter() .max() .unwrap_or_else(|| String::from("No ring.")); println!("The answer is {}", answer); }
true
85f8f3389fc5f48f523c472626ee5bfef79e38c2
Rust
ExternalReality/dynamic-split
/.circleci/test-lib/tests/dummy_test.rs
UTF-8
210
2.65625
3
[ "MIT" ]
permissive
use std::{thread, time}; use rand::Rng; #[test] fn sample_test() { let mut rng = rand::thread_rng(); let seconds = time::Duration::new(rng.gen_range(0,10),0); thread::sleep(seconds); assert!(true); }
true
3d74a395b048468b5e6c6ed499d575314b415e96
Rust
park66665/auxtools
/dm/src/string.rs
UTF-8
1,645
3.28125
3
[ "MIT" ]
permissive
use super::raw_types; use super::value::Value; use std::ffi::CStr; use std::fmt; /// A wrapper around [Values](struct.Value.html) that make working with strings a little easier pub struct StringRef { pub value: Value, } impl StringRef { pub fn new(string: &str) -> Self { StringRef { value: Value::from_string(string), } } pub fn from_value(value: Value) -> Option<Self> { if value.value.tag != raw_types::values::ValueTag::String { return None; } // Here we're going from value -> raw -> new value because to get that juicy static lifetime Some(StringRef { value: unsafe { Value::from_raw(value.value) }, }) } pub unsafe fn from_id(id: u32) -> Self { StringRef { value: Value::from_raw(raw_types::values::Value { tag: raw_types::values::ValueTag::String, data: raw_types::values::ValueData { id }, }), } } pub fn get_id(&self) -> u32 { unsafe { self.value.value.data.id } } } impl Clone for StringRef { fn clone(&self) -> Self { Self::from_value(self.value.clone()).unwrap() } } impl fmt::Debug for StringRef { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { let data: String = self.clone().into(); write!(f, "{}", data) } } impl From<&str> for StringRef { fn from(string: &str) -> StringRef { StringRef::new(string) } } impl Into<String> for StringRef { fn into(self) -> String { unsafe { let id = self.value.value.data.string; let mut entry: *mut raw_types::strings::StringEntry = std::ptr::null_mut(); assert_eq!(raw_types::funcs::get_string_table_entry(&mut entry, id), 1); CStr::from_ptr((*entry).data).to_string_lossy().into() } } }
true
fb9731469fe47f8220fe5d1b667a2919483c5cc6
Rust
SidneyArmitage/png
/src/process/chunks/ihdr.rs
UTF-8
2,149
3.1875
3
[]
no_license
use super::super::*; #[cfg(test)] mod tests { use super::*; #[test] fn test_success() { let mut options = Options::new(); let out = process(&[0, 0, 0, 0x20, 0, 0, 0, 0x20, 0x10, 0x06, 0, 0, 0], 17, &mut options); assert_eq!(out, Ok(())); assert_eq!(options.bit_depth, 16); assert_eq!(options.colour_type, 6); assert_eq!(options.height,0x20); assert_eq!(options.interlace, false); assert_eq!(match options.state { State::IHDR => true, _ => false, }, true); assert_eq!(options.width, 0x20); } #[test] fn test_validator() { assert_eq!(colour_and_bit_is_valid(6, 16), true); } } fn colour_and_bit_is_valid(colour_type: u8, bit_depth: u8) -> bool { match colour_type { 0 => match bit_depth { 1 | 2 | 4 | 8 | 16 => true, _ => false, }, 2 => match bit_depth { 8 | 16 => true, _ => false, }, 3 => match bit_depth { 1 | 2 | 4 | 8 => true, _ => false, }, 4 => match bit_depth { 8 | 16 => true, _ => false, }, 6 => match bit_depth { 8 | 16 => true, _ => false, }, _ => false, } } pub fn process(buffer: &[u8], length: u32, options: &mut Options) -> Result<(), ()>{ if !colour_and_bit_is_valid(buffer[9], buffer[8]) { println!("Invalid colour and bit depth"); return Err(()); } // compression method u8 if buffer[10] != 0 { println!("Compression method does not equal 0"); return Err(()); } // filter method u8 if buffer[11] != 0 { println!("Filter method does not equal 0"); return Err(()); } // interlace method u8 let interlace = buffer[12] == 0; if !interlace && buffer[12] != 0 { println!("Invalid interlace method"); return Err(()); } options.bit_depth = buffer[8]; options.colour_type = buffer[9]; options.width = u32::from_be_bytes([buffer[0], buffer[1], buffer[2], buffer[3]]); println!("{:#?}", [buffer[0], buffer[1], buffer[2], buffer[3]]); println!("{}", options.width); options.height = u32::from_be_bytes([buffer[4], buffer[5], buffer[6], buffer[7]]); options.state = State::IHDR; Ok(()) }
true
e98e56b4ce8b0895a9bcb87e3babe0fa65abe8f5
Rust
steveklabnik/stdsimd
/coresimd/ppsv/codegen/cos.rs
UTF-8
1,892
2.515625
3
[ "MIT", "LicenseRef-scancode-other-permissive", "LicenseRef-scancode-unknown-license-reference", "Apache-2.0" ]
permissive
//! Exact vector cos #![allow(dead_code)] use coresimd::simd::*; #[allow(improper_ctypes)] extern "C" { #[link_name = "llvm.cos.f32"] fn cos_f32(x: f32) -> f32; #[link_name = "llvm.cos.f64"] fn cos_f64(x: f64) -> f64; #[link_name = "llvm.cos.v2f32"] fn cos_v2f32(x: f32x2) -> f32x2; #[link_name = "llvm.cos.v4f32"] fn cos_v4f32(x: f32x4) -> f32x4; #[link_name = "llvm.cos.v8f32"] fn cos_v8f32(x: f32x8) -> f32x8; #[link_name = "llvm.cos.v16f32"] fn cos_v16f32(x: f32x16) -> f32x16; #[link_name = "llvm.cos.v2f64"] fn cos_v2f64(x: f64x2) -> f64x2; #[link_name = "llvm.cos.v4f64"] fn cos_v4f64(x: f64x4) -> f64x4; #[link_name = "llvm.cos.v8f64"] fn cos_v8f64(x: f64x8) -> f64x8; } pub(crate) trait FloatCos { fn cos(self) -> Self; } trait RawCos { fn raw_cos(self) -> Self; } impl RawCos for f32 { fn raw_cos(self) -> Self { unsafe { cos_f32(self) } } } impl RawCos for f64 { fn raw_cos(self) -> Self { unsafe { cos_f64(self) } } } macro_rules! impl_fcos { ($id:ident : $fn:ident) => { #[cfg(not(target_arch = "s390x"))] impl FloatCos for $id { fn cos(self) -> Self { unsafe { $fn(self) } } } // FIXME: https://github.com/rust-lang-nursery/stdsimd/issues/501 #[cfg(target_arch = "s390x")] impl FloatCos for $id { fn cos(self) -> Self { let mut v = $id::splat(0.); for i in 0..$id::lanes() { v = v.replace(i, self.extract(i).raw_cos()) } v } } }; } impl_fcos!(f32x2: cos_v2f32); impl_fcos!(f32x4: cos_v4f32); impl_fcos!(f32x8: cos_v8f32); impl_fcos!(f32x16: cos_v16f32); impl_fcos!(f64x2: cos_v2f64); impl_fcos!(f64x4: cos_v4f64); impl_fcos!(f64x8: cos_v8f64);
true
ffe29abe33c5549eb66c74d2daa7d9ae44a0f3c1
Rust
sismeon/Regalia
/src/request.rs
UTF-8
162
2.59375
3
[]
no_license
pub mod request { enum HTTPMethod { GET, POST, PUT, DELETE } struct HTTPRequest { method: HTTPMethod, } }
true
63960f523645ab2cf7a156f6ebc3f8e68a6f3828
Rust
McM1k/multilayer_perceptron
/src/reader.rs
UTF-8
518
2.8125
3
[]
no_license
use super::cell::Cell; use csv::ReaderBuilder; use std::result::Result; use std::result::Result::*; use std::vec::Vec; pub fn get_raw_data(path: &str) -> Result<Vec<Cell>, &str> { let mut raw_data: Vec<Cell> = Vec::new(); let mut rdr = ReaderBuilder::new() .has_headers(false) .from_path(path) .expect("error while reading csv"); for result in rdr.deserialize() { let record: Cell = result.expect("error in line"); raw_data.push(record); } Ok(raw_data) }
true
162caf9e95bd57734217dc9d7bde65a2ae790498
Rust
Lolirofle/lolpranz
/src/glfw_game.rs
UTF-8
2,333
2.640625
3
[]
no_license
use glfw::{Context,Window}; use glfw; use std::time::Duration; use tdgl::game::Game; use tdgl::game::gameloop; use tdgl::graphics::renderer::Renderer; pub struct GlfwGame<'g,Exit,G> where G: Game<glfw::WindowEvent,(),Exit> + 'g { glfw: glfw::Glfw, pub window: (glfw::Window,Receiver<(f64,glfw::WindowEvent)>), game: G, } impl<'g,Exit,G> GlfwGame<'g,Exit,G> where G: Game<glfw::WindowEvent,(),Exit> + 'g { pub fn using_game(game: G) -> GlfwGame<'g,Exit,G>{ let glfw = glfw::init(glfw::FAIL_ON_ERRORS).unwrap(); let window = glfw.create_window(640,480,"Lolpranz",glfw::WindowMode::Windowed).expect("Failed to create GLFW window."); GlfwGame{ glfw: glfw, window: window, game: game, } } pub fn use_game(&mut self,game: G){ self.game = game; } } impl<'g,Exit,G> Game<(),(),Exit> for GlfwGame<'g,Exit,G> where G: Game<glfw::WindowEvent,(),Exit> + 'g { fn should_exit(&self) -> Option<Exit>{ if let Some(exit) = self.game.should_exit(){ self.window.0.set_should_close(true); return Some(exit); } return None; } fn target_time_per_frame(&self) -> Duration{ self.game.target_time_per_frame() } fn init_render(&self,renderer: &Renderer) -> (){ self.glfw.make_context_current(Some(&self.window.0)); //Window //self.glfw.window_hint(glfw::ContextVersion(3,2)); //self.glfw.window_hint(glfw::OpenglForwardCompat(true)); //self.glfw.window_hint(glfw::OpenglProfile(glfw::OpenGlCoreProfile)); //Initialize window self.window.0.set_all_polling(true); self.window.0.make_current(); self.glfw.set_swap_interval(0); renderer.init_projection(0,0,640,480); return (); } } impl<'g,Exit,G> gameloop::Update<()> for GlfwGame<'g,Exit,G> where G: Game<glfw::WindowEvent,(),Exit> + 'g { fn update(&mut self,_: (),delta_time: Duration){ self.game.update((),delta_time); } } impl<'g,Exit,G> gameloop::Render<()> for GlfwGame<'g,Exit,G> where G: Game<glfw::WindowEvent,(),Exit> + 'g { fn render(&self,renderer: &Renderer,_: &mut ()){ self.game.render(renderer,&mut ()); self.window.0.swap_buffers(); } } impl<'g,Exit,G> gameloop::EventHandler<()> for GlfwGame<'g,Exit,G> where G: Game<glfw::WindowEvent,(),Exit> + 'g { fn event(&mut self,_: ()){ self.glfw.poll_events(); for (_,e) in glfw::flush_messages(&self.window.1){ self.game.event(e); } } }
true
093e2bdd1f7d719e34b48d143d0b1583587892a8
Rust
happyspace/rust-bucket
/misc/src/rust/panic.rs
UTF-8
954
3.796875
4
[]
no_license
pub fn divide_non_zero_result(a: u32, b: u32) -> u32 { if b == 0 { panic!("Divide-by-zero error"); } else if a < b { panic!("Integer division = 0!"); } a / b } pub fn sqrt(number: f64) -> Result<f64, String> { if number >= 0.0 { Ok(number.powf(0.5)) } else { Err("negative floats don't have square roots".to_owned()) } } #[cfg(test)] mod tests { use super::*; #[test] fn test_sqrt() -> Result<(), String> { let x = 4.0; assert_eq!(sqrt(x)?.powf(2.0), x); Ok(()) } #[test] fn test_divide() { assert_eq!(divide_non_zero_result(10, 2), 5) } #[test] #[should_panic] fn test_any_panic() { divide_non_zero_result(1, 0); } #[test] #[should_panic(expected = "Integer division = 0!")] fn test_specific_panic() { let moo = divide_non_zero_result(1, 10); println!("{} ", moo); } }
true
0ccf0a7e5097b6d1728fc7b739083224c7806286
Rust
sbaldwin621/adventofcode2020
/day16/train_ticket2/src/notes.rs
UTF-8
7,814
2.546875
3
[]
no_license
use std::collections::{HashMap, HashSet}; use std::error::Error; use std::fmt::Display; use std::ops::{Index, Range}; use std::slice::Iter; use std::str::FromStr; use nom::bitvec::index; use crate::parser::parse_notes; #[derive(Debug)] pub struct Notes { ruleset: Ruleset, my_ticket: Ticket, nearby_tickets: TicketSet } impl Notes { pub fn new(ruleset: Ruleset, my_ticket: Ticket, nearby_tickets: TicketSet) -> Notes { Notes { ruleset, my_ticket, nearby_tickets } } pub fn ruleset(&self) -> &Ruleset { &self.ruleset } pub fn my_ticket(&self) -> &Ticket { &self.my_ticket } pub fn get_valid_nearby_tickets(&self) -> TicketSet { let mut valid_nearby_tickets = Vec::new(); for ticket in self.nearby_tickets.iter() { if self.ruleset.validate_ticket(&ticket).len() == 0 { valid_nearby_tickets.push(ticket.clone()); } } TicketSet::new(valid_nearby_tickets) } } impl FromStr for Notes { type Err = ParseNotesError; fn from_str(s: &str) -> Result<Self, Self::Err> { match parse_notes(s) { Ok((_, notes)) => Ok(notes), Err(e) => Err(ParseNotesError::InvalidNotes(e.to_string())) } } } #[derive(Debug)] pub enum ParseNotesError { InvalidNotes(String) } impl Display for ParseNotesError { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { writeln!(f, "{}", match self { ParseNotesError::InvalidNotes(error) => format!("invalid notes: {}", error) }) } } impl Error for ParseNotesError { } #[derive(Debug)] pub struct Ruleset { rules: Vec<Rule> } impl Ruleset { pub fn new(rules: Vec<Rule>) -> Ruleset { Ruleset { rules } } pub fn validate_ticket(&self, ticket: &Ticket) -> Vec<u64> { let mut result = Vec::new(); for value in ticket.iter() { if !self.contains(value) { result.push(*value); } } result } pub fn determine_fields(&self, ticket_set: &TicketSet) -> Vec<Field> { let mut fields_by_index: HashMap<usize, HashSet<String>> = HashMap::new(); let mut fields_by_name: HashMap<String, HashSet<usize>> = HashMap::new(); let column_set = ticket_set.invert(); for (i, column) in column_set.iter().enumerate() { for rule in self.rules.iter() { if rule.contains_column(column) { let rule_name = rule.name.clone(); if let Some(names_for_index) = fields_by_index.get_mut(&i) { names_for_index.insert(rule_name); } else { let mut names_for_index = HashSet::new(); names_for_index.insert(rule_name); fields_by_index.insert(i, names_for_index); } if let Some(indexes_for_name) = fields_by_name.get_mut(&rule.name) { indexes_for_name.insert(i); } else { let mut indexes_for_name = HashSet::new(); indexes_for_name.insert(i); fields_by_name.insert(rule.name.clone(), indexes_for_name); } } } } let mut fields = Vec::new(); loop { let mut should_continue = false; for (index, names_for_index) in fields_by_index.iter_mut() { if names_for_index.len() == 1 { let name = names_for_index.iter().nth(0).unwrap().clone(); names_for_index.remove(&name); fields.push(Field { name, index: *index }); should_continue = true; } } for field in fields.iter() { fields_by_index.remove(&field.index); fields_by_name.remove(&field.name); } for (name, indexes_for_name) in fields_by_name.iter_mut() { for field in fields.iter() { indexes_for_name.remove(&field.index); } if indexes_for_name.len() == 1 { let index = *indexes_for_name.iter().nth(0).unwrap(); indexes_for_name.remove(&index); fields.push(Field { name: name.clone(), index }); should_continue = true; } } for field in fields.iter() { fields_by_index.remove(&field.index); fields_by_name.remove(&field.name); } if !should_continue { break; } } fields } fn contains(&self, number: &u64) -> bool { for rule in &self.rules { if rule.contains(number) { return true; } } false } } #[derive(Debug)] pub struct Rule { name: String, ranges: Vec<Range<u64>> } impl Rule { pub fn new(name: &str, ranges: Vec<Range<u64>>) -> Rule { Rule { name: name.to_string(), ranges } } pub fn contains(&self, number: &u64) -> bool { for range in &self.ranges { if range.contains(&number) { return true; } } false } pub fn contains_column(&self, column: &Column) -> bool { for value in column.iter() { if !self.contains(value) { return false; } } true } } #[derive(Debug)] pub struct TicketSet { tickets: Vec<Ticket> } impl TicketSet { pub fn new(tickets: Vec<Ticket>) -> TicketSet { TicketSet { tickets } } pub fn iter(&self) -> Iter<'_, Ticket> { self.tickets.iter() } pub fn invert(&self) -> ColumnSet { let mut columns = (0..self.tickets[0].len()).map(|_| Vec::new()).collect::<Vec<_>>(); for ticket in self.tickets.iter() { if ticket.len() != columns.len() { panic!("uneven tickets"); } for i in 0..ticket.len() { let value = ticket[i]; let column = &mut columns[i]; column.push(value); } } let mut result = Vec::new(); for column in columns { result.push(Column { values: column }); } ColumnSet { columns: result } } } #[derive(Debug, Clone)] pub struct Ticket { values: Vec<u64> } impl Ticket { pub fn new(values: Vec<u64>) -> Ticket { Ticket { values } } pub fn get(&self, index: usize) -> Option<&u64> { self.values.get(index) } pub fn len(&self) -> usize { self.values.len() } pub fn iter(&self) -> Iter<u64> { self.values.iter() } } impl Index<usize> for Ticket { type Output = u64; fn index(&self, index: usize) -> &Self::Output { &self.values[index] } } #[derive(Debug)] pub struct ColumnSet { columns: Vec<Column> } impl ColumnSet { pub fn new(columns: Vec<Column>) -> ColumnSet { ColumnSet { columns } } pub fn iter(&self) -> Iter<Column> { self.columns.iter() } } #[derive(Debug, Clone)] pub struct Column { values: Vec<u64> } impl Column { pub fn new(values: Vec<u64>) -> Column { Column { values } } pub fn iter(&self) -> Iter<u64> { self.values.iter() } } #[derive(Debug, Clone)] pub struct Field { pub name: String, pub index: usize } impl Field { pub fn new(name: String, index: usize) -> Field { Field { name, index } } }
true
457afb49a5ce37d137d9d35830f49766f8e4c96d
Rust
MDGSF/JustCoding
/rust-leetcode/leetcode_1309/src/main.rs
UTF-8
168
2.546875
3
[ "MIT" ]
permissive
use leetcode_1309::solution1::Solution; fn main() { let s = "10#11#12".to_string(); let result = Solution::freq_alphabets(s); println!("result = {}", result); }
true
5b204f724d1ea79741662d328780ca36f8e7dae0
Rust
mazur/advent-of-code-2020
/src/day07/mod.rs
UTF-8
5,141
3.421875
3
[]
no_license
use std::collections::HashMap; use itertools::Itertools; pub fn run() { let system = BagSystem::build_from_rules(include_str!("input.txt")); let can_contain_shiny_gold = system.count_contains("shiny gold"); let shiny_gold_requires_contain = system.count_required_bags("shiny gold"); println!("Day07 - Part 1: {}", can_contain_shiny_gold); println!("Day07 - Part 2: {}", shiny_gold_requires_contain); } struct BagSystem<'a> { contains: HashMap<&'a str, Vec<(&'a str, u32)>>, contained_in: HashMap<&'a str, Vec<&'a str>> } impl<'a> BagSystem<'a> { fn new() -> Self { Self { contains: HashMap::new(), contained_in: HashMap::new() } } fn count_required_bags(&self, key: &str) -> u32 { match self.contains.get(key) { Some (b) => { b.iter().fold(0, |sum, bag| sum + bag.1 + (bag.1* self.count_required_bags(bag.0))) }, None => 0 } } fn count_contains(&self, key: &str) -> usize { let mut all = self.get_all_contains(key); all.sort(); all.dedup(); all.len() } fn get_all_contains(&self, key: &str) -> Vec<&str> { match self.contained_in.get(key) { Some(b) => { let mut res = b.to_vec(); for contained in b { res.append(&mut BagSystem::get_all_contains(self, contained)); } res }, None => Vec::new() } } fn build_from_rules(input: &'a str) -> Self { let mut sys = Self::new(); for line in input.lines() { sys.add_bag_rule(line); } sys } fn add_bag_rule(&mut self, rule_str: &'a str) { let (bag, rules) = rule_str.split(" bags contain ").collect_tuple().unwrap(); let contained_bags: Vec<(&str, u32)> = if rules != "no other bags." { rules.split(", ").map(|c| { let end = c.find(" bag").unwrap(); let n = c[0..1].parse().expect("Not a number"); (&c[2..end], n) }).collect() } else { Vec::new() }; self.add_bag(bag, contained_bags); } fn add_bag(&mut self, bag: &'a str, contained: Vec<(&'a str, u32)>) { for con_bag in &contained { let contained_in = self.contained_in .entry(con_bag.0) .or_insert(Vec::new()); contained_in.push(bag); } self.contains.insert(bag, contained); } } #[cfg(test)] mod tests { use super::*; static TEST_INPUT: &str = "light red bags contain 1 bright white bag, 2 muted yellow bags. dark orange bags contain 3 bright white bags, 4 muted yellow bags. bright white bags contain 1 shiny gold bag. muted yellow bags contain 2 shiny gold bags, 9 faded blue bags. shiny gold bags contain 1 dark olive bag, 2 vibrant plum bags. dark olive bags contain 3 faded blue bags, 4 dotted black bags. vibrant plum bags contain 5 faded blue bags, 6 dotted black bags. faded blue bags contain no other bags. dotted black bags contain no other bags."; #[test] fn test_add_bag_to_system() { let mut system = BagSystem::new(); let bags = vec![("shiny magenta",1), ("wavy teal",2)]; system.add_bag("light red", bags); assert_eq!(2, system.contained_in.len()); } #[test] fn test_add_empty_bag_rule_to_system() { let mut system = BagSystem::new(); system.add_bag_rule("striped tomato bags contain no other bags."); assert_eq!(0, system.contained_in.len()); } #[test] fn test_add_two_bag_rule_to_system() { let mut system = BagSystem::new(); system.add_bag_rule("dark orange bags contain 3 bright white bags, 4 muted yellow bags."); assert_eq!(2, system.contained_in.len()); } #[test] fn test_build_rule_to_system_from_input() { let system = BagSystem::build_from_rules(TEST_INPUT); assert_eq!(7, system.contained_in.len()); } #[test] fn test_get_all_bag_contains() { let system = BagSystem::build_from_rules(TEST_INPUT); let res = system.get_all_contains("shiny gold"); assert_eq!(6, res.len()); } #[test] fn test_count_contains() { let system = BagSystem::build_from_rules(TEST_INPUT); let res = system.count_contains("shiny gold"); assert_eq!(4, res); } #[test] fn test_count_required_bags() { let system = BagSystem::build_from_rules(TEST_INPUT); let res = system.count_required_bags("shiny gold"); assert_eq!(32, res); } }
true