blob_id
stringlengths 40
40
| language
stringclasses 1
value | repo_name
stringlengths 5
140
| path
stringlengths 5
183
| src_encoding
stringclasses 6
values | length_bytes
int64 12
5.32M
| score
float64 2.52
4.94
| int_score
int64 3
5
| detected_licenses
listlengths 0
47
| license_type
stringclasses 2
values | text
stringlengths 12
5.32M
| download_success
bool 1
class |
---|---|---|---|---|---|---|---|---|---|---|---|
5281539881caf7cf94e59df5563e7280ef0733b1
|
Rust
|
farzonl/interviewPrep
|
/rust/src/bin/main.rs
|
UTF-8
| 1,679 | 3.390625 | 3 |
[] |
no_license
|
extern crate interviewprep;
pub fn main() {
}
#[test]
fn ch10_1_merge_blarger() {
let mut a: [i32; 4] = [1, 2, 0, 0];
let b: [i32; 2] = [3, 4];
let sln: [i32; 4] = [1,2,3,4];
interviewprep::merge(& mut a, 2, &b);
assert!(a.iter().zip(sln.iter()).all(|(a,b)| a == b), "Arrays are not equal");
}
#[test]
fn ch10_1_merge_swizzle() {
let mut a: [i32; 4] = [1, 3, 0, 0];
let b: [i32; 2] = [2, 4];
let sln: [i32; 4] = [1,2,3,4];
interviewprep::merge(& mut a, 2, &b);
assert!(a.iter().zip(sln.iter()).all(|(a,b)| a == b), "Arrays are not equal");
}
#[test]
fn ch10_1_merge_alarger() {
let mut a: [i32; 4] = [3, 4, 0, 0];
let b: [i32; 2] = [1, 2];
let sln: [i32; 4] = [1,2,3,4];
interviewprep::merge(& mut a, 2, &b);
assert!(a.iter().zip(sln.iter()).all(|(a,b)| a == b), "Arrays are not equal");
}
#[test]
fn ch10_1_b_one_elem() {
let mut a: [i32; 4] = [1, 3, 4, 0];
let b: [i32; 1] = [3];
let sln: [i32; 4] = [1,3,3,4];
interviewprep::merge(& mut a, 3, &b);
assert!(a.iter().zip(sln.iter()).all(|(a,b)| a == b), "Arrays are not equal");
}
#[test]
fn ch10_3_rotate_array() {
//Input: find 5 in {15, 16, 19, 20, 25, 1, 3 ,4 ,5 ,7 ,10 , 14}
// Array1: {10, 15, 20, 0, 5} Array2: {50,S, 20, 30, 40}
let a: [i32; 12] = [15, 16, 19, 20, 25, 1, 3, 4, 5, 7, 10, 14];
let b: [i32; 5] = [10, 15, 20, 0, 5];
let c: [i32; 5] = [50, 5, 20, 30, 40];
assert_eq!(interviewprep::rotated_search(&a, 5),8);
assert_eq!(interviewprep::rotated_search(&b, 5),4);
assert_eq!(interviewprep::rotated_search(&c, 5),1);
assert_eq!(interviewprep::rotated_search(&c, 6),-1);
}
| true |
07a87edffd5d4ac261e39299793bcaed57367aa5
|
Rust
|
TangentW/sine_chat
|
/src/message/handshake.rs
|
UTF-8
| 783 | 3.078125 | 3 |
[
"MIT"
] |
permissive
|
use serde::{Deserialize, Serialize};
#[derive(Debug, Serialize, Deserialize, Clone)]
pub struct Handshake {
pub token: String,
}
impl Handshake {
pub fn new(token: String) -> Self {
Self { token }
}
}
#[derive(Debug, Serialize, Deserialize, Clone)]
pub struct HandshakeReply {
pub success: bool,
pub message: Option<String>,
}
impl HandshakeReply {
pub fn new(success: bool, message: Option<String>) -> Self {
Self { success, message }
}
pub fn success(message: Option<String>) -> Self {
Self::new(true, message)
}
pub fn failed(message: Option<String>) -> Self {
Self::new(false, message)
}
pub fn error(err: impl std::error::Error) -> Self {
Self::failed(Some(err.to_string()))
}
}
| true |
7762f140935a50b1d4b1bcd27d98062c85d36c8c
|
Rust
|
udoprog/async-oauth2
|
/src/lib.rs
|
UTF-8
| 43,724 | 3.171875 | 3 |
[
"MIT",
"Apache-2.0"
] |
permissive
|
//! [<img alt="github" src="https://img.shields.io/badge/github-udoprog/async--oauth2-8da0cb?style=for-the-badge&logo=github" height="20">](https://github.com/udoprog/async-oauth2)
//! [<img alt="crates.io" src="https://img.shields.io/crates/v/async-oauth2.svg?style=for-the-badge&color=fc8d62&logo=rust" height="20">](https://crates.io/crates/async-oauth2)
//! [<img alt="docs.rs" src="https://img.shields.io/badge/docs.rs-async--oauth2-66c2a5?style=for-the-badge&logoColor=white&logo=data:image/svg+xml;base64,PHN2ZyByb2xlPSJpbWciIHhtbG5zPSJodHRwOi8vd3d3LnczLm9yZy8yMDAwL3N2ZyIgdmlld0JveD0iMCAwIDUxMiA1MTIiPjxwYXRoIGZpbGw9IiNmNWY1ZjUiIGQ9Ik00ODguNiAyNTAuMkwzOTIgMjE0VjEwNS41YzAtMTUtOS4zLTI4LjQtMjMuNC0zMy43bC0xMDAtMzcuNWMtOC4xLTMuMS0xNy4xLTMuMS0yNS4zIDBsLTEwMCAzNy41Yy0xNC4xIDUuMy0yMy40IDE4LjctMjMuNCAzMy43VjIxNGwtOTYuNiAzNi4yQzkuMyAyNTUuNSAwIDI2OC45IDAgMjgzLjlWMzk0YzAgMTMuNiA3LjcgMjYuMSAxOS45IDMyLjJsMTAwIDUwYzEwLjEgNS4xIDIyLjEgNS4xIDMyLjIgMGwxMDMuOS01MiAxMDMuOSA1MmMxMC4xIDUuMSAyMi4xIDUuMSAzMi4yIDBsMTAwLTUwYzEyLjItNi4xIDE5LjktMTguNiAxOS45LTMyLjJWMjgzLjljMC0xNS05LjMtMjguNC0yMy40LTMzLjd6TTM1OCAyMTQuOGwtODUgMzEuOXYtNjguMmw4NS0zN3Y3My4zek0xNTQgMTA0LjFsMTAyLTM4LjIgMTAyIDM4LjJ2LjZsLTEwMiA0MS40LTEwMi00MS40di0uNnptODQgMjkxLjFsLTg1IDQyLjV2LTc5LjFsODUtMzguOHY3NS40em0wLTExMmwtMTAyIDQxLjQtMTAyLTQxLjR2LS42bDEwMi0zOC4yIDEwMiAzOC4ydi42em0yNDAgMTEybC04NSA0Mi41di03OS4xbDg1LTM4Ljh2NzUuNHptMC0xMTJsLTEwMiA0MS40LTEwMi00MS40di0uNmwxMDItMzguMiAxMDIgMzguMnYuNnoiPjwvcGF0aD48L3N2Zz4K" height="20">](https://docs.rs/async-oauth2)
//!
//! An asynchronous OAuth2 flow implementation, trying to adhere as much as
//! possible to [RFC 6749].
//!
//! <br>
//!
//! ## Examples
//!
//! To see the library in action, you can go to one of our examples:
//!
//! - [Google]
//! - [Spotify]
//! - [Twitch]
//!
//! If you've checked out the project they can be run like this:
//!
//! ```sh
//! cargo run --manifest-path=examples/Cargo.toml --bin spotify --
//! --client-id <client-id> --client-secret <client-secret>
//! cargo run --manifest-path=examples/Cargo.toml --bin google --
//! --client-id <client-id> --client-secret <client-secret>
//! cargo run --manifest-path=examples/Cargo.toml --bin twitch --
//! --client-id <client-id> --client-secret <client-secret>
//! ```
//!
//! > Note: You need to configure your client integration to permit redirects to
//! > `http://localhost:8080/api/auth/redirect` for these to work. How this is
//! > done depends on the integration used.
//!
//! <br>
//!
//! ## Authorization Code Grant
//!
//! This is the most common OAuth2 flow.
//!
//! ```no_run
//! use oauth2::*;
//! use url::Url;
//!
//! pub struct ReceivedCode {
//! pub code: AuthorizationCode,
//! pub state: State,
//! }
//!
//! # async fn listen_for_code(port: u32) -> Result<ReceivedCode, Box<dyn std::error::Error>> { todo!() }
//! # #[tokio::main]
//! # async fn main() -> Result<(), Box<dyn std::error::Error>> {
//! let reqwest_client = reqwest::Client::new();
//!
//! // Create an OAuth2 client by specifying the client ID, client secret,
//! // authorization URL and token URL.
//! let mut client = Client::new(
//! "client_id",
//! Url::parse("http://authorize")?,
//! Url::parse("http://token")?
//! );
//!
//! client.set_client_secret("client_secret");
//! // Set the URL the user will be redirected to after the authorization
//! // process.
//! client.set_redirect_url(Url::parse("http://redirect")?);
//! // Set the desired scopes.
//! client.add_scope("read");
//! client.add_scope("write");
//!
//! // Generate the full authorization URL.
//! let state = State::new_random();
//! let auth_url = client.authorize_url(&state);
//!
//! // This is the URL you should redirect the user to, in order to trigger the
//! // authorization process.
//! println!("Browse to: {}", auth_url);
//!
//! // Once the user has been redirected to the redirect URL, you'll have the
//! // access code. For security reasons, your code should verify that the
//! // `state` parameter returned by the server matches `state`.
//! let received: ReceivedCode = listen_for_code(8080).await?;
//!
//! if received.state != state {
//! panic!("CSRF token mismatch :(");
//! }
//!
//! // Now you can trade it for an access token.
//! let token = client.exchange_code(received.code)
//! .with_client(&reqwest_client)
//! .execute::<StandardToken>()
//! .await?;
//!
//! # Ok(())
//! # }
//! ```
//!
//! <br>
//!
//! ## Implicit Grant
//!
//! This flow fetches an access token directly from the authorization endpoint.
//!
//! Be sure to understand the security implications of this flow before using
//! it. In most cases the Authorization Code Grant flow above is preferred to
//! the Implicit Grant flow.
//!
//! ```no_run
//! use oauth2::*;
//! use url::Url;
//!
//! pub struct ReceivedCode {
//! pub code: AuthorizationCode,
//! pub state: State,
//! }
//!
//! # async fn get_code() -> Result<ReceivedCode, Box<dyn std::error::Error>> { todo!() }
//! # #[tokio::main]
//! # async fn main() -> Result<(), Box<dyn std::error::Error>> {
//! let mut client = Client::new(
//! "client_id",
//! Url::parse("http://authorize")?,
//! Url::parse("http://token")?
//! );
//!
//! client.set_client_secret("client_secret");
//!
//! // Generate the full authorization URL.
//! let state = State::new_random();
//! let auth_url = client.authorize_url_implicit(&state);
//!
//! // This is the URL you should redirect the user to, in order to trigger the
//! // authorization process.
//! println!("Browse to: {}", auth_url);
//!
//! // Once the user has been redirected to the redirect URL, you'll have the
//! // access code. For security reasons, your code should verify that the
//! // `state` parameter returned by the server matches `state`.
//! let received: ReceivedCode = get_code().await?;
//!
//! if received.state != state {
//! panic!("CSRF token mismatch :(");
//! }
//!
//! # Ok(()) }
//! ```
//!
//! <br>
//!
//! ## Resource Owner Password Credentials Grant
//!
//! You can ask for a *password* access token by calling the
//! `Client::exchange_password` method, while including the username and
//! password.
//!
//! ```no_run
//! use oauth2::*;
//! use url::Url;
//!
//! # #[tokio::main]
//! # async fn main() -> Result<(), Box<dyn std::error::Error>> {
//! let reqwest_client = reqwest::Client::new();
//!
//! let mut client = Client::new(
//! "client_id",
//! Url::parse("http://authorize")?,
//! Url::parse("http://token")?
//! );
//!
//! client.set_client_secret("client_secret");
//! client.add_scope("read");
//!
//! let token = client
//! .exchange_password("user", "pass")
//! .with_client(&reqwest_client)
//! .execute::<StandardToken>()
//! .await?;
//!
//! # Ok(()) }
//! ```
//!
//! <br>
//!
//! ## Client Credentials Grant
//!
//! You can ask for a *client credentials* access token by calling the
//! `Client::exchange_client_credentials` method.
//!
//! ```no_run
//! use oauth2::*;
//! use url::Url;
//!
//! # #[tokio::main]
//! # async fn main() -> Result<(), Box<dyn std::error::Error>> {
//! let reqwest_client = reqwest::Client::new();
//! let mut client = Client::new(
//! "client_id",
//! Url::parse("http://authorize")?,
//! Url::parse("http://token")?
//! );
//!
//! client.set_client_secret("client_secret");
//! client.add_scope("read");
//!
//! let token_result = client.exchange_client_credentials()
//! .with_client(&reqwest_client)
//! .execute::<StandardToken>();
//!
//! # Ok(()) }
//! ```
//!
//! <br>
//!
//! ## Relationship to oauth2-rs
//!
//! This is a fork of [oauth2-rs].
//!
//! The main differences are:
//! * Removal of unnecessary type parameters on Client ([see discussion here]).
//! * Only support one client implementation ([reqwest]).
//! * Remove most newtypes except `Scope` and the secret ones since they made the API harder to use.
//!
//! [RFC 6749]: https://tools.ietf.org/html/rfc6749
//! [Google]: https://github.com/udoprog/async-oauth2/blob/master/examples/src/bin/google.rs
//! [oauth2-rs]: https://github.com/ramosbugs/oauth2-rs
//! [reqwest]: https://docs.rs/reqwest
//! [see discussion here]: https://github.com/ramosbugs/oauth2-rs/issues/44#issuecomment-50158653
//! [Spotify]: https://github.com/udoprog/async-oauth2/blob/master/examples/src/bin/spotify.rs
//! [Twitch]: https://github.com/udoprog/async-oauth2/blob/master/examples/src/bin/twitch.rs
#![deny(missing_docs)]
use std::{borrow::Cow, error, fmt, time::Duration};
use base64::prelude::{Engine as _, BASE64_URL_SAFE_NO_PAD};
use rand::{thread_rng, Rng};
use serde::{Deserialize, Serialize};
use serde_aux::prelude::*;
use sha2::{Digest, Sha256};
use thiserror::Error;
pub use url::Url;
/// Indicates whether requests to the authorization server should use basic authentication or
/// include the parameters in the request body for requests in which either is valid.
///
/// The default AuthType is *BasicAuth*, following the recommendation of
/// [Section 2.3.1 of RFC 6749](https://tools.ietf.org/html/rfc6749#section-2.3.1).
#[derive(Clone, Copy, Debug)]
pub enum AuthType {
/// The client_id and client_secret will be included as part of the request body.
RequestBody,
/// The client_id and client_secret will be included using the basic auth authentication scheme.
BasicAuth,
}
macro_rules! redacted_debug {
($name:ident) => {
impl fmt::Debug for $name {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
write!(f, concat!(stringify!($name), "([redacted])"))
}
}
};
}
/// borrowed newtype plumbing
macro_rules! borrowed_newtype {
($name:ident, $borrowed:ty) => {
impl std::ops::Deref for $name {
type Target = $borrowed;
#[inline]
fn deref(&self) -> &Self::Target {
&self.0
}
}
impl<'a> From<&'a $name> for Cow<'a, $borrowed> {
#[inline]
fn from(value: &'a $name) -> Cow<'a, $borrowed> {
Cow::Borrowed(&value.0)
}
}
impl AsRef<$borrowed> for $name {
#[inline]
fn as_ref(&self) -> &$borrowed {
self
}
}
};
}
/// newtype plumbing
macro_rules! newtype {
($name:ident, $owned:ty, $borrowed:ty) => {
borrowed_newtype!($name, $borrowed);
impl<'a> From<&'a $borrowed> for $name {
#[inline]
fn from(value: &'a $borrowed) -> Self {
Self(value.to_owned())
}
}
impl From<$owned> for $name {
#[inline]
fn from(value: $owned) -> Self {
Self(value)
}
}
impl<'a> From<&'a $owned> for $name {
#[inline]
fn from(value: &'a $owned) -> Self {
Self(value.to_owned())
}
}
impl From<$name> for $owned {
#[inline]
fn from(value: $name) -> $owned {
value.0
}
}
};
}
/// Access token scope, as defined by the authorization server.
#[derive(Debug, Clone, PartialEq, Eq, PartialOrd, Ord, Hash, Deserialize, Serialize)]
pub struct Scope(String);
newtype!(Scope, String, str);
/// Code Challenge used for [PKCE]((https://tools.ietf.org/html/rfc7636)) protection via the
/// `code_challenge` parameter.
#[derive(Debug, Clone, PartialEq, Eq, PartialOrd, Ord, Hash, Deserialize, Serialize)]
pub struct PkceCodeChallengeS256(String);
newtype!(PkceCodeChallengeS256, String, str);
/// Code Challenge Method used for [PKCE]((https://tools.ietf.org/html/rfc7636)) protection
/// via the `code_challenge_method` parameter.
#[derive(Debug, Clone, PartialEq, Eq, PartialOrd, Ord, Hash, Deserialize, Serialize)]
pub struct PkceCodeChallengeMethod(String);
newtype!(PkceCodeChallengeMethod, String, str);
/// Client password issued to the client during the registration process described by
/// [Section 2.2](https://tools.ietf.org/html/rfc6749#section-2.2).
#[derive(Clone, Deserialize, Serialize)]
pub struct ClientSecret(String);
redacted_debug!(ClientSecret);
newtype!(ClientSecret, String, str);
/// Value used for [CSRF]((https://tools.ietf.org/html/rfc6749#section-10.12)) protection
/// via the `state` parameter.
#[must_use]
#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)]
pub struct State([u8; 16]);
redacted_debug!(State);
borrowed_newtype!(State, [u8]);
impl State {
/// Generate a new random, base64-encoded 128-bit CSRF token.
pub fn new_random() -> Self {
let mut random_bytes = [0u8; 16];
thread_rng().fill(&mut random_bytes);
State(random_bytes)
}
/// Convert into base64.
pub fn to_base64(&self) -> String {
BASE64_URL_SAFE_NO_PAD.encode(self.0)
}
}
impl serde::Serialize for State {
fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
where
S: serde::Serializer,
{
self.to_base64().serialize(serializer)
}
}
impl<'de> serde::Deserialize<'de> for State {
fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
where
D: serde::Deserializer<'de>,
{
let s = String::deserialize(deserializer)?;
let bytes = BASE64_URL_SAFE_NO_PAD
.decode(s)
.map_err(serde::de::Error::custom)?;
let mut buf = [0u8; 16];
buf.copy_from_slice(&bytes);
Ok(Self(buf))
}
}
/// Code Verifier used for [PKCE]((https://tools.ietf.org/html/rfc7636)) protection via the
/// `code_verifier` parameter. The value must have a minimum length of 43 characters and a
/// maximum length of 128 characters. Each character must be ASCII alphanumeric or one of
/// the characters "-" / "." / "_" / "~".
#[derive(Deserialize, Serialize)]
pub struct PkceCodeVerifierS256(String);
newtype!(PkceCodeVerifierS256, String, str);
impl PkceCodeVerifierS256 {
/// Generate a new random, base64-encoded code verifier.
pub fn new_random() -> Self {
PkceCodeVerifierS256::new_random_len(32)
}
/// Generate a new random, base64-encoded code verifier.
///
/// # Arguments
///
/// * `num_bytes` - Number of random bytes to generate, prior to base64-encoding.
/// The value must be in the range 32 to 96 inclusive in order to generate a verifier
/// with a suitable length.
pub fn new_random_len(num_bytes: u32) -> Self {
// The RFC specifies that the code verifier must have "a minimum length of 43
// characters and a maximum length of 128 characters".
// This implies 32-96 octets of random data to be base64 encoded.
assert!((32..=96).contains(&num_bytes));
let random_bytes: Vec<u8> = (0..num_bytes).map(|_| thread_rng().gen::<u8>()).collect();
let code = BASE64_URL_SAFE_NO_PAD.encode(random_bytes);
assert!(code.len() >= 43 && code.len() <= 128);
PkceCodeVerifierS256(code)
}
/// Return the code challenge for the code verifier.
pub fn code_challenge(&self) -> PkceCodeChallengeS256 {
let digest = Sha256::digest(self.as_bytes());
PkceCodeChallengeS256::from(BASE64_URL_SAFE_NO_PAD.encode(digest))
}
/// Return the code challenge method for this code verifier.
pub fn code_challenge_method() -> PkceCodeChallengeMethod {
PkceCodeChallengeMethod::from("S256".to_string())
}
/// Return the extension params used for authorize_url.
pub fn authorize_url_params(&self) -> Vec<(&'static str, String)> {
vec![
(
"code_challenge_method",
PkceCodeVerifierS256::code_challenge_method().into(),
),
("code_challenge", self.code_challenge().into()),
]
}
}
/// Authorization code returned from the authorization endpoint.
#[derive(Clone, Deserialize, Serialize, PartialEq, Eq, PartialOrd, Ord, Hash)]
pub struct AuthorizationCode(String);
redacted_debug!(AuthorizationCode);
newtype!(AuthorizationCode, String, str);
/// Refresh token used to obtain a new access token (if supported by the authorization server).
#[derive(Clone, Deserialize, Serialize, PartialEq, Eq, PartialOrd, Ord, Hash)]
pub struct RefreshToken(String);
redacted_debug!(RefreshToken);
newtype!(RefreshToken, String, str);
/// Access token returned by the token endpoint and used to access protected resources.
#[derive(Clone, Deserialize, Serialize, PartialEq, Eq, PartialOrd, Ord, Hash)]
pub struct AccessToken(String);
redacted_debug!(AccessToken);
newtype!(AccessToken, String, str);
/// Resource owner's password used directly as an authorization grant to obtain an access
/// token.
pub struct ResourceOwnerPassword(String);
newtype!(ResourceOwnerPassword, String, str);
/// Stores the configuration for an OAuth2 client.
#[derive(Clone, Debug)]
pub struct Client {
client_id: String,
client_secret: Option<ClientSecret>,
auth_url: Url,
auth_type: AuthType,
token_url: Url,
scopes: Vec<Scope>,
redirect_url: Option<Url>,
}
impl Client {
/// Initializes an OAuth2 client with the fields common to most OAuth2 flows.
///
/// # Arguments
///
/// * `client_id` - Client ID
/// * `auth_url` - Authorization endpoint: used by the client to obtain authorization from
/// the resource owner via user-agent redirection. This URL is used in all standard OAuth2
/// flows except the [Resource Owner Password Credentials
/// Grant](https://tools.ietf.org/html/rfc6749#section-4.3) and the
/// [Client Credentials Grant](https://tools.ietf.org/html/rfc6749#section-4.4).
/// * `token_url` - Token endpoint: used by the client to exchange an authorization grant
/// (code) for an access token, typically with client authentication. This URL is used in
/// all standard OAuth2 flows except the
/// [Implicit Grant](https://tools.ietf.org/html/rfc6749#section-4.2). If this value is set
/// to `None`, the `exchange_*` methods will return `Err(ExecuteError::Other(_))`.
pub fn new(client_id: impl AsRef<str>, auth_url: Url, token_url: Url) -> Self {
Client {
client_id: client_id.as_ref().to_string(),
client_secret: None,
auth_url,
auth_type: AuthType::BasicAuth,
token_url,
scopes: Vec::new(),
redirect_url: None,
}
}
/// Configure the client secret to use.
pub fn set_client_secret(&mut self, client_secret: impl Into<ClientSecret>) {
self.client_secret = Some(client_secret.into());
}
/// Appends a new scope to the authorization URL.
pub fn add_scope(&mut self, scope: impl Into<Scope>) {
self.scopes.push(scope.into());
}
/// Configures the type of client authentication used for communicating with the authorization
/// server.
///
/// The default is to use HTTP Basic authentication, as recommended in
/// [Section 2.3.1 of RFC 6749](https://tools.ietf.org/html/rfc6749#section-2.3.1).
pub fn set_auth_type(&mut self, auth_type: AuthType) {
self.auth_type = auth_type;
}
/// Sets the the redirect URL used by the authorization endpoint.
pub fn set_redirect_url(&mut self, redirect_url: Url) {
self.redirect_url = Some(redirect_url);
}
/// Produces the full authorization URL used by the
/// [Authorization Code Grant](https://tools.ietf.org/html/rfc6749#section-4.1)
/// flow, which is the most common OAuth2 flow.
///
/// # Arguments
///
/// * `state` - A state value to include in the request. The authorization
/// server includes this value when redirecting the user-agent back to the
/// client.
///
/// # Security Warning
///
/// Callers should use a fresh, unpredictable `state` for each authorization
/// request and verify that this value matches the `state` parameter passed
/// by the authorization server to the redirect URI. Doing so mitigates
/// [Cross-Site Request Forgery](https://tools.ietf.org/html/rfc6749#section-10.12)
/// attacks.
pub fn authorize_url(&self, state: &State) -> Url {
self.authorize_url_impl("code", state)
}
/// Produces the full authorization URL used by the
/// [Implicit Grant](https://tools.ietf.org/html/rfc6749#section-4.2) flow.
///
/// # Arguments
///
/// * `state` - A state value to include in the request. The authorization
/// server includes this value when redirecting the user-agent back to the
/// client.
///
/// # Security Warning
///
/// Callers should use a fresh, unpredictable `state` for each authorization request and verify
/// that this value matches the `state` parameter passed by the authorization server to the
/// redirect URI. Doing so mitigates
/// [Cross-Site Request Forgery](https://tools.ietf.org/html/rfc6749#section-10.12)
/// attacks.
pub fn authorize_url_implicit(&self, state: &State) -> Url {
self.authorize_url_impl("token", state)
}
fn authorize_url_impl(&self, response_type: &str, state: &State) -> Url {
let scopes = self
.scopes
.iter()
.map(|s| s.to_string())
.collect::<Vec<_>>()
.join(" ");
let mut url = self.auth_url.clone();
{
let mut query = url.query_pairs_mut();
query.append_pair("response_type", response_type);
query.append_pair("client_id", &self.client_id);
if let Some(ref redirect_url) = self.redirect_url {
query.append_pair("redirect_uri", redirect_url.as_str());
}
if !scopes.is_empty() {
query.append_pair("scope", &scopes);
}
query.append_pair("state", &state.to_base64());
}
url
}
/// Exchanges a code produced by a successful authorization process with an access token.
///
/// Acquires ownership of the `code` because authorization codes may only be used to retrieve
/// an access token from the authorization server.
///
/// See https://tools.ietf.org/html/rfc6749#section-4.1.3
pub fn exchange_code(&self, code: impl Into<AuthorizationCode>) -> Request<'_> {
let code = code.into();
self.request_token()
.param("grant_type", "authorization_code")
.param("code", code.to_string())
}
/// Requests an access token for the *password* grant type.
///
/// See https://tools.ietf.org/html/rfc6749#section-4.3.2
pub fn exchange_password(
&self,
username: impl AsRef<str>,
password: impl AsRef<str>,
) -> Request<'_> {
let username = username.as_ref();
let password = password.as_ref();
let mut builder = self
.request_token()
.param("grant_type", "password")
.param("username", username.to_string())
.param("password", password.to_string());
// Generate the space-delimited scopes String before initializing params so that it has
// a long enough lifetime.
if !self.scopes.is_empty() {
let scopes = self
.scopes
.iter()
.map(|s| s.to_string())
.collect::<Vec<_>>()
.join(" ");
builder = builder.param("scope", scopes);
}
builder
}
/// Requests an access token for the *client credentials* grant type.
///
/// See https://tools.ietf.org/html/rfc6749#section-4.4.2
pub fn exchange_client_credentials(&self) -> Request<'_> {
let mut builder = self
.request_token()
.param("grant_type", "client_credentials");
// Generate the space-delimited scopes String before initializing params so that it has
// a long enough lifetime.
if !self.scopes.is_empty() {
let scopes = self
.scopes
.iter()
.map(|s| s.to_string())
.collect::<Vec<_>>()
.join(" ");
builder = builder.param("scopes", scopes);
}
builder
}
/// Exchanges a refresh token for an access token
///
/// See https://tools.ietf.org/html/rfc6749#section-6
pub fn exchange_refresh_token(&self, refresh_token: &RefreshToken) -> Request<'_> {
self.request_token()
.param("grant_type", "refresh_token")
.param("refresh_token", refresh_token.to_string())
}
/// Construct a request builder for the token URL.
fn request_token(&self) -> Request<'_> {
Request {
token_url: &self.token_url,
auth_type: self.auth_type,
client_id: &self.client_id,
client_secret: self.client_secret.as_ref(),
redirect_url: self.redirect_url.as_ref(),
params: vec![],
}
}
}
/// A request wrapped in a client, ready to be executed.
pub struct ClientRequest<'a> {
request: Request<'a>,
client: &'a reqwest::Client,
}
impl<'a> ClientRequest<'a> {
/// Execute the token request.
pub async fn execute<T>(self) -> Result<T, ExecuteError>
where
T: for<'de> Deserialize<'de>,
{
use reqwest::{header, Method};
let token_url = self.request.token_url;
let mut request = self.client.request(Method::POST, &token_url.to_string());
// Section 5.1 of RFC 6749 (https://tools.ietf.org/html/rfc6749#section-5.1) only permits
// JSON responses for this request. Some providers such as GitHub have off-spec behavior
// and not only support different response formats, but have non-JSON defaults. Explicitly
// request JSON here.
request = request.header(
header::ACCEPT,
header::HeaderValue::from_static(CONTENT_TYPE_JSON),
);
let request = {
let mut form = url::form_urlencoded::Serializer::new(String::new());
// FIXME: add support for auth extensions? e.g., client_secret_jwt and private_key_jwt
match self.request.auth_type {
AuthType::RequestBody => {
form.append_pair("client_id", self.request.client_id);
if let Some(client_secret) = self.request.client_secret {
form.append_pair("client_secret", client_secret);
}
}
AuthType::BasicAuth => {
// Section 2.3.1 of RFC 6749 requires separately url-encoding the id and secret
// before using them as HTTP Basic auth username and password. Note that this is
// not standard for ordinary Basic auth, so curl won't do it for us.
let username = url_encode(self.request.client_id);
let password = self
.request
.client_secret
.map(|client_secret| url_encode(client_secret));
request = request.basic_auth(&username, password.as_ref());
}
}
for (key, value) in self.request.params {
form.append_pair(key.as_ref(), value.as_ref());
}
if let Some(redirect_url) = &self.request.redirect_url {
form.append_pair("redirect_uri", redirect_url.as_str());
}
request = request.header(
header::CONTENT_TYPE,
header::HeaderValue::from_static("application/x-www-form-urlencoded"),
);
request.body(form.finish().into_bytes())
};
let res = request
.send()
.await
.map_err(|error| ExecuteError::RequestError { error })?;
let status = res.status();
let body = res
.bytes()
.await
.map_err(|error| ExecuteError::RequestError { error })?;
if body.is_empty() {
return Err(ExecuteError::EmptyResponse { status });
}
if !status.is_success() {
let error = match serde_json::from_slice::<ErrorResponse>(body.as_ref()) {
Ok(error) => error,
Err(error) => {
return Err(ExecuteError::BadResponse {
status,
error,
body,
});
}
};
return Err(ExecuteError::ErrorResponse { status, error });
}
return serde_json::from_slice(body.as_ref()).map_err(|error| ExecuteError::BadResponse {
status,
error,
body,
});
fn url_encode(s: &str) -> String {
url::form_urlencoded::byte_serialize(s.as_bytes()).collect::<String>()
}
const CONTENT_TYPE_JSON: &str = "application/json";
}
}
/// A token request that is in progress.
pub struct Request<'a> {
token_url: &'a Url,
auth_type: AuthType,
client_id: &'a str,
client_secret: Option<&'a ClientSecret>,
/// Configured redirect URL.
redirect_url: Option<&'a Url>,
/// Extra parameters.
params: Vec<(Cow<'a, str>, Cow<'a, str>)>,
}
impl<'a> Request<'a> {
/// Set an additional request param.
pub fn param(mut self, key: impl Into<Cow<'a, str>>, value: impl Into<Cow<'a, str>>) -> Self {
self.params.push((key.into(), value.into()));
self
}
/// Wrap the request in a client.
pub fn with_client(self, client: &'a reqwest::Client) -> ClientRequest<'a> {
ClientRequest {
client,
request: self,
}
}
}
/// Basic OAuth2 authorization token types.
#[derive(Clone, Debug, PartialEq, Serialize)]
#[serde(rename_all = "lowercase")]
pub enum TokenType {
/// Bearer token
/// ([OAuth 2.0 Bearer Tokens - RFC 6750](https://tools.ietf.org/html/rfc6750)).
Bearer,
/// MAC ([OAuth 2.0 Message Authentication Code (MAC)
/// Tokens](https://tools.ietf.org/html/draft-ietf-oauth-v2-http-mac-05)).
Mac,
}
impl<'de> serde::de::Deserialize<'de> for TokenType {
fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
where
D: serde::de::Deserializer<'de>,
{
let value = String::deserialize(deserializer)?.to_lowercase();
return match value.as_str() {
"bearer" => Ok(TokenType::Bearer),
"mac" => Ok(TokenType::Mac),
other => Err(serde::de::Error::custom(UnknownVariantError(
other.to_string(),
))),
};
#[derive(Debug)]
struct UnknownVariantError(String);
impl error::Error for UnknownVariantError {}
impl fmt::Display for UnknownVariantError {
fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result {
write!(fmt, "unsupported variant: {}", self.0)
}
}
}
}
/// Common methods shared by all OAuth2 token implementations.
///
/// The methods in this trait are defined in
/// [Section 5.1 of RFC 6749](https://tools.ietf.org/html/rfc6749#section-5.1). This trait exists
/// separately from the `StandardToken` struct to support customization by clients,
/// such as supporting interoperability with non-standards-complaint OAuth2 providers.
pub trait Token
where
Self: for<'a> serde::de::Deserialize<'a>,
{
/// REQUIRED. The access token issued by the authorization server.
fn access_token(&self) -> &AccessToken;
/// REQUIRED. The type of the token issued as described in
/// [Section 7.1](https://tools.ietf.org/html/rfc6749#section-7.1).
/// Value is case insensitive and deserialized to the generic `TokenType` parameter.
fn token_type(&self) -> &TokenType;
/// RECOMMENDED. The lifetime in seconds of the access token. For example, the value 3600
/// denotes that the access token will expire in one hour from the time the response was
/// generated. If omitted, the authorization server SHOULD provide the expiration time via
/// other means or document the default value.
fn expires_in(&self) -> Option<Duration>;
/// OPTIONAL. The refresh token, which can be used to obtain new access tokens using the same
/// authorization grant as described in
/// [Section 6](https://tools.ietf.org/html/rfc6749#section-6).
fn refresh_token(&self) -> Option<&RefreshToken>;
/// OPTIONAL, if identical to the scope requested by the client; otherwise, REQUIRED. The
/// scipe of the access token as described by
/// [Section 3.3](https://tools.ietf.org/html/rfc6749#section-3.3). If included in the response,
/// this space-delimited field is parsed into a `Vec` of individual scopes. If omitted from
/// the response, this field is `None`.
fn scopes(&self) -> Option<&Vec<Scope>>;
}
/// Standard OAuth2 token response.
///
/// This struct includes the fields defined in
/// [Section 5.1 of RFC 6749](https://tools.ietf.org/html/rfc6749#section-5.1), as well as
/// extensions defined by the `EF` type parameter.
#[derive(Clone, Debug, Deserialize, PartialEq, Serialize)]
pub struct StandardToken {
access_token: AccessToken,
token_type: TokenType,
#[serde(
skip_serializing_if = "Option::is_none",
deserialize_with = "deserialize_option_number_from_string"
)]
expires_in: Option<u64>,
#[serde(skip_serializing_if = "Option::is_none")]
refresh_token: Option<RefreshToken>,
#[serde(rename = "scope")]
#[serde(deserialize_with = "helpers::deserialize_space_delimited_vec")]
#[serde(serialize_with = "helpers::serialize_space_delimited_vec")]
#[serde(skip_serializing_if = "Option::is_none")]
#[serde(default)]
scopes: Option<Vec<Scope>>,
}
impl Token for StandardToken {
/// REQUIRED. The access token issued by the authorization server.
fn access_token(&self) -> &AccessToken {
&self.access_token
}
/// REQUIRED. The type of the token issued as described in
/// [Section 7.1](https://tools.ietf.org/html/rfc6749#section-7.1).
/// Value is case insensitive and deserialized to the generic `TokenType` parameter.
fn token_type(&self) -> &TokenType {
&self.token_type
}
/// RECOMMENDED. The lifetime in seconds of the access token. For example, the value 3600
/// denotes that the access token will expire in one hour from the time the response was
/// generated. If omitted, the authorization server SHOULD provide the expiration time via
/// other means or document the default value.
fn expires_in(&self) -> Option<Duration> {
self.expires_in.map(Duration::from_secs)
}
/// OPTIONAL. The refresh token, which can be used to obtain new access tokens using the same
/// authorization grant as described in
/// [Section 6](https://tools.ietf.org/html/rfc6749#section-6).
fn refresh_token(&self) -> Option<&RefreshToken> {
self.refresh_token.as_ref()
}
/// OPTIONAL, if identical to the scope requested by the client; otherwise, REQUIRED. The
/// scipe of the access token as described by
/// [Section 3.3](https://tools.ietf.org/html/rfc6749#section-3.3). If included in the response,
/// this space-delimited field is parsed into a `Vec` of individual scopes. If omitted from
/// the response, this field is `None`.
fn scopes(&self) -> Option<&Vec<Scope>> {
self.scopes.as_ref()
}
}
/// These error types are defined in
/// [Section 5.2 of RFC 6749](https://tools.ietf.org/html/rfc6749#section-5.2).
#[derive(Debug, Clone, Deserialize, PartialEq, Eq, Serialize)]
#[serde(rename_all = "snake_case")]
pub enum ErrorField {
/// The request is missing a required parameter, includes an unsupported parameter value
/// (other than grant type), repeats a parameter, includes multiple credentials, utilizes
/// more than one mechanism for authenticating the client, or is otherwise malformed.
InvalidRequest,
/// Client authentication failed (e.g., unknown client, no client authentication included,
/// or unsupported authentication method).
InvalidClient,
/// The provided authorization grant (e.g., authorization code, resource owner credentials)
/// or refresh token is invalid, expired, revoked, does not match the redirection URI used
/// in the authorization request, or was issued to another client.
InvalidGrant,
/// The authenticated client is not authorized to use this authorization grant type.
UnauthorizedClient,
/// The authorization grant type is not supported by the authorization server.
UnsupportedGrantType,
/// The requested scope is invalid, unknown, malformed, or exceeds the scope granted by the
/// resource owner.
InvalidScope,
/// Other error type.
Other(String),
}
impl fmt::Display for ErrorField {
fn fmt(&self, fmt: &mut fmt::Formatter<'_>) -> fmt::Result {
use self::ErrorField::*;
match *self {
InvalidRequest => "invalid_request".fmt(fmt),
InvalidClient => "invalid_client".fmt(fmt),
InvalidGrant => "invalid_grant".fmt(fmt),
UnauthorizedClient => "unauthorized_client".fmt(fmt),
UnsupportedGrantType => "unsupported_grant_type".fmt(fmt),
InvalidScope => "invalid_scope".fmt(fmt),
Other(ref value) => value.fmt(fmt),
}
}
}
/// Error response returned by server after requesting an access token.
///
/// The fields in this structure are defined in
/// [Section 5.2 of RFC 6749](https://tools.ietf.org/html/rfc6749#section-5.2).
#[derive(Clone, Debug, Deserialize, PartialEq, Serialize)]
pub struct ErrorResponse {
/// A single ASCII error code.
pub error: ErrorField,
#[serde(default)]
#[serde(skip_serializing_if = "Option::is_none")]
/// Human-readable ASCII text providing additional information, used to assist
/// the client developer in understanding the error that occurred.
pub error_description: Option<String>,
#[serde(default)]
#[serde(skip_serializing_if = "Option::is_none")]
/// A URI identifying a human-readable web page with information about the error,
/// used to provide the client developer with additional information about the error.
pub error_uri: Option<String>,
}
impl fmt::Display for ErrorResponse {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
let mut formatted = self.error.to_string();
if let Some(error_description) = self.error_description.as_ref() {
formatted.push_str(": ");
formatted.push_str(error_description);
}
if let Some(error_uri) = self.error_uri.as_ref() {
formatted.push_str(" / See ");
formatted.push_str(error_uri);
}
write!(f, "{}", formatted)
}
}
impl error::Error for ErrorResponse {}
/// Errors when creating new clients.
#[derive(Debug, Error)]
#[non_exhaustive]
pub enum NewClientError {
/// Error creating underlying reqwest client.
#[error("Failed to construct client")]
Reqwest(#[source] reqwest::Error),
}
impl From<reqwest::Error> for NewClientError {
fn from(error: reqwest::Error) -> Self {
Self::Reqwest(error)
}
}
/// Error encountered while requesting access token.
#[derive(Debug, Error)]
#[non_exhaustive]
pub enum ExecuteError {
/// A client error that occured.
#[error("reqwest error")]
RequestError {
/// Original request error.
#[source]
error: reqwest::Error,
},
/// Failed to parse server response. Parse errors may occur while parsing either successful
/// or error responses.
#[error("malformed server response: {status}")]
BadResponse {
/// The status code associated with the response.
status: http::status::StatusCode,
/// The body that couldn't be deserialized.
body: bytes::Bytes,
/// Deserialization error.
#[source]
error: serde_json::error::Error,
},
/// Response with non-successful status code and a body that could be
/// successfully deserialized as an [ErrorResponse].
#[error("request resulted in error response: {status}")]
ErrorResponse {
/// The status code associated with the response.
status: http::status::StatusCode,
/// The deserialized response.
#[source]
error: ErrorResponse,
},
/// Server response was empty.
#[error("request resulted in empty response: {status}")]
EmptyResponse {
/// The status code associated with the empty response.
status: http::status::StatusCode,
},
}
impl ExecuteError {
/// Access the status code of the error if available.
pub fn status(&self) -> Option<http::status::StatusCode> {
match *self {
Self::RequestError { ref error, .. } => error.status(),
Self::BadResponse { status, .. } => Some(status),
Self::ErrorResponse { status, .. } => Some(status),
Self::EmptyResponse { status, .. } => Some(status),
}
}
/// The original response body if available.
pub fn body(&self) -> Option<&bytes::Bytes> {
match *self {
Self::BadResponse { ref body, .. } => Some(body),
_ => None,
}
}
}
/// Helper methods used by OAuth2 implementations/extensions.
pub mod helpers {
use serde::{Deserialize, Deserializer, Serializer};
use url::Url;
/// Serde space-delimited string deserializer for a `Vec<String>`.
///
/// This function splits a JSON string at each space character into a `Vec<String>` .
///
/// # Example
///
/// In example below, the JSON value `{"items": "foo bar baz"}` would deserialize to:
///
/// ```
/// # struct GroceryBasket {
/// # items: Vec<String>,
/// # }
/// # fn main() {
/// GroceryBasket {
/// items: vec!["foo".to_string(), "bar".to_string(), "baz".to_string()]
/// };
/// # }
/// ```
///
/// Note: this example does not compile automatically due to
/// [Rust issue #29286](https://github.com/rust-lang/rust/issues/29286).
///
/// ```
/// # /*
/// use serde::Deserialize;
///
/// #[derive(Deserialize)]
/// struct GroceryBasket {
/// #[serde(deserialize_with = "helpers::deserialize_space_delimited_vec")]
/// items: Vec<String>,
/// }
/// # */
/// ```
pub fn deserialize_space_delimited_vec<'de, T, D>(deserializer: D) -> Result<T, D::Error>
where
T: Default + Deserialize<'de>,
D: Deserializer<'de>,
{
use serde::de::Error;
use serde_json::Value;
if let Some(space_delimited) = Option::<String>::deserialize(deserializer)? {
let entries = space_delimited
.split(' ')
.map(|s| Value::String(s.to_string()))
.collect();
return T::deserialize(Value::Array(entries)).map_err(Error::custom);
}
// If the JSON value is null, use the default value.
Ok(T::default())
}
/// Serde space-delimited string serializer for an `Option<Vec<String>>`.
///
/// This function serializes a string vector into a single space-delimited string.
/// If `string_vec_opt` is `None`, the function serializes it as `None` (e.g., `null`
/// in the case of JSON serialization).
pub fn serialize_space_delimited_vec<T, S>(
vec_opt: &Option<Vec<T>>,
serializer: S,
) -> Result<S::Ok, S::Error>
where
T: AsRef<str>,
S: Serializer,
{
if let Some(ref vec) = *vec_opt {
let space_delimited = vec.iter().map(|s| s.as_ref()).collect::<Vec<_>>().join(" ");
serializer.serialize_str(&space_delimited)
} else {
serializer.serialize_none()
}
}
/// Serde string deserializer for a `Url`.
pub fn deserialize_url<'de, D>(deserializer: D) -> Result<Url, D::Error>
where
D: Deserializer<'de>,
{
use serde::de::Error;
let url_str = String::deserialize(deserializer)?;
Url::parse(url_str.as_ref()).map_err(Error::custom)
}
/// Serde string serializer for a `Url`.
pub fn serialize_url<S>(url: &Url, serializer: S) -> Result<S::Ok, S::Error>
where
S: Serializer,
{
serializer.serialize_str(url.as_str())
}
}
| true |
a0c54f26e3647372623a80e0fc1f1288a081b999
|
Rust
|
nkotlyarov/zenith
|
/zenith_utils/src/sock_split.rs
|
UTF-8
| 6,111 | 2.90625 | 3 |
[
"Apache-2.0",
"PostgreSQL"
] |
permissive
|
use std::{
io::{self, BufReader, Write},
net::{Shutdown, TcpStream},
sync::Arc,
};
use rustls::Session;
/// Wrapper supporting reads of a shared TcpStream.
pub struct ArcTcpRead(Arc<TcpStream>);
impl io::Read for ArcTcpRead {
fn read(&mut self, buf: &mut [u8]) -> std::io::Result<usize> {
(&*self.0).read(buf)
}
}
impl std::ops::Deref for ArcTcpRead {
type Target = TcpStream;
fn deref(&self) -> &Self::Target {
self.0.deref()
}
}
/// Wrapper around a TCP Stream supporting buffered reads.
pub struct BufStream(BufReader<ArcTcpRead>);
impl io::Read for BufStream {
fn read(&mut self, buf: &mut [u8]) -> io::Result<usize> {
self.0.read(buf)
}
}
impl io::Write for BufStream {
fn write(&mut self, buf: &[u8]) -> io::Result<usize> {
self.get_ref().write(buf)
}
fn flush(&mut self) -> io::Result<()> {
self.get_ref().flush()
}
}
impl BufStream {
/// Unwrap into the internal BufReader.
fn into_reader(self) -> BufReader<ArcTcpRead> {
self.0
}
/// Returns a reference to the underlying TcpStream.
fn get_ref(&self) -> &TcpStream {
&*self.0.get_ref().0
}
}
pub enum ReadStream {
Tcp(BufReader<ArcTcpRead>),
Tls(rustls_split::ReadHalf<rustls::ServerSession>),
}
impl io::Read for ReadStream {
fn read(&mut self, buf: &mut [u8]) -> io::Result<usize> {
match self {
Self::Tcp(reader) => reader.read(buf),
Self::Tls(read_half) => read_half.read(buf),
}
}
}
impl ReadStream {
pub fn shutdown(&mut self, how: Shutdown) -> io::Result<()> {
match self {
Self::Tcp(stream) => stream.get_ref().shutdown(how),
Self::Tls(write_half) => write_half.shutdown(how),
}
}
}
pub enum WriteStream {
Tcp(Arc<TcpStream>),
Tls(rustls_split::WriteHalf<rustls::ServerSession>),
}
impl WriteStream {
pub fn shutdown(&mut self, how: Shutdown) -> io::Result<()> {
match self {
Self::Tcp(stream) => stream.shutdown(how),
Self::Tls(write_half) => write_half.shutdown(how),
}
}
}
impl io::Write for WriteStream {
fn write(&mut self, buf: &[u8]) -> io::Result<usize> {
match self {
Self::Tcp(stream) => stream.as_ref().write(buf),
Self::Tls(write_half) => write_half.write(buf),
}
}
fn flush(&mut self) -> io::Result<()> {
match self {
Self::Tcp(stream) => stream.as_ref().flush(),
Self::Tls(write_half) => write_half.flush(),
}
}
}
pub struct TlsBoxed {
stream: BufStream,
session: rustls::ServerSession,
}
impl TlsBoxed {
fn rustls_stream(&mut self) -> rustls::Stream<rustls::ServerSession, BufStream> {
rustls::Stream::new(&mut self.session, &mut self.stream)
}
}
pub enum BidiStream {
Tcp(BufStream),
/// This variant is boxed, because [`rustls::ServerSession`] is quite larger than [`BufStream`].
Tls(Box<TlsBoxed>),
}
impl BidiStream {
pub fn from_tcp(stream: TcpStream) -> Self {
Self::Tcp(BufStream(BufReader::new(ArcTcpRead(Arc::new(stream)))))
}
pub fn shutdown(&mut self, how: Shutdown) -> io::Result<()> {
match self {
Self::Tcp(stream) => stream.get_ref().shutdown(how),
Self::Tls(tls_boxed) => {
if how == Shutdown::Read {
tls_boxed.stream.get_ref().shutdown(how)
} else {
tls_boxed.session.send_close_notify();
let res = tls_boxed.rustls_stream().flush();
tls_boxed.stream.get_ref().shutdown(how)?;
res
}
}
}
}
/// Split the bi-directional stream into two owned read and write halves.
pub fn split(self) -> (ReadStream, WriteStream) {
match self {
Self::Tcp(stream) => {
let reader = stream.into_reader();
let stream: Arc<TcpStream> = reader.get_ref().0.clone();
(ReadStream::Tcp(reader), WriteStream::Tcp(stream))
}
Self::Tls(tls_boxed) => {
let reader = tls_boxed.stream.into_reader();
let buffer_data = reader.buffer().to_owned();
let read_buf_cfg = rustls_split::BufCfg::with_data(buffer_data, 8192);
let write_buf_cfg = rustls_split::BufCfg::with_capacity(8192);
// TODO would be nice to avoid the Arc here
let socket = Arc::try_unwrap(reader.into_inner().0).unwrap();
let (read_half, write_half) =
rustls_split::split(socket, tls_boxed.session, read_buf_cfg, write_buf_cfg);
(ReadStream::Tls(read_half), WriteStream::Tls(write_half))
}
}
}
pub fn start_tls(self, mut session: rustls::ServerSession) -> io::Result<Self> {
match self {
Self::Tcp(mut stream) => {
session.complete_io(&mut stream)?;
assert!(!session.is_handshaking());
Ok(Self::Tls(Box::new(TlsBoxed { stream, session })))
}
Self::Tls { .. } => Err(io::Error::new(
io::ErrorKind::InvalidInput,
"TLS is already started on this stream",
)),
}
}
}
impl io::Read for BidiStream {
fn read(&mut self, buf: &mut [u8]) -> io::Result<usize> {
match self {
Self::Tcp(stream) => stream.read(buf),
Self::Tls(tls_boxed) => tls_boxed.rustls_stream().read(buf),
}
}
}
impl io::Write for BidiStream {
fn write(&mut self, buf: &[u8]) -> io::Result<usize> {
match self {
Self::Tcp(stream) => stream.write(buf),
Self::Tls(tls_boxed) => tls_boxed.rustls_stream().write(buf),
}
}
fn flush(&mut self) -> io::Result<()> {
match self {
Self::Tcp(stream) => stream.flush(),
Self::Tls(tls_boxed) => tls_boxed.rustls_stream().flush(),
}
}
}
| true |
86a2e3f4370a7837c6c64b16cab9710f03d59bb7
|
Rust
|
p-alik/wundergraph
|
/wundergraph/tests/mutations/delete.rs
|
UTF-8
| 3,202 | 2.546875 | 3 |
[
"Apache-2.0",
"MIT",
"LicenseRef-scancode-unknown-license-reference"
] |
permissive
|
use crate::helper::*;
use wundergraph_example::MyContext;
#[test]
fn delete_existing() {
let (schema, pool) = get_example_schema();
let ctx = MyContext::new(pool.get().unwrap());
let res = execute_query(
&schema,
&ctx,
"
{
Heros {
id
heroName
}
}
",
);
assert!(res.is_ok());
assert_json_snapshot!(
res.as_json(), @r###"[
{
"Heros": [
{
"heroName": "Luke Skywalker",
"id": 1
},
{
"heroName": "Darth Vader",
"id": 2
},
{
"heroName": "Han Solo",
"id": 3
},
{
"heroName": "Leia Organa",
"id": 4
},
{
"heroName": "Wilhuff Tarkin",
"id": 5
}
]
},
[]
]"###
);
let res = execute_query(
&schema,
&ctx,
r#"
mutation DeleteHero {
DeleteHero(DeleteHero: {id: 5}) {
count
}
}
"#,
);
assert!(res.is_ok());
assert_json_snapshot!(
res.as_json(), @r###"[
{
"DeleteHero": {
"count": 1
}
},
[]
]"###
);
let res = execute_query(
&schema,
&ctx,
"
{
Heros {
id
heroName
}
}
",
);
assert!(res.is_ok());
assert_json_snapshot!(
res.as_json(), @r###"[
{
"Heros": [
{
"heroName": "Luke Skywalker",
"id": 1
},
{
"heroName": "Darth Vader",
"id": 2
},
{
"heroName": "Han Solo",
"id": 3
},
{
"heroName": "Leia Organa",
"id": 4
}
]
},
[]
]"###
);
}
#[test]
fn delete_non_existing() {
let (schema, pool) = get_example_schema();
let ctx = MyContext::new(pool.get().unwrap());
let res = execute_query(
&schema,
&ctx,
"
{
Heros {
id
heroName
}
}
",
);
assert!(res.is_ok());
assert_json_snapshot!(
res.as_json(), @r###"[
{
"Heros": [
{
"heroName": "Luke Skywalker",
"id": 1
},
{
"heroName": "Darth Vader",
"id": 2
},
{
"heroName": "Han Solo",
"id": 3
},
{
"heroName": "Leia Organa",
"id": 4
},
{
"heroName": "Wilhuff Tarkin",
"id": 5
}
]
},
[]
]"###
);
let res = execute_query(
&schema,
&ctx,
r#"
mutation DeleteHero {
DeleteHero(DeleteHero: {id: 42}) {
count
}
}
"#,
);
assert!(res.is_ok());
assert_json_snapshot!(
res.as_json(), @r###"[
{
"DeleteHero": {
"count": 0
}
},
[]
]"###
);
let res = execute_query(
&schema,
&ctx,
"
{
Heros {
id
heroName
}
}
",
);
assert!(res.is_ok());
assert_json_snapshot!(
res.as_json(), @r###"[
{
"Heros": [
{
"heroName": "Luke Skywalker",
"id": 1
},
{
"heroName": "Darth Vader",
"id": 2
},
{
"heroName": "Han Solo",
"id": 3
},
{
"heroName": "Leia Organa",
"id": 4
},
{
"heroName": "Wilhuff Tarkin",
"id": 5
}
]
},
[]
]"###
);
}
| true |
777e71beb1a05475c280d0baaacd20506f504535
|
Rust
|
kaz184/rust-comp-snippets
|
/src/sequence01.rs
|
UTF-8
| 3,316 | 3.421875 | 3 |
[] |
no_license
|
#[snippet = "BinarySearch"]
#[doc = "lower,upper are inclusive range"]
pub struct BinarySearch<F> {
pub p: F,
pub lower: i64,
pub upper: i64,
}
#[snippet = "BinarySearch"]
impl <F: Fn(i64) -> bool> BinarySearch<F> {
#[doc = "O(log(upper-lower))"]
pub fn lower_bound(&self) -> i64 {
let lower = self.lower;
let upper = self.upper;
assert!(lower<=upper);
let mut lb = lower - 1;
let mut ub = upper + 1;
while ub - lb > 1 {
let mid = (lb+ub)/2;
if (self.p)(mid) {
ub = mid;
} else {
lb = mid;
}
}
let latter = ub;
latter
}
}
#[test]
fn test_generic_binary_search() {
let xs = vec![1,2,2,2,2,2,3,4,5];
let p0 = |i: i64| { xs[i as usize] >= 2 };
let bs0 = BinarySearch {
p: p0,
lower: 0,
upper: xs.len() as i64 - 1,
};
assert_eq!(bs0.lower_bound(), 1);
let p1 = |i: i64| { xs[i as usize] > 2 };
let bs1 = BinarySearch {
p: p1,
lower: 0,
upper: xs.len() as i64 - 1,
};
assert_eq!(bs1.lower_bound(), 6);
let p2 = |i: i64| { xs[i as usize] >= 0 };
let bs2 = BinarySearch {
p: p2,
lower: 0,
upper: xs.len() as i64 - 1,
};
assert_eq!(bs2.lower_bound(), 0);
let p3 = |i: i64| { xs[i as usize] >= 100 };
let bs3 = BinarySearch {
p: p3,
lower: 0,
upper: xs.len() as i64 - 1,
};
assert_eq!(bs3.lower_bound(), 9);
}
#[snippet = "FTSearch"]
struct FTSearch<F> {
f_search: Vec<Option<usize>>,
t_search: Vec<usize>,
p: F,
lower: usize,
}
#[snippet = "FTSearch"]
impl <F: Fn(usize) -> bool> FTSearch<F> {
#[doc = "O(upper-lower)"]
fn new(p: F, lower: usize, upper: usize) -> FTSearch<F> {
let n = upper+1 - lower;
let mut f_search = vec![None; n];
let mut f_i = None;
for i in 0..n {
if p(i+lower) == false {
f_i = Some(i);
}
f_search[i] = f_i;
}
let mut t_search = vec![n; n];
let mut t_i = n;
for i in (0..n).rev() {
if p(i+lower) == true {
t_i = i;
}
t_search[i] = t_i;
}
Self {
p: p,
f_search: f_search,
t_search: t_search,
lower: lower,
}
}
#[doc = "including i and find the closest false in the left"]
fn f_search(&self, i: usize) -> Option<usize> {
self.f_search[i-self.lower].map(|x| x+self.lower)
}
#[doc = "including i and find the closest true in the right"]
fn t_search(&self, i: usize) -> usize {
self.t_search[i-self.lower] + self.lower
}
}
#[test]
fn test_ft_search() {
let xs = vec![true,false,false,true,false];
let ft = FTSearch::new(
|i: usize| { xs[i] },
0,
4,
);
assert_eq!(ft.f_search(0), None); assert_eq!(ft.t_search(0), 0);
assert_eq!(ft.f_search(1), Some(1)); assert_eq!(ft.t_search(1), 3);
assert_eq!(ft.f_search(2), Some(2)); assert_eq!(ft.t_search(2), 3);
assert_eq!(ft.f_search(3), Some(2)); assert_eq!(ft.t_search(3), 3);
assert_eq!(ft.f_search(4), Some(4)); assert_eq!(ft.t_search(4), 5);
}
| true |
82ab7ad2e368d2f936bcea351e89be95b5c4d6af
|
Rust
|
AlexxNica/hornet
|
/src/client/metric/timer.rs
|
UTF-8
| 4,192 | 3.234375 | 3 |
[
"MIT",
"Apache-2.0",
"LicenseRef-scancode-unknown-license-reference"
] |
permissive
|
use super::*;
use time;
use time::Tm;
/// A timer metric for tracking elapsed time
///
/// Internally uses a `Metric<i64>` with `Semantics::Instant` and `1` time dimension
pub struct Timer {
metric: Metric<i64>,
time_scale: Time,
start_time: Option<Tm>
}
/// Error encountered while starting or stopping a timer
#[derive(Debug)]
pub enum Error {
/// IO error
Io(io::Error),
/// Timer was already started
TimerAlreadyStarted,
/// Timer wasn't previously started
TimerNotStarted,
}
impl From<io::Error> for Error {
fn from(err: io::Error) -> Error {
Error::Io(err)
}
}
impl Timer {
/// Creates a new timer metric with given time scale
pub fn new(name: &str, time_scale: Time,
shorthelp_text: &str, longhelp_text: &str) -> Result<Self, String> {
let metric = Metric::new(
name,
0,
Semantics::Instant,
Unit::new().time(time_scale, 1)?,
shorthelp_text,
longhelp_text
)?;
Ok(Timer {
metric: metric,
time_scale: time_scale,
start_time: None
})
}
/// Starts the timer. Returns an error if the timer is
/// already started.
pub fn start(&mut self) -> Result<(), Error> {
if self.start_time.is_some() {
return Err(Error::TimerAlreadyStarted)
}
self.start_time = Some(time::now());
Ok(())
}
/// Stops the timer, updates the internal metric, and
/// returns the time elapsed since the last `start`. If
/// the timer was stopped too early or too late such that
/// the internal nanosecond, microsecond or millisecond value
/// over/under-flows, then elapsed time isn't updated.
pub fn stop(&mut self) -> Result<i64, Error> {
match self.start_time {
Some(start_time) => {
let duration = time::now() - start_time;
let elapsed = match self.time_scale {
Time::NSec => duration.num_nanoseconds().unwrap_or(0),
Time::USec => duration.num_microseconds().unwrap_or(0),
Time::MSec => duration.num_microseconds().unwrap_or(0),
Time::Sec => duration.num_seconds(),
Time::Min => duration.num_minutes(),
Time::Hour => duration.num_hours()
};
let val = *self.metric.val();
self.metric.set_val(val + elapsed)?;
// we need to record the time elapsed even if stop()
// was called before a single unit of time_scale passed
if elapsed != 0 {
self.start_time = None;
}
Ok(elapsed)
},
None => Err(Error::TimerNotStarted)
}
}
/// Returns the cumulative time elapsed between every
/// `start` and `stop` pair.
pub fn elapsed(&mut self) -> i64 {
*self.metric.val()
}
}
impl MMVWriter for Timer {
private_impl!{}
fn write(&mut self, ws: &mut MMVWriterState, c: &mut Cursor<&mut [u8]>, mmv_ver: Version) -> io::Result<()> {
self.metric.write(ws, c, mmv_ver)
}
fn register(&self, ws: &mut MMVWriterState, mmv_ver: Version) {
self.metric.register(ws, mmv_ver)
}
fn has_mmv2_string(&self) -> bool {
self.metric.has_mmv2_string()
}
}
#[test]
pub fn test() {
use super::super::Client;
use std::thread;
use std::time::Duration;
let mut timer = Timer::new("timer", Time::MSec, "", "").unwrap();
assert_eq!(timer.elapsed(), 0);
Client::new("timer_test").unwrap()
.export(&mut [&mut timer]).unwrap();
assert!(timer.stop().is_err());
let sleep_time = 2; // seconds
timer.start().unwrap();
assert!(timer.start().is_err());
thread::sleep(Duration::from_secs(sleep_time));
let elapsed1 = timer.stop().unwrap();
assert_eq!(timer.elapsed(), elapsed1);
timer.start().unwrap();
thread::sleep(Duration::from_secs(sleep_time));
let elapsed2 = timer.stop().unwrap();
assert_eq!(timer.elapsed(), elapsed1 + elapsed2);
}
| true |
490c13a6c1902ba571447ede640c4507e9c0cb56
|
Rust
|
rhololkeolke/nphysics_model_explorer
|
/model-explorer/src/state/run_sim.rs
|
UTF-8
| 1,449 | 2.53125 | 3 |
[] |
no_license
|
use crate::resource;
use amethyst::{
input::is_key_down, renderer::VirtualKeyCode, GameData, SimpleState, SimpleTrans, StateData,
StateEvent, Trans,
};
pub struct RunSimState;
impl SimpleState for RunSimState {
fn handle_event(
&mut self,
data: StateData<'_, GameData<'_, '_>>,
event: StateEvent,
) -> SimpleTrans {
if let StateEvent::Window(event) = &event {
if is_key_down(&event, VirtualKeyCode::R) {
// restart the simulation
println!("Restart simulation");
// TODO(dschwab): Should probably filter the types of
// entities I'm deleting by which component they have.
data.world.delete_all();
*data.world.write_resource::<resource::ReloadModel>() =
resource::ReloadModel::Restart;
return Trans::Pop;
} else if is_key_down(&event, VirtualKeyCode::L) {
println!("Reload model");
// TODO(dschwab): Should probably filter the types of
// entities I'm deleting by which component they have.
data.world.delete_all();
// reload the model and restart
*data.world.write_resource::<resource::ReloadModel>() =
resource::ReloadModel::Reload;
return Trans::Pop;
}
}
Trans::None
}
}
| true |
feefcd027cbf70facf8093eed3082b3afecb42ae
|
Rust
|
steamroller-airmash/airmash-server
|
/server-config/src/util.rs
|
UTF-8
| 3,569 | 3.171875 | 3 |
[
"Apache-2.0",
"MIT",
"LicenseRef-scancode-unknown-license-reference"
] |
permissive
|
use std::cell::Cell;
use std::mem::ManuallyDrop;
use std::ops::{Deref, DerefMut};
pub(crate) mod duration {
use std::time::Duration;
use serde::{Deserialize, Deserializer, Serializer};
pub(crate) fn serialize<S: Serializer>(dur: &Duration, ser: S) -> Result<S::Ok, S::Error> {
ser.serialize_f64(dur.as_secs_f64())
}
pub(crate) fn deserialize<'de, D: Deserializer<'de>>(de: D) -> Result<Duration, D::Error> {
f64::deserialize(de).map(Duration::from_secs_f64)
}
}
pub(crate) mod option_duration {
use std::time::Duration;
use serde::{Deserialize, Deserializer, Serialize, Serializer};
pub(crate) fn serialize<S: Serializer>(
dur: &Option<Duration>,
ser: S,
) -> Result<S::Ok, S::Error> {
dur.map(|d| d.as_secs_f64()).serialize(ser)
}
pub(crate) fn deserialize<'de, D: Deserializer<'de>>(
de: D,
) -> Result<Option<Duration>, D::Error> {
Ok(Option::deserialize(de)?.map(Duration::from_secs_f64))
}
}
pub(crate) mod vector {
use serde::{Deserialize, Deserializer, Serialize, Serializer};
use crate::Vector2;
pub(crate) fn serialize<S: Serializer>(v: &Vector2, ser: S) -> Result<S::Ok, S::Error> {
[v.x, v.y].serialize(ser)
}
pub(crate) fn deserialize<'de, D: Deserializer<'de>>(de: D) -> Result<Vector2, D::Error> {
<[f32; 2]>::deserialize(de).map(From::from)
}
}
/// Wrapper type around [`ManuallyDrop`] which drops the contained value unless
/// it is explicitly prevented from doing so.
pub(crate) struct MaybeDrop<T> {
item: ManuallyDrop<T>,
flag: Cell<bool>,
}
impl<T> MaybeDrop<T> {
pub fn new(item: T) -> Self {
Self {
item: ManuallyDrop::new(item),
flag: Cell::new(true),
}
}
/// Prevent the contained value from being dropped when this `MaybeDrop` is
/// dropped.
pub fn cancel_drop(slot: &Self) {
slot.flag.set(false)
}
}
impl<T> From<ManuallyDrop<T>> for MaybeDrop<T> {
fn from(item: ManuallyDrop<T>) -> Self {
Self::new(ManuallyDrop::into_inner(item))
}
}
impl<T> Drop for MaybeDrop<T> {
fn drop(&mut self) {
if self.flag.get() {
// SAFETY: This is the only place where self.item is dropped so there is no
// possibility of double-drops.
unsafe { ManuallyDrop::drop(&mut self.item) }
}
}
}
impl<T> Deref for MaybeDrop<T> {
type Target = T;
fn deref(&self) -> &Self::Target {
&self.item
}
}
impl<T> DerefMut for MaybeDrop<T> {
fn deref_mut(&mut self) -> &mut Self::Target {
&mut self.item
}
}
/// RAII wrapper that drops whatever the stored pointer points to.
pub(crate) struct DropPtr<T>(*mut ManuallyDrop<T>);
impl<T> DropPtr<T> {
/// # Safety
/// `ptr` must be valid to drop until the `DropPtr` instance drops or is
/// forgotten.
pub unsafe fn new(ptr: *mut ManuallyDrop<T>) -> Self {
Self(ptr)
}
}
impl<T> Drop for DropPtr<T> {
fn drop(&mut self) {
// SAFETY: The safety contract for DropPtr::new guarantees that this is safe.
unsafe { ManuallyDrop::drop(&mut *self.0) }
}
}
#[cfg(test)]
mod tests {
use super::*;
struct DropWrite<'a>(&'a mut bool);
impl Drop for DropWrite<'_> {
fn drop(&mut self) {
*self.0 = true;
}
}
#[test]
fn maybedrop_drops_by_default() {
let mut check = false;
{
let _drop = MaybeDrop::new(DropWrite(&mut check));
}
assert!(check);
}
#[test]
fn maybedrop_no_drop_when_disabled() {
let mut check = false;
{
let drop = MaybeDrop::new(DropWrite(&mut check));
MaybeDrop::cancel_drop(&drop);
}
assert!(!check);
}
}
| true |
7dcedb38627774b125e8f24756d2dd5d6d7812ab
|
Rust
|
FreskyZ/fff-lang
|
/src/common/arena/tests.rs
|
UTF-8
| 6,035 | 2.9375 | 3 |
[
"Apache-2.0"
] |
permissive
|
use super::*;
#[test]
fn basic() {
struct Node1 { span: u32, isid: u32 }
struct Node2 { span: u32, isid: u32, span2: u128, keyword: u8, separator: u8 }
struct Node3 { span: u32, node1: Index<Node1>, node4: Option<Index<Node4>> }
struct Node4 { span: u32, node3s: Slice<Index<Node3>> }
let arena = Arena::new();
let index1 = arena.emplace(|n: &mut Node1| { n.span = 1; n.isid = 2; });
let index2 = arena.emplace(|n: &mut Node2| { n.span = 3; n.isid = 4; n.span2 = 5; n.keyword = 6; n.separator = 7 });
let index3 = arena.emplace(|n: &mut Node1| { n.span = 8; n.isid = 9; });
let index8 = arena.emplace(|n: &mut Node1| { n.span = 14; n.isid = 15; });
let mut node3s = Vec::new();
node3s.push(arena.emplace(|n: &mut Node3| { n.span = 10; n.node1 = index1; n.node4 = None; }));
node3s.push(arena.emplace(|n: &mut Node3| { n.span = 11; n.node1 = index3; n.node4 = None; }));
node3s.push(arena.emplace(|n: &mut Node3| { n.span = 12; n.node1 = index8; n.node4 = None; }));
let node3s = arena.build_slice(node3s);
let index7 = arena.emplace(|n: &mut Node4| { n.span = 13; n.node3s = node3s; });
// println!("{}", arena.status(true));
let node2 = arena.get(index2);
assert_eq!((node2.span, node2.isid, node2.span2, node2.keyword, node2.separator), (3, 4, 5, 6, 7));
let node7 = arena.get(index7);
assert_eq!(node7.span, 13);
assert_eq!(node7.node3s.len(), 3);
let node7_node3s = arena.get_iter(node7.node3s).map(|i| arena.get(*i)).collect::<Vec<_>>();
assert_eq!(node7_node3s.len(), 3);
assert_eq!(node7_node3s[0].span, 10);
assert!(node7_node3s[0].node4.is_none());
let node1 = arena.get(node7_node3s[0].node1);
assert_eq!((node1.span, node1.isid), (1, 2));
assert_eq!(node7_node3s[1].span, 11);
assert!(node7_node3s[1].node4.is_none());
let node3 = arena.get(node7_node3s[1].node1);
assert_eq!((node3.span, node3.isid), (8, 9));
assert_eq!(node7_node3s[2].span, 12);
assert!(node7_node3s[2].node4.is_none());
let node8 = arena.get(node7_node3s[2].node1);
assert_eq!((node8.span, node8.isid), (14, 15));
}
#[test]
#[cfg(not(miri))] // 2000 size vector is too large for miri
fn chunk_size_operations() {
let sizes = (0..2056).map(|i| get_chunk_size(i)).collect::<Vec<_>>();
assert_eq!(sizes[0], 1 << 12);
assert_eq!(sizes[1], 1 << 12);
assert_eq!(sizes[2], 1 << 13);
assert_eq!(sizes[9], 1 << 20);
assert_eq!(sizes[10], 1 << 21);
assert_eq!(sizes[50], 1 << 21);
// compare to naive method
let mut start_indexes = Vec::with_capacity(2057);
start_indexes.push(0);
for &size in &sizes {
let new_start_index = *start_indexes.last().unwrap() + size;
start_indexes.push(new_start_index);
}
assert_eq!(start_indexes.len(), 2057);
assert_eq!(start_indexes[0], 0);
assert_eq!(start_indexes[1], 1 << 12);
assert_eq!(start_indexes[2], 1 << 13);
assert_eq!(start_indexes[9], 1 << 20);
assert_eq!(start_indexes[10], 1 << 21);
assert_eq!(start_indexes[11], 2 << 21);
assert_eq!(start_indexes[12], 3 << 21);
assert_eq!(start_indexes[13], 4 << 21);
assert_eq!(start_indexes[14], 5 << 21);
assert_eq!(start_indexes[15], 6 << 21);
for i in 10..=2056 {
assert_eq!(start_indexes[i], (i - 9) << 21);
}
for i in 0..=2056 {
assert_eq!(get_chunk_base_index(i), start_indexes[i]);
}
macro_rules! case {
($i:expr) => (
assert_eq!(get_chunk_index_and_offset($i), {
let mut chunk_index = 0;
loop {
if ($i as usize) < start_indexes[chunk_index + 1] {
break (chunk_index, ($i - start_indexes[chunk_index]));
}
chunk_index += 1;
}
});
);
}
case!(0);
case!(1);
case!(4000);
case!(8000);
case!(10000);
case!(100000);
case!(1000000);
case!(10000000);
case!(100000000);
macro_rules! cases {
($($i:expr),+) => {{
$(case!($i);)+
}}
}
// from random import randint
// from textwrap import wrap
// print('\n'.join(wrap(', '.join([str(randint(0, 1000000) if randint(0, 100) > 10 else randint(0, 0xFFFFFFFF)) for _ in range(0, 100)]), width=120)))
cases!{
454603, 436048, 795409, 2281690808, 633818, 703200, 412184, 3030640151, 351551, 229871, 721038, 3280059628, 42351,
538350, 500639, 623567, 971871, 630982, 152641, 534634, 203521, 821225, 637018539, 99095, 2684916193, 988599, 183001,
790087, 631094, 987952, 216783, 603426, 334300, 213410, 71344, 664852, 184458, 905925, 749255, 757427, 542735, 750410,
983898, 2671555705, 532565, 2888062604, 694565, 765835, 663730, 125219, 313677, 407941, 477639, 400632, 144275,
1884706155, 676943, 435813, 1107472433, 542688, 676498, 75842, 655258, 714652, 475418, 3397429939, 552209, 80713,
366352, 538997, 583272, 791465, 917358, 507872, 790258657, 205375, 100045, 452772, 706163, 531050, 584011, 246191,
356364, 168979, 203555, 86832, 437606, 412513, 259866, 808418, 519853, 604293, 243356, 871119, 290524, 305770, 322255,
156722, 757885, 176444
}
}
#[test]
fn very_large_object() {
let arena = Arena::new();
arena.emplace(|_: &mut [u128; 256]| {});
}
#[test]
#[should_panic(expected = "too large object")]
fn too_large_object() {
let arena = Arena::new();
arena.emplace(|_: &mut [u128; 257]| {});
}
#[test]
#[cfg(not(miri))] // 2000 size vector is too large for miri, too
fn large_array() {
let arena = Arena::new();
let mut values = Vec::new();
for i in 0..2400 {
values.push(arena.emplace(|n: &mut i32| { *n = i; }));
}
let slice = arena.build_slice(values);
// println!("{}", arena.status(true));
let vec = arena.get_iter(slice).map(|i| arena.get(*i)).collect::<Vec<_>>();
for i in 0..2400 {
assert_eq!(i as i32, *vec[i]);
}
}
| true |
cae1b82286641665ad16b0f8035d1f963ff8f607
|
Rust
|
jiricodes/rust_http_server
|
/src/http/query_string.rs
|
UTF-8
| 1,150 | 3.453125 | 3 |
[
"MIT"
] |
permissive
|
use std::collections::HashMap;
#[derive(Debug)]
pub struct QueryString<'a> {
data: HashMap<&'a str, QueryValue<'a>>,
}
#[derive(Debug)]
pub enum QueryValue<'a> {
Single(&'a str),
Multiple(Vec<&'a str>),
}
impl<'a> QueryString<'a> {
pub fn get(&self, key: &str) -> Option<&QueryValue> {
self.data.get(key)
}
}
impl<'a> From<&'a str> for QueryString<'a> {
fn from(s: &'a str) -> Self {
let mut data = HashMap::new();
for substr in s.split('&') {
let mut key = substr;
let mut val = "";
if let Some(i) = substr.find('=') {
key = &substr[..i];
val = &substr[i + 1..];
}
data.entry(key)
.and_modify(|existing: &mut QueryValue| match existing {
QueryValue::Single(previous_value) => {
*existing = QueryValue::Multiple(vec![previous_value, val]);
}
QueryValue::Multiple(vec) => vec.push(val),
})
.or_insert(QueryValue::Single(val));
}
QueryString { data }
}
}
| true |
45b13478db85fc36bb450913f61b9be474c9f245
|
Rust
|
mikialex/rxsl
|
/src/symbol_table.rs
|
UTF-8
| 1,561 | 3.0625 | 3 |
[] |
no_license
|
use crate::ir::PrimitiveType;
use std::collections::HashMap;
#[derive(Clone, Copy)]
pub struct SymbolInfo {
pub is_const: bool,
pub ty: PrimitiveType,
}
pub struct SymbolTable {
scopes: Vec<ScopeSymbolTable>,
}
impl SymbolTable {
pub fn new() -> Self {
Self { scopes: Vec::new() }
}
pub fn push_scope(&mut self) {
self.scopes.push(ScopeSymbolTable::new());
}
pub fn pop_scope(&mut self) {
self.scopes
.pop()
.expect("failed to pop scope, no outer scope exist");
}
#[must_use]
pub fn search(&self, name: &str) -> Result<&SymbolInfo, SymbolError> {
self.scopes
.iter()
.rev()
.find_map(|table| table.symbols.get(name))
.ok_or(SymbolError::NotExist(name.to_owned()))
}
pub fn declare(&mut self, name: &str, info: SymbolInfo) -> Result<(), SymbolError> {
let previous = self
.scopes
.last_mut()
.unwrap()
.symbols
.insert(name.to_owned(), info);
// do options have map none to result method?
if previous.is_some() {
Err(SymbolError::NameConflict(name.to_owned()))
} else {
Ok(())
}
}
}
pub struct ScopeSymbolTable {
symbols: HashMap<String, SymbolInfo>,
}
#[derive(Debug)]
pub enum SymbolError {
NameConflict(String),
NotExist(String),
}
impl ScopeSymbolTable {
pub fn new() -> Self {
Self {
symbols: HashMap::new(),
}
}
}
| true |
abaf8cd3ab7fceea4eeaf047d5c8d75a8a4799cf
|
Rust
|
sa77/rusty-converter
|
/src/main.rs
|
UTF-8
| 727 | 3.109375 | 3 |
[] |
no_license
|
#[allow(unused_imports)]
use std::error::Error;
use std::io;
use std::process;
fn main() {
// graceful exit
if let Err(err) = run() {
println!("{}", err);
process::exit(1);
}
}
// Box<dyn Error> means 'any kind of error' - hard to inspect
fn run() -> Result<(), Box<dyn Error>> {
let mut rdr = csv::Reader::from_reader(io::stdin());
for result in rdr.records() {
// the match block can be replaced with syntatic sugar `?`
// let record = result?;
// println!("{:?}", record);
match result {
Err(err) => return Err(From::from(err)),
Ok(record) => {
println!("{:?}", record);
}
}
}
Ok(())
}
| true |
cc292a965d76f15260480d7add263863f975b8b8
|
Rust
|
sprt/linkerd2-proxy
|
/src/control/destination/client.rs
|
UTF-8
| 2,832 | 2.515625 | 3 |
[
"Apache-2.0"
] |
permissive
|
use futures::{Async, Poll, Stream};
use tower_grpc::{self as grpc, generic::client::GrpcService, BoxBody};
use api::destination::{client::Destination, GetDestination, Update};
use control::remote_stream::{self, Remote};
use std::sync::Arc;
use NameAddr;
/// A client for making service discovery requests to the destination service.
#[derive(Clone)]
pub struct Client<T> {
client: T,
context_token: Arc<String>,
}
/// A destination service query for a particular name.
///
/// A `Query` manages the underlying gRPC request and can reconnect itself as necessary.
pub struct Query<T>
where
T: GrpcService<BoxBody>,
{
auth: NameAddr,
client: Client<T>,
query: remote_stream::Remote<Update, T>,
}
// ===== impl Client =====
impl<T> Client<T>
where
T: GrpcService<BoxBody>,
{
fn query(&mut self, dst: &NameAddr, kind: &str) -> Remote<Update, T> {
trace!("{}ing destination service query for {}", kind, dst);
if let Ok(Async::Ready(())) = self.client.poll_ready() {
let req = GetDestination {
scheme: "k8s".into(),
path: format!("{}", dst),
context_token: self.context_token.as_ref().clone(),
};
let mut svc = Destination::new(self.client.as_service());
let response = svc.get(grpc::Request::new(req));
let rx = remote_stream::Receiver::new(response);
Remote::ConnectedOrConnecting { rx }
} else {
trace!("destination client not yet ready");
Remote::NeedsReconnect
}
}
/// Returns a destination service query for the given `dst`.
pub fn connect(mut self, dst: &NameAddr) -> Query<T> {
let query = self.query(dst, "connect");
Query {
auth: dst.clone(),
client: self,
query,
}
}
pub fn new(client: T, proxy_id: String) -> Self {
Self {
client,
context_token: Arc::new(proxy_id),
}
}
}
impl<T> Query<T>
where
T: GrpcService<BoxBody>,
{
pub fn authority(&self) -> &NameAddr {
&self.auth
}
/// Indicates that this query should be reconnected.
pub fn reconnect(&mut self) {
self.query = Remote::NeedsReconnect;
}
/// Polls the destination service query for updates, reconnecting if necessary.
pub fn poll(&mut self) -> Poll<Option<Update>, grpc::Status> {
loop {
self.query = match self.query {
Remote::ConnectedOrConnecting { ref mut rx } => return rx.poll(),
Remote::NeedsReconnect => match self.client.query(&self.auth, "reconnect") {
Remote::NeedsReconnect => return Ok(Async::NotReady),
query => query,
},
}
}
}
}
| true |
188f2abac075d9e3d4e104f02d886acbe8efed76
|
Rust
|
tmtz/haphazard-1
|
/src/deleter.rs
|
UTF-8
| 1,332 | 3.234375 | 3 |
[] |
no_license
|
pub trait Reclaim {}
impl<T> Reclaim for T {}
pub trait Deleter {
/// # Safety
/// `ptr` must have been allocated by the corresponding allocation method.
/// delete must be called at most once for each `ptr`.
unsafe fn delete(&self, ptr: *mut dyn Reclaim);
}
impl Deleter for unsafe fn(*mut (dyn Reclaim + 'static)) {
unsafe fn delete(&self, ptr: *mut dyn Reclaim) {
unsafe { (*self)(ptr) }
}
}
pub mod deleters {
use super::Reclaim;
unsafe fn _drop_in_place(ptr: *mut dyn Reclaim) {
// Safe by the contract on HazPtrObject::retire.
unsafe { std::ptr::drop_in_place(ptr) };
}
/// Always safe to use given requirements on HazPtrObject::retire,
/// but may lead to memory leaks if the pointer type itself needs drop.
#[allow(non_upper_case_globals)]
pub const drop_in_place: unsafe fn(*mut dyn Reclaim) = _drop_in_place;
unsafe fn _drop_box(ptr: *mut dyn Reclaim) {
// Safety: Safe by the safety gurantees of retire and because it's only used when
// retiring Box objects.
let _ = unsafe { Box::from_raw(ptr) };
}
/// # Safety
///
/// Can only be used on values that were originally derived from a Box.
#[allow(non_upper_case_globals)]
pub const drop_box: unsafe fn(*mut dyn Reclaim) = _drop_box;
}
| true |
65cb76f09dfabcaeb1b94b95265b12108501aed2
|
Rust
|
boxdot/advent-of-code-2019
|
/stiar/day25/src/lib.rs
|
UTF-8
| 2,259 | 2.890625 | 3 |
[] |
no_license
|
mod intcode;
pub use intcode::*;
pub fn run_command(program: &mut Program, iter: impl Iterator<Item = i64>) -> String {
program.run(iter).map(|c| (c as u8) as char).collect()
}
pub fn to_ascii(input: &str) -> Vec<i64> {
input.chars().map(|c| (c as u8) as i64).collect()
}
pub fn collect_inventory(program: &mut Program) {
let commands = [
"south",
"west",
"north",
"take fuel cell",
"south",
"east",
"north",
"west",
"west",
"east",
"east",
"north",
"east",
"take candy cane",
"south",
"take hypercube",
"east",
"west",
"north",
"north",
"south",
"west",
"north",
"take coin",
"east",
"take tambourine",
"west",
"west",
"take spool of cat6",
"north",
"take weather machine",
"west",
"take mutex",
"west",
];
for command in commands.iter() {
println!(
"{}",
run_command(program, to_ascii(&(command.to_string() + "\n")).into_iter())
);
}
}
pub fn try_pass(program: Program) {
let inventory = [
"spool of cat6",
"hypercube",
"weather machine",
"coin",
"candy cane",
"tambourine",
"fuel cell",
"mutex",
];
for subset in (0..1 << inventory.len()).map(|mask| {
inventory
.iter()
.enumerate()
.filter_map(|(i, x)| if (mask >> i) % 2 == 1 { Some(x) } else { None })
.collect::<Vec<_>>()
}) {
let mut p = program.clone();
println!("{}", run_command(&mut p, std::iter::empty()));
collect_inventory(&mut p);
for element in subset.iter() {
println!(
"{}",
run_command(&mut p, to_ascii(&format!("drop {}\n", element)).into_iter())
);
}
let output = run_command(&mut p, to_ascii("west\n").into_iter());
if !output.contains("Alert! Droids on this ship") {
println!("Subset to drop: {:?}", subset);
println!("{}", output);
return;
}
}
}
| true |
2522b3b93bdc6de77b1352d7e236b49b6343ce9e
|
Rust
|
COLDTURNIP/raphanus_leetcode
|
/rust/src/p740.rs
|
UTF-8
| 2,726 | 3.796875 | 4 |
[] |
no_license
|
/*
Problem 740. Delete and Earn
============================
https://leetcode.com/problems/delete-and-earn/
Given an array nums of integers, you can perform operations on the array.
In each operation, you pick any nums[i] and delete it to earn nums[i] points. After, you must
delete every element equal to nums[i] - 1 or nums[i] + 1.
You start with 0 points. Return the maximum number of points you can earn by applying such
operations.
Example 1:
Input: nums = [3, 4, 2]
Output: 6
Explanation:
Delete 4 to earn 4 points, consequently 3 is also deleted.
Then, delete 2 to earn 2 points. 6 total points are earned.
Example 2:
Input: nums = [2, 2, 3, 3, 3, 4]
Output: 9
Explanation:
Delete 3 to earn 3 points, deleting both 2's and the 4.
Then, delete 3 again to earn 3 points, and 3 again to earn 3 points.
9 total points are earned.
Note:
- The length of nums is at most 20000.
- Each element nums[i] is an integer in the range [1, 10000].
*/
impl Solution {
pub fn delete_and_earn(nums: Vec<i32>) -> i32 {
let num_cnts = {
let mut nums = nums;
nums.sort_unstable();
let mut cnts = Vec::<(i32, i32)>::with_capacity(nums.len());
let mut last = -1;
for n in nums.iter() {
if *n != last {
cnts.push((*n, 1));
last = *n;
} else if let Some(p) = cnts.last_mut() {
p.1 += 1
}
}
cnts
};
let (mut sc, mut sc_last, mut sc_last2) = (0, 0, 0);
for i in 0..num_cnts.len() {
sc_last2 = sc_last2.max(sc_last);
sc_last = sc;
let (n, cnt) = num_cnts[i];
if n - 1 == i.checked_sub(1).map(|i| num_cnts[i].0).unwrap_or(-1) {
sc = n * cnt + sc_last2;
} else {
sc = n * cnt + sc_last.max(sc_last2);
}
}
sc.max(sc_last)
}
}
pub struct Solution;
#[cfg(test)]
mod tests {
extern crate test;
use super::Solution;
#[test]
fn test_case1() {
assert_eq!(Solution::delete_and_earn(vec![3, 4, 2]), 6);
}
#[test]
fn test_case2() {
assert_eq!(Solution::delete_and_earn(vec![2, 2, 3, 4, 3, 3]), 9);
}
#[test]
fn test_case3() {
assert_eq!(
Solution::delete_and_earn(vec![1, 1, 1, 2, 4, 5, 5, 5, 6]),
18
);
}
#[test]
fn test_empty() {
assert_eq!(Solution::delete_and_earn(Vec::new()), 0);
}
#[bench]
fn bench(b: &mut test::Bencher) {
b.iter(|| Solution::delete_and_earn(vec![2, 2, 3, 4, 3, 3]));
}
}
| true |
c803bb62b40598cc5006184591a5990580928aae
|
Rust
|
Plutor/advent-of-code-2020
|
/8.rs
|
UTF-8
| 739 | 2.78125 | 3 |
[] |
no_license
|
fn main() {
let data = std::fs::read_to_string("8.dat").expect("oops");
let mut instructions = data.lines().collect::<Vec<&str>>();
let mut acc: i32 = 0;
let mut pc: i32 = 0;
loop {
let instruction = instructions[pc as usize];
println!("pc={}, acc={}, running {}", pc, acc, instruction);
if instruction == "!!!" {
break;
}
let mut iter = instruction.split_whitespace();
let itype = iter.next().unwrap();
let ival: i32 = iter.next().unwrap().parse().unwrap();
instructions[pc as usize] = "!!!";
pc = match itype {
"acc" => { acc += ival; pc+1 },
"jmp" => { pc+ival },
_ => pc+1
}
}
}
| true |
0e3b47f81fac4e25dbbbba2a0596b6f614461244
|
Rust
|
yjhmelody/leetcode-rust
|
/src/design_circular_queue.rs
|
UTF-8
| 2,324 | 3.6875 | 4 |
[
"MIT"
] |
permissive
|
#![allow(dead_code)]
#![allow(unused_variables)]
use std::iter;
// One more space
#[derive(Debug, Clone)]
pub struct MyCircularQueue {
q: Vec<i32>,
head: usize,
tail: usize,
size: usize,
}
impl MyCircularQueue {
/** Initialize your data structure here. Set the size of the queue to be k. */
pub fn new(k: i32) -> Self {
let size = k as usize + 1;
let q = iter::repeat(0).take(size.clone()).collect::<Vec<i32>>();
Self {
q,
head: 0,
tail: 0,
size,
}
}
/** Insert an element into the circular queue. Return true if the operation is successful. */
pub fn en_queue(&mut self, value: i32) -> bool {
if self.is_full() {
false
} else {
self.q[self.tail] = value;
self.tail += 1;
self.tail %= self.size;
true
}
}
/** Delete an element from the circular queue. Return true if the operation is successful. */
pub fn de_queue(&mut self) -> bool {
if self.is_empty() {
false
} else {
self.head += 1;
self.head %= self.size;
true
}
}
/** Get the front item from the queue. */
pub fn front(&self) -> i32 {
if self.is_empty() {
-1
} else {
self.q[self.head]
}
}
/** Get the last item from the queue. */
pub fn rear(&self) -> i32 {
if self.is_empty() {
-1
} else {
self.q[(self.tail + self.size - 1) % self.size]
}
}
/** Checks whether the circular queue is empty or not. */
pub fn is_empty(&self) -> bool {
self.head == self.tail
}
/** Checks whether the circular queue is full or not. */
pub fn is_full(&self) -> bool {
(self.tail + 1) % self.size == self.head
}
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn test1() {
let k = 10;
let value = 10;
let mut obj = MyCircularQueue::new(k);
let ret_1: bool = obj.en_queue(value);
let ret_2: bool = obj.de_queue();
let ret_3: i32 = obj.front();
let ret_4: i32 = obj.rear();
let ret_5: bool = obj.is_empty();
let ret_6: bool = obj.is_full();
}
}
| true |
d14f9efb7db7f635d3a46e3d3a331b53aebadcbd
|
Rust
|
khonsulabs/stylecs
|
/stylecs-macros/src/lib.rs
|
UTF-8
| 2,162 | 2.8125 | 3 |
[] |
no_license
|
use attribute_derive::Attribute;
use manyhow::manyhow;
use proc_macro2::{Ident, TokenStream};
use quote::quote;
use syn::{DeriveInput, Expr};
#[derive(Attribute, Debug)]
#[attribute(ident = style)]
struct StyleComponent {
name: Option<Ident>,
authority: Option<Ident>,
inherited: Option<bool>,
merge: Option<Expr>,
}
#[manyhow]
#[proc_macro_derive(StyleComponent, attributes(style))]
pub fn style_component_derive(input: TokenStream) -> manyhow::Result<TokenStream> {
let DeriveInput {
attrs,
ident,
generics,
..
} = syn::parse2(input)?;
let StyleComponent {
name,
authority,
inherited,
merge,
} = StyleComponent::from_attributes(&attrs)?;
let name = if let Some(name) = name {
validate(&name)?
} else {
stylecs_shared::pascal_case_to_snake_case(ident.to_string()).map_err(|_| manyhow::error_message!(ident.span(), "An invalid character for a stylecs Identifier was found. A name must be manually provided for this type."))?
};
let name = if let Some(authority) = authority {
let authority = validate(&authority)?;
quote!(::stylecs::static_name!(#authority, #name))
} else {
quote!(::stylecs::static_name!(#name))
};
let inherited = inherited.map(|value| {
quote!(
fn inherited() -> bool {
#value
}
)
});
let merge = merge.map(|expr| {
quote!(
fn merge(&mut self, other: &Self) {
#expr;
}
)
});
Ok(quote! {
impl<#generics> stylecs::StyleComponent for #ident<#generics> {
fn name() -> ::stylecs::Name {
static NAME: ::stylecs::StaticName = #name;
NAME.to_name()
}
#inherited
#merge
}
})
}
fn validate(name: &Ident) -> manyhow::Result<String> {
let location = name.span();
let name = name.to_string();
stylecs_shared::validate_identifier(&name)
.map_err(|_| manyhow::error_message!(location, "invalid character in identifier"))?;
Ok(name)
}
| true |
01d1e146bef1efc5f76882c41ca0c23aafbcc975
|
Rust
|
afnanenayet/oars
|
/oars/src/perm_vec.rs
|
UTF-8
| 1,230 | 4.03125 | 4 |
[
"MIT"
] |
permissive
|
//! Provides an interface for a permutation vector class that makes it easy to
//! randomly shuffle orthogonal arrays, or shuffle any set.
use rand::seq::SliceRandom;
use std::ops::Index;
/// This is a vector containing the elements ${0, 1 \cdots n - 1}$, shuffled
/// around to easily allow for a user to create a permutation.
///
/// For example: `a[0]`, if `a` is shuffled, corresponds to another element.
/// This allows you to perform an O(1) permutation as long as `a` has been
/// shuffled.
#[derive(Debug)]
pub struct PermutationVector {
/// The internal array containing the shuffled elements.
vec: Vec<usize>,
}
impl Index<usize> for PermutationVector {
type Output = usize;
fn index(&self, index: usize) -> &usize {
&self.vec[index]
}
}
impl PermutationVector {
/// Create a permutation vector that has not been shuffled. If you want a random permutation,
/// you must remember to call the `shuffle()` method.
pub fn new(n: usize) -> Self {
Self {
vec: (0..n).collect(),
}
}
/// Randomly shuffle the permutation vector
pub fn shuffle(&mut self) {
let mut rng = rand::thread_rng();
self.vec.shuffle(&mut rng);
}
}
| true |
a945b4b261dd3673ead6c72bc929493217c80e9d
|
Rust
|
tclfs/rust
|
/src/test/ui/underscore-lifetime/dyn-trait-underscore.rs
|
UTF-8
| 1,118 | 2.8125 | 3 |
[
"MIT",
"LicenseRef-scancode-other-permissive",
"Apache-2.0",
"BSD-2-Clause",
"BSD-3-Clause",
"NCSA"
] |
permissive
|
// Copyright 2015 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
// Check that the `'_` in `dyn Trait + '_` acts like ordinary elision,
// and not like an object lifetime default.
//
// cc #48468
#![feature(dyn_trait)]
#![feature(underscore_lifetimes)]
fn a<T>(items: &[T]) -> Box<dyn Iterator<Item=&T>> {
// ^^^^^^^^^^^^^^^^^^^^^ bound *here* defaults to `'static`
Box::new(items.iter()) //~ ERROR cannot infer an appropriate lifetime
}
fn b<T>(items: &[T]) -> Box<dyn Iterator<Item=&T> + '_> {
Box::new(items.iter()) // OK, equivalent to c
}
fn c<'a, T>(items: &'a [T]) -> Box<dyn Iterator<Item=&'a T> + 'a> {
Box::new(items.iter()) // OK, equivalent to b
}
fn main() { }
| true |
9d14d59e5951876f021c428256b3f4c104c34fa9
|
Rust
|
heliumbrain/axum
|
/src/extract/path/mod.rs
|
UTF-8
| 3,924 | 3.015625 | 3 |
[
"MIT"
] |
permissive
|
mod de;
use super::{rejection::*, FromRequest};
use crate::{extract::RequestParts, routing::UrlParams};
use async_trait::async_trait;
use serde::de::DeserializeOwned;
use std::ops::{Deref, DerefMut};
/// Extractor that will get captures from the URL and parse them using
/// [`serde`].
///
/// # Example
///
/// ```rust,no_run
/// use axum::{
/// extract::Path,
/// handler::get,
/// Router,
/// };
/// use uuid::Uuid;
///
/// async fn users_teams_show(
/// Path((user_id, team_id)): Path<(Uuid, Uuid)>,
/// ) {
/// // ...
/// }
///
/// let app = Router::new().route("/users/:user_id/team/:team_id", get(users_teams_show));
/// # async {
/// # axum::Server::bind(&"".parse().unwrap()).serve(app.into_make_service()).await.unwrap();
/// # };
/// ```
///
/// If the path contains only one parameter, then you can omit the tuple.
///
/// ```rust,no_run
/// use axum::{
/// extract::Path,
/// handler::get,
/// Router,
/// };
/// use uuid::Uuid;
///
/// async fn user_info(Path(user_id): Path<Uuid>) {
/// // ...
/// }
///
/// let app = Router::new().route("/users/:user_id", get(user_info));
/// # async {
/// # axum::Server::bind(&"".parse().unwrap()).serve(app.into_make_service()).await.unwrap();
/// # };
/// ```
///
/// Path segments also can be deserialized into any type that implements
/// [`serde::Deserialize`]. Path segment labels will be matched with struct
/// field names.
///
/// ```rust,no_run
/// use axum::{
/// extract::Path,
/// handler::get,
/// Router,
/// };
/// use serde::Deserialize;
/// use uuid::Uuid;
///
/// #[derive(Deserialize)]
/// struct Params {
/// user_id: Uuid,
/// team_id: Uuid,
/// }
///
/// async fn users_teams_show(
/// Path(Params { user_id, team_id }): Path<Params>,
/// ) {
/// // ...
/// }
///
/// let app = Router::new().route("/users/:user_id/team/:team_id", get(users_teams_show));
/// # async {
/// # axum::Server::bind(&"".parse().unwrap()).serve(app.into_make_service()).await.unwrap();
/// # };
/// ```
///
/// If you wish to capture all path parameters you can use `HashMap` or `Vec`:
///
/// ```rust,no_run
/// use axum::{
/// extract::Path,
/// handler::get,
/// Router,
/// };
/// use std::collections::HashMap;
///
/// async fn params_map(
/// Path(params): Path<HashMap<String, String>>,
/// ) {
/// // ...
/// }
///
/// async fn params_vec(
/// Path(params): Path<Vec<(String, String)>>,
/// ) {
/// // ...
/// }
///
/// let app = Router::new()
/// .route("/users/:user_id/team/:team_id", get(params_map).post(params_vec));
/// # async {
/// # axum::Server::bind(&"".parse().unwrap()).serve(app.into_make_service()).await.unwrap();
/// # };
/// ```
///
/// [`serde`]: https://crates.io/crates/serde
/// [`serde::Deserialize`]: https://docs.rs/serde/1.0.127/serde/trait.Deserialize.html
#[derive(Debug)]
pub struct Path<T>(pub T);
impl<T> Deref for Path<T> {
type Target = T;
#[inline]
fn deref(&self) -> &Self::Target {
&self.0
}
}
impl<T> DerefMut for Path<T> {
#[inline]
fn deref_mut(&mut self) -> &mut Self::Target {
&mut self.0
}
}
#[async_trait]
impl<T, B> FromRequest<B> for Path<T>
where
T: DeserializeOwned + Send,
B: Send,
{
type Rejection = PathParamsRejection;
async fn from_request(req: &mut RequestParts<B>) -> Result<Self, Self::Rejection> {
const EMPTY_URL_PARAMS: &UrlParams = &UrlParams(Vec::new());
let url_params = if let Some(params) = req
.extensions_mut()
.and_then(|ext| ext.get::<Option<UrlParams>>())
{
params.as_ref().unwrap_or(EMPTY_URL_PARAMS)
} else {
return Err(MissingRouteParams.into());
};
T::deserialize(de::PathDeserializer::new(url_params))
.map_err(|err| PathParamsRejection::InvalidPathParam(InvalidPathParam::new(err.0)))
.map(Path)
}
}
| true |
41cc42e1bce4ffd04aa1689b4e6780924f738d5b
|
Rust
|
roy-ganz/toql
|
/crates/sql_expr_macro/src/sql_expr_macro.rs
|
UTF-8
| 5,667 | 2.546875 | 3 |
[
"MIT",
"Apache-2.0"
] |
permissive
|
use syn::parse::{Parse, ParseStream};
use syn::punctuated::Punctuated;
use syn::{Expr, LitStr, Result, Token};
use proc_macro2::TokenStream;
use toql_sql_expr_parser::PestSqlExprParser;
use pest::Parser;
use toql_sql_expr_parser::Rule;
#[derive(Debug)]
pub struct SqlExprMacro {
pub query: LitStr,
pub arguments: Punctuated<Expr, Token![,]>,
}
impl Parse for SqlExprMacro {
fn parse(input: ParseStream) -> Result<Self> {
Ok(SqlExprMacro {
query: input.parse()?,
arguments: {
let lookahead = input.lookahead1();
if lookahead.peek(Token![,]) {
// arguments ?
input.parse::<Token![,]>()?; // skip ,
input.parse_terminated(Expr::parse)?
} else {
Punctuated::new()
}
},
})
}
}
#[derive(Debug)]
struct FieldInfo {
pub literal: String,
}
impl FieldInfo {
pub fn new() -> Self {
FieldInfo {
literal: String::new(),
}
}
}
pub fn parse(
sql_expr_string: &LitStr,
expr_args: &mut syn::punctuated::Iter<'_, syn::Expr>,
) -> std::result::Result<TokenStream, TokenStream> {
// eprintln!("About to parse {}", toql_string);
match PestSqlExprParser::parse(Rule::query, &sql_expr_string.value()) {
Ok(pairs) => Ok(evaluate_pair(&mut pairs.flatten(), expr_args)?),
Err(e) => {
let msg = e.to_string();
Err(quote_spanned!(sql_expr_string.span() => compile_error!(#msg)))
}
}
}
fn append_literal(field_info: &mut FieldInfo, tokens: &mut Vec<TokenStream>) {
let lit = &field_info.literal;
if !lit.is_empty() {
tokens.push(quote!( toql::sql_expr::SqlExprToken::Literal(String::from(#lit))));
field_info.literal.clear();
}
}
fn append_tokens(tokens: &mut Vec<TokenStream>, outstream: &mut TokenStream) {
if !tokens.is_empty() {
if outstream.is_empty() {
outstream.extend(quote!(t));
}
outstream.extend(quote!(.extend(toql::sql_expr::SqlExpr::from(vec![ #(#tokens),* ]))));
}
}
fn evaluate_pair(
pairs: &mut pest::iterators::FlatPairs<toql_sql_expr_parser::Rule>,
expr_args: &mut syn::punctuated::Iter<'_, syn::Expr>,
) -> std::result::Result<TokenStream, TokenStream> {
let mut with_args = false;
let mut alias = false;
let mut field_info = FieldInfo::new();
let mut outstream: TokenStream = TokenStream::new(); // actual output stream
let mut tokens: Vec<TokenStream> = Vec::new(); // collect tokens for vec
for pair in pairs {
let span = pair.clone().as_span();
match pair.as_rule() {
Rule::placeholder => {
append_literal(&mut field_info, &mut tokens);
append_tokens(&mut tokens, &mut outstream);
tokens.clear();
if let Some(a) = expr_args.next() {
if outstream.is_empty() {
outstream.extend(quote!(t));
}
outstream.extend(quote!( .extend(#a)));
} else {
return Err(quote!(compile_error!("Missing placeholder argument")));
}
alias = false;
}
Rule::self_alias => {
append_literal(&mut field_info, &mut tokens);
tokens.push(quote!(toql::sql_expr::SqlExprToken::SelfAlias));
alias = true;
}
Rule::other_alias => {
append_literal(&mut field_info, &mut tokens);
tokens.push(quote!(toql::sql_expr::SqlExprToken::OtherAlias));
alias = true;
}
Rule::quoted => {
let text = span.as_str();
field_info.literal.push_str(text);
alias = false;
}
Rule::aux_param => {
append_literal(&mut field_info, &mut tokens);
let name = span.as_str().trim_start_matches('<').trim_end_matches('>');
alias = false;
tokens.push(quote!( toql::sql_expr::SqlExprToken::AuxParam(String::from(#name))));
}
Rule::literal => {
// Add a dot if an alias immediately precedes a non whitespace literal (..column_name)
let l = span.as_str();
if alias && l != " " {
field_info.literal.push('.');
}
// If literal is ? insert arguments
if l == "?" {
append_literal(&mut field_info, &mut tokens);
if let Some(a) = expr_args.next() {
tokens.push( quote!( toql::sql_expr::SqlExprToken::Arg(toql::sql_arg::SqlArg::from(#a))));
with_args = true;
} else if !with_args {
tokens.push(quote!(toql::sql_expr::SqlExprToken::UnresolvedArg));
} else {
return Err(quote!(compile_error!("Missing value for argument")));
}
} else {
field_info.literal.push_str(l)
}
alias = false;
}
_ => {}
}
}
append_literal(&mut field_info, &mut tokens);
if expr_args.next().is_some() {
return Err(quote!(compile_error!("To many values for arguments");));
}
append_tokens(&mut tokens, &mut outstream);
Ok(quote!({let mut t = toql::sql_expr::SqlExpr::new(); #outstream; t})) // return value not reference
}
| true |
e0447eb58e41ec5cf6b772a09d6f42f22a6a41fd
|
Rust
|
l1lhu1hu1/rust-practice
|
/else/thread/src/4.rs
|
UTF-8
| 931 | 3.5625 | 4 |
[] |
no_license
|
use std::sync::{Arc, Mutex};
use std::thread;
fn main() {
let mut handles = Vec::new();
// Arcだけだと排他制御はできないのMutexを用いている
// Arc stands for Atomically Reference Counted
// Mutexはlockメソッドをもつ
// Arcはマルチスレッド版のrcで、所有者カウンタが0になったらメモリを解放するという仕組みになっている
let data = Arc::new(Mutex::new(vec![1; 10]));
for x in 0..10 {
// cloneは所有者カウンタをインクリメントするメソッド
let data_ref = data.clone();
handles.push(thread::spawn(move || {
// 一度ロックして同時に外部から書き換えられないようにしている
let mut data = data_ref.lock().unwrap();
data[x] += 1;
}));
}
for handle in handles {
let _ = handle.join();
}
dbg!(data);
}
| true |
0160240cfad8f4d568a176248ef2b1732b9258b9
|
Rust
|
automerge/automerge
|
/rust/automerge/src/columnar/column_range/key.rs
|
UTF-8
| 7,721 | 2.765625 | 3 |
[
"MIT"
] |
permissive
|
use std::{convert::Infallible, ops::Range};
use super::{DeltaRange, RleRange};
use crate::{
columnar::{
encoding::{
raw, DecodeColumnError, DeltaDecoder, DeltaEncoder, RleDecoder, RleEncoder, Sink,
},
SpliceError,
},
convert,
types::{ElemId, OpId},
};
#[derive(Clone, Debug, PartialEq)]
pub(crate) enum Key {
Prop(smol_str::SmolStr),
Elem(ElemId),
}
#[derive(Clone, Debug, PartialEq)]
pub(crate) struct KeyRange {
actor: RleRange<u64>,
counter: DeltaRange,
string: RleRange<smol_str::SmolStr>,
}
impl KeyRange {
pub(crate) fn new(
actor: RleRange<u64>,
counter: DeltaRange,
string: RleRange<smol_str::SmolStr>,
) -> Self {
Self {
actor,
counter,
string,
}
}
pub(crate) fn actor_range(&self) -> &RleRange<u64> {
&self.actor
}
pub(crate) fn counter_range(&self) -> &DeltaRange {
&self.counter
}
pub(crate) fn string_range(&self) -> &RleRange<smol_str::SmolStr> {
&self.string
}
pub(crate) fn iter<'a>(&self, data: &'a [u8]) -> KeyIter<'a> {
KeyIter {
actor: self.actor.decoder(data),
counter: self.counter.decoder(data),
string: self.string.decoder(data),
}
}
pub(crate) fn encode<'b, O, I: Iterator<Item = convert::Key<'b, O>> + Clone>(
items: I,
out: &mut Vec<u8>,
) -> Self
where
O: convert::OpId<usize>,
{
// SAFETY: The incoming iterator is infallible and there are no existing items
Self {
actor: (0..0).into(),
counter: (0..0).into(),
string: (0..0).into(),
}
.splice::<_, Infallible, _>(&[], 0..0, items.map(Ok), out)
.unwrap()
}
/// Splice new keys into this set of keys, encoding the resulting actor, counter, and str
/// columns in `out`.
pub(crate) fn splice<'b, O, E, I>(
&mut self,
data: &[u8],
replace: Range<usize>,
replace_with: I,
out: &mut Vec<u8>,
) -> Result<Self, SpliceError<raw::Error, E>>
where
O: convert::OpId<usize>,
E: std::error::Error,
I: Iterator<Item = Result<convert::Key<'b, O>, E>> + Clone,
{
let actor = self.actor.splice(
data,
replace.clone(),
replace_with.clone().map(|k| {
k.map(|k| match k {
convert::Key::Prop(_) => None,
convert::Key::Elem(convert::ElemId::Head) => None,
convert::Key::Elem(convert::ElemId::Op(o)) => Some(o.actor() as u64),
})
}),
out,
)?;
let counter = self.counter.splice(
data,
replace.clone(),
replace_with.clone().map(|k| {
k.map(|k| match k {
convert::Key::Prop(_) => None,
convert::Key::Elem(convert::ElemId::Head) => Some(0),
convert::Key::Elem(convert::ElemId::Op(o)) => Some(o.counter() as i64),
})
}),
out,
)?;
let string = self.string.splice(
data,
replace,
replace_with.map(|k| {
k.map(|k| match k {
convert::Key::Prop(s) => Some(s),
convert::Key::Elem(_) => None,
})
}),
out,
)?;
Ok(Self {
actor,
counter,
string,
})
}
}
#[derive(Clone, Debug)]
pub(crate) struct KeyIter<'a> {
actor: RleDecoder<'a, u64>,
counter: DeltaDecoder<'a>,
string: RleDecoder<'a, smol_str::SmolStr>,
}
impl<'a> KeyIter<'a> {
fn try_next(&mut self) -> Result<Option<Key>, DecodeColumnError> {
let actor = self
.actor
.next()
.transpose()
.map_err(|e| DecodeColumnError::decode_raw("actor", e))?;
let counter = self
.counter
.next()
.transpose()
.map_err(|e| DecodeColumnError::decode_raw("counter", e))?;
let string = self
.string
.next()
.transpose()
.map_err(|e| DecodeColumnError::decode_raw("string", e))?;
match (actor, counter, string) {
(Some(Some(_)), Some(Some(_)), Some(Some(_))) => {
Err(DecodeColumnError::invalid_value("key", "too many values"))
}
(Some(None) | None, Some(None) | None, Some(Some(string))) => {
Ok(Some(Key::Prop(string)))
}
(Some(None) | None, Some(Some(0)), Some(None) | None) => {
Ok(Some(Key::Elem(ElemId(OpId::new(0, 0)))))
}
(Some(Some(actor)), Some(Some(ctr)), Some(None) | None) => match ctr.try_into() {
//Ok(ctr) => Some(Ok(Key::Elem(ElemId(OpId(ctr, actor as usize))))),
Ok(ctr) => Ok(Some(Key::Elem(ElemId(OpId::new(ctr, actor as usize))))),
Err(_) => Err(DecodeColumnError::invalid_value(
"counter",
"negative value for counter",
)),
},
(None | Some(None), None | Some(None), None | Some(None)) => Ok(None),
(None | Some(None), k, _) => {
tracing::error!(key=?k, "unexpected null actor");
Err(DecodeColumnError::unexpected_null("actor"))
}
(_, None | Some(None), _) => Err(DecodeColumnError::unexpected_null("counter")),
}
}
}
impl<'a> Iterator for KeyIter<'a> {
type Item = Result<Key, DecodeColumnError>;
fn next(&mut self) -> Option<Self::Item> {
self.try_next().transpose()
}
}
pub(crate) struct KeyEncoder<S> {
actor: RleEncoder<S, u64>,
counter: DeltaEncoder<S>,
string: RleEncoder<S, smol_str::SmolStr>,
}
impl KeyEncoder<Vec<u8>> {
pub(crate) fn new() -> KeyEncoder<Vec<u8>> {
KeyEncoder {
actor: RleEncoder::new(Vec::new()),
counter: DeltaEncoder::new(Vec::new()),
string: RleEncoder::new(Vec::new()),
}
}
pub(crate) fn finish(self, out: &mut Vec<u8>) -> KeyRange {
let actor_start = out.len();
let (actor, _) = self.actor.finish();
out.extend(actor);
let actor_end = out.len();
let (counter, _) = self.counter.finish();
out.extend(counter);
let counter_end = out.len();
let (string, _) = self.string.finish();
out.extend(string);
let string_end = out.len();
KeyRange {
actor: (actor_start..actor_end).into(),
counter: (actor_end..counter_end).into(),
string: (counter_end..string_end).into(),
}
}
}
impl<S: Sink> KeyEncoder<S> {
pub(crate) fn append<O>(&mut self, key: convert::Key<'_, O>)
where
O: convert::OpId<usize>,
{
match key {
convert::Key::Prop(p) => {
self.string.append_value(p.clone());
self.actor.append_null();
self.counter.append_null();
}
convert::Key::Elem(convert::ElemId::Head) => {
self.string.append_null();
self.actor.append_null();
self.counter.append_value(0);
}
convert::Key::Elem(convert::ElemId::Op(o)) => {
self.string.append_null();
self.actor.append_value(o.actor() as u64);
self.counter.append_value(o.counter() as i64);
}
}
}
}
| true |
af8ede2617d9a07b742c73a435f0f9b74088ff5c
|
Rust
|
silverweed/ecsde
|
/inle/inle_physics/src/collider.rs
|
UTF-8
| 1,223 | 2.5625 | 3 |
[] |
no_license
|
use super::layers::Collision_Layer;
use super::phys_world::{Collider_Handle, Physics_Body_Handle};
use inle_ecs::ecs_world::Entity;
use inle_math::vector::Vec2f;
#[derive(Copy, Clone, Debug, PartialEq)]
#[non_exhaustive]
pub enum Collision_Shape {
Rect { width: f32, height: f32 },
Circle { radius: f32 },
}
impl Collision_Shape {
pub fn extent(self) -> Vec2f {
match self {
Collision_Shape::Circle { radius } => v2!(radius, radius) * 2.,
Collision_Shape::Rect { width, height } => v2!(width, height),
}
}
}
impl Default for Collision_Shape {
fn default() -> Collision_Shape {
Collision_Shape::Circle { radius: 0. }
}
}
#[derive(Clone, Debug, Default)]
pub struct Collider {
pub shape: Collision_Shape,
pub offset: Vec2f,
pub is_static: bool,
pub layer: Collision_Layer,
pub entity: Entity,
// This is written by the Physics_World when the collider is added
pub handle: Collider_Handle,
// These should not be written except by the physics system.
pub position: Vec2f,
pub velocity: Vec2f,
}
#[derive(Copy, Clone, Debug, Default)]
pub struct C_Collider {
pub phys_body_handle: Physics_Body_Handle,
}
| true |
eba966bd3532b9b151bf4ffc1bb1c523fb141447
|
Rust
|
jendakol/rbackup-cli
|
/src/commands.rs
|
UTF-8
| 5,123 | 2.625 | 3 |
[] |
no_license
|
use std::fs::canonicalize;
use std::future::Future;
use std::path::PathBuf;
use std::time::Duration;
use err_context::AnyError;
use futures::StreamExt;
use futures_retry::{ErrorHandler, FutureRetry, RetryPolicy};
use log::{debug, info, warn};
use tokio::fs::File;
use tokio::prelude::*;
use url::Url;
use walkdir::WalkDir;
use crate::config::ServerSession;
use crate::connector;
use crate::utils::IterUtils;
const MAX_ATTEMPTS: usize = 3;
pub async fn register(url: &Url, username: String) -> Result<(), AnyError> {
let pass = rpassword::prompt_password_stdout("Password: ").unwrap();
debug!("Registering to {} with username {}", url, username);
retried(move || connector::register(url, username.clone(), pass.clone())).await?;
info!("Registered successfully!");
Ok(())
}
pub async fn login(
url: &Url,
device_id: String,
username: String,
config_file: &PathBuf,
) -> Result<(), AnyError> {
let pass = rpassword::prompt_password_stdout("Password: ").unwrap();
debug!(
"Logging in at {} with username '{}' and device_id '{}'",
url, username, device_id
);
let session_id =
retried(move || connector::login(url, device_id.clone(), username.clone(), pass.clone()))
.await?;
debug!("Logged in, session ID: {}", session_id);
let session = ServerSession {
url: url.clone(),
session_id,
};
debug!("Saving session to {:?}: {:?}", config_file, session);
if let Some(parent) = config_file.parent() {
tokio::fs::create_dir_all(parent).await?;
}
let mut file = File::create(config_file).await?;
file.write_all(toml::to_string_pretty(&session)?.as_bytes())
.await?;
info!("Logged in successfully, session ID: {}", session_id);
Ok(())
}
pub async fn upload_files(
session: ServerSession,
parallelism: usize,
filenames: Vec<PathBuf>,
) -> Result<(), AnyError> {
let filenames = unfold_dirs(filenames);
let total_count = filenames.len();
let futures = futures::stream::iter(
filenames
.into_iter()
.map(move |path| upload_file(session.clone(), path)),
);
let results = futures
.buffer_unordered(parallelism)
.collect::<Vec<_>>()
.await
.collect_errors();
match results {
Ok(_) => {
info!("Upload of {} files was successful!", total_count);
Ok(())
}
Err(errs) => {
debug!("Could not upload all files, errors: {:?}", errs);
for err in errs {
warn!("Error while uploading: {:?}", err)
}
Err(AnyError::from("Could not upload all files"))
}
}
}
async fn upload_file(session: ServerSession, path: PathBuf) -> Result<(), AnyError> {
let path = canonicalize(path)?;
debug!("Uploading {:?}", path);
retried(|| connector::upload_file(session.clone(), path.clone()))
.await
.map(|r| {
use connector::UploadFileResponse::*;
match r {
Success(_) => info!("File {:?} was uploaded", path),
HashMismatch(err) => warn!("Upload of {:?} was not successful: {}", path, err),
BadRequest(err) => warn!("Upload of {:?} was not successful: {}", path, err),
}
})
}
pub async fn list_devices(session: ServerSession) -> Result<(), AnyError> {
let list = retried(move || connector::list_devices(session.clone())).await?;
info!("Remote devices list: {:?}", list);
Ok(())
}
fn unfold_dirs(filenames: Vec<PathBuf>) -> Vec<PathBuf> {
filenames
.into_iter()
.flat_map(|path| {
if path.is_dir() {
WalkDir::new(path)
.follow_links(false)
.same_file_system(true)
.into_iter()
.filter_map(|e| match e {
Ok(e) => Some(e.path().to_path_buf()),
Err(e) => {
warn!("Could not open {:?}: {}", e.path(), e);
None
}
})
.filter(|p| p.is_file())
.collect()
} else {
vec![path]
}
})
.collect()
}
async fn retried<F, R>(f: impl FnMut() -> F + Unpin) -> Result<R, AnyError>
where
F: Future<Output = Result<R, AnyError>>,
{
Ok(FutureRetry::new(f, RetryHandler)
.await
.map_err(|(e, _)| e)?
.0)
}
struct RetryHandler;
impl ErrorHandler<AnyError> for RetryHandler {
type OutError = AnyError;
fn handle(&mut self, attempt: usize, e: AnyError) -> RetryPolicy<Self::OutError> {
if attempt < MAX_ATTEMPTS {
debug!(
"Error while downloading, {} attempts rest: {:?}",
MAX_ATTEMPTS - attempt,
e
);
RetryPolicy::WaitRetry(Duration::from_secs(2u64.pow(attempt as u32)))
} else {
RetryPolicy::ForwardError(e)
}
}
}
| true |
a19f345d9c86e3759be38ff22b56793f71cd3d27
|
Rust
|
rapidclock/estahr
|
/src/strings/distance.rs
|
UTF-8
| 5,854 | 3.390625 | 3 |
[
"MIT"
] |
permissive
|
use std::cmp::{max, min};
fn levenshtein_distance<A: PartialOrd>(it_a: &mut dyn Iterator<Item=A>, it_b: &mut dyn Iterator<Item=A>) -> usize {
let vec_a: Vec<_> = it_a.collect();
let vec_b: Vec<_> = it_b.collect();
match (vec_a.len(), vec_b.len()) {
(0, b) => b,
(a, 0) => a,
(a, b) => {
if a <= b {
levenshtein(vec_a, vec_b)
} else {
levenshtein(vec_b, vec_a)
}
}
}
}
fn levenshtein<A: PartialOrd>(vec_a: Vec<A>, vec_b: Vec<A>) -> usize {
let (mut left, mut top, mut across);
let m= vec_a.len();
let mut cache: Vec<usize> = Vec::with_capacity(m);
(0..m).for_each(|i| cache.push(i + 1));
for (i, item_b) in vec_b.iter().enumerate() {
left = i + 1;
across = left - 1;
for (j, item_a) in vec_a.iter().enumerate() {
top = cache[j];
if item_a.eq(item_b) {
cache[j] = across;
} else {
cache[j] = min_3(left, across, top) + 1;
}
across = top;
left = cache[j];
}
}
cache[m - 1]
}
fn min_3(a: usize, b: usize, c: usize) -> usize {
if (a < b) && (a < c) {
a
} else if b < c {
b
} else {
c
}
}
/// Calculates the Levenshtein distance(edit distance) between the two strings.
///
/// The levenshtein distance takes into account additions, substitutions and deletions,
/// weighted equally.
///
/// Assumes an ascii string.
pub fn levenshtein_ascii(str_a: &str, str_b: &str) -> usize {
levenshtein_distance(&mut str_a.chars(), &mut str_b.chars())
}
/// Calculates the Levenshtein distance(edit distance) between the two byte slices.
///
/// The levenshtein distance takes into account additions, substitutions and deletions,
/// weighted equally.
///
/// Assumes an ascii string.
pub fn levenshtein_bytes(bytes_a: &[u8], bytes_b: &[u8]) -> usize {
levenshtein_distance(&mut bytes_a.iter(), &mut bytes_b.iter())
}
/// Calculates the Hamming distance between the two strings.
///
/// Assumes an ascii string.
pub fn hamming_ascii(str_a: &str, str_b: &str) -> usize {
if str_a.len() <= str_b.len() {
hamming_distance(&mut str_a.chars(), &mut str_b.chars())
} else {
hamming_distance(&mut str_b.chars(), &mut str_a.chars())
}
}
/// Calculates the Hamming distance between the two byte slices.
pub fn hamming_bytes(bytes_a: &[u8], bytes_b: &[u8]) -> usize {
if bytes_a.len() <= bytes_b.len() {
hamming_distance(&mut bytes_a.iter(), &mut bytes_b.iter())
} else {
hamming_distance(&mut bytes_b.iter(), &mut bytes_a.iter())
}
}
fn hamming_distance<A: PartialOrd>(it_a: &mut dyn Iterator<Item=A>, it_b: &mut dyn Iterator<Item=A>) -> usize {
let mut distance: usize = 0;
for item_a in it_a {
if let Some(item_b) = it_b.next() {
if item_a.ne(&item_b) {
distance += 1;
}
}
}
for _ in it_b {
distance += 1;
}
distance
}
/// Calculates the Jaro Winkler distance between the two strings.
///
/// 0 indicates no distance and 1 is the max distance.
///
/// Assumes an ascii string.
pub fn jaro_winkler_ascii(str_a: &str, str_b: &str) -> f64 {
let vec_a = str_a.chars().collect::<Vec<char>>();
let vec_b = str_b.chars().collect::<Vec<char>>();
jaro_winkler_distance(&vec_a, &vec_b)
}
/// Calculates the Jaro Winkler distance between the two byte slices.
///
/// 0 indicates no distance and 1 is the max distance.
pub fn jaro_winkler_bytes(bytes_a: &[u8], bytes_b: &[u8]) -> f64 {
jaro_winkler_distance(bytes_a, bytes_b)
}
fn jaro_winkler_distance<A: PartialEq>(it_a: &[A], it_b: &[A]) -> f64 {
let len_a = it_a.len();
let len_b = it_b.len();
// Basic Edge cases
if len_a == 0 && len_b == 0 {
return 0.0;
} else if len_a == 0 || len_b == 0 {
return 1.0;
}
let mut vec_a = vec![false; len_a];
let mut vec_b = vec![false; len_b];
let mut matches = 0;
let mut transpositions = 0.0;
let search_size: isize = ((max(len_a, len_b) / 2) - 1) as isize;
// Matches calculation
for i in 0..len_a {
let start = max(0, i as isize - search_size) as usize;
let end = min(i + search_size as usize + 1, len_b) as usize;
for j in start..end {
if vec_b[j] || it_a[i].ne(&it_b[j]) {
continue;
}
vec_a[i] = true;
vec_b[j] = true;
matches += 1;
break;
}
}
if matches == 0 { return 1.0; }
// Transpositions calculation
let mut k = 0 as usize;
for i in 0..len_a {
if !vec_a[i] {
continue;
}
while k < len_b && !vec_b[k] {
k += 1;
}
if it_a[i].ne(&it_b[k]) {
transpositions += 1.0;
}
k += 1;
}
transpositions /= 2.0;
let jaro_number = ((matches as f64 / len_a as f64) + (matches as f64 / len_b as f64) + ((matches as f64 - transpositions) / matches as f64)) / 3.0;
let mut pref_len = 0;
for i in 0..len_a {
if i == 4 { break; }
if i < len_b {
if it_a[i] == it_b[i] {
pref_len += 1;
} else {
break;
}
} else {
break;
}
}
pref_len = min(4, pref_len);
let jaro_winkler_number = jaro_number + (pref_len as f64 * 0.1 * (1.0 - jaro_number));
round_two_digits(1.0 - jaro_winkler_number)
}
fn round_two_digits(num: f64) -> f64 {
(num * 100.0).round() / 100.0
}
| true |
c54600160e5d680cd8c127a91ba9b9a5682ecbc4
|
Rust
|
Jonesey13/aoc-2020
|
/day_13/part_2/src/main.rs
|
UTF-8
| 1,437 | 3.28125 | 3 |
[] |
no_license
|
use std::path::Path;
use std::fs;
fn main() {
let time_and_buses_string = fs::read_to_string(Path::new("time_and_buses.txt"))
.expect("Time & buses file not found!");
let mut time_and_buses_split = time_and_buses_string.split("\n");
let time = time_and_buses_split.next().unwrap().parse::<usize>().expect("Time not an int!");
let buses_string = time_and_buses_split.next().expect("No Buses String!");
let bus_constraints: Vec<BusConstraint> = buses_string.split(",")
.enumerate()
.filter_map(
|(index, val)|
val.parse::<usize>()
.ok()
.and_then(|val| Some(BusConstraint::new(index, val)))
)
.collect();
let mut timestamp = time;
let mut step = 1;
for constraint in bus_constraints {
timestamp += step;
loop {
if constraint.validate_time(timestamp) {
step = step * constraint.bus_number;
break;
} else {
timestamp += step;
}
}
}
println!("{}", timestamp)
}
#[derive(Debug)]
struct BusConstraint {
time_offset: usize,
bus_number: usize
}
impl BusConstraint {
fn new(time_offset: usize, bus_number: usize) -> Self { Self { time_offset, bus_number } }
fn validate_time(&self, time: usize) -> bool {
(time + self.time_offset) % self.bus_number == 0
}
}
| true |
a420556b72ee81955d516ecad6e0d916993886ba
|
Rust
|
sia-services/foundation-rs
|
/oracle_derive/src/internals/ast.rs
|
UTF-8
| 4,306 | 3 | 3 |
[
"MIT"
] |
permissive
|
use syn;
use syn::punctuated::Punctuated;
use super::ctx::Ctxt;
use syn::token::Token;
/// A source data structure annotated with '#[derive(Query)]'
/// parsed into an internal representation.
pub struct Container<'a> {
/// The struct or enum name (without generics).
pub ident: &'a syn::Ident,
/// The contents of the struct or enum.
pub data: Data<'a>,
/// Any generics on the struct or enum.
pub generics: &'a syn::Generics,
/// Original input
pub original: &'a syn::DeriveInput,
}
/// The fields of a struct or enum.
/// Analogous to `syn::Data`.
pub enum Data<'a> {
Enum(Vec<Variant<'a>>),
Struct(Style, Vec<Field<'a>>),
}
/// A variant of an enum.
pub struct Variant<'a> {
pub ident: &'a syn::Ident,
// pub attrs: attr::Variant,
pub style: Style,
pub fields: Vec<Field<'a>>,
pub original: &'a syn::Variant,
}
/// A field of a struct
pub struct Field<'a> {
pub member: syn::Member,
pub attrs: &'a Vec<syn::Attribute>,
pub ty: &'a syn::Type,
pub original: &'a syn::Field,
}
#[derive(Copy, Clone, PartialEq)]
pub enum Style {
/// Named fields.
Struct,
/// Many unnamed fields.
Tuple,
/// One unnamed field.
Newtype,
/// No fields
Unit,
}
impl <'a> Container <'a> {
/// Convert the raw Syn ast into a parsed container object, collecting errors in `cs`.
pub fn from_ast(
cx: &Ctxt,
item: &'a syn::DeriveInput
) -> Option<Container<'a>> {
let data = match &item.data {
syn::Data::Enum(data) => Data::Enum(enum_from_ast(cx, &data.variants)),
syn::Data::Struct(data) => {
let (style, fields) = struct_from_ast(cx, &data.fields);
Data::Struct(style, fields)
}
syn::Data::Union(_) => {
cx.error_spanned_by(item, "Oracle does not support query for unions");
return None;
}
};
let mut item = Container {
ident: &item.ident,
data,
generics: &item.generics,
original: item,
};
// check::check(cx, &mut item);
Some(item)
}
}
impl <'a> Data <'a> {
pub fn all_fields(&'a self) -> Box<dyn Iterator<Item=&'a Field<'a>> + 'a> {
match self {
Data::Enum(variants) => {
Box::new(variants.iter().flat_map(|variant| variant.fields.iter()))
}
Data::Struct(_, fields) => Box::new(fields.iter()),
}
}
pub fn is_struct(&'a self) -> bool {
if let Data::Struct(style,_) = self {
*style == Style::Struct
} else {
false
}
}
}
fn enum_from_ast<'a>(
cx: &Ctxt,
variants: &'a Punctuated<syn::Variant, Token![,]>,
) -> Vec<Variant<'a>> {
variants
.iter()
.map(|variant| {
let (style, fields) = struct_from_ast(cx, &variant.fields);
Variant {
ident: &variant.ident,
style,
fields,
original: variant,
}
})
.collect()
}
fn struct_from_ast<'a>(
cx: &Ctxt,
fields: &'a syn::Fields,
) -> (Style, Vec<Field<'a>>) {
match fields {
syn::Fields::Named(fields) => (
Style::Struct,
fields_from_ast(cx, &fields.named),
),
syn::Fields::Unnamed(fields) if fields.unnamed.len() == 1 => (
Style::Newtype,
fields_from_ast(cx, &fields.unnamed),
),
syn::Fields::Unnamed(fields) => (
Style::Tuple,
fields_from_ast(cx, &fields.unnamed),
),
syn::Fields::Unit => (Style::Unit, Vec::new()),
}
}
fn fields_from_ast<'a>(
cx: &Ctxt,
fields: &'a Punctuated<syn::Field, Token![,]>,
) -> Vec<Field<'a>> {
fields
.iter()
.enumerate()
.map(|(i, field)| {
let attrs = &field.attrs;
Field {
member: match &field.ident {
Some(ident) => syn::Member::Named(ident.clone()),
None => syn::Member::Unnamed(i.into()),
},
attrs,
ty: &field.ty,
original: field,
}
})
.collect()
}
| true |
de12ec731bd2747cafc43f60aeed1158ab976a24
|
Rust
|
cginternals/yage
|
/yage-core/src/geometry/geometry.rs
|
UTF-8
| 4,441 | 3.609375 | 4 |
[
"Apache-2.0",
"MIT"
] |
permissive
|
use crate::{
Context, GpuObject, Drawable, ResourceManager,
Primitive, Buffer, VertexAttribute, Material
};
///
/// Represents a renderable geometry
///
/// A geometry represents polygonal geometry that can be rendered
/// onto the screen. It consists of the primitives that make up
/// the parts of the geometry, their buffers, vertex attributes,
/// and materials.
///
pub struct Geometry {
buffers: ResourceManager<Buffer>, // Vertex buffers
attributes: ResourceManager<VertexAttribute>, // Vertex attributes
materials: ResourceManager<Material>, // Materials
primitives: Vec<Primitive>, // Geometric primitives
}
impl Geometry {
///
/// Create geometry.
///
/// # Returns
/// A new instance of Geometry.
///
pub fn new() -> Self {
Self {
buffers: ResourceManager::new(),
attributes: ResourceManager::new(),
materials: ResourceManager::new(),
primitives: Vec::new(),
}
}
///
/// Get buffers.
///
/// # Returns
/// Reference to resource manager for buffers.
///
pub fn buffers(&self) -> &ResourceManager<Buffer> {
&self.buffers
}
///
/// Get buffers.
///
/// # Returns
/// Mutable reference to resource manager for buffers.
///
pub fn buffers_mut(&mut self) -> &mut ResourceManager<Buffer> {
&mut self.buffers
}
///
/// Add buffer.
///
/// # Parameters
/// - `buffer`: Vertex buffer
///
/// # Returns
/// Index of vertex buffer.
///
pub fn add_buffer(&mut self, buffer: Buffer) -> usize {
self.buffers.add(buffer)
}
///
/// Get vertex attributes.
///
/// # Returns
/// Reference to resource manager for vertex attributes.
///
pub fn vertex_attributes(&self) -> &ResourceManager<VertexAttribute> {
&self.attributes
}
///
/// Get vertex attributes.
///
/// # Returns
/// Mutable reference to resource manager for vertex attributes.
///
pub fn vertex_attributes_mut(&mut self) -> &mut ResourceManager<VertexAttribute> {
&mut self.attributes
}
///
/// Add vertex attribute.
///
/// # Parameters
/// - `attribute`: Vertex attribute
///
/// # Returns
/// Index of vertex attribute.
///
pub fn add_vertex_attribute(&mut self, attribute: VertexAttribute) -> usize {
self.attributes.add(attribute)
}
///
/// Get materials.
///
/// # Returns
/// Reference to resource manager for materials.
///
pub fn materials(&self) -> &ResourceManager<Material> {
&self.materials
}
///
/// Get materials.
///
/// # Returns
/// Mutable reference to resource manager for materials.
///
pub fn materials_mut(&mut self) -> &mut ResourceManager<Material> {
&mut self.materials
}
///
/// Add material.
///
/// # Parameters
/// - `material`: Material
///
/// # Returns
/// Index of material.
///
pub fn add_material(&mut self, material: Material) -> usize {
self.materials.add(material)
}
///
/// Add primitive to geometry.
///
/// # Parameters
/// - `primitive`: Geometry primitive
///
pub fn add_primitive(&mut self, primitive: Primitive) {
self.primitives.push(primitive);
}
}
impl GpuObject for Geometry {
fn init(&mut self, context: &Context) {
// Initialize buffers
for buffer in self.buffers.objects_mut() {
buffer.init(context);
}
// Initialize primitives
for primitive in &mut self.primitives {
primitive.init_vao(
context,
&self.attributes,
&self.buffers
);
}
}
fn deinit(&mut self, context: &Context) {
// De-initialize buffers
for buffer in self.buffers.objects_mut() {
buffer.deinit(context);
}
// De-initialize primitives
for primitive in &mut self.primitives {
primitive.deinit_vao(context);
}
}
}
impl Drawable for Geometry {
fn draw(&mut self, context: &Context) {
// Lazy initialization
self.init(context);
// Draw primitives
for primitive in &mut self.primitives {
primitive.draw(context);
}
}
}
| true |
6f172dde844b9906a86f99d250cc3824204f2ef3
|
Rust
|
barlog-m/yew-bootsrap-modal-example
|
/src/app.rs
|
UTF-8
| 1,395 | 2.578125 | 3 |
[] |
no_license
|
use yew::prelude::*;
use super::modal::ModalComponent;
pub struct App {
link: ComponentLink<Self>,
is_modal_shown: bool,
}
pub enum AppMessage {
ModalShow,
ModalHide,
}
impl Component for App {
type Message = AppMessage;
type Properties = ();
fn create(_props: Self::Properties, link: ComponentLink<Self>) -> Self {
App { link, is_modal_shown: false }
}
fn update(&mut self, msg: Self::Message) -> ShouldRender {
match msg {
AppMessage::ModalShow => {
self.is_modal_shown = true;
true
}
AppMessage::ModalHide => {
self.is_modal_shown = false;
true
}
}
}
fn change(&mut self, _props: Self::Properties) -> ShouldRender {
false
}
fn view(&self) -> Html {
let on_click_modal = self.link.callback(|_| AppMessage::ModalShow);
html! {
<>
<ModalComponent is_shown={ self.is_modal_shown } on_hide=self.link.callback(|_| AppMessage::ModalHide) />
<div class="container">
<button
type="button"
class="btn btn-primary mx-auto d-block mt-5"
onclick=on_click_modal>
{ "Modal" }
</button>
</div>
</>
}
}
}
| true |
dbcf0668b28c913be4b665a9d7154e49dad7afe0
|
Rust
|
manarimo/ICFPC2021
|
/kenkoooo/no-wasm/src/bin/globalist-optimize.rs
|
UTF-8
| 3,626 | 2.609375 | 3 |
[
"MIT"
] |
permissive
|
use anyhow::Result;
use brute_force::PathRefExt;
use manarimo_lib::geometry::{
count_contained_edges, count_contained_points, count_valid_edges, dislike, Point,
};
use manarimo_lib::types::{Bonus, Pose, Problem};
use rand::prelude::*;
use std::collections::BTreeSet;
use std::path::Path;
fn main() -> Result<()> {
dotenv::dotenv()?;
env_logger::init();
let args: Vec<String> = std::env::args().collect();
let problem: Problem = args[1].parse_json()?;
let solution: Pose = args[2].parse_json()?;
let mut prohibited = BTreeSet::new();
for s in args[4].split(',') {
let s = s.trim();
if s.len() == 0 {
continue;
}
let v = s.parse::<usize>()?;
prohibited.insert(v);
}
solve(&problem, &solution, &args[3], &prohibited)?;
Ok(())
}
fn solve<P: AsRef<Path>>(
problem: &Problem,
solution: &Pose,
output: P,
prohibited: &BTreeSet<usize>,
) -> Result<()> {
let mut rng = StdRng::seed_from_u64(717);
let hole: Vec<Point> = problem.hole.iter().map(Point::from).collect();
let orig_pose: Vec<Point> = problem.figure.vertices.iter().map(Point::from).collect();
let edges = problem.figure.edges.clone();
let hole = &hole;
let edges = &edges;
let orig_pose = &orig_pose;
let mut current_state: Vec<Point> = solution.vertices.iter().map(Point::from).collect();
let mut current_cost = calc_globalist_cost(¤t_state, edges, orig_pose);
assert!(is_valid(¤t_state, hole, edges), "Can not start");
let fixed_dislike = dislike(hole, ¤t_state);
loop {
let n = current_state.len();
let select = rng.gen_range(0..n);
if prohibited.contains(&select) {
continue;
}
let dx = rng.gen_range(-1..=1);
let dy = rng.gen_range(-1..=1);
if dx == dy {
continue;
}
let prev = current_state[select];
current_state[select].x += dx;
current_state[select].y += dy;
if !is_valid(¤t_state, hole, edges) {
current_state[select] = prev;
continue;
}
if dislike(hole, ¤t_state) > fixed_dislike {
current_state[select] = prev;
continue;
}
let new_cost = calc_globalist_cost(¤t_state, edges, orig_pose);
if new_cost < current_cost {
log::info!("{} -> {}", current_cost, new_cost);
current_cost = new_cost;
let pose = Pose {
vertices: current_state.iter().map(|p| [p.x, p.y]).collect(),
bonuses: Some(vec![Bonus {
bonus: "GLOBALIST".to_string(),
}]),
};
output.write_json(&pose)?;
} else {
current_state[select] = prev;
}
}
}
fn is_valid(solution: &[Point], hole: &[Point], edges: &[[usize; 2]]) -> bool {
if count_contained_points(hole, solution) != solution.len() {
return false;
}
if count_contained_edges(hole, solution, edges) != edges.len() {
return false;
}
true
}
fn calc_globalist_cost(solution: &[Point], edges: &[[usize; 2]], orig_pose: &[Point]) -> f64 {
edges
.iter()
.map(|&[from, to]| {
let p = orig_pose[from];
let q = orig_pose[to];
let d1 = p.d(&q);
let p = solution[from];
let q = solution[to];
let d2 = p.d(&q);
let difference = (d1 - d2).abs();
(difference * 1_000_000) as f64 / d1 as f64
})
.sum::<f64>()
}
| true |
d4b956bc08523ebeec0584095cf03670bb1daf26
|
Rust
|
eldruin/tm4c-hal
|
/tm4c123x-hal/src/timer.rs
|
UTF-8
| 5,235 | 2.78125 | 3 |
[
"LicenseRef-scancode-unknown-license-reference",
"Apache-2.0",
"MIT"
] |
permissive
|
//! Timers
use crate::{
hal::timer::{CountDown, Periodic},
sysctl::{self, Clocks},
};
#[rustfmt::skip]
use tm4c123x::{
TIMER0, TIMER1, TIMER2, TIMER3, TIMER4, TIMER5,
WTIMER0, WTIMER1, WTIMER2, WTIMER3, WTIMER4, WTIMER5,
};
use tm4c_hal::time::Hertz;
use void::Void;
/// Hardware timers
pub struct Timer<TIM> {
tim: TIM,
clocks: Clocks,
timeout: Hertz,
}
/// Interrupt events
pub enum Event {
/// Timer timed out / count down ended
TimeOut,
}
macro_rules! hal {
($($TIM:ident: ($tim:ident, $powerDomain:ident),)+) => {
$(
impl Periodic for Timer<$TIM> {}
impl CountDown for Timer<$TIM> {
type Time = Hertz;
#[allow(unused_unsafe)]
fn start<T>(&mut self, timeout: T)
where
T: Into<Hertz>,
{
// Disable timer
self.tim.ctl.modify(|_, w|
w.taen().clear_bit()
.tben().clear_bit()
);
self.timeout = timeout.into();
let frequency = self.timeout.0;
let ticks = self.clocks.sysclk.0 / frequency;
self.tim.tav.write(|w| unsafe { w.bits(ticks) });
self.tim.tailr.write(|w| unsafe { w.bits(ticks) });
// // start counter
self.tim.ctl.modify(|_, w|
w.taen().set_bit()
);
}
fn wait(&mut self) -> nb::Result<(), Void> {
if self.tim.ris.read().tatoris().bit_is_clear () {
Err(nb::Error::WouldBlock)
} else {
self.tim.icr.write(|w| w.tatocint().set_bit());
Ok(())
}
}
}
impl Timer<$TIM> {
// XXX(why not name this `new`?) bummer: constructors need to have different names
// even if the `$TIM` are non overlapping (compare to the `free` function below
// which just works)
/// Configures a TIM peripheral as a periodic count down timer
pub fn $tim<T>(tim: $TIM, timeout: T,
pc: &sysctl::PowerControl,
clocks: &Clocks,
) -> Self
where
T: Into<Hertz>,
{
// power up
sysctl::control_power(
pc, sysctl::Domain::$powerDomain,
sysctl::RunMode::Run, sysctl::PowerState::On);
sysctl::reset(pc, sysctl::Domain::$powerDomain);
// Stop Timers
tim.ctl.write(|w|
w.taen().clear_bit()
.tben().clear_bit()
.tastall().set_bit()
);
// GPTMCFG = 0x0 (chained - 2x16 = 32bits) This
// will not force 32bits wide timer, this will
// really force the wider range to be used (32 for
// 16/32bits timers, 64 for 32/64).
tim.cfg.write(|w| w.cfg()._32_bit_timer());
tim.tamr.write(|w| w.tamr().period());
let mut timer = Timer {
tim,
clocks: *clocks,
timeout: Hertz(0),
};
timer.start(timeout);
timer
}
/// Starts listening for an `event`
pub fn listen(&mut self, event: Event) {
match event {
Event::TimeOut => {
// Enable update event interrupt
self.tim.imr.modify(|_,w| w.tatoim().set_bit());
}
}
}
/// Stops listening for an `event`
pub fn unlisten(&mut self, event: Event) {
match event {
Event::TimeOut => {
// Enable update event interrupt
self.tim.imr.modify(|_,w| w.tatoim().clear_bit());
}
}
}
/// Releases the TIM peripheral
pub fn free(self) -> $TIM {
// pause counter
self.tim.ctl.write(|w|
w.taen().clear_bit()
.tben().clear_bit());
self.tim
}
}
)+
}
}
hal! {
TIMER0: (timer0, Timer0),
TIMER1: (timer1, Timer1),
TIMER2: (timer2, Timer2),
TIMER3: (timer3, Timer3),
TIMER4: (timer4, Timer4),
TIMER5: (timer5, Timer5),
WTIMER0: (wtimer0, WideTimer0),
WTIMER1: (wtimer1, WideTimer1),
WTIMER2: (wtimer2, WideTimer2),
WTIMER3: (wtimer3, WideTimer3),
WTIMER4: (wtimer4, WideTimer4),
WTIMER5: (wtimer5, WideTimer5),
}
| true |
42a0a160159ca0a8927627be96eea4fc8bc220b7
|
Rust
|
meow-chip/MeowSBI
|
/src/platform/qemu.rs
|
UTF-8
| 2,944 | 2.640625 | 3 |
[] |
no_license
|
use super::PlatformOps;
use crate::utils::clint::CLINT;
use crate::utils::uart::UART16550;
pub struct QEMU {
hartid: usize,
serial: UART16550,
clint: CLINT,
}
impl PlatformOps for QEMU {
fn new(hartid: usize, fdt: fdt::FDT) -> Self {
use core::convert::TryInto;
let mut clint_addr = None;
let mut uart_addr = None;
let mut uart_shift: Option<usize> = None;
let mut uart_offset: Option<usize> = None;
let mut uart_clk: Option<usize> = None;
let mut uart_baud: Option<usize> = None;
crate::serial::early_print("Parsing FDT\n");
for node in fdt.nodes() {
if node.is_compatible_with("clint0") {
let addr = node
.property("reg")
.map(|p| p.raw().split_at(core::mem::size_of::<usize>()).0);
clint_addr = addr.map(|p| usize::from_be_bytes(p.try_into().unwrap()));
} else if node.is_compatible_with("ns16550a") || node.is_compatible_with("na16550"){
let addr = node
.property("reg")
.map(|p| p.raw().split_at(core::mem::size_of::<usize>()).0);
uart_addr = addr.map(|p| usize::from_be_bytes(p.try_into().unwrap()));
uart_offset = node.property("reg-offset")
.and_then(|p| p.as_u32().ok())
.map(|r| r as _);
uart_shift = node.property("reg-shift")
.and_then(|p| p.as_u32().ok())
.map(|r| r as _);
uart_clk = node.property("clock-frequency")
.and_then(|p| p.as_u32().ok())
.map(|r| r as _);
uart_baud = node.property("current-speed")
.and_then(|p| p.as_u32().ok())
.map(|r| r as _);
}
}
// crate::serial::early_print("Parsing Finished\n");
QEMU {
hartid,
serial: UART16550::new(
uart_addr.unwrap_or(0x10000000) + uart_offset.unwrap_or(0),
uart_shift.unwrap_or(0),
uart_clk.unwrap_or(11_059_200) as _,
uart_clk.unwrap_or(115200) as _,
),
clint: CLINT::new(clint_addr.unwrap_or(0x2000000) as *mut u8, hartid),
}
}
fn early_init(&self, _cold: bool) {
if self.hartid == 0 {
self.clint.setup_leader();
self.serial.init();
}
// TODO: barrier here
self.clint.setup();
}
fn set_timer(&self, instant: u64) {
self.clint.set_timer(instant);
}
fn put_char(&self, c: u8) {
self.serial.putchar(c)
}
fn get_char(&self) -> u8 {
self.serial.getchar()
}
fn send_ipi(&self, hartid: usize) {
self.clint.send_soft(hartid);
}
fn clear_ipi(&self) {
self.clint.clear_soft();
}
}
| true |
abf8186e9f3a8bdf262bf3d7469849fb51035199
|
Rust
|
nambrosini/adventofcode
|
/2022/src/day13.rs
|
UTF-8
| 3,289 | 3.265625 | 3 |
[] |
no_license
|
use itertools::Itertools;
use nom::branch::alt;
use nom::bytes::complete::tag;
use nom::character::complete::i32;
use nom::combinator::map;
use nom::multi::separated_list0;
use nom::sequence::delimited;
use nom::IResult;
use std::cmp::Ordering;
#[aoc_generator(day13)]
pub fn generator(input: &str) -> Vec<Packet> {
input
.split("\n\n")
.flat_map(|s| {
let split: Vec<&str> = s.lines().collect();
vec![
parse_packet(split[0]).unwrap().1,
parse_packet(split[1]).unwrap().1,
]
})
.collect()
}
#[aoc(day13, part1)]
pub fn part1(packets: &[Packet]) -> usize {
packets
.chunks(2)
.enumerate()
.filter(|(_, v)| v[0] < v[1])
.map(|(i, _)| i + 1)
.sum()
}
#[aoc(day13, part2)]
pub fn part2(packets: &[Packet]) -> usize {
let mut packets = packets.to_vec();
let div1 = Packet::List(vec![Packet::List(vec![Packet::Int(2)])]);
let div2 = Packet::List(vec![Packet::List(vec![Packet::Int(6)])]);
packets.extend_from_slice(&[div1.clone(), div2.clone()]);
packets
.iter()
.sorted()
.enumerate()
.filter(|(_, p)| p == &&div1 || p == &&div2)
.map(|(i, _)| i + 1)
.product()
}
#[derive(Debug, Clone)]
pub enum Packet {
Int(i32),
List(Vec<Packet>),
}
fn parse_packet(value: &str) -> IResult<&str, Packet> {
alt((
map(i32, Packet::Int),
map(
delimited(tag("["), separated_list0(tag(","), parse_packet), tag("]")),
Packet::List,
),
))(value)
}
impl Eq for Packet {}
impl PartialEq<Self> for Packet {
fn eq(&self, other: &Self) -> bool {
self.partial_cmp(other).unwrap() == Ordering::Equal
}
}
impl PartialOrd for Packet {
fn partial_cmp(&self, other: &Self) -> Option<Ordering> {
match (self, other) {
(Packet::Int(l), Packet::Int(r)) => l.partial_cmp(r),
(Packet::List(_), Packet::Int(_)) => {
self.partial_cmp(&Packet::List(vec![other.clone()]))
}
(Packet::Int(_), Packet::List(_)) => {
Packet::List(vec![self.clone()]).partial_cmp(other)
}
(Packet::List(l), Packet::List(r)) => {
for (e1, e2) in l.iter().zip(r) {
if let Some(res) = e1.partial_cmp(e2) {
if res != Ordering::Equal {
return Some(res);
}
}
}
l.len().partial_cmp(&r.len())
}
}
}
}
impl Ord for Packet {
fn cmp(&self, other: &Self) -> Ordering {
self.partial_cmp(other).unwrap()
}
}
#[test]
fn test() {
let s = "[1,1,3,1,1]
[1,1,5,1,1]
[[1],[2,3,4]]
[[1],4]
[9]
[[8,7,6]]
[[4,4],4,4]
[[4,4],4,4,4]
[7,7,7,7]
[7,7,7]
[]
[3]
[[[]]]
[[]]
[1,[2,[3,[4,[5,6,7]]]],8,9]
[1,[2,[3,[4,[5,6,0]]]],8,9]";
assert_eq!(13, part1(&generator(s)));
}
#[test]
fn test2() {
let s = "[1,1,3,1,1]
[1,1,5,1,1]
[[1],[2,3,4]]
[[1],4]
[9]
[[8,7,6]]
[[4,4],4,4]
[[4,4],4,4,4]
[7,7,7,7]
[7,7,7]
[]
[3]
[[[]]]
[[]]
[1,[2,[3,[4,[5,6,7]]]],8,9]
[1,[2,[3,[4,[5,6,0]]]],8,9]";
assert_eq!(140, part2(&generator(s)));
}
| true |
9cf616f59d0dbe777abdcbda7dbbf6152f6ccd3c
|
Rust
|
agemor/sage
|
/src/tensor/ops/stats.rs
|
UTF-8
| 2,486 | 2.96875 | 3 |
[
"Apache-2.0"
] |
permissive
|
use crate::tensor::backend::{BinaryIndexOperation, BinaryOperation};
use crate::tensor::shape::{ToIndex, ToIndices};
use crate::tensor::Tensor;
impl Tensor {
pub fn sum<Is>(&self, axes: Is, retain_axis: bool) -> Tensor
where
Is: ToIndices,
{
self.reduce(BinaryOperation::Add, axes, retain_axis)
}
pub fn max<Is>(&self, axes: Is, retain_axis: bool) -> Tensor
where
Is: ToIndices,
{
self.reduce(BinaryOperation::Max, axes, retain_axis)
}
pub fn min<Is>(&self, axes: Is, retain_axis: bool) -> Tensor
where
Is: ToIndices,
{
self.reduce(BinaryOperation::Min, axes, retain_axis)
}
pub fn mean<Is>(&self, axes: Is, retain_axis: bool) -> Tensor
where
Is: ToIndices,
{
let axes = axes.to_indices(self.order());
let size = axes.iter().map(|a| self.shape[a]).product::<usize>();
self.sum(axes, retain_axis) / size as f32
}
pub fn variance<Is>(&self, axes: Is, retain_axis: bool) -> Tensor
where
Is: ToIndices,
{
let axes = axes.to_indices(self.order());
let size = axes.iter().map(|a| self.shape[a]).product::<usize>();
(self - self.mean(axes.as_ref(), true))
.pow(2.0)
.sum(axes.as_ref(), retain_axis)
/ (size - 1) as f32
}
pub fn argmax<I>(&self, axis: I, retain_axis: bool) -> Tensor
where
I: ToIndex,
{
self.reduce_index(BinaryIndexOperation::Max, axis, retain_axis)
}
pub fn argmin<I>(&self, axis: I, retain_axis: bool) -> Tensor
where
I: ToIndex,
{
self.reduce_index(BinaryIndexOperation::Min, axis, retain_axis)
}
}
#[cfg(test)]
mod test {
use super::*;
#[test]
fn test_max_axis() {
let a = Tensor::from_elem([3, 2, 5], 10.0);
let b = Tensor::from_elem([3, 2, 5], 3.0);
// (3, 4, 5)
let c = Tensor::concat(&[&a, &b], 1).unwrap();
assert_eq!(c.max_axis(1, true), Tensor::from_elem([3, 1, 5], 10.0));
}
#[test]
fn test_argmax() {
let a = Tensor::from_slice(
[3, 5],
&[
0.37894, -1.43962, -0.03472, 1.50011, 1.10574, 1.20776, -0.74392, -0.10786,
0.48039, -0.82024, -0.62761, -0.94768, 0.75950, 1.23026, 1.93393,
],
);
let b = Tensor::from_slice([5], &[1., 1., 2., 0., 2.]);
assert!(a.argmax(0).equals(&b, 0.001));
}
}
| true |
61df68388397092a4c5c65488e9b14e51fdacb02
|
Rust
|
j4ckzh0u/usbip
|
/src/device.rs
|
UTF-8
| 17,513 | 2.671875 | 3 |
[
"MIT"
] |
permissive
|
use super::*;
/// Represent a USB device
#[derive(Clone, Default)]
pub struct UsbDevice {
pub path: String,
pub bus_id: String,
pub bus_num: u32,
pub dev_num: u32,
pub speed: u32,
pub vendor_id: u16,
pub product_id: u16,
pub device_bcd: u16,
pub device_class: u8,
pub device_subclass: u8,
pub device_protocol: u8,
pub configuration_value: u8,
pub num_configurations: u8,
pub interfaces: Vec<UsbInterface>,
pub(crate) ep0_in: UsbEndpoint,
pub(crate) ep0_out: UsbEndpoint,
// strings
pub(crate) string_pool: HashMap<u8, String>,
pub(crate) string_configuration: u8,
pub(crate) string_manufacturer: u8,
pub(crate) string_product: u8,
pub(crate) string_serial: u8,
}
impl UsbDevice {
pub fn new(index: u32) -> Self {
let mut res = Self {
path: format!("/sys/device/usbip/{}", index),
bus_id: format!("{}", index),
dev_num: index,
speed: UsbSpeed::High as u32,
ep0_in: UsbEndpoint {
address: 0x80,
attributes: EndpointAttributes::Control as u8,
max_packet_size: EP0_MAX_PACKET_SIZE,
interval: 0,
},
ep0_out: UsbEndpoint {
address: 0x00,
attributes: EndpointAttributes::Control as u8,
max_packet_size: EP0_MAX_PACKET_SIZE,
interval: 0,
},
// configured by default
configuration_value: 1,
num_configurations: 1,
..Self::default()
};
res.string_configuration = res.new_string("Default Configuration");
res.string_manufacturer = res.new_string("Manufacturer");
res.string_product = res.new_string("Product");
res.string_serial = res.new_string("Serial");
res
}
pub fn with_interface(
mut self,
interface_class: u8,
interface_subclass: u8,
interface_protocol: u8,
name: &str,
endpoints: Vec<UsbEndpoint>,
handler: Arc<Mutex<Box<dyn UsbInterfaceHandler + Send>>>,
) -> Self {
let string_interface = self.new_string(name);
let class_specific_descriptor = handler.lock().unwrap().get_class_specific_descriptor();
self.interfaces.push(UsbInterface {
interface_class,
interface_subclass,
interface_protocol,
endpoints,
string_interface,
class_specific_descriptor,
handler,
});
self
}
pub(crate) fn new_string(&mut self, s: &str) -> u8 {
for i in 1.. {
if self.string_pool.get(&i).is_none() {
self.string_pool.insert(i, s.to_string());
return i;
}
}
panic!("string poll exhausted")
}
pub(crate) fn find_ep(&self, ep: u8) -> Option<(UsbEndpoint, Option<&UsbInterface>)> {
if ep == self.ep0_in.address {
Some((self.ep0_in, None))
} else if ep == self.ep0_out.address {
Some((self.ep0_out, None))
} else {
for intf in &self.interfaces {
for endpoint in &intf.endpoints {
if endpoint.address == ep {
return Some((*endpoint, Some(intf)));
}
}
}
None
}
}
pub(crate) async fn write_dev<T: AsyncReadExt + AsyncWriteExt + Unpin>(
&self,
socket: &mut T,
) -> Result<()> {
socket_write_fixed_string(socket, &self.path, 256).await?;
socket_write_fixed_string(socket, &self.bus_id, 32).await?;
// fields
socket.write_u32(self.bus_num).await?;
socket.write_u32(self.dev_num).await?;
socket.write_u32(self.speed).await?;
socket.write_u16(self.vendor_id).await?;
socket.write_u16(self.product_id).await?;
socket.write_u16(self.device_bcd).await?;
socket.write_u8(self.device_class).await?;
socket.write_u8(self.device_subclass).await?;
socket.write_u8(self.device_protocol).await?;
socket.write_u8(self.configuration_value).await?;
socket.write_u8(self.num_configurations).await?;
socket.write_u8(self.interfaces.len() as u8).await?;
Ok(())
}
pub(crate) async fn write_dev_with_interfaces<T: AsyncReadExt + AsyncWriteExt + Unpin>(
&self,
socket: &mut T,
) -> Result<()> {
self.write_dev(socket).await?;
for interface in &self.interfaces {
socket.write_u8(interface.interface_class).await?;
socket.write_u8(interface.interface_subclass).await?;
socket.write_u8(interface.interface_protocol).await?;
// padding
socket.write_u8(0).await?;
}
Ok(())
}
pub(crate) async fn handle_urb<T: AsyncReadExt + AsyncWriteExt + Unpin>(
&self,
socket: &mut T,
ep: UsbEndpoint,
intf: Option<&UsbInterface>,
transfer_buffer_length: u32,
setup: [u8; 8],
) -> Result<Vec<u8>> {
use DescriptorType::*;
use Direction::*;
use EndpointAttributes::*;
use StandardRequest::*;
// parse setup
let setup_packet = SetupPacket::parse(&setup);
// read data from socket for OUT
let out_data = if let Out = ep.direction() {
let mut data = vec![0u8; transfer_buffer_length as usize];
socket.read_exact(&mut data).await?;
data
} else {
vec![]
};
match (FromPrimitive::from_u8(ep.attributes), ep.direction()) {
(Some(Control), In) => {
// control in
debug!("Control IN setup={:x?}", setup_packet);
match (
setup_packet.request_type,
FromPrimitive::from_u8(setup_packet.request),
) {
(0b10000000, Some(GetDescriptor)) => {
// high byte: type
match FromPrimitive::from_u16(setup_packet.value >> 8) {
Some(Device) => {
debug!("Get device descriptor");
let mut desc = vec![
0x12, // bLength
Device as u8, // bDescriptorType: Device
0x10,
0x02, // bcdUSB: USB 2.1
self.device_class, // bDeviceClass
self.device_subclass, // bDeviceSubClass
self.device_protocol, // bDeviceProtocol
self.ep0_in.max_packet_size as u8, // bMaxPacketSize0
self.vendor_id as u8, // idVendor
(self.vendor_id >> 8) as u8,
self.product_id as u8, // idProduct
(self.product_id >> 8) as u8,
self.device_bcd as u8, // bcdDevice
(self.device_bcd >> 8) as u8,
self.string_manufacturer, // iManufacturer
self.string_product, // iProduct
self.string_serial, // iSerial
self.num_configurations,
];
// requested len too short: wLength < real length
if setup_packet.length < desc.len() as u16 {
desc.resize(setup_packet.length as usize, 0);
}
return Ok(desc);
}
Some(BOS) => {
debug!("Get BOS descriptor");
let mut desc = vec![
0x05, // bLength
BOS as u8, // bDescriptorType: BOS
0x05, 0x00, // wTotalLength
0x00, // bNumCapabilities
];
// requested len too short: wLength < real length
if setup_packet.length < desc.len() as u16 {
desc.resize(setup_packet.length as usize, 0);
}
return Ok(desc);
}
Some(Configuration) => {
debug!("Get configuration descriptor");
let mut desc = vec![
0x09, // bLength
Configuration as u8, // bDescriptorType: Configuration
0x00,
0x00, // wTotalLength: to be filled below
self.interfaces.len() as u8, // bNumInterfaces
self.configuration_value, // bConfigurationValue
self.string_configuration, // iConfiguration
0x80, // bmAttributes Bus Powered
0x32, // bMaxPower 100mA
];
for (i, intf) in self.interfaces.iter().enumerate() {
let mut intf_desc = vec![
0x09, // bLength
Interface as u8, // bDescriptorType: Interface
i as u8, // bInterfaceNum
0x00, // bAlternateSettings
intf.endpoints.len() as u8, // bNumEndpoints
intf.interface_class, // bInterfaceClass
intf.interface_subclass, // bInterfaceSubClass
intf.interface_protocol, // bInterfaceProtocol
intf.string_interface, //iInterface
];
// class specific endpoint
let mut specific = intf.class_specific_descriptor.clone();
intf_desc.append(&mut specific);
// endpoint descriptors
for endpoint in &intf.endpoints {
let mut ep_desc = vec![
0x07, // bLength
Endpoint as u8, // bDescriptorType: Endpoint
endpoint.address, // bEndpointAddress
endpoint.attributes, // bmAttributes
endpoint.max_packet_size as u8,
(endpoint.max_packet_size >> 8) as u8, // wMaxPacketSize
endpoint.interval, // bInterval
];
intf_desc.append(&mut ep_desc);
}
desc.append(&mut intf_desc);
}
// length
let len = desc.len() as u16;
desc[2] = len as u8;
desc[3] = (len >> 8) as u8;
// requested len too short: wLength < real length
if setup_packet.length < desc.len() as u16 {
desc.resize(setup_packet.length as usize, 0);
}
return Ok(desc);
}
Some(String) => {
debug!("Get string descriptor");
let index = setup_packet.value as u8;
if index == 0 {
// language ids
let mut desc = vec![
4, // bLength
DescriptorType::String as u8, // bDescriptorType
0x09,
0x04, // bLANGID, en-US
];
// requested len too short: wLength < real length
if setup_packet.length < desc.len() as u16 {
desc.resize(setup_packet.length as usize, 0);
}
return Ok(desc);
} else {
let s = &self.string_pool[&index];
let bytes: Vec<u16> = s.encode_utf16().collect();
let mut desc = vec![
(2 + bytes.len() * 2) as u8, // bLength
DescriptorType::String as u8, // bDescriptorType
];
for byte in bytes {
desc.push(byte as u8);
desc.push((byte >> 8) as u8);
}
// requested len too short: wLength < real length
if setup_packet.length < desc.len() as u16 {
desc.resize(setup_packet.length as usize, 0);
}
return Ok(desc);
}
}
Some(DeviceQualifier) => {
debug!("Get device qualifier descriptor");
let mut desc = vec![
0x0A, // bLength
DeviceQualifier as u8, // bDescriptorType: Device Qualifier
0x10,
0x02, // bcdUSB USB 2.1
self.device_class, // bDeviceClass
self.device_subclass, // bDeviceSUbClass
self.device_protocol, // bDeviceProtocol
self.ep0_in.max_packet_size as u8, // bMaxPacketSize0
self.num_configurations, // bNumConfigurations
0x00, // reserved
];
// requested len too short: wLength < real length
if setup_packet.length < desc.len() as u16 {
desc.resize(setup_packet.length as usize, 0);
}
return Ok(desc);
}
_ => {
warn!("unknown desc type: {:x?}", setup_packet);
return Ok(vec![]);
}
}
}
_ if setup_packet.request_type & 0xF == 1 => {
// to interface
// see https://www.beyondlogic.org/usbnutshell/usb6.shtml
// only low 8 bits are valid
let intf = &self.interfaces[setup_packet.index as usize & 0xFF];
let mut handler = intf.handler.lock().unwrap();
let resp = handler.handle_urb(intf, ep, setup_packet, &out_data)?;
return Ok(resp);
}
_ => unimplemented!("control in"),
}
}
(Some(Control), Out) => {
// control out
debug!("Control OUT setup={:x?}", setup_packet);
}
(Some(_), _) => {
// others
let intf = intf.unwrap();
let mut handler = intf.handler.lock().unwrap();
let resp = handler.handle_urb(intf, ep, setup_packet, &out_data)?;
return Ok(resp);
}
_ => unimplemented!("transfer to {:?}", ep),
}
Ok(vec![])
}
}
| true |
36c8321ebc45e5e73c8cf6767064d9978ead5605
|
Rust
|
yorkart/collector-rs
|
/src/utils/file.rs
|
UTF-8
| 3,968 | 2.859375 | 3 |
[
"Apache-2.0"
] |
permissive
|
use std::io;
use std::fmt;
use std::fmt::Formatter;
use std::error;
use std::fs::File;
use std::io::Read;
use std::io::Seek;
use bytes::BytesMut;
use bytes::BufMut;
#[cfg(not(target_os = "windows"))]
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct StatOS {
pub inode: u64,
pub device: u64,
}
#[cfg(not(target_os = "windows"))]
pub fn get_file_stat(path :&str) -> Result<StatOS, FSError> {
use std::os::linux::fs::MetadataExt;
let file = File::open(path.to_owned())?;
let metadata = file.metadata()?;
let stat_os = StatOS{
inode: metadata.st_ino() as u64,
device: metadata.st_dev(),
};
Ok(stat_os)
}
#[cfg(target_os = "windows")]
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct StatOS {
pub idx_hi: u64,
pub idx_lo: u64,
pub vol: u64,
}
#[cfg(target_os = "windows")]
pub fn get_file_stat(path :&str) -> Result<StatOS, FSError> {
use std::os::windows::fs::MetadataExt;
let file = File::open(path.to_owned())?;
let metadata = file.metadata()?;
let stat_os = StatOS{
inode: metadata.st_ino() as u64,
device: metadata.st_dev(),
};
Ok(stat_os)
}
pub struct Reader {
file : File,
}
impl Reader {
pub fn new(path :&str) -> Result<Reader, FSError> {
let file = File::open(path.to_owned())?;
Ok(Reader{file})
}
pub fn watch(&self) {
loop {
}
}
pub fn read_chunk(&mut self, size: usize, byte_mut: &mut BytesMut) -> Option<usize> {
let len = self.file.metadata().unwrap().len();
println!("file len : {}", len);
let mut buffer = Vec::new();
unsafe {
buffer.reserve(size);
buffer.set_len(size);
}
//
// let mut buffer = [0; 1024];
match self.file.read(&mut buffer[..size]) {
Ok(0) => {
return None
}
Ok(len) => {
// println!("read len {} / {}", len, buffer.len());
byte_mut.reserve(size);
byte_mut.put_slice(&buffer[..len]);
//self.file.seek(io::SeekFrom::Current(0)).unwrap();
Some(len)
},
Err(e) => {
println!("error {:?}", e);
return None
},
}
}
}
#[derive(Debug)]
pub enum FSError {
Io(io::Error),
}
impl fmt::Display for FSError {
fn fmt(&self, f: &mut Formatter) -> fmt::Result {
match *self {
FSError::Io(ref err) => write!(f, "IO error: {}", err),
}
}
}
impl error::Error for FSError {
fn description(&self) -> &str {
match *self {
FSError::Io(ref err) => err.description(),
}
}
fn cause(&self) -> Option<&error::Error> {
match *self {
FSError::Io(ref err) => Some(err),
}
}
}
impl From<io::Error> for FSError {
fn from(e: io::Error) -> Self {
FSError::Io(e)
}
}
#[cfg(test)]
mod tests {
use super::*;
use std::thread;
#[test]
pub fn get_file_stat_test() {
let t = get_file_stat("/data/workspace/rust/collector-rs/log/collector-rs.log");
match t {
Ok(stat_os) => println!("{:?}", stat_os),
Err(e) => println!("error {:?}", e)
}
let mut reader = Reader::new("/data/workspace/rust/collector-rs/log/collector-rs.log").unwrap();
let mut bs = BytesMut::new();
loop {
match reader.read_chunk(104, &mut bs) {
Some(_) => {
let len = bs.len();
let a = bs.split_to(len);
for x in a {
print!("{}", x as char);
}
// println!(" ");
},
None => {
thread::sleep_ms(1000);
},
}
}
println!("END");
}
}
| true |
f2d858d39181675d7ce2a0a5aa4ded28a97789fc
|
Rust
|
Bastczuak/PiXr
|
/examples/network/main.rs
|
UTF-8
| 1,532 | 2.953125 | 3 |
[
"MIT"
] |
permissive
|
extern crate PiXr;
use PiXr::{run, Pix, PixGameLoop, PixMsgPack};
struct Game {
t: f32,
text: String,
chat: Vec<String>,
}
impl PixGameLoop for Game {
fn on_init(&mut self, pix: &mut Pix) -> Result<(), String> {
pix.open_socket(4055, true)?;
pix.screen(256, 240, "PiX Chat")
}
fn on_update(&mut self, pix: &mut Pix, dt: f32) -> Result<(), String> {
pix.clear(0);
for (y, message) in self.chat.iter().enumerate() {
pix.print(11.0, 0.0, y as f32 * 8.0, message.as_str())?;
}
let (w, h) = pix.dimension();
pix.line(1.0, 0.0, h - 10.0, w - 1.0, h - 10.0)?;
pix.print(14.0, 0.0, h - 9.0, self.text.as_str())?;
Ok(())
}
fn on_key_down(&mut self, pix: &mut Pix, key: String) -> Result<(), String> {
match key.as_str() {
"Escape" => pix.quit(),
"Backspace" => {
self.text.pop();
Ok(())
}
"Return" => {
pix.send("255.255.255.255", 4055, self.text.clone())?;
self.text.clear();
Ok(())
}
_ => Ok(()),
}
}
fn on_text_input(&mut self, pix: &mut Pix, text: String) -> Result<(), String> {
self.text.push_str(text.as_str());
Ok(())
}
fn on_receive(
&mut self,
pix: &mut Pix,
ip: String,
port: u16,
data: PixMsgPack,
) -> Result<(), String> {
let string: String = data.deserialize()?;
self.chat.push(string);
Ok(())
}
}
fn main() -> Result<(), String> {
run(Game {
t: 0.0,
text: String::from("Hello World"),
chat: Vec::new(),
})
}
| true |
a0c7acdccac7be9e803895f263880c48d826024c
|
Rust
|
dimforge/rapier
|
/examples3d/one_way_platforms3.rs
|
UTF-8
| 4,661 | 2.5625 | 3 |
[
"Apache-2.0"
] |
permissive
|
use rapier3d::prelude::*;
use rapier_testbed3d::Testbed;
struct OneWayPlatformHook {
platform1: ColliderHandle,
platform2: ColliderHandle,
}
impl PhysicsHooks for OneWayPlatformHook {
fn modify_solver_contacts(&self, context: &mut ContactModificationContext) {
// The allowed normal for the first platform is its local +y axis, and the
// allowed normal for the second platform is its local -y axis.
//
// Now we have to be careful because the `manifold.local_n1` normal points
// toward the outside of the shape of `context.co1`. So we need to flip the
// allowed normal direction if the platform is in `context.collider2`.
//
// Therefore:
// - If context.collider1 == self.platform1 then the allowed normal is +y.
// - If context.collider2 == self.platform1 then the allowed normal is -y.
// - If context.collider1 == self.platform2 then its allowed normal +y needs to be flipped to -y.
// - If context.collider2 == self.platform2 then the allowed normal -y needs to be flipped to +y.
let mut allowed_local_n1 = Vector::zeros();
if context.collider1 == self.platform1 {
allowed_local_n1 = Vector::y();
} else if context.collider2 == self.platform1 {
// Flip the allowed direction.
allowed_local_n1 = -Vector::y();
}
if context.collider1 == self.platform2 {
allowed_local_n1 = -Vector::y();
} else if context.collider2 == self.platform2 {
// Flip the allowed direction.
allowed_local_n1 = Vector::y();
}
// Call the helper function that simulates one-way platforms.
context.update_as_oneway_platform(&allowed_local_n1, 0.1);
// Set the surface velocity of the accepted contacts.
let tangent_velocity =
if context.collider1 == self.platform1 || context.collider2 == self.platform2 {
-12.0
} else {
12.0
};
for contact in context.solver_contacts.iter_mut() {
contact.tangent_velocity.z = tangent_velocity;
}
}
}
pub fn init_world(testbed: &mut Testbed) {
/*
* World
*/
let mut bodies = RigidBodySet::new();
let mut colliders = ColliderSet::new();
let impulse_joints = ImpulseJointSet::new();
let multibody_joints = MultibodyJointSet::new();
/*
* Ground
*/
let rigid_body = RigidBodyBuilder::fixed();
let handle = bodies.insert(rigid_body);
let collider = ColliderBuilder::cuboid(9.0, 0.5, 25.0)
.translation(vector![0.0, 2.0, 30.0])
.active_hooks(ActiveHooks::MODIFY_SOLVER_CONTACTS);
let platform1 = colliders.insert_with_parent(collider, handle, &mut bodies);
let collider = ColliderBuilder::cuboid(9.0, 0.5, 25.0)
.translation(vector![0.0, -2.0, -30.0])
.active_hooks(ActiveHooks::MODIFY_SOLVER_CONTACTS);
let platform2 = colliders.insert_with_parent(collider, handle, &mut bodies);
/*
* Setup the one-way platform hook.
*/
let physics_hooks = OneWayPlatformHook {
platform1,
platform2,
};
/*
* Spawn cubes at regular intervals and apply a custom gravity
* depending on their position.
*/
testbed.add_callback(move |graphics, physics, _, run_state| {
if run_state.timestep_id % 50 == 0 && physics.bodies.len() <= 7 {
// Spawn a new cube.
let collider = ColliderBuilder::cuboid(1.0, 2.0, 1.5);
let body = RigidBodyBuilder::dynamic().translation(vector![0.0, 6.0, 20.0]);
let handle = physics.bodies.insert(body);
physics
.colliders
.insert_with_parent(collider, handle, &mut physics.bodies);
if let Some(graphics) = graphics {
graphics.add_body(handle, &physics.bodies, &physics.colliders);
}
}
for handle in physics.islands.active_dynamic_bodies() {
let body = physics.bodies.get_mut(*handle).unwrap();
if body.position().translation.y > 1.0 {
body.set_gravity_scale(1.0, false);
} else if body.position().translation.y < -1.0 {
body.set_gravity_scale(-1.0, false);
}
}
});
/*
* Set up the testbed.
*/
testbed.set_world_with_params(
bodies,
colliders,
impulse_joints,
multibody_joints,
vector![0.0, -9.81, 0.0],
physics_hooks,
);
testbed.look_at(point![-100.0, 0.0, 0.0], Point::origin());
}
| true |
18fe737d92473a4212e042a965cf43fb39048c66
|
Rust
|
wolfgang/tetrs
|
/src/game/brick_factory.rs
|
UTF-8
| 4,375 | 2.625 | 3 |
[] |
no_license
|
use crate::game::brick_provider::BrickDef;
pub const I_BLOCK: u8 = 1;
pub const O_BLOCK: u8 = 2;
pub const T_BLOCK: u8 = 3;
pub const J_BLOCK: u8 = 4;
pub const S_BLOCK: u8 = 5;
pub const Z_BLOCK: u8 = 6;
pub const L_BLOCK: u8 = 7;
pub fn i_block() -> BrickDef {
BrickDef {
brick_type: I_BLOCK,
bricklets:
vec![
from_strings(vec!["####"]),
from_strings(vec![
"..#.",
"..#.",
"..#.",
"..#."]),
from_strings(vec![
"....",
"####"]),
from_strings(vec![
".#..",
".#..",
".#..",
".#.."]),
],
}
}
pub fn o_block() -> BrickDef {
BrickDef {
brick_type: O_BLOCK,
bricklets: vec![
from_strings(vec![
"##",
"##"])
],
}
}
pub fn t_block() -> BrickDef {
BrickDef {
brick_type: T_BLOCK,
bricklets: vec![
from_strings(vec![
".#.",
"###"
]),
from_strings(vec![
".#.",
".##",
".#."
]),
from_strings(vec![
"...",
"###",
".#."
]),
from_strings(vec![
".#.",
"##.",
".#."
])
],
}
}
pub fn l_block() -> BrickDef {
BrickDef {
brick_type: L_BLOCK,
bricklets:
vec![
from_strings(vec![
"#..",
"###"
]),
from_strings(vec![
".##",
".#.",
".#."
]),
from_strings(vec![
"...",
"###",
"..#"
]),
from_strings(vec![
".#.",
".#.",
"##."
]),
],
}
}
pub fn j_block() -> BrickDef {
BrickDef {
brick_type: J_BLOCK,
bricklets:
vec![
from_strings(vec![
"..#",
"###"
]),
from_strings(vec![
".#.",
".#.",
".##"
]),
from_strings(vec![
"...",
"###",
"#.."
]),
from_strings(vec![
"##.",
".#.",
".#."
]),
],
}
}
pub fn s_block() -> BrickDef {
BrickDef {
brick_type: S_BLOCK,
bricklets:
vec![
from_strings(vec![
".##",
"##."
]),
from_strings(vec![
".#.",
".##",
"..#"
]),
from_strings(vec![
"...",
".##",
"##."
]),
from_strings(vec![
"#.",
"##",
".#"
]),
],
}
}
pub fn z_block() -> BrickDef {
BrickDef {
brick_type: Z_BLOCK,
bricklets:
vec![
from_strings(vec![
"##.",
".##."
]),
from_strings(vec![
"..#",
".##",
".#."
]),
from_strings(vec![
"...",
"##.",
".##"
]),
from_strings(vec![
".#",
"##",
"#."
]),
],
}
}
// ###
// #
pub fn t_block_flipped() -> BrickDef {
BrickDef {
brick_type: T_BLOCK,
bricklets: vec![vec![(0, 0), (1, 0), (2, 0), (1, 1)]],
}
}
// ###
// #
pub fn l_block_flipped() -> BrickDef {
BrickDef {
brick_type: J_BLOCK,
bricklets: vec![vec![(0, 0), (1, 0), (2, 0), (0, 1)]],
}
}
fn from_strings(strings: Vec<&str>) -> Vec<(u8, u8)> {
let mut result = Vec::with_capacity(8);
for (y, row) in strings.iter().enumerate() {
for (x, char) in row.chars().enumerate() {
if char == '#' { result.push((x as u8, y as u8)) }
}
}
result
}
| true |
a445bebd9a5159f697f197bafbf1543924bc77b2
|
Rust
|
jiangzhe/mybin
|
/mybin-core/src/cmd/field_list.rs
|
UTF-8
| 1,988 | 2.71875 | 3 |
[] |
no_license
|
use crate::col::ColumnDefinition;
use crate::flag::CapabilityFlags;
use crate::packet::{EofPacket, ErrPacket};
use crate::Command;
use bytes::{Buf, Bytes, BytesMut};
use bytes_parser::error::{Error, Needed, Result};
use bytes_parser::{WriteBytesExt, WriteToBytes};
/// get column definitions of a table
///
/// deprecated, use SHOW COLUMNS instead
#[derive(Debug)]
pub struct ComFieldList {
pub cmd: Command,
pub table: String,
pub field_wildcard: String,
}
impl ComFieldList {
pub fn new<T: Into<String>, U: Into<String>>(table: T, field_wildcard: U) -> Self {
ComFieldList {
cmd: Command::FieldList,
table: table.into(),
field_wildcard: field_wildcard.into(),
}
}
}
impl WriteToBytes for ComFieldList {
fn write_to(self, out: &mut BytesMut) -> Result<usize> {
let mut len = 0;
len += out.write_u8(self.cmd.to_byte())?;
len += out.write_bytes(self.table.as_bytes())?;
len += out.write_u8(0)?;
len += out.write_bytes(self.field_wildcard.as_bytes())?;
Ok(len)
}
}
#[derive(Debug)]
pub enum ComFieldListResponse {
ColDef(ColumnDefinition),
Err(ErrPacket),
Eof(EofPacket),
}
impl ComFieldListResponse {
pub fn read_from(input: &mut Bytes, cap_flags: &CapabilityFlags, sql: bool) -> Result<Self> {
if !input.has_remaining() {
return Err(Error::InputIncomplete(Bytes::new(), Needed::Unknown));
}
match input[0] {
0xff => {
let err = ErrPacket::read_from(input, cap_flags, sql)?;
Ok(ComFieldListResponse::Err(err))
}
0xfe => {
let eof = EofPacket::read_from(input, cap_flags)?;
Ok(ComFieldListResponse::Eof(eof))
}
_ => {
let col_def = ColumnDefinition::read_from(input, true)?;
Ok(ComFieldListResponse::ColDef(col_def))
}
}
}
}
| true |
37e6cdd5014167b8b7febbe4b5f2698f813d3598
|
Rust
|
khernyo/quirc-rs
|
/src/math.rs
|
UTF-8
| 3,602 | 2.984375 | 3 |
[
"ISC"
] |
permissive
|
use std::{f32, f64};
pub trait RoundToNearestFavorEven {
// TODO Remove when Rust grows this rounding mode.
fn round_to_nearest_favor_even(self) -> Self;
}
macro_rules! impl_round_to_nearest_favor_even {
($t:tt) => {
impl RoundToNearestFavorEven for $t {
fn round_to_nearest_favor_even(self) -> Self {
let k = 1.0 / $t::EPSILON;
let a = self.abs();
if a < k {
((a + k) - k).copysign(self)
} else {
self
}
}
}
};
}
impl_round_to_nearest_favor_even!(f32);
impl_round_to_nearest_favor_even!(f64);
#[cfg(test)]
mod tests {
use std::os::raw::c_double;
use super::*;
extern "C" {
fn rint(x: c_double) -> c_double;
}
#[test]
#[allow(clippy::float_cmp)]
#[allow(clippy::cognitive_complexity)]
fn test_round_to_nearest_favor_even() {
assert!(f64::NAN.round_to_nearest_favor_even().is_nan());
assert_eq!(f64::INFINITY.round_to_nearest_favor_even(), f64::INFINITY);
assert_eq!(
f64::NEG_INFINITY.round_to_nearest_favor_even(),
f64::NEG_INFINITY
);
assert_eq!(0f64.round_to_nearest_favor_even(), 0f64);
assert_eq!((-0f64).round_to_nearest_favor_even(), -0f64);
assert_eq!(0.5.round_to_nearest_favor_even(), 0f64);
assert_eq!(1.5.round_to_nearest_favor_even(), 2f64);
assert_eq!(2.5.round_to_nearest_favor_even(), 2f64);
assert_eq!(3.5.round_to_nearest_favor_even(), 4f64);
assert_eq!((-0.5).round_to_nearest_favor_even(), -0f64);
assert_eq!((-1.5).round_to_nearest_favor_even(), -2f64);
assert_eq!((-2.5).round_to_nearest_favor_even(), -2f64);
assert_eq!((-3.5).round_to_nearest_favor_even(), -4f64);
assert_eq!(0.4.round_to_nearest_favor_even(), 0f64);
assert_eq!(1.4.round_to_nearest_favor_even(), 1f64);
assert_eq!(2.4.round_to_nearest_favor_even(), 2f64);
assert_eq!(3.4.round_to_nearest_favor_even(), 3f64);
assert_eq!((-0.4).round_to_nearest_favor_even(), -0f64);
assert_eq!((-1.4).round_to_nearest_favor_even(), -1f64);
assert_eq!((-2.4).round_to_nearest_favor_even(), -2f64);
assert_eq!((-3.4).round_to_nearest_favor_even(), -3f64);
assert_eq!(0.6.round_to_nearest_favor_even(), 1f64);
assert_eq!(1.6.round_to_nearest_favor_even(), 2f64);
assert_eq!(2.6.round_to_nearest_favor_even(), 3f64);
assert_eq!(3.6.round_to_nearest_favor_even(), 4f64);
assert_eq!((-0.6).round_to_nearest_favor_even(), -1f64);
assert_eq!((-1.6).round_to_nearest_favor_even(), -2f64);
assert_eq!((-2.6).round_to_nearest_favor_even(), -3f64);
assert_eq!((-3.6).round_to_nearest_favor_even(), -4f64);
}
#[test]
#[ignore] // Ignored because it's a long running test
fn test_round_to_nearest_favor_even_exhaustive() {
for i in 0..u32::max_value() {
unsafe {
let f: f32 = f32::from_bits(i);
let f_round: f32 = f.round_to_nearest_favor_even();
let f_round_bits: u32 = std::mem::transmute(f_round);
let f_rint: f32 = rint(f64::from(f)) as f32;
let f_rint_bits: u32 = std::mem::transmute(f_rint);
if f.is_nan() {
assert_eq!(f_round.is_nan(), f_rint.is_nan());
} else {
assert_eq!(f_round_bits, f_rint_bits);
}
}
}
}
}
| true |
0545f46743a2e1327ab40fa4d43e7a2ea70d1ea3
|
Rust
|
JamesMenetrey/rust-web-app
|
/src/domain/products/commands/set_product_title.rs
|
UTF-8
| 1,516 | 3.03125 | 3 |
[] |
no_license
|
/*! Contains the `SetProductTitleCommand`. */
use auto_impl::auto_impl;
use crate::domain::{
error::{
err_msg,
Error,
},
products::{
ProductId,
ProductStore,
},
Resolver,
};
pub type Result = ::std::result::Result<(), Error>;
/** Input for a `SetProductTitleCommand`. */
#[derive(Clone, Deserialize)]
pub struct SetProductTitle {
pub id: ProductId,
pub title: String,
}
/** Set a new title for a product. */
#[auto_impl(FnMut)]
pub trait SetProductTitleCommand {
fn set_product_title(&mut self, command: SetProductTitle) -> Result;
}
/** Default implementation for a `SetProductTitleCommand`. */
pub(in crate::domain) fn set_product_title_command(
store: impl ProductStore,
) -> impl SetProductTitleCommand {
move |command: SetProductTitle| {
debug!(
"updating product `{}` title to {:?}",
command.id, command.title
);
let product = {
if let Some(mut product) = store.get_product(command.id)? {
product.set_title(command.title)?;
product
} else {
Err(err_msg("not found"))?
}
};
store.set_product(product)?;
info!("updated product `{}` title", command.id);
Ok(())
}
}
impl Resolver {
pub fn set_product_title_command(&self) -> impl SetProductTitleCommand {
let store = self.products().product_store();
set_product_title_command(store)
}
}
| true |
6cd866f26f0e18cdd21805d90c06d1726e48e323
|
Rust
|
scottlamb/http-serve
|
/benches/file.rs
|
UTF-8
| 4,215 | 2.578125 | 3 |
[
"MIT",
"Apache-2.0",
"LicenseRef-scancode-unknown-license-reference"
] |
permissive
|
// Copyright (c) 2016-2018 The http-serve developers
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE.txt or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT.txt or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
use criterion::{criterion_group, criterion_main, Criterion};
use http::{Request, Response};
use hyper::Body;
use once_cell::sync::Lazy;
use std::ffi::OsString;
use std::fs::File;
use std::io::Write;
use std::sync::Mutex;
use std::time::Duration;
use tempfile::TempDir;
type BoxedError = Box<dyn std::error::Error + Send + Sync>;
async fn serve(req: Request<Body>) -> Result<Response<Body>, BoxedError> {
let f = tokio::task::block_in_place::<_, Result<_, BoxedError>>(move || {
let f = std::fs::File::open(&*PATH.lock().unwrap())?;
let headers = http::header::HeaderMap::new();
Ok(http_serve::ChunkedReadFile::new(f, headers)?)
})?;
Ok(http_serve::serve(f, &req))
}
/// Returns the hostport of a newly created, never-destructed server.
fn new_server() -> String {
let (tx, rx) = std::sync::mpsc::channel();
std::thread::spawn(move || {
let make_svc = hyper::service::make_service_fn(|_conn| {
futures_util::future::ok::<_, hyper::Error>(hyper::service::service_fn(serve))
});
let rt = tokio::runtime::Runtime::new().unwrap();
let _guard = rt.enter();
let addr = ([127, 0, 0, 1], 0).into();
let srv = hyper::Server::bind(&addr).tcp_nodelay(true).serve(make_svc);
let addr = srv.local_addr();
tx.send(addr).unwrap();
rt.block_on(srv).unwrap();
});
let addr = rx.recv().unwrap();
format!("http://{}:{}", addr.ip(), addr.port())
}
static PATH: Lazy<Mutex<OsString>> = Lazy::new(|| Mutex::new(OsString::new()));
static SERVER: Lazy<String> = Lazy::new(new_server);
/// Sets up the server to serve a `kib`-KiB file, until the returned `TempDir`
/// goes out of scope and the file is deleted.
fn setup(kib: usize) -> TempDir {
let tmpdir = tempfile::tempdir().unwrap();
let tmppath = tmpdir.path().join("f");
{
let p = &mut *PATH.lock().unwrap();
p.clear();
p.push(&tmppath);
}
let mut tmpfile = File::create(tmppath).unwrap();
for _ in 0..kib {
tmpfile.write_all(&[0; 1024]).unwrap();
}
tmpdir
}
fn serve_full_entity(b: &mut criterion::Bencher, kib: &usize) {
let _tmpdir = setup(*kib);
let client = reqwest::Client::new();
let rt = tokio::runtime::Runtime::new().unwrap();
b.to_async(&rt).iter(|| async {
let resp = client.get(&*SERVER).send().await.unwrap();
assert_eq!(reqwest::StatusCode::OK, resp.status());
let b = resp.bytes().await.unwrap();
assert_eq!(1024 * *kib, b.len());
});
}
fn serve_last_byte_1mib(b: &mut criterion::Bencher) {
let _tmpdir = setup(1024);
let client = reqwest::Client::new();
let rt = tokio::runtime::Runtime::new().unwrap();
b.to_async(&rt).iter(|| async {
let resp = client
.get(&*SERVER)
.header("Range", "bytes=-1")
.send()
.await
.unwrap();
assert_eq!(reqwest::StatusCode::PARTIAL_CONTENT, resp.status());
let b = resp.bytes().await.unwrap();
assert_eq!(1, b.len());
});
}
fn criterion_benchmark(c: &mut Criterion) {
let mut g = c.benchmark_group("serve_full_entity");
g.throughput(criterion::Throughput::Bytes(1024))
.bench_function("1kib", |b| serve_full_entity(b, &1));
g.throughput(criterion::Throughput::Bytes(1024 * 1024))
.bench_function("1mib", |b| serve_full_entity(b, &1024));
g.finish();
c.bench_function("serve_last_byte_1mib", serve_last_byte_1mib);
}
criterion_group! {
name = benches;
// Tweak the config to run more quickly; http-serve has many bench cases.
config = Criterion::default()
.sample_size(10)
.warm_up_time(Duration::from_millis(100))
.measurement_time(Duration::from_secs(1));
targets = criterion_benchmark
}
criterion_main!(benches);
| true |
454bcdefe462285936ebdd316f8e980d94713a82
|
Rust
|
ogoestcc/recommender
|
/src/models/recommender.rs
|
UTF-8
| 5,475 | 2.703125 | 3 |
[] |
no_license
|
use array_tool::vec::Intersect;
use std::{cmp::Ordering, collections::HashMap};
use super::{alert::Alert, user::User};
#[derive(Debug, Clone)]
pub struct Recommender {
pub users: HashMap<u32, User>,
pub alerts: HashMap<String, Alert>,
// alerts: &'b mut HashMap<&'b str, &'b Alert>,
// ratings: &'b Vec<Rating>,
}
/// PERSONALIZED RECOMMENDATION
impl Recommender {
pub fn users_similarity(left: &Vec<String>, right: Vec<String>) -> f32 {
let right_size = right.len();
let intersection: Vec<_> = left.intersect(right);
intersection.len() as f32
/ (left.len() as f32 + right_size as f32 - intersection.len() as f32)
}
fn not_viewed(user: &User, alert: &Alert) -> bool {
if let Some(ratings) = &user.ratings {
ratings.get(&alert.id).is_none()
} else {
true
}
}
fn include_preferences(user: &User, alert: &Alert) -> bool {
let preferences = &user.preferences;
preferences
.iter()
.any(|pref| (*pref == alert.product || *pref == alert.provider))
}
fn sort_alerts<'u, T: 'u>(
user: &'u User,
get_id: for<'a> fn(&'a T) -> &'a String,
get_score: for<'a> fn(&'a T) -> f32,
) -> impl FnMut(&T, &T) -> Ordering + 'u {
move |left, right| {
let left = user.alert_score_by_id(get_id(left), get_score(left));
let right = user.alert_score_by_id(get_id(right), get_score(right));
let order = if left > right {
Ordering::Greater
} else if left < right {
Ordering::Less
} else {
Ordering::Equal
};
order.reverse()
}
}
pub fn content_based(
&self,
user_id: u32,
alert_number: u16,
exclude_clause: Option<for<'r, 's> fn(&'r User, &'s Alert) -> bool>,
) -> Vec<&Alert> {
let viewed_method = if let Some(method) = exclude_clause {
method
} else {
Recommender::not_viewed
};
let user = self.users.get(&user_id);
if user.is_none() {
return vec![];
}
let user = user.unwrap();
let mut alerts = self.alerts.values().collect::<Vec<_>>();
// let mut alerts = alerts
// .iter()
// .filter(|alert| {
// let alert = alert.to_owned().to_owned();
// viewed_method(user, alert)
// })
// .collect::<Vec<_>>();
alerts.sort_by(Recommender::sort_alerts::<&Alert>(
user,
|a| &a.id,
|a| a.score.unwrap_or(0.),
));
let limit = if alert_number as usize > alerts.len() {
alerts.len()
} else {
alert_number as usize
};
let slice = &alerts[..limit];
slice.iter().map(|alert| alert.to_owned()).collect()
}
pub fn collaborative_filtering(&self, user_id: u32, alert_number: u16) -> Vec<&Alert> {
let user = self.users.get(&user_id).unwrap();
let alerts: Vec<_> = self
.alerts
.iter()
.filter(|(_, alert)| Recommender::not_viewed(user, alert))
.collect();
let alerts = alerts.clone();
let mut alerts: Vec<_> = alerts
.iter()
.filter_map(|(alert_id, alert)| {
let mut score = 0.;
let mut similarity_total_mod = 0.;
if let Some(similarity) = &user.similarity {
for (similar_id, similarity) in similarity {
let similar_user = self.users.get(similar_id).unwrap();
score += similar_user.alert_rating(alert_id) as f32 * similarity;
similarity_total_mod += similarity.abs();
}
Some((alert, score / similarity_total_mod))
} else {
None
}
})
.collect();
alerts.sort_by(Recommender::sort_alerts::<(&&Alert, f32)>(
user,
|(a, _)| &a.id,
|(_, s)| *s,
));
let limit = if alert_number as usize > alerts.len() {
alerts.len()
} else {
alert_number as usize
};
let slice = &alerts[..limit];
slice
.iter()
.map(|(alert, _)| alert.to_owned().to_owned())
.collect()
}
}
/// NON-PERSONALIZED RECOMMENDATION
impl Recommender {
pub fn top_n(&self, alert_number: u32, content: Option<String>) -> Vec<&Alert> {
let alerts = self.alerts.values().collect::<Vec<_>>();
let mut alerts = if let Some(content) = &content {
alerts
.iter()
.filter_map(|a| {
if a.filter_content(content) {
Some(a.to_owned())
} else {
None
}
})
.collect::<Vec<_>>()
} else {
alerts
};
alerts.sort_by(|a, b| a.cmp(b).reverse());
let limit = if alert_number as usize > alerts.len() {
alerts.len()
} else {
alert_number as usize
};
let slice = &alerts[..limit];
slice.iter().map(|alert| alert.to_owned()).collect()
}
}
| true |
7c8dc9176cba3cd323e76a50a5c0e5a425d52e83
|
Rust
|
audunhalland/dyn-symbol
|
/src/lib.rs
|
UTF-8
| 18,751 | 3.78125 | 4 |
[] |
no_license
|
//!
//! Dynamic, plugin-based [Symbol](https://en.wikipedia.org/wiki/Symbol_(programming)) abstraction.
//!
//! A [Symbol] can be used as an _identifier_ in place of the more primitive workhorse [String].
//! There could be multiple reasons to do so:
//!
//! 1. Mixing of different domains in the same runtime code
//! 2. Handling of naming collisions in multiple namespaces
//! 3. Avoiding memory allocations for statically known namespaces
//! 4. Mix of static and dynamic allocation
//! 5. Associating metadata to the symbols themselves
//!
//! The main use case for symbols is as map keys for in-memory key/value stores.
//!
//! Note that there are probably more reasons _not_ to use symbols than to use them! In most cases, something like
//! `enum` or [String] will do just fine. But sometimes applications process a lot of semi-schematic external input,
//! and you just want Rust to work like any old dynamic programming language again.
//!
//! # Example use cases
//! * Namespaced XML/HTML attributes (in HTML, some are static and some are dynamic. i.e. `data-` attributes)
//! * Key/value stores for "anything"
//! * Some way to abstract away string interners? (this is untested)
//!
//! A [Symbol] is just one plain, non-generic type, that can represent all possible symbol values. It implements all traits to make it
//! usable as a key in maps.
//!
//! # What this crate does not do
//! * Serialization and deserialization of symbols. [Symbol] should not implement `serde` traits, ser/de should instead be handled by each namespace.
//! * Provide any namespaces.
//!
//! # Static symbols
//! Static symbols originate from a namespace where all possible values are statically known at compile time.
//! One instance of a static symbol requires no memory allocation.
//!
//! Creating a static namespace:
//!
//! ```
//! use dyn_symbol::*;
//!
//! struct MyStaticNS {
//! symbols: &'static [(&'static str, &'static str)],
//! }
//!
//! const MY_STATIC_NS: MyStaticNS = MyStaticNS {
//! symbols: &[
//! ("foo", "the first symbol!"),
//! ("bar", "the second symbol!")
//! ]
//! };
//!
//! impl dyn_symbol::namespace::Static for MyStaticNS {
//! fn namespace_name(&self) -> &str {
//! "my"
//! }
//!
//! fn symbol_name(&self, id: u32) -> &str {
//! self.symbols[id as usize].0
//! }
//! }
//!
//! // Define (and export) some symbol constants
//! pub const FOO: Symbol = Symbol::Static(&MY_STATIC_NS, 0);
//! pub const BAR: Symbol = Symbol::Static(&MY_STATIC_NS, 1);
//!
//! assert_eq!(FOO, FOO);
//! assert_eq!(FOO.clone(), FOO.clone());
//! assert_ne!(FOO, BAR);
//! assert_eq!(format!("{:?}", FOO), "my::foo");
//!
//! // We can find the originating namespace later:
//! assert!(FOO.downcast_static::<MyStaticNS>().is_some());
//!
//! // To implement special metadata-extraction (or similar functionality) for a namespace:
//! fn get_symbol_description(symbol: &Symbol) -> Result<&'static str, &'static str> {
//! if let Some((namespace, id)) = symbol.downcast_static::<MyStaticNS>() {
//! Ok(namespace.symbols[id as usize].1)
//! } else {
//! Err("not from this namespace :(")
//! }
//! }
//!
//! assert_eq!(get_symbol_description(&BAR).unwrap(), "the second symbol!");
//! ```
//!
//! For static symbols, the implementations of [Eq]/[Ord]/[Hash](std::hash::Hash) et. al use only the namespace's [type_id](std::any::Any::type_id)
//! plus the symbol's numerical `id`.
//!
//! Typically, the boilerplate code for a static namespace will be generated by macros or `build.rs`.
//!
//! # Dynamic symbols
//! Sometimes the values that a symbol can take are not known upfront. In this case we have to resort to memory allocation.
//! Dynamic symbols implement a different namespace trait: [namespace::Dynamic]. The type that implements this trait also
//! functions as the symbol _instance_ itself:
//!
//! ```
//! use dyn_symbol::*;
//!
//! // This symbol is string-based:
//! struct DynamicNS(String);
//!
//! impl namespace::Dynamic for DynamicNS {
//! fn namespace_name(&self) -> &str {
//! "dynamic"
//! }
//!
//! fn symbol_name(&self) -> &str {
//! &self.0
//! }
//!
//! fn dyn_clone(&self) -> Box<dyn namespace::Dynamic> {
//! Box::new(DynamicNS(self.0.clone()))
//! }
//!
//! /// Note: calling code should already have verified that these are indeed the same types, using `type_id`.
//! fn dyn_eq(&self, rhs: &dyn namespace::Dynamic) -> bool {
//! self.0 == rhs.downcast_ref::<DynamicNS>().unwrap().0
//! }
//!
//! fn dyn_cmp(&self, rhs: &dyn namespace::Dynamic) -> std::cmp::Ordering {
//! self.0.cmp(&rhs.downcast_ref::<DynamicNS>().unwrap().0)
//! }
//!
//! fn dyn_hash(&self, state: &mut dyn std::hash::Hasher) {
//! // we are now in `dyn` land, so the [std::hash::Hash] trait cannot be used:
//! state.write(self.0.as_bytes());
//! state.write_u8(0xff)
//! }
//! }
//!
//! let foo0 = Symbol::Dynamic(Box::new(DynamicNS("foo".into())));
//! let foo1 = Symbol::Dynamic(Box::new(DynamicNS("foo".into())));
//! let bar = Symbol::Dynamic(Box::new(DynamicNS("bar".into())));
//!
//! assert_eq!(foo0, foo1);
//! assert_eq!(foo0.clone(), foo1.clone());
//! assert_ne!(foo0, bar);
//! ```
//!
//! It is entirely up to the Dynamic implementation to consider what kind of symbols are considered equal.
//! The `Eq`/`Hash` symmetry need to hold, though.
//!
//! Dynamic symbols are supported as a companion to static symbols. If your application works mainly with dynamic symbols,
//! you should consider using a different keying mechanism, because of the inherent overhead/indirection/boxing of dynamic symbols.
//!
//! # Type system
//! This crate makes use of [Any](std::any::Any), and consideres namespaces sharing the same [TypeId](std::any::TypeId) to be the _same namespace_.
//! This could make code reuse a bit cumbersome. If one crate exports multiple namespaces, this can be solved by using const generics:
//!
//! ```
//! struct ReusableNamespace<const N: u8>;
//!
//! // impl<const N: u8> namespace::Static for MyNamespace<N> { ... }
//!
//! const NS_1: ReusableNamespace<1> = ReusableNamespace;
//! const NS_2: ReusableNamespace<2> = ReusableNamespace;
//!
//! // assert_ne!(NS_1.type_id(), NS_2.type_id());
//! ```
//!
//! This will cause the two namespaces to have differing `type_id`s.
//!
//!
use std::cmp::Ordering;
///
/// A symbol, with support for mixed static/dynamic allocation.
///
pub enum Symbol {
/// Construct a Symbol originating from a static namespace.
/// The first parameter is a trait object pointing back to the namespace,
/// the second parameter is the symbol `id` within that namespace.
Static(&'static dyn namespace::Static, u32),
/// Construct a Symbol with dynamic origins. Dynamic namespaces are unbounded in size,
/// so a memory allocation is needed. This encoding allows dynamic namespaces to support
/// the same semantics that static namespaces do. Instead of just using a [String], we
/// can also encode what kind of string it is.
Dynamic(Box<dyn namespace::Dynamic>),
}
impl Symbol {
pub fn name(&self) -> &str {
match self {
Self::Static(ns, id) => ns.symbol_name(*id),
Self::Dynamic(instance) => instance.symbol_name(),
}
}
///
/// Get access to the associated namespace's `Any` representation.
/// its `type_id` may be used as a reflection tool to get to know about the Symbol's origin.
///
pub fn as_any(&self) -> &dyn std::any::Any {
match self {
Self::Static(ns, _) => ns.as_any(),
Self::Dynamic(instance) => instance.as_any(),
}
}
///
/// Try to downcast this Symbol's originating _static namespace_ to a concrete `&T`,
/// and if successful, return that concrete namespace along with the symbol's static id.
///
pub fn downcast_static<T: 'static>(&self) -> Option<(&T, u32)> {
match self {
Self::Static(ns, id) => ns.as_any().downcast_ref::<T>().map(|t| (t, *id)),
Self::Dynamic(_) => None,
}
}
///
/// Try to downcast this Symbol's _dynamic namespace_ as a `&T`.
///
/// Always fails for static namespaces.
///
pub fn downcast_dyn<T: 'static>(&self) -> Option<&T> {
match self {
Self::Static(_, _) => None,
Self::Dynamic(instance) => instance.as_any().downcast_ref::<T>(),
}
}
}
impl Clone for Symbol {
fn clone(&self) -> Self {
match self {
Self::Static(static_symbol, id) => Self::Static(*static_symbol, *id),
Self::Dynamic(instance) => Self::Dynamic(instance.dyn_clone()),
}
}
}
impl std::fmt::Debug for Symbol {
fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result {
match self {
Self::Static(ns, id) => {
write!(f, "{}::{}", ns.namespace_name(), ns.symbol_name(*id))
}
Self::Dynamic(instance) => {
write!(
f,
"{}::{}",
instance.namespace_name(),
instance.symbol_name()
)
}
}
}
}
impl PartialEq for Symbol {
fn eq(&self, rhs: &Symbol) -> bool {
match (self, rhs) {
(Self::Static(this_ns, this_id), Self::Static(rhs_ns, rhs_id)) => {
*this_id == *rhs_id && this_ns.type_id() == rhs_ns.type_id()
}
(Self::Dynamic(this), Self::Dynamic(rhs)) => {
this.type_id() == rhs.type_id() && this.dyn_eq(rhs.as_ref())
}
_ => false,
}
}
}
impl Eq for Symbol {}
impl Ord for Symbol {
fn cmp(&self, rhs: &Symbol) -> Ordering {
match (self, rhs) {
(Self::Static(this_ns, this_id), Self::Static(rhs_ns, rhs_id)) => {
let this_type_id = this_ns.type_id();
let rhs_type_id = rhs_ns.type_id();
if this_type_id == rhs_type_id {
this_id.cmp(&rhs_id)
} else {
this_type_id.cmp(&rhs_type_id)
}
}
(Self::Dynamic(this), Self::Dynamic(rhs)) => {
let this_type_id = this.type_id();
let rhs_type_id = rhs.type_id();
if this_type_id == rhs_type_id {
this.dyn_cmp(rhs.as_ref())
} else {
this_type_id.cmp(&rhs_type_id)
}
}
(Self::Static(_, _), Self::Dynamic(_)) => Ordering::Less,
(Self::Dynamic(_), Self::Static(_, _)) => Ordering::Greater,
}
}
}
impl PartialOrd for Symbol {
fn partial_cmp(&self, other: &Symbol) -> Option<std::cmp::Ordering> {
Some(self.cmp(other))
}
}
impl std::hash::Hash for Symbol {
fn hash<H: std::hash::Hasher>(&self, state: &mut H) {
match self {
Self::Static(ns, id) => {
ns.type_id().hash(state);
state.write_u32(*id)
}
Self::Dynamic(dynamic_sym) => {
dynamic_sym.type_id().hash(state);
dynamic_sym.dyn_hash(state)
}
}
}
}
pub mod namespace {
//!
//! Namespace traits that must be implemented by symbol providers.
//!
use downcast_rs::*;
///
/// A static namespace. Symbols in a static namespace are identified with an `id` encoded as a `u32`.
///
pub trait Static: Send + Sync + Downcast {
///
/// The namespace's name, used for [Debug][std::fmt::Debug].
///
fn namespace_name(&self) -> &str;
///
/// A symbol's name, used for [Debug][std::fmt::Debug].
///
fn symbol_name(&self, id: u32) -> &str;
}
///
/// A dynamic namespace. A dynamic symbol instance is tied to `Self`.
///
pub trait Dynamic: Send + Sync + Downcast {
///
/// The namespace's name, used for [Debug][std::fmt::Debug].
///
fn namespace_name(&self) -> &str;
///
/// The symbol name, used for [Debug][std::fmt::Debug].
///
fn symbol_name(&self) -> &str;
///
/// Clone this dynamic symbol. Must return a new symbol instance that is `eq` to `&self`.
///
fn dyn_clone(&self) -> Box<dyn Dynamic>;
///
/// Dynamic [eq](std::cmp::PartialEq::eq). `rhs` can be unconditionally downcasted to `Self`.
///
fn dyn_eq(&self, rhs: &dyn Dynamic) -> bool;
///
/// Dynamic [cmp](std::cmp::Ord::cmp). `rhs` can be unconditionally downcasted to `Self`.
///
fn dyn_cmp(&self, rhs: &dyn Dynamic) -> std::cmp::Ordering;
///
/// Dynamic [hash](std::hash::Hash::hash). `rhs` can be unconditionally downcasted to `Self`.
///
fn dyn_hash(&self, state: &mut dyn std::hash::Hasher);
}
impl_downcast!(Dynamic);
}
#[cfg(test)]
mod tests {
use super::*;
use std::hash::{BuildHasher, Hash, Hasher};
mod _static {
use super::*;
pub struct ClassN<const N: u8> {
class_name: &'static str,
names: &'static [&'static str],
}
impl<const N: u8> namespace::Static for ClassN<N> {
fn namespace_name(&self) -> &str {
self.class_name
}
fn symbol_name(&self, id: u32) -> &str {
self.names[id as usize]
}
}
pub const STATIC_NS_CLASS_A: ClassN<1> = ClassN {
class_name: "A",
names: &["0", "1"],
};
pub const STATIC_NS_CLASS_B: ClassN<2> = ClassN {
class_name: "B",
names: &["0"],
};
}
mod dynamic {
use super::*;
pub struct TestDynamic<const N: u8>(pub String, &'static str);
impl<const N: u8> namespace::Dynamic for TestDynamic<N> {
fn namespace_name(&self) -> &str {
self.1
}
fn symbol_name(&self) -> &str {
&self.0
}
fn dyn_clone(&self) -> Box<dyn namespace::Dynamic> {
Box::new(TestDynamic::<N>(self.0.clone(), self.1))
}
fn dyn_eq(&self, rhs: &dyn namespace::Dynamic) -> bool {
self.0 == rhs.downcast_ref::<TestDynamic<N>>().unwrap().0
}
fn dyn_cmp(&self, rhs: &dyn namespace::Dynamic) -> std::cmp::Ordering {
self.0.cmp(&rhs.downcast_ref::<TestDynamic<N>>().unwrap().0)
}
fn dyn_hash(&self, state: &mut dyn std::hash::Hasher) {
state.write(self.0.as_bytes());
state.write_u8(0xff)
}
}
pub fn sym0(str: &str) -> Symbol {
Symbol::Dynamic(Box::new(TestDynamic::<0>(str.into(), "dyn0")))
}
pub fn sym1(str: &str) -> Symbol {
Symbol::Dynamic(Box::new(TestDynamic::<1>(str.into(), "dyn1")))
}
}
const STATIC_A_0: Symbol = Symbol::Static(&_static::STATIC_NS_CLASS_A, 0);
const STATIC_A_1: Symbol = Symbol::Static(&_static::STATIC_NS_CLASS_A, 1);
const STATIC_B_0: Symbol = Symbol::Static(&_static::STATIC_NS_CLASS_B, 0);
struct TestState {
random_state: std::collections::hash_map::RandomState,
}
impl TestState {
pub fn new() -> Self {
Self {
random_state: std::collections::hash_map::RandomState::new(),
}
}
fn assert_hash_match(&self, a: &Symbol, b: &Symbol, should_equal: bool) {
let mut hasher_a = self.random_state.build_hasher();
let mut hasher_b = self.random_state.build_hasher();
a.hash(&mut hasher_a);
b.hash(&mut hasher_b);
if should_equal {
assert_eq!(hasher_a.finish(), hasher_b.finish())
} else {
assert_ne!(hasher_a.finish(), hasher_b.finish())
}
}
fn assert_full_eq(&self, a: &Symbol, b: &Symbol) {
assert_eq!(a, b);
assert_eq!(a.cmp(b), Ordering::Equal);
self.assert_hash_match(a, b, true)
}
fn assert_full_ne(&self, a: &Symbol, b: &Symbol) {
assert_ne!(a, b);
assert_ne!(a.cmp(b), Ordering::Equal);
self.assert_hash_match(a, b, false)
}
}
#[test]
fn test_symbol_size_of() {
let u_size = std::mem::size_of::<usize>();
// This size_of Symbol is computed like this:
// It's at least two words, because of `dyn`.
// it's more than two words because it needs to encode the A/B enum value.
// on 64-bit arch it should be 3 words, because it contains an `u32` too,
// and that should be encoded within the same machine word as the enum discriminant..
// I think...
let expected_word_size = match u_size {
8 => 3 * u_size,
// 4 => 4, Perhaps?
_ => panic!("untested word size"),
};
assert_eq!(std::mem::size_of::<Symbol>(), expected_word_size);
}
#[test]
fn test_debug() {
assert_eq!(format!("{:?}", STATIC_A_0), "A::0");
assert_eq!(format!("{:?}", STATIC_A_1), "A::1");
assert_eq!(format!("{:?}", STATIC_B_0), "B::0");
assert_eq!(format!("{:?}", dynamic::sym0("foo")), "dyn0::foo");
assert_eq!(format!("{:?}", dynamic::sym1("bar")), "dyn1::bar");
}
#[test]
fn test_equality() {
let test_state = TestState::new();
test_state.assert_full_eq(&STATIC_A_0, &STATIC_A_0);
test_state.assert_full_eq(&STATIC_A_1, &STATIC_A_1);
test_state.assert_full_eq(&STATIC_B_0, &STATIC_B_0);
test_state.assert_full_ne(&STATIC_A_0, &STATIC_A_1);
test_state.assert_full_ne(&STATIC_A_1, &STATIC_B_0);
test_state.assert_full_eq(&dynamic::sym0("foo"), &dynamic::sym0("foo"));
}
#[test]
fn test_inequality() {
let test_state = TestState::new();
test_state.assert_full_ne(&STATIC_A_0, &STATIC_A_1);
test_state.assert_full_ne(&STATIC_A_0, &STATIC_B_0);
test_state.assert_full_ne(&dynamic::sym0("foo"), &dynamic::sym0("bar"));
test_state.assert_full_ne(&dynamic::sym0("foo"), &dynamic::sym1("foo"));
}
#[test]
fn test_ord() {
assert_ne!(STATIC_A_0.cmp(&STATIC_A_1), Ordering::Equal);
assert_ne!(STATIC_A_0.cmp(&STATIC_B_0), Ordering::Equal);
assert_ne!(STATIC_A_1.cmp(&STATIC_B_0), Ordering::Equal);
}
}
| true |
3fc89be3621243b93081c49ec6eb8ddfbde91afd
|
Rust
|
xbsura/arrow-datafusion
|
/ballista/rust/client/src/lib.rs
|
UTF-8
| 4,659 | 2.578125 | 3 |
[
"Apache-2.0",
"JSON",
"NTP",
"CC-BY-4.0",
"LicenseRef-scancode-protobuf",
"BSD-3-Clause",
"CC-BY-3.0",
"OpenSSL",
"Zlib",
"CC0-1.0",
"LLVM-exception",
"MIT",
"LicenseRef-scancode-unknown-license-reference",
"ZPL-2.1",
"BSL-1.0",
"LicenseRef-scancode-public-domain",
"BSD-2-Clause"
] |
permissive
|
// Licensed to the Apache Software Foundation (ASF) under one
// or more contributor license agreements. See the NOTICE file
// distributed with this work for additional information
// regarding copyright ownership. The ASF licenses this file
// to you under the Apache License, Version 2.0 (the
// "License"); you may not use this file except in compliance
// with the License. You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing,
// software distributed under the License is distributed on an
// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
// KIND, either express or implied. See the License for the
// specific language governing permissions and limitations
// under the License.
//! Ballista is a distributed compute platform primarily implemented in Rust, and powered by Apache Arrow and
//! DataFusion. It is built on an architecture that allows other programming languages (such as Python, C++, and
//! Java) to be supported as first-class citizens without paying a penalty for serialization costs.
//!
//! The foundational technologies in Ballista are:
//!
//! - [Apache Arrow](https://arrow.apache.org/) memory model and compute kernels for efficient processing of data.
//! - [Apache Arrow Flight Protocol](https://arrow.apache.org/blog/2019/10/13/introducing-arrow-flight/) for efficient
//! data transfer between processes.
//! - [Google Protocol Buffers](https://developers.google.com/protocol-buffers) for serializing query plans.
//! - [Docker](https://www.docker.com/) for packaging up executors along with user-defined code.
//!
//! Ballista can be deployed as a standalone cluster and also supports [Kubernetes](https://kubernetes.io/). In either
//! case, the scheduler can be configured to use [etcd](https://etcd.io/) as a backing store to (eventually) provide
//! redundancy in the case of a scheduler failing.
//!
//! ## Starting a cluster
//!
//! There are numerous ways to start a Ballista cluster, including support for Docker and
//! Kubernetes. For full documentation, refer to the
//! [DataFusion User Guide](https://github.com/apache/arrow-datafusion/tree/master/docs/user-guide)
//!
//! A simple way to start a local cluster for testing purposes is to use cargo to install
//! the scheduler and executor crates.
//!
//! ```bash
//! cargo install ballista-scheduler
//! cargo install ballista-executor
//! ```
//!
//! With these crates installed, it is now possible to start a scheduler process.
//!
//! ```bash
//! RUST_LOG=info ballista-scheduler
//! ```
//!
//! The scheduler will bind to port 50050 by default.
//!
//! Next, start an executor processes in a new terminal session with the specified concurrency
//! level.
//!
//! ```bash
//! RUST_LOG=info ballista-executor -c 4
//! ```
//!
//! The executor will bind to port 50051 by default. Additional executors can be started by
//! manually specifying a bind port. For example:
//!
//! ```bash
//! RUST_LOG=info ballista-executor --bind-port 50052 -c 4
//! ```
//!
//! ## Executing a query
//!
//! Ballista provides a `BallistaContext` as a starting point for creating queries. DataFrames can be created
//! by invoking the `read_csv`, `read_parquet`, and `sql` methods.
//!
//! The following example runs a simple aggregate SQL query against a CSV file from the
//! [New York Taxi and Limousine Commission](https://www1.nyc.gov/site/tlc/about/tlc-trip-record-data.page)
//! data set.
//!
//! ```no_run
//! use ballista::prelude::*;
//! use datafusion::arrow::util::pretty;
//! use datafusion::prelude::CsvReadOptions;
//!
//! #[tokio::main]
//! async fn main() -> Result<()> {
//! // create configuration
//! let config = BallistaConfig::builder()
//! .set("ballista.shuffle.partitions", "4")
//! .build()?;
//!
//! // connect to Ballista scheduler
//! let ctx = BallistaContext::remote("localhost", 50050, &config);
//!
//! // register csv file with the execution context
//! ctx.register_csv(
//! "tripdata",
//! "/path/to/yellow_tripdata_2020-01.csv",
//! CsvReadOptions::new(),
//! )?;
//!
//! // execute the query
//! let df = ctx.sql(
//! "SELECT passenger_count, MIN(fare_amount), MAX(fare_amount), AVG(fare_amount), SUM(fare_amount)
//! FROM tripdata
//! GROUP BY passenger_count
//! ORDER BY passenger_count",
//! )?;
//!
//! // collect the results and print them to stdout
//! let results = df.collect().await?;
//! pretty::print_batches(&results)?;
//! Ok(())
//! }
//! ```
pub mod columnar_batch;
pub mod context;
pub mod prelude;
| true |
9e9ef78d7c71e53cc600f59d387ab91a54a1c9b2
|
Rust
|
ytitov/json_to_csv_tables
|
/src/parts.rs
|
UTF-8
| 9,763 | 2.9375 | 3 |
[] |
no_license
|
use clap::Clap;
use serde_json::Value;
use std::collections::{BTreeMap, HashMap};
use std::fmt;
use std::fs::File;
use std::io::prelude::*;
use std::io::BufReader;
pub mod err;
pub mod table;
pub use table::Table;
#[derive(Clap, Debug, Clone)]
#[clap(version = "0.1.0", author = "Yuri Titov <[email protected]>")]
pub struct Opts {
pub in_file: String,
pub out_folder: String,
#[clap(short, long, default_value = "ROOT", about = "The root table")]
pub root_table_name: String,
#[clap(short, long, default_value = "_ID")]
pub column_id_postfix: String,
#[clap(long, about = "Add a column to the table inside the given csv file")]
pub add_column_name: Option<String>,
#[clap(
long,
about = "Number of json objects to scan before writing to disk, if not specified, the full file is scanned into memory"
)]
pub json_buf_size: Option<usize>,
}
struct CsvFileInfo {
pub columns: BTreeMap<String, u16>,
pub lines_in_file: usize,
}
impl From<&File> for CsvFileInfo {
fn from(file: &File) -> Self {
let f = BufReader::new(file);
let mut columns: BTreeMap<String, u16> = BTreeMap::new();
let mut num: usize = 0;
for line in f.lines() {
match &line {
Ok(line) => {
if num == 0 {
let first_line = String::from(line);
let cols: Vec<&str> = first_line.trim().split(",").collect();
let mut idx = 0;
for col in cols {
columns.entry(col.to_owned()).or_insert(idx);
idx += 1;
}
}
num += 1;
}
Err(_) => {
return CsvFileInfo {
columns,
lines_in_file: num,
};
}
}
}
return CsvFileInfo {
columns,
lines_in_file: num,
};
}
}
fn find_or_create_file(filepath: &str) -> Result<File, err::CsvError> {
use std::fs::OpenOptions;
use std::path::Path;
let path = Path::new(filepath);
let display = path.display();
let file = match OpenOptions::new().write(true).append(true).open(&path) {
Err(_) => {
//println!("Creating file {}\n Reason: {}", display, why);
match File::create(filepath) {
Err(why) => {
return Err(err::CsvError::CouldNotCreate(format!(
"{}, because {}",
display, why
)))
}
Ok(file) => file,
}
}
Ok(file) => file,
};
Ok(file)
}
fn get_csv_file_info(fname: &str) -> CsvFileInfo {
use std::io::prelude::*;
use std::path::Path;
let path = Path::new(fname);
//let display = path.display();
let mut columns: BTreeMap<String, u16> = BTreeMap::new();
let file = match File::open(&path) {
Err(_why) => {
//println!("INFO: Did not see file {}, will create one", display);
return CsvFileInfo {
columns,
lines_in_file: 0,
};
}
Ok(file) => file,
};
let f = BufReader::new(file);
let mut num: usize = 0;
for line in f.lines() {
match &line {
Ok(line) => {
if num == 0 {
let first_line = String::from(line);
let cols: Vec<&str> = first_line.trim().split(",").collect();
let mut idx = 0;
for col in cols {
columns.entry(col.to_owned()).or_insert(idx);
idx += 1;
}
}
num += 1;
}
Err(_) => {
return CsvFileInfo {
columns,
lines_in_file: num,
};
}
}
}
return CsvFileInfo {
columns,
lines_in_file: num,
};
}
#[derive(Debug)]
pub struct Schema {
// key: (depth, table name)
pub data: HashMap<String, Table>,
pub opts: Opts,
}
impl fmt::Display for Schema {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
for (_, table) in &self.data {
write!(f, "{}\n", table)?;
}
write!(f, "")
}
}
impl Schema {
pub fn new(opts: Opts) -> Self {
Schema {
data: HashMap::new(),
opts,
}
}
pub fn create_table(&mut self, table_name: String) {
if !self.data.contains_key(&table_name) {
let t = Table::new(&table_name, &self.opts);
self.data.insert(table_name, t);
}
}
pub fn get_num_table_rows(&mut self, tables: &[String]) -> usize {
let table_name = tables.join("_");
if let Some(t) = self.data.get_mut(&table_name) {
return t.rows.len() + t.row_offset;
} else {
panic!("set_row could not find the table, by this point this should not happen");
}
}
pub fn add_table_row(
&mut self,
tables: &[String],
row: BTreeMap<String, Value>,
) -> Result<(), err::CsvError> {
let table_name = tables.join("_");
if let Some(t) = self.data.get_mut(&table_name) {
t.add_row(row)?;
Ok(())
} else {
panic!(format!(
"set_row could not find the table ({:?})\n {:?}",
tables, &row
));
}
}
fn as_fk(&self, s: &str) -> String {
return format!("{}{}", s, self.opts.column_id_postfix);
}
pub fn walk_props(
&mut self,
parents: Vec<String>,
val: Value,
) -> Result<Option<(String, Value)>, err::CsvError> {
match val {
Value::Object(obj) => {
self.create_table(parents.join("_"));
let mut row_values = BTreeMap::new();
for (key, val) in obj {
let mut p = parents.clone();
p.push(key);
if let Some((key, value)) = self.walk_props(p, val)? {
row_values.insert(key, value);
}
}
//if row_values.len() > 0 {
if parents.len() > 1 {
let grand_parents = parents
.clone()
.into_iter()
.take(parents.len() - 1)
.collect::<Vec<String>>();
let grand_parent_name = grand_parents.join("_");
row_values.insert(
self.as_fk(&grand_parent_name),
Value::from(self.get_num_table_rows(&grand_parents)),
);
}
self.add_table_row(&parents, row_values)?;
Ok(None)
}
Value::Array(arr) => {
self.create_table(parents.join("_"));
for val in arr {
let mut row_values = BTreeMap::new();
if let Some((key, value)) = self.walk_props(parents.clone(), val)? {
row_values.insert(key, value);
}
if row_values.len() > 0 {
if parents.len() > 1 {
let grand_parents = parents
.clone()
.into_iter()
.take(parents.len() - 1)
.collect::<Vec<String>>();
let grand_parent_name = grand_parents.join("_");
row_values.insert(
self.as_fk(&grand_parent_name),
Value::from(self.get_num_table_rows(&grand_parents)),
);
}
self.add_table_row(&parents, row_values)?;
}
}
Ok(None)
}
other_value => {
// ignore parents when its a non container
let key = &parents[parents.len() - 1];
Ok(Some((key.to_owned(), other_value)))
}
}
}
pub fn export_csv(self) -> Result<(), err::CsvError> {
for (_, table) in self.data {
table.export_csv(&self.opts)?;
}
Ok(())
}
pub fn process_file(mut self) -> std::result::Result<(), Box<dyn std::error::Error>> {
let f = File::open(&self.opts.in_file)?;
let f = BufReader::new(f);
let mut num_lines_read = 0;
for line in f.lines() {
match serde_json::from_str(&line?) {
Ok::<Value, _>(val) => {
//self.trav(0, None, vec![String::from(&self.opts.root_table_name)], val);
self.walk_props(vec![String::from(&self.opts.root_table_name)], val)?;
num_lines_read += 1;
}
Err(e) => {
println!("WARNING: {}, skipping this json string.", e);
}
}
if let Some(json_buf_size) = self.opts.json_buf_size {
if num_lines_read >= json_buf_size {
for (_, table) in &mut self.data {
table.flush_to_file(&self.opts)?;
}
num_lines_read = 0;
}
}
}
self.export_csv()?;
Ok(())
}
}
| true |
e17a2e609b57088d4ee59d095283a11828ab13c2
|
Rust
|
brandon515/Chess-Bot
|
/src/chess.rs
|
UTF-8
| 14,523 | 2.890625 | 3 |
[] |
no_license
|
use rustc_serialize::json;
use std::vec::Vec;
use std::collections::HashMap;
use std::fs::{
File,
OpenOptions,
};
use std::io;
use std::io::{
Write,
Read,
};
#[derive(RustcDecodable,RustcEncodable)]
struct ChessMap {
pub moves: Vec<String>,
board: HashMap<u64, String>,
pub discarded_pieces: Vec<String>,
pub player_white: String,
pub player_black: String,
}
/**
*The coordinate system is a standard chess board but in the octal numbering system
*The origin in the lower left
* 0oXY
* X is the letter starting with a=0 and h=7
* Y is the number-1, that is Vertical_Coordinate=Y+1
* So for example A4 would be 0o03 and E1 would be 0o40
***/
impl ChessMap {
pub fn new(white_name: String, black_name: String) -> ChessMap {
let mut new_board = HashMap::new();
for letter in 0..8{
new_board.insert(0o01+(letter*0o10), "wpawn".to_string());
new_board.insert(0o06+(letter*0o10), "bpawn".to_string());
}
new_board.insert(0o00, "wrook".to_string());
new_board.insert(0o10, "wknight".to_string());
new_board.insert(0o20, "wbishop".to_string());
new_board.insert(0o30, "wqueen".to_string());
new_board.insert(0o40, "wking".to_string());
new_board.insert(0o50, "wbishop".to_string());
new_board.insert(0o60, "wknight".to_string());
new_board.insert(0o70, "wrook".to_string());
new_board.insert(0o07, "brook".to_string());
new_board.insert(0o17, "bknight".to_string());
new_board.insert(0o27, "bbishop".to_string());
new_board.insert(0o37, "bqueen".to_string());
new_board.insert(0o47, "bking".to_string());
new_board.insert(0o57, "bbishop".to_string());
new_board.insert(0o67, "bknight".to_string());
new_board.insert(0o77, "brook".to_string());
ChessMap{
player_black: black_name,
player_white: white_name,
discarded_pieces: Vec::new(),
moves: Vec::new(),
board: new_board,
}
}
pub fn legal_moves(&self, location: u64) -> Vec<u64> {
let mut valid_moves = Vec::new();
if location >= 0o100{
return valid_moves;
}
let piece = match (&self).board.get(&location) {
Some(x) => x,
None => return valid_moves,
};
let alignment = &piece[0..1];
let piece_type = &piece[1..piece.len()];
let (horizontal_place, vertical_place) = ChessMap::unpack_horizontal_vertical(location);
if piece_type == "rook"{
valid_moves = (&self).search_cross(alignment,location);
//
}else if piece_type == "knight"{
match (&self).check_relative_space(alignment,location,-2, 1){
Some(x) => valid_moves.push(x),
None => (),
};
match (&self).check_relative_space(alignment,location,-2,-1){
Some(x) => valid_moves.push(x),
None => (),
};
match (&self).check_relative_space(alignment,location,-1, 2){
Some(x) => valid_moves.push(x),
None => (),
};
match (&self).check_relative_space(alignment,location,-1,-2){
Some(x) => valid_moves.push(x),
None => (),
};
match (&self).check_relative_space(alignment,location, 1, 2){
Some(x) => valid_moves.push(x),
None => (),
};
match (&self).check_relative_space(alignment,location, 1,-2){
Some(x) => valid_moves.push(x),
None => (),
};
match (&self).check_relative_space(alignment,location, 2, 1){
Some(x) => valid_moves.push(x),
None => (),
};
match (&self).check_relative_space(alignment,location, 2,-1){
Some(x) => valid_moves.push(x),
None => (),
};
}else if piece_type == "bishop"{
valid_moves = (&self).search_x(alignment, location);
//
}else if piece_type == "pawn"{
let vertical_offset = if alignment == "w" && vertical_place+1 < 0o10{
1
} else{
-1
};
let forward_location = (horizontal_place*0o10)+(vertical_place+vertical_offset);
if (&self).space_valid(alignment, forward_location){
valid_moves.push(forward_location)
}
if horizontal_place+1 < 0o10{
let forward_right = forward_location+0o10;
match (&self).board.get(&forward_right) {
Some(_) => {
if (&self).space_valid(alignment, forward_right){
valid_moves.push(forward_right);
}
},
None => (),
};
}else if horizontal_place >= 1{
let forward_left = forward_location-0o10;
match (&self).board.get(&forward_left) {
Some(_) => {
if (&self).space_valid(alignment, forward_left){
valid_moves.push(forward_left);
}
},
None => (),
};
}
}else if piece_type == "queen"{
valid_moves.extend((&self).search_cross(alignment, location));
valid_moves.extend((&self).search_x(alignment, location));
}else if piece_type == "king"{
let mut potential_moves = (&self).check_immediate_moves(alignment, location);
if potential_moves.is_empty(){
return valid_moves;
}
for (location_scan, piece) in (&self).board.iter(){
let alignment_scan = &piece[0..1];
if alignment_scan == alignment{
continue;
}
if &piece[1..piece.len()] == "king"{
let potential_moves_scan = (&self).check_immediate_moves(alignment_scan, *location_scan);
for x in potential_moves_scan{
if potential_moves.contains(&x){
let index = potential_moves.iter().position(|&i| i==x).unwrap();
potential_moves.remove(index);
}
}
}else{
let potential_moves_scan = (&self).legal_moves(*location_scan);
for x in potential_moves_scan{
if potential_moves.contains(&x){
let index = potential_moves.iter().position(|&i| i==x).unwrap();
potential_moves.remove(index);
}
}
}
}
valid_moves.extend(potential_moves);
}
valid_moves
}
pub fn move_piece(&mut self, current_location: u64, new_location: u64) -> bool {
let allowed_moves = (&self).legal_moves(current_location);
if allowed_moves.contains(&new_location) == false{
return false;
}
let piece = match self.board.remove(¤t_location) {
Some(x) => x,
None => return false,
};
match self.board.insert(new_location, piece){
Some(x) => {
self.kill_piece(new_location);
match self.board.insert(new_location, x){
Some(_) => panic!("Attempted to move piece from 0o{:o} to 0o{:o}", current_location, new_location),
None => return true,
}
},
None => return true,
};
}
pub fn save(chess_board: ChessMap) -> io::Result<()>{
let data = json::encode(&chess_board).unwrap();
let mut file = match OpenOptions::new()
.write(true)
.truncate(true)
.open(format!("{}_{}", chess_board.player_white, chess_board.player_black)){
Ok(x) => x,
Err(x) => return Err(x),
};
file.write_all(data.as_bytes())
}
pub fn from_file(player_white: String, player_black: String) -> Option<ChessMap> {
let mut file = match OpenOptions::new()
.read(true)
.open(format!("{}_{}", player_white, player_black)){
Ok(x) => x,
Err(_) => return None,
};
let mut data = String::new();
match file.read_to_string(&mut data){
Ok(x) => {
if x < 3{
return None;
}
},
Err(_) => return None,
};
let ret: ChessMap = match json::decode(&data[0..data.len()]){
Ok(x) => x,
Err(x) => return None,
};
Some(ret)
}
fn search_cross(&self, alignment: &str, location: u64) -> Vec<u64> {
let mut valid_moves = Vec::new();
//search to the right
valid_moves.extend((&self).search_ray(alignment, location, 1, 0));
//search to the left
valid_moves.extend((&self).search_ray(alignment, location,-1, 0));
//search above
valid_moves.extend((&self).search_ray(alignment, location, 0, 1));
//search below
valid_moves.extend((&self).search_ray(alignment, location, 0,-1));
valid_moves
}
fn search_x(&self, alignment: &str, location: u64) -> Vec<u64> {
let mut valid_moves = Vec::new();
//search upper right
valid_moves.extend((&self).search_ray(alignment, location, 1, 1));
//search upper left
valid_moves.extend((&self).search_ray(alignment, location,-1, 1));
//search lower right
valid_moves.extend((&self).search_ray(alignment, location, 1,-1));
//search lower left
valid_moves.extend((&self).search_ray(alignment, location,-1,-1));
valid_moves
}
fn space_valid(&self, alignment: &str, location: u64) -> bool {
match (&self).board.get(&location) {
Some(x) => {
if &x[0..1] == alignment{false}
else if &x[1..x.len()] == "king"{false}
else{true}
},
None => true
}
}
fn kill_piece(&mut self, location: u64) -> bool {
let piece = match self.board.remove(&location) {
Some(x) => x,
None => return false,
};
self.discarded_pieces.push(piece);
true
}
fn unpack_horizontal_vertical(location: u64) -> (u64,u64) {
let horizontal_place = location/0o10;
let vertical_place = location%0o10;
(horizontal_place, vertical_place)
}
fn unpack_horizontal_vertical_signed(location: u64) -> (i64,i64) {
let (horizontal_place, vertical_place) = ChessMap::unpack_horizontal_vertical(location);
(horizontal_place as i64, vertical_place as i64)
}
fn search_ray(&self,alignment: &str, location: u64, step_x: i64, step_y: i64) -> Vec<u64> {
let mut valid_moves: Vec<u64> = Vec::new();
for step in 1..8 {
let (horizontal_place, vertical_place) = ChessMap::unpack_horizontal_vertical_signed(location);
if horizontal_place+(step_x*step) < 0 || horizontal_place+(step_x*step) >= 0o10{
return valid_moves
}
if vertical_place+(step_y*step) < 0|| vertical_place+(step_y*step) >= 0o10{
return valid_moves
}
let horizontal_scan = horizontal_place+(step_x*step);
let vertical_scan = vertical_place+(step_y*step);
let location_scan = (horizontal_scan*0o10)+vertical_scan;
if (&self).space_valid(alignment, location_scan as u64){
valid_moves.push(location_scan as u64);
}
match (&self).board.get(&(location_scan as u64)){
Some(_) => return valid_moves,
None => continue,
}
}
valid_moves
}
fn check_relative_space(&self,alignment: &str, location: u64, x_offset: i64, y_offset: i64) -> Option<u64> {
let (horizontal_place, vertical_place) = ChessMap::unpack_horizontal_vertical_signed(location);
if horizontal_place+x_offset < 0 || horizontal_place+x_offset >=0o10{
return None;
}else if vertical_place+y_offset < 0 || vertical_place+y_offset >=0o10{
return None;
}
let location_scan = (((horizontal_place+x_offset)*0o10)+(vertical_place+y_offset)) as u64;
if (&self).space_valid(alignment, location_scan){
Some(location_scan)
}else{
return None;
}
}
fn check_immediate_moves(&self, alignment: &str, location: u64) -> Vec<u64> {
let mut potential_moves: Vec<u64> = Vec::new();
for step in -1..2{
match (&self).check_relative_space(alignment, location, step,-1) {
Some(x) => potential_moves.push(x),
None => (),
}
match (&self).check_relative_space(alignment, location, step, 0) {
Some(x) => potential_moves.push(x),
None => (),
}
match (&self).check_relative_space(alignment, location, step, 1) {
Some(x) => potential_moves.push(x),
None => (),
}
}
potential_moves
}
}
#[test]
fn legal_rook_test() {
let mut test_board = ChessMap::new("blah".to_string(), "didles".to_string());
if test_board.kill_piece(0o76) == false{
panic!("Not able to kill pawn in front of rook");
}
let rook_moves = test_board.legal_moves(0o77);
assert_eq!(rook_moves.len(), 6);
assert!(rook_moves.contains(&0o76));
assert!(rook_moves.contains(&0o75));
assert!(rook_moves.contains(&0o74));
assert!(rook_moves.contains(&0o73));
assert!(rook_moves.contains(&0o72));
assert!(rook_moves.contains(&0o71));
}
#[test]
fn legal_bishop_test(){
let mut test_board = ChessMap::new("blah".to_string(), "asdfa".to_string());
if test_board.kill_piece(0o16) == false{
panic!("Not able to kill pawn next to bishop");
}
let bishop_moves = test_board.legal_moves(0o27);
assert_eq!(bishop_moves.len(), 2);
assert!(bishop_moves.contains(&0o05));
assert!(bishop_moves.contains(&0o16));
}
#[test]
fn legal_knight_test(){
let test_board = ChessMap::new("blah".to_string(), "daf".to_string());
let knight_moves = test_board.legal_moves(0o60);
assert_eq!(knight_moves.len(), 2);
assert!(knight_moves.contains(&0o52));
assert!(knight_moves.contains(&0o72));
}
#[test]
fn legal_pawn_test(){
let test_board = ChessMap::new("blah".to_string(), "asd".to_string());
let pawn_moves = test_board.legal_moves(0o11);
assert_eq!(pawn_moves.len(), 1);
assert!(pawn_moves.contains(&0o12));
}
#[test]
fn legal_king_test(){
let mut test_board = ChessMap::new("ahlsdf".to_string(), "asdf".to_string());
let king_cant_move = test_board.legal_moves(0o47);
assert!(king_cant_move.is_empty());
assert!(test_board.kill_piece(0o46));
let king_can_move = test_board.legal_moves(0o47);
assert_eq!(king_can_move.len(), 1);
assert_eq!(*(king_can_move.get(0).unwrap()),0o46);
assert!(test_board.kill_piece(0o31));
assert!(test_board.move_piece(0o30,0o35));
let king_can_no_longer_move = test_board.legal_moves(0o47);
assert!(king_can_no_longer_move.is_empty());
}
#[test]
fn kill_piece_test(){
let mut test_board = ChessMap::new("blah".to_string(), "asdf".to_string());
assert_eq!(test_board.kill_piece(0o76),true);
assert_eq!(test_board.discarded_pieces.len(),1);
assert!(test_board.discarded_pieces.contains(&("bpawn".to_string())));
}
#[test]
fn move_piece_test(){
let mut test_board = ChessMap::new("absldf".to_string(), "asldf".to_string());
assert!(test_board.kill_piece(0o71));
assert!(test_board.move_piece(0o70,0o75));
assert!(test_board.move_piece(0o75,0o76));
assert_eq!(test_board.discarded_pieces.len(), 2);
}
#[test]
fn save_load_test(){
let player_white = "test1".to_string();
let player_black = "test2".to_string();
let mut test_board = ChessMap::new(player_white.clone(), player_black.clone());
assert!(test_board.kill_piece(0o77));
assert_eq!(test_board.discarded_pieces.len(), 1);
ChessMap::save(test_board).unwrap();
let mut load_board = ChessMap::from_file(player_white.clone(), player_black.clone()).unwrap();
assert_eq!(load_board.discarded_pieces.len(), 1);
assert_eq!(*(load_board.discarded_pieces.get(0).unwrap()), "brook");
}
| true |
b087f00078a7ef846d11d165003601a6a18c5c8e
|
Rust
|
LaurentMazare/tch-rs
|
/src/tensor/iter.rs
|
UTF-8
| 1,495 | 2.921875 | 3 |
[
"MIT",
"Apache-2.0",
"LicenseRef-scancode-unknown-license-reference"
] |
permissive
|
use super::Tensor;
use crate::TchError;
pub struct Iter<T> {
index: i64,
len: i64,
content: Tensor,
phantom: std::marker::PhantomData<T>,
}
impl Tensor {
pub fn iter<T>(&self) -> Result<Iter<T>, TchError> {
Ok(Iter {
index: 0,
len: self.size1()?,
content: self.shallow_clone(),
phantom: std::marker::PhantomData,
})
}
}
impl std::iter::Iterator for Iter<i64> {
type Item = i64;
fn next(&mut self) -> Option<Self::Item> {
if self.index >= self.len {
return None;
}
let v = self.content.int64_value(&[self.index]);
self.index += 1;
Some(v)
}
}
impl std::iter::Iterator for Iter<f64> {
type Item = f64;
fn next(&mut self) -> Option<Self::Item> {
if self.index >= self.len {
return None;
}
let v = self.content.double_value(&[self.index]);
self.index += 1;
Some(v)
}
}
impl std::iter::Sum for Tensor {
fn sum<I: Iterator<Item = Tensor>>(mut iter: I) -> Tensor {
match iter.next() {
None => Tensor::from(0.),
Some(t) => iter.fold(t, |acc, x| x + acc),
}
}
}
impl<'a> std::iter::Sum<&'a Tensor> for Tensor {
fn sum<I: Iterator<Item = &'a Tensor>>(mut iter: I) -> Tensor {
match iter.next() {
None => Tensor::from(0.),
Some(t) => iter.fold(t.shallow_clone(), |acc, x| x + acc),
}
}
}
| true |
f45df52385f9b9352a1d90b0f3a7ae9d5a49428c
|
Rust
|
nikomatsakis/optopodi
|
/src/metrics.rs
|
UTF-8
| 3,973 | 2.921875 | 3 |
[
"MIT",
"Apache-2.0"
] |
permissive
|
use anyhow::Error;
use async_trait::async_trait;
use fehler::throws;
use serde::Deserialize;
use tokio::sync::mpsc::{Receiver, Sender};
use crate::util;
mod export_to_sheets;
mod list_repos;
mod print;
#[async_trait]
pub trait Producer {
fn column_names(&self) -> Vec<String>;
async fn producer_task(self, tx: Sender<Vec<String>>) -> Result<(), String>;
}
#[async_trait]
pub trait Consumer {
async fn consume(
self,
rx: &mut Receiver<Vec<String>>,
column_names: Vec<String>,
) -> Result<(), String>;
}
pub use export_to_sheets::ExportToSheets;
pub use list_repos::ListReposForOrg;
pub use print::Print;
#[throws]
async fn all_repos(org: &octocrab::orgs::OrgHandler<'_>) -> Vec<octocrab::models::Repository> {
util::accumulate_pages(|page| org.list_repos().page(page).send()).await?
}
// ==================== GQL Query Structures =========================
#[derive(Deserialize, Debug)]
pub struct Response<T> {
data: T,
errors: Option<Vec<serde_json::Value>>,
}
#[derive(Deserialize, Debug)]
pub struct PageInfo {
#[serde(rename = "hasNextPage")]
has_next_page: bool,
#[serde(rename = "endCursor")]
end_cursor: String,
}
#[derive(Deserialize, Debug)]
struct AllReposData {
organization: Organization,
}
#[derive(Deserialize, Debug)]
struct Organization {
repositories: Repositories,
}
#[derive(Deserialize, Debug)]
struct Repositories {
edges: Vec<Node>,
#[serde(rename = "pageInfo")]
page_info: PageInfo,
}
#[derive(Deserialize, Debug)]
struct Node {
node: RepoNode,
}
#[derive(Deserialize, Debug)]
struct RepoNode {
name: String,
}
// ==================== GQL Query Functions =========================
/// returns a list of relevant data (only) for each repositories under the given organization.
///
/// Note: Currently, the only repo data we're even using is its name. This will likely change over time.
/// Mutate the query found in `metrics::get_query_str_all_repos` to add any relevant data necessary.
#[throws]
async fn all_repos_graphql(org: &str) -> Vec<String> {
let octo = octocrab::instance();
let mut query_string = get_query_str_all_repos(&org, None);
let mut repos: Vec<String> = vec![];
loop {
let response: Response<AllReposData> = octo.graphql(&query_string).await?;
let repos_data = response.data.organization.repositories;
repos.extend(
repos_data
.edges
.iter()
.map(|edge| edge.node.name.to_owned()),
);
if repos_data.page_info.has_next_page {
query_string = get_query_str_all_repos(&org, Some(&repos_data.page_info.end_cursor));
} else {
break;
}
}
repos
}
/// utility function used for pagination with the GraphQL "get all repositories for organization" query
///
/// # Arguments
/// - `org` — The name of the GitHub Organization to retrieve all the repositories for
/// - `after_cursor` — An optional cursor value to start at (provided by the `pageInfo` property in the previous page)
///
/// Feel free to explore in the [GitHub GraphQL Explorer]
///
/// [GitHub GraphQL Explorer]: https://docs.github.com/en/graphql/overview/explorer
fn get_query_str_all_repos(org: &str, after_cursor: Option<&str>) -> String {
format!(
r#"query {{
organization(login:"{org_name}"){{
repositories(first:100{after_clause}){{
edges {{
node {{
name
}}
}}
pageInfo {{
hasNextPage
endCursor
}}
}}
}}
}}"#,
org_name = org,
after_clause = if let Some(cursor) = after_cursor {
format!(r#", after:"{}""#, cursor)
} else {
"".to_string()
},
)
}
| true |
c42a650fca50f63c6e685f50bb94dc19fe1caadf
|
Rust
|
adisney3000/Rust-SCSI
|
/src/bin/parse_raw_sense.rs
|
UTF-8
| 624 | 2.734375 | 3 |
[
"MIT"
] |
permissive
|
//#![warn(missing_docs)]
use scsi::Sense;
use std::io;
use std::io::BufRead;
fn main() {
let mut buffer: Vec <u8> = Vec::new();
let stdin = io::stdin();
let stdin_handle = stdin.lock();
let mut iter = stdin_handle.lines();
while let Some(Ok(line)) = iter.next() {
for token in line.split_whitespace() {
buffer.push(u8::from_str_radix(&token, 16).expect("Invalid hex value"));
}
}
let sense: Sense = Sense::from_buf(&buffer).expect("Invalid sense data!");
println!("== Debug Print ==");
println!("{:#?}", sense);
println!("");
println!("== Sense Print ==");
println!("{}", sense);
}
| true |
c5a5518929cf81f6a8a25d43f603be981345e530
|
Rust
|
cmeissl/wayland-rs
|
/wayland-commons/src/map.rs
|
UTF-8
| 8,595 | 3.046875 | 3 |
[
"MIT"
] |
permissive
|
//! Wayland objects map
use crate::{Interface, MessageGroup, NoMessage};
use std::cmp::Ordering;
/// Limit separating server-created from client-created objects IDs in the namespace
pub const SERVER_ID_LIMIT: u32 = 0xFF00_0000;
/// A trait representing the metadata a wayland implementation
/// may attach to an object.
pub trait ObjectMetadata: Clone {
/// Create the metadata for a child object
///
/// Mostly needed for client side, to propagate the event queues
fn child(&self) -> Self;
}
impl ObjectMetadata for () {
fn child(&self) {}
}
/// The representation of a protocol object
#[derive(Clone)]
pub struct Object<Meta: ObjectMetadata> {
/// Interface name of this object
pub interface: &'static str,
/// Version of this object
pub version: u32,
/// Description of the requests of this object
pub requests: &'static [crate::wire::MessageDesc],
/// Description of the events of this object
pub events: &'static [crate::wire::MessageDesc],
/// Metadata associated to this object (ex: its event queue client side)
pub meta: Meta,
/// A function which, from an opcode, a version, and the Meta, creates a child
/// object associated with this event if any
pub childs_from_events: fn(u16, u32, &Meta) -> Option<Object<Meta>>,
/// A function which, from an opcode, a version, and the Meta, creates a child
/// object associated with this request if any
pub childs_from_requests: fn(u16, u32, &Meta) -> Option<Object<Meta>>,
}
impl<Meta: ObjectMetadata + std::fmt::Debug> std::fmt::Debug for Object<Meta> {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
f.debug_struct("Object")
.field("interface", &self.interface)
.field("version", &self.version)
.field("requests", &self.requests)
.field("events", &self.events)
.field("meta", &self.meta)
.finish()
}
}
impl<Meta: ObjectMetadata> Object<Meta> {
/// Create an Object corresponding to given interface and version
pub fn from_interface<I: Interface>(version: u32, meta: Meta) -> Object<Meta> {
Object {
interface: I::NAME,
version,
requests: I::Request::MESSAGES,
events: I::Event::MESSAGES,
meta,
childs_from_events: childs_from::<I::Event, Meta>,
childs_from_requests: childs_from::<I::Request, Meta>,
}
}
/// Create an optional `Object` corresponding to the possible `new_id` associated
/// with given event opcode
pub fn event_child(&self, opcode: u16) -> Option<Object<Meta>> {
(self.childs_from_events)(opcode, self.version, &self.meta)
}
/// Create an optional `Object` corresponding to the possible `new_id` associated
/// with given request opcode
pub fn request_child(&self, opcode: u16) -> Option<Object<Meta>> {
(self.childs_from_requests)(opcode, self.version, &self.meta)
}
/// Check whether this object is of given interface
pub fn is_interface<I: Interface>(&self) -> bool {
// TODO: we might want to be more robust than that
self.interface == I::NAME
}
/// Create a placeholder object that will be filled-in by the message logic
pub fn placeholder(meta: Meta) -> Object<Meta> {
Object {
interface: "",
version: 0,
requests: &[],
events: &[],
meta,
childs_from_events: childs_from::<NoMessage, Meta>,
childs_from_requests: childs_from::<NoMessage, Meta>,
}
}
}
fn childs_from<M: MessageGroup, Meta: ObjectMetadata>(
opcode: u16,
version: u32,
meta: &Meta,
) -> Option<Object<Meta>> {
M::child(opcode, version, meta)
}
/// A holder for the object store of a connection
///
/// Keeps track of which object id is associated to which
/// interface object, and which is currently unused.
#[derive(Default, Debug)]
pub struct ObjectMap<Meta: ObjectMetadata> {
client_objects: Vec<Option<Object<Meta>>>,
server_objects: Vec<Option<Object<Meta>>>,
}
impl<Meta: ObjectMetadata> ObjectMap<Meta> {
/// Create a new empty object map
pub fn new() -> ObjectMap<Meta> {
ObjectMap { client_objects: Vec::new(), server_objects: Vec::new() }
}
/// Find an object in the store
pub fn find(&self, id: u32) -> Option<Object<Meta>> {
if id == 0 {
None
} else if id >= SERVER_ID_LIMIT {
self.server_objects.get((id - SERVER_ID_LIMIT) as usize).and_then(Clone::clone)
} else {
self.client_objects.get((id - 1) as usize).and_then(Clone::clone)
}
}
/// Remove an object from the store
///
/// Does nothing if the object didn't previously exists
pub fn remove(&mut self, id: u32) {
if id == 0 {
// nothing
} else if id >= SERVER_ID_LIMIT {
if let Some(place) = self.server_objects.get_mut((id - SERVER_ID_LIMIT) as usize) {
*place = None;
}
} else if let Some(place) = self.client_objects.get_mut((id - 1) as usize) {
*place = None;
}
}
/// Insert given object for given id
///
/// Can fail if the requested id is not the next free id of this store.
/// (In which case this is a protocol error)
// -- The lint is allowed because fixing it would be a breaking change --
#[allow(clippy::result_unit_err)]
pub fn insert_at(&mut self, id: u32, object: Object<Meta>) -> Result<(), ()> {
if id == 0 {
Err(())
} else if id >= SERVER_ID_LIMIT {
insert_in_at(&mut self.server_objects, (id - SERVER_ID_LIMIT) as usize, object)
} else {
insert_in_at(&mut self.client_objects, (id - 1) as usize, object)
}
}
/// Allocate a new id for an object in the client namespace
pub fn client_insert_new(&mut self, object: Object<Meta>) -> u32 {
insert_in(&mut self.client_objects, object) + 1
}
/// Allocate a new id for an object in the server namespace
pub fn server_insert_new(&mut self, object: Object<Meta>) -> u32 {
insert_in(&mut self.server_objects, object) + SERVER_ID_LIMIT
}
/// Mutably access an object of the map
// -- The lint is allowed because fixing it would be a breaking change --
#[allow(clippy::result_unit_err)]
pub fn with<T, F: FnOnce(&mut Object<Meta>) -> T>(&mut self, id: u32, f: F) -> Result<T, ()> {
if id == 0 {
Err(())
} else if id >= SERVER_ID_LIMIT {
if let Some(&mut Some(ref mut obj)) =
self.server_objects.get_mut((id - SERVER_ID_LIMIT) as usize)
{
Ok(f(obj))
} else {
Err(())
}
} else if let Some(&mut Some(ref mut obj)) = self.client_objects.get_mut((id - 1) as usize)
{
Ok(f(obj))
} else {
Err(())
}
}
/// Mutably access all objects of the map in sequence
pub fn with_all<F: FnMut(u32, &mut Object<Meta>)>(&mut self, mut f: F) {
for (id, place) in self.client_objects.iter_mut().enumerate() {
if let Some(ref mut obj) = *place {
f(id as u32 + 1, obj);
}
}
for (id, place) in self.server_objects.iter_mut().enumerate() {
if let Some(ref mut obj) = *place {
f(id as u32 + SERVER_ID_LIMIT, obj);
}
}
}
}
// insert a new object in a store at the first free place
fn insert_in<Meta: ObjectMetadata>(
store: &mut Vec<Option<Object<Meta>>>,
object: Object<Meta>,
) -> u32 {
match store.iter().position(Option::is_none) {
Some(id) => {
store[id] = Some(object);
id as u32
}
None => {
store.push(Some(object));
(store.len() - 1) as u32
}
}
}
// insert an object at a given place in a store
fn insert_in_at<Meta: ObjectMetadata>(
store: &mut Vec<Option<Object<Meta>>>,
id: usize,
object: Object<Meta>,
) -> Result<(), ()> {
match id.cmp(&store.len()) {
Ordering::Greater => Err(()),
Ordering::Equal => {
store.push(Some(object));
Ok(())
}
Ordering::Less => {
let previous = &mut store[id];
if !previous.is_none() {
return Err(());
}
*previous = Some(object);
Ok(())
}
}
}
| true |
74119e22087d27e1d04f671f89f8a49257ef26e9
|
Rust
|
philippeitis/isbn-rs
|
/src/lib.rs
|
UTF-8
| 23,728 | 3.15625 | 3 |
[
"MIT"
] |
permissive
|
//! A library for handling [International Standard Book Number], or ISBNs.
//!
//! # Examples
//!
//! ```
//! use isbn2::{Isbn10, Isbn13};
//!
//! let isbn_10 = Isbn10::new([8, 9, 6, 6, 2, 6, 1, 2, 6, 4]).unwrap();
//! assert_eq!(isbn_10.hyphenate().unwrap().as_str(), "89-6626-126-4");
//! assert_eq!(isbn_10.registration_group(), Ok("Korea, Republic"));
//! assert_eq!("89-6626-126-4".parse(), Ok(isbn_10));
//!
//! let isbn_13 = Isbn13::new([9, 7, 8, 1, 4, 9, 2, 0, 6, 7, 6, 6, 5]).unwrap();
//! assert_eq!(isbn_13.hyphenate().unwrap().as_str(), "978-1-4920-6766-5");
//! assert_eq!(isbn_13.registration_group(), Ok("English language"));
//! assert_eq!("978-1-4920-6766-5".parse(), Ok(isbn_13));
//! ```
//!
//! [International Standard Book Number]: https://www.isbn-international.org/
#![cfg_attr(not(feature = "runtime-ranges"), no_std)]
#![deny(clippy::missing_errors_doc)]
#![deny(clippy::if_not_else)]
#[cfg(feature = "runtime-ranges")]
pub mod range;
#[cfg(feature = "runtime-ranges")]
pub use range::IsbnRange;
#[cfg(feature = "serialize")]
use serde::{Deserialize, Serialize};
use core::char;
use core::fmt;
use core::num::ParseIntError;
use core::str::FromStr;
use arrayvec::{ArrayString, ArrayVec, CapacityError};
pub type IsbnResult<T> = Result<T, IsbnError>;
include!(concat!(env!("OUT_DIR"), "/generated.rs"));
struct Group<'a> {
name: &'a str,
segment_length: usize,
}
trait IsbnObject {
fn ean_ucc_group(&self) -> Result<Group, IsbnError> {
Isbn::get_ean_ucc_group(self.prefix_element(), self.segment(0))
}
fn hyphenate_with(&self, hyphen_at: [usize; 2]) -> ArrayString<17>;
fn trait_hyphenate(&self) -> Result<ArrayString<17>, IsbnError> {
let registration_group_segment_length = self.ean_ucc_group()?.segment_length;
let registrant_segment_length = Isbn::get_registration_group(
self.prefix_element(),
self.group_prefix(registration_group_segment_length),
self.segment(registration_group_segment_length),
)?
.segment_length;
let hyphen_at = [
registration_group_segment_length,
registration_group_segment_length + registrant_segment_length,
];
Ok(self.hyphenate_with(hyphen_at))
}
fn trait_registration_group(&self) -> Result<&str, IsbnError> {
let registration_group_segment_length = self.ean_ucc_group()?.segment_length;
Ok(Isbn::get_registration_group(
self.prefix_element(),
self.group_prefix(registration_group_segment_length),
self.segment(registration_group_segment_length),
)?
.name)
}
fn prefix_element(&self) -> u16;
fn segment(&self, base: usize) -> u32;
fn group_prefix(&self, length: usize) -> u32;
}
/// An International Standard Book Number, either ISBN10 or ISBN13.
///
/// # Examples
///
/// ```
/// use isbn2::{Isbn, Isbn10, Isbn13};
///
/// let isbn_10 = Isbn::_10(Isbn10::new([8, 9, 6, 6, 2, 6, 1, 2, 6, 4]).unwrap());
/// let isbn_13 = Isbn::_13(Isbn13::new([9, 7, 8, 1, 4, 9, 2, 0, 6, 7, 6, 6, 5]).unwrap());
///
/// assert_eq!("89-6626-126-4".parse(), Ok(isbn_10));
/// assert_eq!("978-1-4920-6766-5".parse(), Ok(isbn_13));
/// ```
#[derive(Debug, PartialEq, Clone, Eq)]
#[cfg_attr(feature = "serialize", derive(Serialize, Deserialize))]
pub enum Isbn {
_10(Isbn10),
_13(Isbn13),
}
impl Isbn {
/// Hyphenate an ISBN into its parts:
///
/// * GS1 Prefix (ISBN-13 only)
/// * Registration group
/// * Registrant
/// * Publication
/// * Check digit
///
/// ```
/// use isbn2::{Isbn, Isbn10, Isbn13};
///
/// let isbn_10 = Isbn::_10(Isbn10::new([8, 9, 6, 6, 2, 6, 1, 2, 6, 4]).unwrap());
/// let isbn_13 = Isbn::_13(Isbn13::new([9, 7, 8, 1, 4, 9, 2, 0, 6, 7, 6, 6, 5]).unwrap());
///
/// assert_eq!(isbn_10.hyphenate().unwrap().as_str(), "89-6626-126-4");
/// assert_eq!(isbn_13.hyphenate().unwrap().as_str(), "978-1-4920-6766-5");
/// ```
/// # Errors
/// If the ISBN is not valid, as determined by the current ISBN rules, an error will be
/// returned.
pub fn hyphenate(&self) -> Result<ArrayString<17>, IsbnError> {
match self {
Isbn::_10(ref c) => c.hyphenate(),
Isbn::_13(ref c) => c.hyphenate(),
}
}
/// Retrieve the name of the registration group.
///
/// ```
/// use isbn2::{Isbn, Isbn10, Isbn13};
///
/// let isbn_10 = Isbn::_10(Isbn10::new([8, 9, 6, 6, 2, 6, 1, 2, 6, 4]).unwrap());
/// let isbn_13 = Isbn::_13(Isbn13::new([9, 7, 8, 1, 4, 9, 2, 0, 6, 7, 6, 6, 5]).unwrap());
///
/// assert_eq!(isbn_10.registration_group(), Ok("Korea, Republic"));
/// assert_eq!(isbn_13.registration_group(), Ok("English language"));
/// ```
///
/// # Errors
/// If the ISBN is not valid, as determined by the current ISBN rules, an error will be
/// returned.
pub fn registration_group(&self) -> Result<&str, IsbnError> {
match self {
Isbn::_10(ref c) => c.registration_group(),
Isbn::_13(ref c) => c.registration_group(),
}
}
}
/// An International Standard Book Number, either ISBN10 or ISBN13.
///
/// # Examples
///
/// ```
/// use isbn2::{Isbn, Isbn10, Isbn13};
///
/// let isbn_10 = Isbn::_10(Isbn10::new([8, 9, 6, 6, 2, 6, 1, 2, 6, 4]).unwrap());
/// let isbn_13 = Isbn::_13(Isbn13::new([9, 7, 8, 1, 4, 9, 2, 0, 6, 7, 6, 6, 5]).unwrap());
///
/// assert_eq!("89-6626-126-4".parse(), Ok(isbn_10));
/// assert_eq!("978-1-4920-6766-5".parse(), Ok(isbn_13));
/// ```
#[derive(Debug, PartialEq, Clone, Eq)]
#[cfg_attr(feature = "serialize", derive(Serialize, Deserialize))]
pub enum IsbnRef<'a> {
_10(&'a Isbn10),
_13(&'a Isbn13),
}
impl<'a> From<&'a Isbn> for IsbnRef<'a> {
fn from(isbn: &'a Isbn) -> Self {
match isbn {
Isbn::_10(isbn) => isbn.into(),
Isbn::_13(isbn) => isbn.into()
}
}
}
impl<'a> From<&'a Isbn10> for IsbnRef<'a> {
fn from(isbn: &'a Isbn10) -> Self {
IsbnRef::_10(isbn)
}
}
impl<'a> From<&'a Isbn13> for IsbnRef<'a> {
fn from(isbn: &'a Isbn13) -> Self {
IsbnRef::_13(isbn)
}
}
impl fmt::Display for Isbn {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
match self {
Isbn::_10(ref c) => c.fmt(f),
Isbn::_13(ref c) => c.fmt(f),
}
}
}
impl From<Isbn10> for Isbn {
fn from(isbn10: Isbn10) -> Isbn {
Isbn::_10(isbn10)
}
}
impl From<Isbn13> for Isbn {
fn from(isbn13: Isbn13) -> Isbn {
Isbn::_13(isbn13)
}
}
impl FromStr for Isbn {
type Err = IsbnError;
fn from_str(s: &str) -> Result<Isbn, IsbnError> {
Parser::new(s)?.read_isbn()
}
}
/// Used to convert ISBN digits into chars, excluding the last digit of ISBN10.
fn convert_isbn_body(d: u8) -> char {
char::from_digit(d.into(), 10).unwrap()
}
/// Used to convert ISBN digits into chars, including the last digit of ISBN10.
const fn convert_isbn10_check(d: u8) -> char {
if d < 11 {
['0', '1', '2', '3', '4', '5', '6', '7', '8', '9', 'X'][d as usize]
} else {
'X'
}
}
/// 10-digit ISBN format.
#[derive(Debug, PartialEq, Eq, Copy, Clone, Hash)]
#[cfg_attr(feature = "serialize", derive(Serialize, Deserialize))]
pub struct Isbn10 {
digits: [u8; 10],
}
impl IsbnObject for Isbn10 {
/// Applies two hyphens in the middle.
fn hyphenate_with(&self, hyphen_at: [usize; 2]) -> ArrayString<17> {
let mut hyphenated = ArrayString::new();
for (i, &digit) in self.digits[0..9].iter().enumerate() {
if hyphen_at.contains(&i) {
hyphenated.push('-')
}
hyphenated.push(convert_isbn_body(digit));
}
hyphenated.push('-');
hyphenated.push(convert_isbn10_check(self.digits[9]));
hyphenated
}
fn prefix_element(&self) -> u16 {
0x978
}
fn segment(&self, base: usize) -> u32 {
(0..7).fold(0, |s, i| {
s + u32::from(*self.digits.get(base + i).unwrap_or(&0)) * 10_u32.pow(6 - i as u32)
})
}
fn group_prefix(&self, length: usize) -> u32 {
let mut digits = 0;
for &digit in &self.digits[..length] {
digits = (digits << 4) | digit as u32;
}
digits
}
}
impl Isbn10 {
/// Creates a new ISBN10 code from 10 digits. Verifies that the checksum is correct,
/// and that no digits are out of bounds.
///
/// # Examples
///
/// ```
/// use isbn2::Isbn10;
///
/// let isbn10 = Isbn10::new([8, 9, 6, 6, 2, 6, 1, 2, 6, 4]).unwrap();
/// ```
/// # Errors
/// If any of the first nine digits exceed nine, or the tenth digit exceeds 10, an error
/// will be returned. If the check digit is not correct for the ISBN, an error will also
/// be returned.
pub fn new(digits: [u8; 10]) -> IsbnResult<Isbn10> {
if digits[..9].iter().any(|&digit| digit > 9) || digits[9] > 10 {
Err(IsbnError::DigitTooLarge)
} else if Isbn10::calculate_check_digit(&digits) == digits[9] {
Ok(Isbn10 { digits })
} else {
Err(IsbnError::InvalidChecksum)
}
}
/// Convert ISBN-13 to ISBN-10, if applicable.
///
/// ```
/// use isbn2::{Isbn10, Isbn13};
///
/// let isbn_13 = Isbn13::new([9, 7, 8, 1, 4, 9, 2, 0, 6, 7, 6, 6, 5]).unwrap();
/// assert_eq!(Isbn10::try_from(isbn_13), "1-4920-6766-0".parse());
/// ```
/// # Errors
/// If the ISBN13 does not have a 978 prefix, it can not be downcast to an ISBN10, and an
/// error will be returned.
pub fn try_from(isbn13: Isbn13) -> IsbnResult<Self> {
if isbn13.digits[..3] == [9, 7, 8] {
let mut a = [0; 10];
a[..9].clone_from_slice(&isbn13.digits[3..12]);
a[9] = Isbn10::calculate_check_digit(&a);
Ok(Isbn10 { digits: a })
} else {
Err(IsbnError::InvalidConversion)
}
}
fn calculate_check_digit(digits: &[u8; 10]) -> u8 {
let sum: usize = digits[..9]
.iter()
.enumerate()
.map(|(i, &d)| d as usize * (10 - i))
.sum();
let sum_m = (sum % 11) as u8;
if sum_m == 0 {
0
} else {
11 - sum_m
}
}
/// Hyphenate an ISBN-10 into its parts:
///
/// * Registration group
/// * Registrant
/// * Publication
/// * Check digit
///
/// ```
/// use isbn2::Isbn10;
///
/// let isbn_10 = Isbn10::new([8, 9, 6, 6, 2, 6, 1, 2, 6, 4]).unwrap();
/// assert_eq!(isbn_10.hyphenate().unwrap().as_str(), "89-6626-126-4");
/// ```
/// # Errors
/// If the ISBN is not valid, as determined by the current ISBN rules, an error will be
/// returned.
pub fn hyphenate(&self) -> Result<ArrayString<17>, IsbnError> {
self.trait_hyphenate()
}
/// Retrieve the name of the registration group.
///
/// ```
/// use isbn2::Isbn10;
///
/// let isbn_10 = Isbn10::new([8, 9, 6, 6, 2, 6, 1, 2, 6, 4]).unwrap();
/// assert_eq!(isbn_10.registration_group(), Ok("Korea, Republic"));
/// ```
/// # Errors
/// If the ISBN is not valid, as determined by the current ISBN rules, an error will be
/// returned.
pub fn registration_group(&self) -> Result<&str, IsbnError> {
self.trait_registration_group()
}
}
impl fmt::Display for Isbn10 {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
let mut s = ArrayString::<10>::new();
self.digits[0..9]
.iter()
.for_each(|&digit| s.push(convert_isbn_body(digit)));
s.push(convert_isbn10_check(self.digits[9]));
write!(f, "{}", s)
}
}
impl FromStr for Isbn10 {
type Err = IsbnError;
fn from_str(s: &str) -> Result<Isbn10, IsbnError> {
let mut p = Parser::new(s)?;
if p.digits.len() == 10 {
p.read_isbn10()
} else {
Err(IsbnError::InvalidLength)
}
}
}
/// 13-digit ISBN format.
#[derive(Debug, PartialEq, Eq, Copy, Clone, Hash)]
#[cfg_attr(feature = "serialize", derive(Serialize, Deserialize))]
pub struct Isbn13 {
digits: [u8; 13],
}
impl IsbnObject for Isbn13 {
fn hyphenate_with(&self, hyphen_at: [usize; 2]) -> ArrayString<17> {
let mut hyphenated = ArrayString::new();
for &digit in &self.digits[0..3] {
hyphenated.push(convert_isbn_body(digit))
}
hyphenated.push('-');
for (i, &digit) in self.digits[3..12].iter().enumerate() {
if hyphen_at.contains(&i) {
hyphenated.push('-')
}
hyphenated.push(convert_isbn_body(digit));
}
hyphenated.push('-');
hyphenated.push(convert_isbn_body(self.digits[12]));
hyphenated
}
fn prefix_element(&self) -> u16 {
((self.digits[0] as u16) << 8) | ((self.digits[1] as u16) << 4) | (self.digits[2] as u16)
}
fn segment(&self, base: usize) -> u32 {
(3..9).fold(0, |s, i| {
s + u32::from(*self.digits.get(base + i).unwrap_or(&0)) * 10_u32.pow(9 - i as u32)
})
}
fn group_prefix(&self, length: usize) -> u32 {
let mut digits = 0;
for &digit in &self.digits[3..length + 3] {
digits = (digits << 4) | digit as u32;
}
digits
}
}
impl Isbn13 {
/// Creates a new ISBN13 code from 13 digits. Verifies that the checksum is correct,
/// and that no digits are out of bounds.
///
/// # Examples
///
/// ```
/// use isbn2::Isbn13;
///
/// let isbn13 = Isbn13::new([9, 7, 8, 1, 4, 9, 2, 0, 6, 7, 6, 6, 5]).unwrap();
/// ```
/// # Errors
/// If any of the digits exceed nine, an error will be returned. If the check digit is not
/// correct for the ISBN, an error will also be returned.
pub fn new(digits: [u8; 13]) -> IsbnResult<Isbn13> {
if digits.iter().any(|&digit| digit > 9) {
Err(IsbnError::DigitTooLarge)
} else if Isbn13::calculate_check_digit(&digits) == digits[12] {
Ok(Isbn13 { digits })
} else {
Err(IsbnError::InvalidChecksum)
}
}
fn calculate_check_digit(digits: &[u8; 13]) -> u8 {
let mut sum = 0;
for i in 0..6 {
sum += u16::from(digits[i * 2] + 3 * digits[i * 2 + 1]);
}
let sum_m = (sum % 10) as u8;
if sum_m == 0 {
0
} else {
10 - sum_m
}
}
/// Hyphenate an ISBN-13 into its parts:
///
/// * GS1 Prefix
/// * Registration group
/// * Registrant
/// * Publication
/// * Check digit
///
/// ```
/// use isbn2::Isbn13;
///
/// let isbn_13 = Isbn13::new([9, 7, 8, 1, 4, 9, 2, 0, 6, 7, 6, 6, 5]).unwrap();
/// assert_eq!(isbn_13.hyphenate().unwrap().as_str(), "978-1-4920-6766-5");
/// ```
/// # Errors
/// If the ISBN is not valid, as determined by the current ISBN rules, an error will be
/// returned.
pub fn hyphenate(&self) -> Result<ArrayString<17>, IsbnError> {
self.trait_hyphenate()
}
/// Retrieve the name of the registration group.
///
/// ```
/// use isbn2::Isbn13;
///
/// let isbn_13 = Isbn13::new([9, 7, 8, 1, 4, 9, 2, 0, 6, 7, 6, 6, 5]).unwrap();
/// assert_eq!(isbn_13.registration_group(), Ok("English language"));
/// ```
/// # Errors
/// If the ISBN is not valid, as determined by the current ISBN rules, an error will be
/// returned.
pub fn registration_group(&self) -> Result<&str, IsbnError> {
self.trait_registration_group()
}
}
impl fmt::Display for Isbn13 {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
let mut s = ArrayString::<13>::new();
self.digits
.iter()
.for_each(|&digit| s.push(convert_isbn_body(digit)));
write!(f, "{}", s)
}
}
impl From<Isbn10> for Isbn13 {
fn from(isbn10: Isbn10) -> Isbn13 {
let mut digits = [0; 13];
digits[..3].clone_from_slice(&[9, 7, 8]);
digits[3..12].clone_from_slice(&isbn10.digits[0..9]);
digits[12] = Isbn13::calculate_check_digit(&digits);
Isbn13 { digits }
}
}
impl FromStr for Isbn13 {
type Err = IsbnError;
fn from_str(s: &str) -> Result<Isbn13, IsbnError> {
let mut p = Parser::new(s)?;
if p.digits.len() == 13 {
p.read_isbn13()
} else {
Err(IsbnError::InvalidLength)
}
}
}
/// An error which can be returned when parsing an ISBN.
#[derive(Debug, PartialEq)]
pub enum IsbnError {
/// The given string is too short or too long to be an ISBN.
InvalidLength,
/// Encountered an invalid digit while parsing.
InvalidDigit,
/// Encountered an invalid ISBN registration group.
InvalidGroup,
/// Encountered a range not defined for use at this time.
UndefinedRange,
/// Failed to validate checksum.
InvalidChecksum,
/// Failed to convert to ISBN10.
InvalidConversion,
/// One or supplied more digits were too large.
DigitTooLarge,
}
impl fmt::Display for IsbnError {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
match self {
IsbnError::InvalidLength => write!(
f,
"The given string is too short or too long to be an ISBN."
),
IsbnError::InvalidDigit => write!(f, "Encountered an invalid digit while parsing."),
IsbnError::InvalidGroup => write!(f, "Encountered an invalid ISBN registration group."),
IsbnError::UndefinedRange => {
write!(f, "Encountered a range not defined for use at this time.")
}
IsbnError::InvalidChecksum => write!(f, "Failed to validate checksum."),
IsbnError::InvalidConversion => write!(f, "Failed to convert to ISBN10."),
IsbnError::DigitTooLarge => write!(
f,
"A supplied digit was larger than 9, or the ISBN10 check digit was larger than 10."
),
}
}
}
impl From<ParseIntError> for IsbnError {
fn from(_: ParseIntError) -> Self {
IsbnError::InvalidDigit
}
}
impl From<CapacityError<u8>> for IsbnError {
fn from(_: CapacityError<u8>) -> Self {
IsbnError::InvalidLength
}
}
#[derive(Debug, Clone)]
struct Parser {
digits: ArrayVec<u8, 13>,
}
impl Parser {
pub fn new<S: AsRef<str>>(s: S) -> Result<Parser, IsbnError> {
let mut digits = ArrayVec::new();
let mut has_x = false;
for c in s.as_ref().chars() {
match c {
'-' | ' ' => {}
'X' => {
if digits.len() == 9 {
has_x = true;
digits.push(10);
} else {
return Err(IsbnError::InvalidDigit);
}
}
'0'..='9' => {
if has_x {
return Err(IsbnError::InvalidDigit);
} else {
digits.try_push(c.to_digit(10).unwrap() as u8)?
}
}
_ => return Err(IsbnError::InvalidDigit),
}
}
Ok(Parser { digits })
}
fn read_isbn(&mut self) -> Result<Isbn, IsbnError> {
match self.digits.len() {
10 => self.read_isbn10().map(Isbn::_10),
13 => self.read_isbn13().map(Isbn::_13),
_ => Err(IsbnError::InvalidLength),
}
}
/// Reads an ISBN13 from self. Requires that length is checked beforehand.
fn read_isbn13(&mut self) -> Result<Isbn13, IsbnError> {
let mut digits = [0; 13];
digits.clone_from_slice(&self.digits);
let check_digit = Isbn13::calculate_check_digit(&digits);
if check_digit == digits[12] {
Ok(Isbn13 { digits })
} else {
Err(IsbnError::InvalidDigit)
}
}
/// Reads an ISBN10 from self. Requires that length is checked beforehand.
fn read_isbn10(&mut self) -> Result<Isbn10, IsbnError> {
let mut digits = [0; 10];
digits.clone_from_slice(&self.digits);
let check_digit = Isbn10::calculate_check_digit(&digits);
if check_digit == digits[9] {
Ok(Isbn10 { digits })
} else {
Err(IsbnError::InvalidDigit)
}
}
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn test_from_str_isbn10() {
// Wikipedia ISBN-10 check digit calculation example
assert!(Isbn::from_str("0-306-40615-2").is_ok());
// Wikipedia ISBN-10 check digit calculation invalid example
assert!(Isbn::from_str("99999-999-9-X").is_err());
// Wikipedia Registrant element examples
assert!(Isbn::from_str("99921-58-10-7").is_ok());
assert!(Isbn::from_str("9971-5-0210-0").is_ok());
assert!(Isbn::from_str("9971-5-0210-0").is_ok());
assert!(Isbn::from_str("960-425-059-0").is_ok());
assert!(Isbn::from_str("80-902734-1-6").is_ok());
assert!(Isbn::from_str("85-359-0277-5").is_ok());
assert!(Isbn::from_str("1-84356-028-3").is_ok());
assert!(Isbn::from_str("0-684-84328-5").is_ok());
assert!(Isbn::from_str("0-8044-2957-X").is_ok());
assert!(Isbn::from_str("0-85131-041-9").is_ok());
assert!(Isbn::from_str("0-943396-04-2").is_ok());
assert!(Isbn::from_str("0-9752298-0-X").is_ok());
}
#[test]
fn test_hyphens_no_panic() {
assert!(Isbn::from_str("0-9752298-0-X").unwrap().hyphenate().is_ok());
assert!(Isbn::from_str("978-3-16-148410-0")
.unwrap()
.hyphenate()
.is_ok());
}
#[test]
fn test_from_str_isbn13() {
// Wikipedia Example
assert!(Isbn13::from_str("978-3-16-148410-0").is_ok());
// Wikipedia ISBN-13 check digit calculation example
assert!(Isbn13::from_str("978-0-306-40615-7").is_ok());
}
#[test]
fn test_invalid_isbn_strings_no_panic() {
assert!(Isbn::from_str("L").is_err());
assert!(Isbn::from_str("").is_err());
assert!(Isbn::from_str("01234567890123456789").is_err());
assert!(Isbn::from_str("ⱧňᚥɂᛢĞžᚪ©ᛟƚ¶G").is_err());
assert!(Isbn10::from_str("").is_err());
assert!(Isbn10::from_str("01234567890").is_err());
assert!(Isbn10::from_str("01234567X9").is_err());
assert!(Isbn10::from_str("012345678").is_err());
assert!(Isbn13::from_str("").is_err());
assert!(Isbn13::from_str("012345678901X").is_err());
assert!(Isbn13::from_str("01234567890X2").is_err());
assert!(Isbn13::from_str("012345678").is_err());
assert!(Isbn13::from_str("0123456789012345").is_err());
}
#[test]
fn test_isbns_do_not_accept_larger_digits() {
let mut a = [10; 10];
// Everything except check digit must be <= 9.
a[9] = Isbn10::calculate_check_digit(&a);
assert!(Isbn10::new(a).is_err());
// Check digit can be 10.
assert!(Isbn10::new([0, 9, 7, 5, 2, 2, 9, 8, 0, 10]).is_ok());
// Check digits which are larger than 10 are implicitly handled by
// the fact that calculate_check_digit returns a number from 0 to 10.
let mut a = [10; 13];
a[12] = Isbn13::calculate_check_digit(&a);
assert!(Isbn13::new(a).is_err());
}
}
| true |
3f0f9f3b26eabd4683365588c0a2708d68a6eb10
|
Rust
|
Mimikkk/leetcode-rust
|
/src/tests/utils.rs
|
UTF-8
| 220 | 2.765625 | 3 |
[] |
no_license
|
#[test]
fn sorted_should_return_new_sorted_vec() {
use crate::utils::Sorted;
let original_vector = vec![4, 3, 2, 1];
assert_eq!(original_vector.sorted(), vec![1, 2, 3, 4], "Should return new sorted vec");
}
| true |
edcc54f604c7bb2d176c3fe961c1cc29e0ef69c0
|
Rust
|
TheooAndre/rust-beginner-projects
|
/src/pythagorean_triples_checker.rs
|
UTF-8
| 3,008 | 3.796875 | 4 |
[] |
no_license
|
use std::io::{self, Write};
/// Checks for a pythagorean triple
/// # Examples
/// ```
/// # use rust_beginner_projects::pythagorean_triples_checker::check;
/// assert_eq!(check(Some((3, 4, 5))), true);
/// assert_eq!(check(Some((1, 2, 3))), false);
/// ```
/// # Parameters
/// `nums` - An optional tuple containing the sides of a triangle
/// - `Some(side_a, side_b, hypotenuse)`: Program will check if it is a pythagorean triple and exit right after
/// - `None`: Program will prompt the user for input until the user exits
/// # Returns
/// `bool` - If passed parameters, retuns whether it was a pythagorean triple or not
pub fn check(nums: Option<(i32, i32, i32)>) -> bool {
println!("Pythagorian Triple Checker");
// If it is getting the sides from the parameters, exit after one pass of the loop
// If it is getting the sides from the prompt, continue to loop till the user quits
let mut from_parameters = false;
let mut is_triple = false;
while !from_parameters {
// Get the sides, either from the parameters, or through the input fuction
let (side_a, side_b, side_c) = match nums {
Some(i) => {
from_parameters = true;
i
}
None => prompt(),
};
// a^2 + b^2 == c^2
if side_a.pow(2) + side_b.pow(2) == side_c.pow(2) {
println!("It's a Pythagorian Triple");
is_triple = true;
} else {
println!("It's NOT a Pythagorian Triple");
}
}
is_triple
}
/// Gets input from the user
/// # Quits
/// Will exit if it get's a non-integer input
pub fn prompt() -> (i32, i32, i32) {
let mut side_a = String::new();
let mut side_b = String::new();
let mut side_c = String::new();
let stdin = io::stdin();
print!("Side A: ");
io::stdout().flush().expect("Failed to flush the screen");
// Get input
stdin
.read_line(&mut side_a)
.expect("Failed to read from stdin");
// Turn side_a into an integer. If it can't, quit
let side_a: i32 = match side_a.trim().parse() {
Ok(i) => i,
Err(_) => {
println!("Side A is invalid!");
std::process::exit(1);
}
};
print!("Side B: ");
io::stdout().flush().expect("Failed to flush the screen");
stdin
.read_line(&mut side_b)
.expect("Failed to read from stdin");
let side_b: i32 = match side_b.trim().parse() {
Ok(i) => i,
Err(_) => {
println!("Side B is invalid!");
std::process::exit(1);
}
};
print!("Hypotenuse: ");
io::stdout().flush().expect("Failed to flush the screen");
stdin
.read_line(&mut side_c)
.expect("Failed to read from stdin");
let side_c: i32 = match side_c.trim().parse() {
Ok(i) => i,
Err(_) => {
println!("The Hypotenuse is invalid!");
std::process::exit(1);
}
};
(side_a, side_b, side_c)
}
| true |
06b2c02e7f5eb88111d1c39b973378587c997ec3
|
Rust
|
dogewanwan/rust-simple-benchmarker
|
/src/observation_logger.rs
|
UTF-8
| 2,120 | 3.109375 | 3 |
[
"MIT"
] |
permissive
|
use chrono::{DateTime, Utc};
use std::fmt::{Formatter, Display};
impl PreciseTime for DateTime<Utc> {
fn now() -> Self {
Utc::now()
}
fn is_before(&self, time: &DateTime<Utc>) -> bool {
self <= &time
}
}
pub trait PreciseTime {
fn now() -> Self;
fn is_before(&self, time: &DateTime<Utc>) -> bool;
}
pub struct SimpleLogger<Time: PreciseTime> {
vectors: Vec<(Time, Option<Time>)>,
current: Option<(Time, Option<Time>)>
}
impl Display for SimpleLogger<DateTime<Utc>> {
fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result {
let request_len = self.vectors
.iter()
.filter(|(_, x)| x.is_some())
.count();
let min_time = self.vectors
.iter()
.map(|(x, _)| x)
.cloned()
.min();
let max_time = self.vectors
.iter()
.filter_map(|(_, y)| y.clone())
.max();
if let (Some(x), Some(y)) = (min_time, max_time) {
let seconds = y - x;
let rps = request_len as f64 / (seconds.num_seconds() as f64);
write!(f, "RPS: {}", rps)
} else {
write!(f, "No results")
}
}
}
impl<T: PreciseTime> Default for SimpleLogger<T> {
fn default() -> Self {
SimpleLogger {
vectors: vec![],
current: None
}
}
}
impl<Time: PreciseTime, Error> ObservationLogger<Error> for SimpleLogger<Time> {
fn log_start_of_request(&mut self) {
self.current = Some((Time::now(), None));
}
fn log_end_of_request(&mut self, error: Option<Error>) {
if let Some((start, _)) = self.current.take() {
self.vectors.push((start, if error.is_none() { Some(Time::now()) } else { None }));
}
}
fn merge(mut self, other: Self) -> Self {
self.vectors.extend(other.vectors.into_iter());
self
}
}
pub trait ObservationLogger<Error> {
fn log_start_of_request(&mut self);
fn log_end_of_request(&mut self, error: Option<Error>);
fn merge(self, other: Self) -> Self;
}
| true |
79c53d5dd724dea357e56e6bf52daf1a3a676301
|
Rust
|
dellams/devcamp7-leap
|
/dna/course/zomes/courses/code/src/section/entry.rs
|
UTF-8
| 2,023 | 2.859375 | 3 |
[] |
no_license
|
use hdk::{
entry_definition::ValidatingEntryType,
holochain_core_types::{dna::entry_types::Sharing, validation::EntryValidationData},
holochain_json_api::{error::JsonError, json::JsonString},
holochain_persistence_api::cas::content::Address,
};
use holochain_entry_utils::HolochainEntry;
#[derive(Serialize, Deserialize, Debug, DefaultJson, Clone)]
pub struct Section {
pub title: String,
pub course_anchor_address: Address,
pub timestamp: u64,
pub anchor_address: Address,
}
impl Section {
pub fn new(
title: String,
course_anchor_address: Address,
timestamp: u64,
anchor_address: Address,
) -> Self {
Section {
title: title,
course_anchor_address: course_anchor_address,
timestamp: timestamp,
anchor_address: anchor_address,
}
}
}
impl HolochainEntry for Section {
fn entry_type() -> String {
String::from("section")
}
}
pub fn entry_def() -> ValidatingEntryType {
entry!(
name: Section::entry_type(),
description: "this is the definition of section",
sharing: Sharing::Public,
validation_package: || {
hdk::ValidationPackageDefinition::Entry
},
validation: | validation_data: hdk::EntryValidationData<Section>| {
match validation_data {
EntryValidationData::Create { .. } => {
// TODO: implement validation
Ok(())
},
EntryValidationData::Modify { .. } => {
// TODO: implement validation
Ok(())
},
EntryValidationData::Delete { .. } => {
// TODO: implement validation
Ok(())
}
}
},
// Since now Section entry is a data entry that is hidden behind the SectionAnchor,
// there won't be any links that it has.
links:[]
)
}
| true |
cf03d7342df3ac3cced29c7a806b264a638ef686
|
Rust
|
evq/atsaml11xxx
|
/src/adc/seqstatus/mod.rs
|
UTF-8
| 1,641 | 2.875 | 3 |
[] |
no_license
|
#[doc = r" Value read from the register"]
pub struct R {
bits: u8,
}
impl super::SEQSTATUS {
#[doc = r" Reads the contents of the register"]
#[inline]
pub fn read(&self) -> R {
R {
bits: self.register.get(),
}
}
}
#[doc = r" Value of the field"]
pub struct SEQSTATER {
bits: u8,
}
impl SEQSTATER {
#[doc = r" Value of the field as raw bits"]
#[inline]
pub fn bits(&self) -> u8 {
self.bits
}
}
#[doc = r" Value of the field"]
pub struct SEQBUSYR {
bits: bool,
}
impl SEQBUSYR {
#[doc = r" Value of the field as raw bits"]
#[inline]
pub fn bit(&self) -> bool {
self.bits
}
#[doc = r" Returns `true` if the bit is clear (0)"]
#[inline]
pub fn bit_is_clear(&self) -> bool {
!self.bit()
}
#[doc = r" Returns `true` if the bit is set (1)"]
#[inline]
pub fn bit_is_set(&self) -> bool {
self.bit()
}
}
impl R {
#[doc = r" Value of the register as raw bits"]
#[inline]
pub fn bits(&self) -> u8 {
self.bits
}
#[doc = "Bits 0:4 - Sequence State"]
#[inline]
pub fn seqstate(&self) -> SEQSTATER {
let bits = {
const MASK: u8 = 31;
const OFFSET: u8 = 0;
((self.bits >> OFFSET) & MASK as u8) as u8
};
SEQSTATER { bits }
}
#[doc = "Bit 7 - Sequence Busy"]
#[inline]
pub fn seqbusy(&self) -> SEQBUSYR {
let bits = {
const MASK: bool = true;
const OFFSET: u8 = 7;
((self.bits >> OFFSET) & MASK as u8) != 0
};
SEQBUSYR { bits }
}
}
| true |
48c3bef54cfe3e76420f28ed6e95a42a3802d83f
|
Rust
|
dmit/tachibana
|
/src/vec.rs
|
UTF-8
| 2,700 | 3.59375 | 4 |
[
"CC0-1.0",
"LicenseRef-scancode-public-domain"
] |
permissive
|
use std::ops::{Add, Div, Mul, Neg, Sub};
#[derive(Clone, Copy, Debug)]
pub struct Vec3 {
pub x: f32,
pub y: f32,
pub z: f32,
}
impl Vec3 {
pub const ZERO: Vec3 = Vec3 {
x: 0.,
y: 0.,
z: 0.,
};
pub const ONE: Vec3 = Vec3 {
x: 1.,
y: 1.,
z: 1.,
};
#[inline]
pub fn map<F: Fn(f32) -> f32>(&self, f: F) -> Vec3 {
Vec3 {
x: f(self.x),
y: f(self.y),
z: f(self.z),
}
}
#[inline]
pub fn squared_length(self) -> f32 {
self.x * self.x + self.y * self.y + self.z * self.z
}
#[inline]
pub fn length(self) -> f32 {
self.squared_length().sqrt()
}
#[inline]
pub fn unit(self) -> Vec3 {
self / self.length()
}
#[inline]
pub fn dot(&self, a: Vec3) -> f32 {
self.x * a.x + self.y * a.y + self.z * a.z
}
#[inline]
pub fn cross(&self, a: Vec3) -> Vec3 {
Vec3 {
x: self.y * a.z - self.z * a.y,
y: self.z * a.x - self.x * a.z,
z: self.x * a.y - self.y * a.x,
}
}
}
impl Add<Vec3> for Vec3 {
type Output = Vec3;
#[inline]
fn add(self, a: Vec3) -> Self::Output {
Vec3 {
x: self.x + a.x,
y: self.y + a.y,
z: self.z + a.z,
}
}
}
impl Sub<Vec3> for Vec3 {
type Output = Vec3;
#[inline]
fn sub(self, a: Vec3) -> Self::Output {
Vec3 {
x: self.x - a.x,
y: self.y - a.y,
z: self.z - a.z,
}
}
}
impl Neg for Vec3 {
type Output = Vec3;
#[inline]
fn neg(self) -> Self::Output {
Vec3 {
x: -self.x,
y: -self.y,
z: -self.z,
}
}
}
impl Mul<Vec3> for Vec3 {
type Output = Vec3;
#[inline]
fn mul(self, a: Vec3) -> Self::Output {
Vec3 {
x: self.x * a.x,
y: self.y * a.y,
z: self.z * a.z,
}
}
}
impl Div<Vec3> for Vec3 {
type Output = Vec3;
#[inline]
fn div(self, a: Vec3) -> Self::Output {
Vec3 {
x: self.x / a.x,
y: self.y / a.y,
z: self.z / a.z,
}
}
}
impl Mul<f32> for Vec3 {
type Output = Vec3;
#[inline]
fn mul(self, a: f32) -> Self::Output {
Vec3 {
x: self.x * a,
y: self.y * a,
z: self.z * a,
}
}
}
impl Div<f32> for Vec3 {
type Output = Vec3;
#[inline]
fn div(self, a: f32) -> Self::Output {
Vec3 {
x: self.x / a,
y: self.y / a,
z: self.z / a,
}
}
}
| true |
85fbf5a7adb82600e8bdec97c2c9ea055c8e7a97
|
Rust
|
wlindley/advent2018
|
/sixth/one/src/main.rs
|
UTF-8
| 7,346 | 3.171875 | 3 |
[
"MIT"
] |
permissive
|
use std::cmp;
use std::collections::HashMap;
use std::fs::File;
use std::io::prelude::*;
use std::io::BufReader;
fn main() {
let points = load_points();
let areas = calculate_areas(points);
let mut point = Point::new(0, 0);
let mut largest = std::i32::MIN;
for (p, area) in areas {
if area > largest {
point = p;
largest = area;
}
}
println!("Point {:?} has largest area: {}", point, largest);
}
#[derive(Debug, PartialEq, Eq, Clone, Hash)]
struct Point {
x: i32,
y: i32,
}
impl Point {
fn new(x: i32, y: i32) -> Point {
return Point { x, y };
}
fn distance(&self, other: &Point) -> i32 {
return (other.x - self.x).abs() + (other.y - self.y).abs();
}
}
#[derive(Debug, PartialEq, Eq, Clone)]
struct Rect {
top_left: Point,
bottom_right: Point,
}
impl Rect {
fn new(x: i32, y: i32, x2: i32, y2: i32) -> Rect {
return Rect {
top_left: Point::new(x, y),
bottom_right: Point::new(x2, y2),
};
}
fn enclosing(points: &Vec<Point>) -> Rect {
return points.iter().fold(
Rect::new(std::i32::MAX, std::i32::MAX, std::i32::MIN, std::i32::MIN),
|rect, p| {
let min_x = cmp::min(rect.top_left.x, p.x);
let min_y = cmp::min(rect.top_left.y, p.y);
let max_x = cmp::max(rect.bottom_right.x, p.x);
let max_y = cmp::max(rect.bottom_right.y, p.y);
return Rect {
top_left: Point::new(min_x, min_y),
bottom_right: Point::new(max_x, max_y),
};
},
);
}
fn points(&self) -> PointIterator {
return PointIterator {
rect: self.clone(),
cur: self.top_left.clone(),
};
}
fn on_border(&self, point: &Point) -> bool {
return point.x == self.top_left.x
|| point.x == self.bottom_right.x
|| point.y == self.top_left.y
|| point.y == self.bottom_right.y;
}
}
struct PointIterator {
rect: Rect,
cur: Point,
}
impl Iterator for PointIterator {
type Item = Point;
fn next(&mut self) -> Option<Self::Item> {
if self.cur.y > self.rect.bottom_right.y {
return Option::None;
}
let point = self.cur.clone();
self.cur.x += 1;
if self.cur.x > self.rect.bottom_right.x {
self.cur.x = self.rect.top_left.x;
self.cur.y += 1;
}
return Option::Some(point);
}
}
fn calculate_areas(points: Vec<Point>) -> HashMap<Point, i32> {
let rect = Rect::enclosing(&points);
let mut results: HashMap<Point, i32> = HashMap::new();
for point in rect.points() {
match find_closest(&point, &points) {
Option::None => continue,
Option::Some(closest) => {
if rect.on_border(&point) {
*results.entry(closest.clone()).or_insert(0) = std::i32::MIN;
} else {
*results.entry(closest.clone()).or_insert(0) += 1;
}
},
}
}
return results;
}
fn find_closest<'a>(probe: &Point, points: &'a Vec<Point>) -> Option<&'a Point> {
let mut distances = HashMap::new();
for p in points {
let dist = probe.distance(p);
if distances.contains_key(&dist) {
distances.insert(dist, Option::None);
} else {
distances.insert(dist, Option::Some(p));
}
}
let mut closest = std::i32::MAX;
for (&dist, _) in &distances {
closest = cmp::min(dist, closest);
}
return *distances.entry(closest).or_default();
}
fn load_points() -> Vec<Point> {
let f = File::open("input.txt").expect("could not find file");
let r = BufReader::new(&f);
return r
.lines()
.map(|l| l.unwrap())
.map(|l| {
let mut tokens = l.split(|c| c == ',').map(|t| t.trim());
return Point {
x: tokens.next().unwrap().parse().unwrap(),
y: tokens.next().unwrap().parse().unwrap(),
};
})
.collect();
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn test_rect_enclosing() {
assert_eq!(
Rect::new(0, 0, 0, 0),
Rect::enclosing(&vec![Point::new(0, 0)])
);
assert_eq!(
Rect::new(0, 0, 2, 2),
Rect::enclosing(&vec![Point::new(0, 0), Point::new(2, 2)])
);
assert_eq!(
Rect::new(1, 2, 3, 4),
Rect::enclosing(&vec![Point::new(3, 4), Point::new(1, 2)])
);
assert_eq!(
Rect::new(1, 2, 3, 4),
Rect::enclosing(&vec![Point::new(3, 2), Point::new(1, 4)])
);
assert_eq!(
Rect::new(1, 2, 3, 4),
Rect::enclosing(&vec![Point::new(3, 2), Point::new(2, 3), Point::new(1, 4)])
);
}
#[test]
fn test_points() {
assert_eq!(
vec![Point::new(0, 0), Point::new(1, 0)],
Rect::new(0, 0, 1, 0).points().collect::<Vec<Point>>()
);
assert_eq!(
vec![
Point::new(0, 0),
Point::new(1, 0),
Point::new(0, 1),
Point::new(1, 1)
],
Rect::new(0, 0, 1, 1).points().collect::<Vec<Point>>()
);
}
#[test]
fn test_on_border() {
let rect = Rect::new(0, 0, 5, 5);
assert_eq!(true, rect.on_border(&Point::new(0, 0)));
assert_eq!(true, rect.on_border(&Point::new(5, 5)));
assert_eq!(true, rect.on_border(&Point::new(2, 0)));
assert_eq!(true, rect.on_border(&Point::new(0, 3)));
assert_eq!(false, rect.on_border(&Point::new(1, 1)));
assert_eq!(false, rect.on_border(&Point::new(2, 3)));
}
#[test]
fn test_distance() {
assert_eq!(1, Point::new(0, 0).distance(&Point::new(1, 0)));
assert_eq!(1, Point::new(0, 0).distance(&Point::new(0, 1)));
assert_eq!(2, Point::new(0, 0).distance(&Point::new(1, 1)));
assert_eq!(7, Point::new(1, 2).distance(&Point::new(5, -1)));
}
#[test]
fn test_find_closest() {
assert_eq!(Point::new(0, 0), *find_closest(&Point::new(0, 0), &vec![Point::new(0, 0), Point::new(2, 2)]).unwrap());
assert_eq!(Point::new(2, 2), *find_closest(&Point::new(2, 1), &vec![Point::new(0, 0), Point::new(2, 2)]).unwrap());
assert_eq!(Option::None, find_closest(&Point::new(1, 1), &vec![Point::new(0, 0), Point::new(2, 2)]));
}
#[test]
fn test_calculate_areas() {
let areas = calculate_areas(vec![
Point::new(1, 1), //inf
Point::new(1, 6), //inf
Point::new(8, 3), //inf
Point::new(3, 4), // 9
Point::new(5, 5), //17
Point::new(8, 9), //inf
]);
assert_eq!(9, areas[&Point::new(3, 4)]);
assert_eq!(17, areas[&Point::new(5, 5)]);
let areas = calculate_areas(vec![
Point::new(0, 0), //inf
Point::new(0, 8), //inf
Point::new(8, 0), //inf
Point::new(8, 8), //inf
Point::new(4, 4), //23
]);
assert_eq!(25, areas[&Point::new(4, 4)]);
}
}
| true |
4568cd74d37017cb2a1e09a40c0d6dc7a5b089f1
|
Rust
|
winksaville/fuchsia
|
/third_party/rust_crates/vendor/tokio/tests/enumerate.rs
|
UTF-8
| 508 | 2.5625 | 3 |
[
"BSD-3-Clause",
"MIT"
] |
permissive
|
extern crate futures;
extern crate tokio;
extern crate tokio_executor;
extern crate tokio_timer;
use futures::sync::mpsc;
use tokio::util::StreamExt;
#[test]
fn enumerate() {
use futures::*;
let (mut tx, rx) = mpsc::channel(1);
std::thread::spawn(|| {
for i in 0..5 {
tx = tx.send(i * 2).wait().unwrap();
}
});
let result = rx.enumerate().collect();
assert_eq!(
result.wait(),
Ok(vec![(0, 0), (1, 2), (2, 4), (3, 6), (4, 8)])
);
}
| true |
84975ba85886a35f09d0c422c8355396cba5d0a6
|
Rust
|
Miniwoffer/gitfuse-rs
|
/src/filesystem/filesystem_entry.rs
|
UTF-8
| 8,678 | 3.078125 | 3 |
[
"MIT"
] |
permissive
|
use filesystem::error_codes;
use git2::{Oid, Repository, Tree, TreeEntry};
use std::os::raw::c_int;
use std::str::Split;
use fuse::FileType;
#[derive(PartialEq)]
#[derive(Debug)]
pub struct FilesystemEntry {
pub name: String,
pub file_type: FileType,
pub oid: Option<Oid>,
pub ino: usize,
pub children: Vec<FilesystemEntry>,
pub size: u64,
pub file_mode: i32,
//write functionality
pub content: Option<Vec<u8>>,
pub write: bool,
pub write_mode: u32,
}
struct GitEntry {
pub oid: Oid,
pub name: String,
pub file_mode: i32,
}
impl FilesystemEntry {
pub fn new(file_type: FileType, name: String, path: String, inodes: &mut Vec<String>, file_mode : i32) -> Self {
inodes.push(path + "/" + name.as_str());
Self {
name,
file_type,
oid: None,
ino: inodes.len() - 1,
children: Vec::new(),
size: 0u64,
content: match file_type {
FileType::RegularFile => Some(Vec::new()),
_ => None,
},
write: false,
write_mode: 0,
file_mode,
}
}
pub fn add(&mut self, file: FilesystemEntry) -> Option<&FilesystemEntry> {
match self.index(file.name.as_str()) {
Some(t) => return None,
None => {}
}
self.children.push(file);
Some(self.children.last().unwrap())
}
pub fn remove(
&mut self,
name: &str,
file_type: FileType,
inodes: &mut Vec<String>,
) -> Result<(), c_int> {
let mut index = None;
for i in 0..self.children.len() {
if self.children[i].name == name {
index = Some(i);
}
}
let index = match index {
Some(t) => t,
None => return Err(error_codes::ENOENT),
};
if self.children[index].file_type == file_type {
inodes.remove(self.children[index].ino);
self.children.remove(index);
return Ok(());
} else {
return Err(error_codes::ENOTDIR);
}
}
pub fn get_path(&self, path: &str) -> Option<&FilesystemEntry> {
let mut path = path.to_owned();
if path.is_empty() {
return Some(self);
}
let (name, rest) = match path.find('/') {
Some(s) => {
let (n, a) = path.split_at(s);
let (_, a) = a.split_at(1);
(n, a)
}
None => (path.as_str(), ""),
};
//let (name,rest) = path.split_at(split);
//let (_,rest) = rest.split_at(1);
let ret = match self.index(name) {
Some(s) => match s.get_path(rest) {
Some(s) => s,
None => return None,
},
None => return None,
};
Some(ret)
}
pub fn get_path_mut(&mut self, path: &str) -> Option<&mut FilesystemEntry> {
let mut path = path.to_owned();
if path.is_empty() {
return Some(self);
}
let (name, rest) = match path.find('/') {
Some(s) => {
let (n, a) = path.split_at(s);
let (_, a) = a.split_at(1);
(n, a)
}
None => (path.as_str(), ""),
};
//let (name,rest) = path.split_at(split);
//let (_,rest) = rest.split_at(1);
let ret = match self.index_mut(name) {
Some(s) => match s.get_path_mut(rest) {
Some(s) => s,
None => return None,
},
None => return None,
};
Some(ret)
}
pub fn index(&self, index: &str) -> Option<&FilesystemEntry> {
for child in self.children.iter() {
if child.name == index {
return Some(child);
}
}
None
}
pub fn index_mut(&mut self, index: &str) -> Option<&mut FilesystemEntry> {
for child in self.children.iter_mut() {
if child.name == index {
return Some(child);
}
}
None
}
pub fn from_tree(
tree: &Tree,
repo: &Repository,
name: String,
mut path: String,
inodes: &mut Vec<String>,
file_mode: i32,
) -> FilesystemEntry {
let mut children = Vec::new();
if !path.is_empty() {
path = path + "/";
}
for entry in tree {
children.push(FilesystemEntry::from_tree_entry(
&entry,
repo,
path.clone() + name.as_str(),
inodes,
));
}
inodes.push(path.clone() + name.as_str());
Self {
name,
file_type: FileType::Directory,
oid: Some(tree.id()),
ino: inodes.len() - 1,
children,
size: 0u64,
content: None,
write: false,
write_mode: 0,
file_mode,
}
}
pub fn from_tree_entry(
treeEntry: &TreeEntry,
repo: &Repository,
path: String,
inodes: &mut Vec<String>,
) -> FilesystemEntry {
let name: String = treeEntry.name().unwrap().to_owned();
let file_mode = treeEntry.filemode();
let treeEntry = treeEntry.to_object(repo).unwrap();
let mut full_path = path.clone();
if !full_path.is_empty() {
full_path = full_path + "/";
}
full_path = full_path + name.as_str();
let oid = treeEntry.id();
match treeEntry.clone().into_blob() {
Ok(f) => {
let size = f.content().len() as u64;
inodes.push(full_path);
return FilesystemEntry {
name,
file_type: FileType::RegularFile,
oid: Some(oid),
ino: inodes.len() - 1,
children: Vec::new(),
size,
content: Some(Vec::new()),
write: false,
write_mode: 0,
file_mode,
};
}
Err(e) => {}
};
match treeEntry.as_tree() {
Some(t) => FilesystemEntry::from_tree(t, repo, name, path, inodes, file_mode),
None =>
//empty tree?
{
inodes.push(full_path);
FilesystemEntry {
name,
file_type: FileType::Directory,
oid: Some(oid),
ino: inodes.len() - 1,
children: Vec::new(),
size: 0,
content: None,
write: false,
write_mode: 0,
file_mode,
}
}
}
}
pub fn to_git_object(&self, repo: &mut Repository) -> Option<Oid> {
match self.file_type {
FileType::RegularFile => {
return self.oid;
}
FileType::Directory => {
let mut entries = Vec::new();
for child in &self.children {
let oid = match child.to_git_object(repo) {
Some(oid) => oid,
None => continue,
};
let name = child.name.clone();
let file_mode = child.file_mode.clone();
entries.push(GitEntry {
name,
oid,
file_mode,
});
}
if entries.is_empty() {
let oid = match repo.blob(&[0u8,0]) {
Ok(oid) => oid,
Err(e) => panic!(e),
};
println!("{:?}",oid);
entries.push( GitEntry{
name : ".gitfs".to_owned(),
oid : oid,
file_mode : 0o100000,
});
}
match repo.treebuilder(None) {
Ok(mut tb) => {
for entry in entries {
tb.insert(entry.name, entry.oid, entry.file_mode);
}
Some(tb.write().unwrap())
}
Err(e) => {
panic!(e);
}
}
}
_ => None,
}
}
}
| true |
2eff298ba11c320c6ac2361f09f6f52e8e2d387a
|
Rust
|
wezm/dslite2svd
|
/crates/tm4c123x/src/uart0/im/mod.rs
|
UTF-8
| 15,079 | 2.78125 | 3 |
[
"0BSD",
"BSD-3-Clause"
] |
permissive
|
#[doc = r" Value read from the register"]
pub struct R {
bits: u32,
}
#[doc = r" Value to write to the register"]
pub struct W {
bits: u32,
}
impl super::IM {
#[doc = r" Modifies the contents of the register"]
#[inline]
pub fn modify<F>(&self, f: F)
where
for<'w> F: FnOnce(&R, &'w mut W) -> &'w mut W,
{
let bits = self.register.get();
let r = R { bits: bits };
let mut w = W { bits: bits };
f(&r, &mut w);
self.register.set(w.bits);
}
#[doc = r" Reads the contents of the register"]
#[inline]
pub fn read(&self) -> R {
R { bits: self.register.get() }
}
#[doc = r" Writes to the register"]
#[inline]
pub fn write<F>(&self, f: F)
where
F: FnOnce(&mut W) -> &mut W,
{
let mut w = W::reset_value();
f(&mut w);
self.register.set(w.bits);
}
#[doc = r" Writes the reset value to the register"]
#[inline]
pub fn reset(&self) {
self.write(|w| w)
}
}
#[doc = r" Value of the field"]
pub struct CTSMIMR {
bits: bool,
}
impl CTSMIMR {
#[doc = r" Value of the field as raw bits"]
#[inline]
pub fn bit(&self) -> bool {
self.bits
}
#[doc = r" Returns `true` if the bit is clear (0)"]
#[inline]
pub fn bit_is_clear(&self) -> bool {
!self.bit()
}
#[doc = r" Returns `true` if the bit is set (1)"]
#[inline]
pub fn bit_is_set(&self) -> bool {
self.bit()
}
}
#[doc = r" Value of the field"]
pub struct RXIMR {
bits: bool,
}
impl RXIMR {
#[doc = r" Value of the field as raw bits"]
#[inline]
pub fn bit(&self) -> bool {
self.bits
}
#[doc = r" Returns `true` if the bit is clear (0)"]
#[inline]
pub fn bit_is_clear(&self) -> bool {
!self.bit()
}
#[doc = r" Returns `true` if the bit is set (1)"]
#[inline]
pub fn bit_is_set(&self) -> bool {
self.bit()
}
}
#[doc = r" Value of the field"]
pub struct TXIMR {
bits: bool,
}
impl TXIMR {
#[doc = r" Value of the field as raw bits"]
#[inline]
pub fn bit(&self) -> bool {
self.bits
}
#[doc = r" Returns `true` if the bit is clear (0)"]
#[inline]
pub fn bit_is_clear(&self) -> bool {
!self.bit()
}
#[doc = r" Returns `true` if the bit is set (1)"]
#[inline]
pub fn bit_is_set(&self) -> bool {
self.bit()
}
}
#[doc = r" Value of the field"]
pub struct RTIMR {
bits: bool,
}
impl RTIMR {
#[doc = r" Value of the field as raw bits"]
#[inline]
pub fn bit(&self) -> bool {
self.bits
}
#[doc = r" Returns `true` if the bit is clear (0)"]
#[inline]
pub fn bit_is_clear(&self) -> bool {
!self.bit()
}
#[doc = r" Returns `true` if the bit is set (1)"]
#[inline]
pub fn bit_is_set(&self) -> bool {
self.bit()
}
}
#[doc = r" Value of the field"]
pub struct FEIMR {
bits: bool,
}
impl FEIMR {
#[doc = r" Value of the field as raw bits"]
#[inline]
pub fn bit(&self) -> bool {
self.bits
}
#[doc = r" Returns `true` if the bit is clear (0)"]
#[inline]
pub fn bit_is_clear(&self) -> bool {
!self.bit()
}
#[doc = r" Returns `true` if the bit is set (1)"]
#[inline]
pub fn bit_is_set(&self) -> bool {
self.bit()
}
}
#[doc = r" Value of the field"]
pub struct PEIMR {
bits: bool,
}
impl PEIMR {
#[doc = r" Value of the field as raw bits"]
#[inline]
pub fn bit(&self) -> bool {
self.bits
}
#[doc = r" Returns `true` if the bit is clear (0)"]
#[inline]
pub fn bit_is_clear(&self) -> bool {
!self.bit()
}
#[doc = r" Returns `true` if the bit is set (1)"]
#[inline]
pub fn bit_is_set(&self) -> bool {
self.bit()
}
}
#[doc = r" Value of the field"]
pub struct BEIMR {
bits: bool,
}
impl BEIMR {
#[doc = r" Value of the field as raw bits"]
#[inline]
pub fn bit(&self) -> bool {
self.bits
}
#[doc = r" Returns `true` if the bit is clear (0)"]
#[inline]
pub fn bit_is_clear(&self) -> bool {
!self.bit()
}
#[doc = r" Returns `true` if the bit is set (1)"]
#[inline]
pub fn bit_is_set(&self) -> bool {
self.bit()
}
}
#[doc = r" Value of the field"]
pub struct OEIMR {
bits: bool,
}
impl OEIMR {
#[doc = r" Value of the field as raw bits"]
#[inline]
pub fn bit(&self) -> bool {
self.bits
}
#[doc = r" Returns `true` if the bit is clear (0)"]
#[inline]
pub fn bit_is_clear(&self) -> bool {
!self.bit()
}
#[doc = r" Returns `true` if the bit is set (1)"]
#[inline]
pub fn bit_is_set(&self) -> bool {
self.bit()
}
}
#[doc = r" Value of the field"]
pub struct _9BITIMR {
bits: bool,
}
impl _9BITIMR {
#[doc = r" Value of the field as raw bits"]
#[inline]
pub fn bit(&self) -> bool {
self.bits
}
#[doc = r" Returns `true` if the bit is clear (0)"]
#[inline]
pub fn bit_is_clear(&self) -> bool {
!self.bit()
}
#[doc = r" Returns `true` if the bit is set (1)"]
#[inline]
pub fn bit_is_set(&self) -> bool {
self.bit()
}
}
#[doc = r" Proxy"]
pub struct _CTSMIMW<'a> {
w: &'a mut W,
}
impl<'a> _CTSMIMW<'a> {
#[doc = r" Sets the field bit"]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r" Clears the field bit"]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r" Writes raw bits to the field"]
#[inline]
pub fn bit(self, value: bool) -> &'a mut W {
const MASK: bool = true;
const OFFSET: u8 = 1;
self.w.bits &= !((MASK as u32) << OFFSET);
self.w.bits |= ((value & MASK) as u32) << OFFSET;
self.w
}
}
#[doc = r" Proxy"]
pub struct _RXIMW<'a> {
w: &'a mut W,
}
impl<'a> _RXIMW<'a> {
#[doc = r" Sets the field bit"]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r" Clears the field bit"]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r" Writes raw bits to the field"]
#[inline]
pub fn bit(self, value: bool) -> &'a mut W {
const MASK: bool = true;
const OFFSET: u8 = 4;
self.w.bits &= !((MASK as u32) << OFFSET);
self.w.bits |= ((value & MASK) as u32) << OFFSET;
self.w
}
}
#[doc = r" Proxy"]
pub struct _TXIMW<'a> {
w: &'a mut W,
}
impl<'a> _TXIMW<'a> {
#[doc = r" Sets the field bit"]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r" Clears the field bit"]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r" Writes raw bits to the field"]
#[inline]
pub fn bit(self, value: bool) -> &'a mut W {
const MASK: bool = true;
const OFFSET: u8 = 5;
self.w.bits &= !((MASK as u32) << OFFSET);
self.w.bits |= ((value & MASK) as u32) << OFFSET;
self.w
}
}
#[doc = r" Proxy"]
pub struct _RTIMW<'a> {
w: &'a mut W,
}
impl<'a> _RTIMW<'a> {
#[doc = r" Sets the field bit"]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r" Clears the field bit"]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r" Writes raw bits to the field"]
#[inline]
pub fn bit(self, value: bool) -> &'a mut W {
const MASK: bool = true;
const OFFSET: u8 = 6;
self.w.bits &= !((MASK as u32) << OFFSET);
self.w.bits |= ((value & MASK) as u32) << OFFSET;
self.w
}
}
#[doc = r" Proxy"]
pub struct _FEIMW<'a> {
w: &'a mut W,
}
impl<'a> _FEIMW<'a> {
#[doc = r" Sets the field bit"]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r" Clears the field bit"]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r" Writes raw bits to the field"]
#[inline]
pub fn bit(self, value: bool) -> &'a mut W {
const MASK: bool = true;
const OFFSET: u8 = 7;
self.w.bits &= !((MASK as u32) << OFFSET);
self.w.bits |= ((value & MASK) as u32) << OFFSET;
self.w
}
}
#[doc = r" Proxy"]
pub struct _PEIMW<'a> {
w: &'a mut W,
}
impl<'a> _PEIMW<'a> {
#[doc = r" Sets the field bit"]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r" Clears the field bit"]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r" Writes raw bits to the field"]
#[inline]
pub fn bit(self, value: bool) -> &'a mut W {
const MASK: bool = true;
const OFFSET: u8 = 8;
self.w.bits &= !((MASK as u32) << OFFSET);
self.w.bits |= ((value & MASK) as u32) << OFFSET;
self.w
}
}
#[doc = r" Proxy"]
pub struct _BEIMW<'a> {
w: &'a mut W,
}
impl<'a> _BEIMW<'a> {
#[doc = r" Sets the field bit"]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r" Clears the field bit"]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r" Writes raw bits to the field"]
#[inline]
pub fn bit(self, value: bool) -> &'a mut W {
const MASK: bool = true;
const OFFSET: u8 = 9;
self.w.bits &= !((MASK as u32) << OFFSET);
self.w.bits |= ((value & MASK) as u32) << OFFSET;
self.w
}
}
#[doc = r" Proxy"]
pub struct _OEIMW<'a> {
w: &'a mut W,
}
impl<'a> _OEIMW<'a> {
#[doc = r" Sets the field bit"]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r" Clears the field bit"]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r" Writes raw bits to the field"]
#[inline]
pub fn bit(self, value: bool) -> &'a mut W {
const MASK: bool = true;
const OFFSET: u8 = 10;
self.w.bits &= !((MASK as u32) << OFFSET);
self.w.bits |= ((value & MASK) as u32) << OFFSET;
self.w
}
}
#[doc = r" Proxy"]
pub struct __9BITIMW<'a> {
w: &'a mut W,
}
impl<'a> __9BITIMW<'a> {
#[doc = r" Sets the field bit"]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r" Clears the field bit"]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r" Writes raw bits to the field"]
#[inline]
pub fn bit(self, value: bool) -> &'a mut W {
const MASK: bool = true;
const OFFSET: u8 = 12;
self.w.bits &= !((MASK as u32) << OFFSET);
self.w.bits |= ((value & MASK) as u32) << OFFSET;
self.w
}
}
impl R {
#[doc = r" Value of the register as raw bits"]
#[inline]
pub fn bits(&self) -> u32 {
self.bits
}
#[doc = "Bit 1 - UART Clear to Send Modem Interrupt Mask"]
#[inline]
pub fn ctsmim(&self) -> CTSMIMR {
let bits = {
const MASK: bool = true;
const OFFSET: u8 = 1;
((self.bits >> OFFSET) & MASK as u32) != 0
};
CTSMIMR { bits }
}
#[doc = "Bit 4 - UART Receive Interrupt Mask"]
#[inline]
pub fn rxim(&self) -> RXIMR {
let bits = {
const MASK: bool = true;
const OFFSET: u8 = 4;
((self.bits >> OFFSET) & MASK as u32) != 0
};
RXIMR { bits }
}
#[doc = "Bit 5 - UART Transmit Interrupt Mask"]
#[inline]
pub fn txim(&self) -> TXIMR {
let bits = {
const MASK: bool = true;
const OFFSET: u8 = 5;
((self.bits >> OFFSET) & MASK as u32) != 0
};
TXIMR { bits }
}
#[doc = "Bit 6 - UART Receive Time-Out Interrupt Mask"]
#[inline]
pub fn rtim(&self) -> RTIMR {
let bits = {
const MASK: bool = true;
const OFFSET: u8 = 6;
((self.bits >> OFFSET) & MASK as u32) != 0
};
RTIMR { bits }
}
#[doc = "Bit 7 - UART Framing Error Interrupt Mask"]
#[inline]
pub fn feim(&self) -> FEIMR {
let bits = {
const MASK: bool = true;
const OFFSET: u8 = 7;
((self.bits >> OFFSET) & MASK as u32) != 0
};
FEIMR { bits }
}
#[doc = "Bit 8 - UART Parity Error Interrupt Mask"]
#[inline]
pub fn peim(&self) -> PEIMR {
let bits = {
const MASK: bool = true;
const OFFSET: u8 = 8;
((self.bits >> OFFSET) & MASK as u32) != 0
};
PEIMR { bits }
}
#[doc = "Bit 9 - UART Break Error Interrupt Mask"]
#[inline]
pub fn beim(&self) -> BEIMR {
let bits = {
const MASK: bool = true;
const OFFSET: u8 = 9;
((self.bits >> OFFSET) & MASK as u32) != 0
};
BEIMR { bits }
}
#[doc = "Bit 10 - UART Overrun Error Interrupt Mask"]
#[inline]
pub fn oeim(&self) -> OEIMR {
let bits = {
const MASK: bool = true;
const OFFSET: u8 = 10;
((self.bits >> OFFSET) & MASK as u32) != 0
};
OEIMR { bits }
}
#[doc = "Bit 12 - 9-Bit Mode Interrupt Mask"]
#[inline]
pub fn _9bitim(&self) -> _9BITIMR {
let bits = {
const MASK: bool = true;
const OFFSET: u8 = 12;
((self.bits >> OFFSET) & MASK as u32) != 0
};
_9BITIMR { bits }
}
}
impl W {
#[doc = r" Reset value of the register"]
#[inline]
pub fn reset_value() -> W {
W { bits: 0 }
}
#[doc = r" Writes raw bits to the register"]
#[inline]
pub unsafe fn bits(&mut self, bits: u32) -> &mut Self {
self.bits = bits;
self
}
#[doc = "Bit 1 - UART Clear to Send Modem Interrupt Mask"]
#[inline]
pub fn ctsmim(&mut self) -> _CTSMIMW {
_CTSMIMW { w: self }
}
#[doc = "Bit 4 - UART Receive Interrupt Mask"]
#[inline]
pub fn rxim(&mut self) -> _RXIMW {
_RXIMW { w: self }
}
#[doc = "Bit 5 - UART Transmit Interrupt Mask"]
#[inline]
pub fn txim(&mut self) -> _TXIMW {
_TXIMW { w: self }
}
#[doc = "Bit 6 - UART Receive Time-Out Interrupt Mask"]
#[inline]
pub fn rtim(&mut self) -> _RTIMW {
_RTIMW { w: self }
}
#[doc = "Bit 7 - UART Framing Error Interrupt Mask"]
#[inline]
pub fn feim(&mut self) -> _FEIMW {
_FEIMW { w: self }
}
#[doc = "Bit 8 - UART Parity Error Interrupt Mask"]
#[inline]
pub fn peim(&mut self) -> _PEIMW {
_PEIMW { w: self }
}
#[doc = "Bit 9 - UART Break Error Interrupt Mask"]
#[inline]
pub fn beim(&mut self) -> _BEIMW {
_BEIMW { w: self }
}
#[doc = "Bit 10 - UART Overrun Error Interrupt Mask"]
#[inline]
pub fn oeim(&mut self) -> _OEIMW {
_OEIMW { w: self }
}
#[doc = "Bit 12 - 9-Bit Mode Interrupt Mask"]
#[inline]
pub fn _9bitim(&mut self) -> __9BITIMW {
__9BITIMW { w: self }
}
}
| true |
ba8a793c44d40ff7fd1962b0b02116f693ae3e1b
|
Rust
|
triptych/Oxygengine
|
/oxygengine-ignite/src/main.rs
|
UTF-8
| 11,437 | 2.6875 | 3 |
[
"LicenseRef-scancode-unknown-license-reference",
"Apache-2.0"
] |
permissive
|
use cargo_metadata::MetadataCommand;
use clap::{App, Arg, SubCommand};
use serde::Deserialize;
use std::{
collections::HashMap,
env::{current_dir, current_exe, vars},
fs::{copy, create_dir_all, read_dir, read_to_string, write},
io::{Error, ErrorKind, Result},
path::Path,
process::{Command, Stdio},
};
enum ActionType {
PreCreate,
PostCreate,
PreBuild,
PostBuild,
}
#[derive(Default, Deserialize)]
struct PresetManifest {
#[serde(default)]
pub notes: Actions,
#[serde(default)]
pub scripts: Actions,
}
impl PresetManifest {
pub fn print_note(&self, action: ActionType) {
if let Some(text) = self.notes.format(action) {
println!("{}", text);
}
}
pub fn execute_script(&self, action: ActionType, wkdir: &Path) -> Result<()> {
if let Some(mut text) = self.scripts.format(action) {
for (key, value) in vars() {
text = text.replace(&format!("~${}$~", key), &value);
}
let parts = text
.split("<|>")
.map(|part| part.trim())
.collect::<Vec<_>>();
let output = Command::new(parts[0])
.args(&parts[1..])
.envs(vars().map(|(k, v)| (k, v)))
.current_dir(wkdir)
.stdin(Stdio::inherit())
.stdout(Stdio::inherit())
.stderr(Stdio::inherit())
.output();
println!("{}", parts.join(" "));
output?;
}
Ok(())
}
}
#[derive(Default, Deserialize)]
struct Actions {
pub precreate: Option<String>,
pub postcreate: Option<String>,
pub prebuild: Option<String>,
pub postbuild: Option<String>,
#[serde(skip)]
pub dictionary: HashMap<String, String>,
}
impl Actions {
pub fn format(&self, action: ActionType) -> Option<String> {
let text = match action {
ActionType::PreCreate => &self.precreate,
ActionType::PostCreate => &self.postcreate,
ActionType::PreBuild => &self.prebuild,
ActionType::PostBuild => &self.postbuild,
};
let mut text = if let Some(text) = text {
text.clone()
} else {
return None;
};
for (key, value) in &self.dictionary {
text = text.replace(&format!("~%{}%~", key), value);
}
Some(text)
}
}
fn main() -> Result<()> {
let meta = MetadataCommand::new().exec();
let mut root_path = if let Ok(meta) = meta {
meta.packages
.iter()
.find_map(|p| {
if p.name == env!("CARGO_PKG_NAME") {
Some(p.manifest_path.clone())
} else {
None
}
})
.unwrap_or_else(|| current_exe().unwrap())
} else {
current_exe()?
};
root_path.pop();
let presets_path = root_path.join("presets");
let presets_list = read_dir(&presets_path)?
.filter_map(|entry| {
let path = entry.unwrap().path();
if path.is_dir() {
Some(path.file_name().unwrap().to_str().unwrap().to_owned())
} else {
None
}
})
.collect::<Vec<_>>();
if presets_list.is_empty() {
return Err(Error::new(
ErrorKind::NotFound,
"There are no presets installed - consider reinstalling oxygengine-ignite, it might be corrupted",
));
}
let matches = App::new(env!("CARGO_PKG_NAME"))
.version(env!("CARGO_PKG_VERSION"))
.author(env!("CARGO_PKG_AUTHORS"))
.about(env!("CARGO_PKG_DESCRIPTION"))
.subcommand(
SubCommand::with_name("new")
.about("Create new Oxygen Engine project")
.arg(
Arg::with_name("id")
.value_name("ID")
.help("Project ID")
.takes_value(true)
.required(true),
)
.arg(
Arg::with_name("destination")
.short("d")
.long("destination")
.value_name("PATH")
.help("Project destination path")
.takes_value(true)
.required(false),
)
.arg(
Arg::with_name("preset")
.short("p")
.long("preset")
.help(&format!("Project preset ({})", presets_list.join(", ")))
.takes_value(false)
.required(false)
.default_value(presets_list.last().unwrap()),
)
.arg(
Arg::with_name("dont-build")
.long("dont-build")
.help("Prepare project and exit without building it")
.takes_value(false)
.required(false),
)
.arg(
Arg::with_name("quiet")
.short("q")
.long("quiet")
.help("Don't show progress information")
.takes_value(false)
.required(false),
),
)
.subcommand(
SubCommand::with_name("pack")
.about("Pack assets for Oxygen Engine")
.arg(
Arg::with_name("input")
.short("i")
.long("input")
.value_name("PATH")
.help("Assets root folder")
.takes_value(true)
.required(true),
)
.arg(
Arg::with_name("output")
.short("o")
.long("output")
.value_name("PATH")
.help("Asset pack output file")
.takes_value(true)
.required(true),
)
.arg(
Arg::with_name("quiet")
.short("q")
.long("quiet")
.help("Don't show progress information")
.takes_value(false)
.required(false),
),
)
.get_matches();
if let Some(matches) = matches.subcommand_matches("new") {
let id = matches.value_of("id").unwrap();
let destination = matches.value_of("destination");
let preset = matches.value_of("preset").unwrap();
let dont_build = matches.is_present("dont-build");
let quiet = matches.is_present("quiet");
let preset_path = presets_path.join(preset);
if !preset_path.exists() {
return Err(Error::new(
ErrorKind::NotFound,
format!("Preset not found: {} (in path: {:?})", preset, preset_path),
));
}
let mut destination_path = if let Some(destination) = destination {
destination.into()
} else {
current_dir()?
};
destination_path.push(id);
if let Err(err) = create_dir_all(&destination_path) {
if err.kind() != ErrorKind::AlreadyExists {
return Err(Error::new(
ErrorKind::Other,
format!("Could not create directory: {:?}", destination_path),
));
}
}
let preset_manifest_path = presets_path.join(format!("{}.toml", preset));
let preset_manifest = if preset_manifest_path.exists() {
let contents = read_to_string(&preset_manifest_path)?;
if let Ok(mut manifest) = toml::from_str::<PresetManifest>(&contents) {
manifest.notes.dictionary.insert(
"IGNITE_DESTINATION_PATH".to_owned(),
destination_path.to_str().unwrap().to_owned(),
);
manifest.scripts.dictionary = manifest.notes.dictionary.clone();
Some(manifest)
} else {
None
}
} else {
None
};
if !quiet {
println!("Make project: {:?}", &destination_path);
if let Some(preset_manifest) = &preset_manifest {
preset_manifest.print_note(ActionType::PreCreate);
}
println!("* Prepare project structure...");
}
if let Some(preset_manifest) = &preset_manifest {
preset_manifest.execute_script(ActionType::PreCreate, &destination_path)?;
}
copy_dir(&preset_path, &destination_path, &id)?;
if let Some(preset_manifest) = &preset_manifest {
preset_manifest.execute_script(ActionType::PostCreate, &destination_path)?;
}
if !quiet {
println!(" Done!");
if let Some(preset_manifest) = &preset_manifest {
preset_manifest.print_note(ActionType::PostCreate);
}
}
if !dont_build {
if !quiet {
if let Some(preset_manifest) = &preset_manifest {
preset_manifest.print_note(ActionType::PreBuild);
}
println!("* Build rust project...");
}
if let Some(preset_manifest) = &preset_manifest {
preset_manifest.execute_script(ActionType::PreBuild, &destination_path)?;
}
Command::new("cargo")
.arg("build")
.current_dir(&destination_path)
.output()?;
if let Some(preset_manifest) = &preset_manifest {
preset_manifest.execute_script(ActionType::PostBuild, &destination_path)?;
}
if !quiet {
println!(" Done!");
if let Some(preset_manifest) = &preset_manifest {
preset_manifest.print_note(ActionType::PostBuild);
}
}
}
} else if let Some(matches) = matches.subcommand_matches("pack") {
let input = matches.value_of("input").unwrap();
let output = matches.value_of("output").unwrap();
let quiet = matches.is_present("quiet");
oxygengine_build_tools::pack::pack_assets_and_write_to_file(input, output, quiet)?;
}
Ok(())
}
fn copy_dir(from: &Path, to: &Path, id: &str) -> Result<()> {
if from.is_dir() {
for entry in read_dir(from)? {
let entry = entry?;
let path = entry.path();
if path.is_dir() {
let dir = to.join(path.file_name().unwrap());
create_dir_all(&dir)?;
copy_dir(&path, &dir, id)?;
} else if path.is_file() {
let to = to.join(path.file_name().unwrap());
if let Ok(contents) = read_to_string(&path) {
let contents = contents.replace("~%IGNITE_ID%~", id);
write(to, contents)?;
} else {
copy(&path, to)?;
}
}
}
}
Ok(())
}
| true |
d16a9af1c925e15e0f1a81e7a2053ba1633eee57
|
Rust
|
dimforge/nalgebra
|
/src/linalg/balancing.rs
|
UTF-8
| 2,745 | 3.078125 | 3 |
[
"Apache-2.0"
] |
permissive
|
//! Functions for balancing a matrix.
use simba::scalar::RealField;
use std::ops::{DivAssign, MulAssign};
use crate::allocator::Allocator;
use crate::base::dimension::Dim;
use crate::base::{Const, DefaultAllocator, OMatrix, OVector};
/// Applies in-place a modified Parlett and Reinsch matrix balancing with 2-norm to the matrix and returns
/// the corresponding diagonal transformation.
///
/// See <https://arxiv.org/pdf/1401.5766.pdf>
pub fn balance_parlett_reinsch<T: RealField, D: Dim>(matrix: &mut OMatrix<T, D, D>) -> OVector<T, D>
where
DefaultAllocator: Allocator<T, D, D> + Allocator<T, D>,
{
assert!(matrix.is_square(), "Unable to balance a non-square matrix.");
let dim = matrix.shape_generic().0;
let radix: T = crate::convert(2.0f64);
let mut d = OVector::from_element_generic(dim, Const::<1>, T::one());
let mut converged = false;
while !converged {
converged = true;
for i in 0..dim.value() {
let mut n_col = matrix.column(i).norm_squared();
let mut n_row = matrix.row(i).norm_squared();
let mut f = T::one();
let s = n_col.clone() + n_row.clone();
n_col = n_col.sqrt();
n_row = n_row.sqrt();
if n_col.clone().is_zero() || n_row.clone().is_zero() {
continue;
}
while n_col.clone() < n_row.clone() / radix.clone() {
n_col *= radix.clone();
n_row /= radix.clone();
f *= radix.clone();
}
while n_col.clone() >= n_row.clone() * radix.clone() {
n_col /= radix.clone();
n_row *= radix.clone();
f /= radix.clone();
}
let eps: T = crate::convert(0.95);
#[allow(clippy::suspicious_operation_groupings)]
if n_col.clone() * n_col + n_row.clone() * n_row < eps * s {
converged = false;
d[i] *= f.clone();
matrix.column_mut(i).mul_assign(f.clone());
matrix.row_mut(i).div_assign(f.clone());
}
}
}
d
}
/// Computes in-place `D * m * D.inverse()`, where `D` is the matrix with diagonal `d`.
pub fn unbalance<T: RealField, D: Dim>(m: &mut OMatrix<T, D, D>, d: &OVector<T, D>)
where
DefaultAllocator: Allocator<T, D, D> + Allocator<T, D>,
{
assert!(m.is_square(), "Unable to unbalance a non-square matrix.");
assert_eq!(m.nrows(), d.len(), "Unbalancing: mismatched dimensions.");
for j in 0..d.len() {
let mut col = m.column_mut(j);
let denom = T::one() / d[j].clone();
for i in 0..d.len() {
col[i] *= d[i].clone() * denom.clone();
}
}
}
| true |
06fc77f6b098a290c738e2a56c90869de8338982
|
Rust
|
emkay/insertion_sort
|
/src/lib.rs
|
UTF-8
| 488 | 3.328125 | 3 |
[] |
no_license
|
mod insertion_sort;
#[cfg(test)]
mod test {
#[test]
fn it_works() {
use super::insertion_sort;
let vec = vec![3, 1, 2, 8, 4, 7, 5];
let sorted = insertion_sort::sort(vec);
assert_eq!(sorted.len(), 7);
assert_eq!(sorted[0], 1);
assert_eq!(sorted[1], 2);
assert_eq!(sorted[2], 3);
assert_eq!(sorted[3], 4);
assert_eq!(sorted[4], 5);
assert_eq!(sorted[5], 7);
assert_eq!(sorted[6], 8);
}
}
| true |
0b8398bd32e3a121be71d74707ba65e6ab921fc7
|
Rust
|
iosmanthus/regular-language-utils
|
/src/re.rs
|
UTF-8
| 6,779 | 2.875 | 3 |
[] |
no_license
|
use crate::ast::Ast;
use crate::nfa::{Nfa, Transition};
use maplit::{hashmap, hashset};
use std::rc::Rc;
use ReOperator::*;
use ReToken::*;
#[derive(Clone, Copy, PartialEq, Debug)]
pub enum ReOperator {
Concat,
Alter,
Star,
Left,
Right,
}
impl ReOperator {
pub fn priority(self) -> i32 {
match self {
Left | Right => 0,
Alter => 1,
Concat => 2,
Star => 3,
}
}
pub fn eval(self, ctx: &mut Vec<Ast<ReToken>>) {
let pcnt = match self {
Concat | Alter => 2,
Star => 1,
Left | Right => 0,
};
let mut children = vec![];
for _ in 0..pcnt {
children.push(Rc::new(ctx.pop().unwrap()));
}
children.reverse();
let children = if !children.is_empty() {
Some(children)
} else {
None
};
ctx.push(Ast::new(Operator(self), children));
}
}
#[derive(Clone, Copy, Debug, PartialEq)]
pub enum ReToken {
Symbol(char),
Operator(ReOperator),
}
impl ReToken {
pub fn new(c: char) -> Self {
match c {
'*' => Operator(Star),
'|' => Operator(Alter),
'(' => Operator(Left),
')' => Operator(Right),
_ => Symbol(c),
}
}
pub fn is_operator(self) -> bool {
!self.is_symbol()
}
pub fn is_symbol(self) -> bool {
if let Symbol(_) = self {
true
} else {
false
}
}
}
#[derive(Clone, Debug, PartialEq)]
pub struct Re {
ast: Ast<ReToken>,
}
impl Re {
pub fn new(pattern: &str) -> Self {
let mut ops: Vec<ReOperator> = vec![];
let mut asts: Vec<Ast<ReToken>> = vec![];
// let `prev` be a `(` to push the first symbol into stack
let mut prev = Operator(Left);
for c in pattern.chars() {
// Construct a token from a char
let mut c = ReToken::new(c);
let mut temp = None;
if c.is_symbol() {
match prev {
Operator(Alter) | Operator(Left) => {
asts.push(Ast::new(c, None));
}
_ => {
temp = Some(c);
c = Operator(Concat);
}
}
prev = if temp.is_none() { c } else { temp.unwrap() }
}
if c.is_operator() {
if let Operator(func) = c {
match func {
Left => ops.push(func),
_ => {
while !ops.is_empty()
&& func.priority() <= ops.last().unwrap().priority()
{
let func = ops.pop().unwrap();
if let Left = func {
break;
}
func.eval(&mut asts);
}
if func != Right {
ops.push(func);
}
if func == Concat {
asts.push(Ast::new(temp.unwrap(), None));
} else {
prev = c;
}
}
}
}
}
}
while !ops.is_empty() {
let func = ops.pop().unwrap();
func.eval(&mut asts);
}
Re {
ast: asts[0].clone(),
}
}
fn ast(&self) -> &Ast<ReToken> {
&self.ast
}
}
impl From<Re> for Nfa<usize, char> {
fn from(re: Re) -> Self {
use ReOperator::*;
use ReToken::*;
fn from(ast: &Ast<ReToken>, id: &mut usize) -> Nfa<usize, char> {
match ast.token() {
&Symbol(a) => {
let result = Nfa::new(
*id,
hashset! {*id+1},
hashmap! {
(*id,Transition::Symbol(a)) => hashset! {*id+1}
},
);
// Consume two state id
*id += 2;
result
}
Operator(Concat) => {
let children = ast.children().unwrap();
let (l, r) = (from(&children[0], id), from(&children[1], id));
l.concat(r)
}
Operator(Alter) => {
let children = ast.children().unwrap();
let (l, r) = (from(&children[0], id), from(&children[1], id));
let result = l.union(r, *id);
*id += 1;
result
}
Operator(Star) => {
let children = ast.children().unwrap();
let leaf = from(&children[0], id);
let result = leaf.star(*id, *id + 1);
*id += 2;
result
}
_ => unreachable!(),
}
}
from(re.ast(), &mut 0)
}
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn test_re_parse() {
let re = Re::new("(1*2)|3");
let ast = Ast::new(
Operator(Alter),
Some(vec![
Rc::new(Ast::new(
Operator(Concat),
Some(vec![
Rc::new(Ast::new(
Operator(Star),
Some(vec![Rc::new(Ast::new(Symbol('1'), None))]),
)),
Rc::new(Ast::new(Symbol('2'), None)),
]),
)),
Rc::new(Ast::new(Symbol('3'), None)),
]),
);
assert_eq!(Re { ast }, re);
}
#[test]
fn test_nfa_from_re() {
use crate::re::Re;
let start = 2;
let accept_states = hashset! {7};
let transitions = hashmap! {
(0,Transition::Symbol('a')) => hashset!{1},
(4,Transition::Symbol('b')) => hashset!{5},
(6,Transition::Epsilon) => hashset!{4,7},
(1,Transition::Epsilon) => hashset!{2},
(5,Transition::Epsilon) => hashset!{6},
(2,Transition::Epsilon) => hashset!{0,3},
(3,Transition::Epsilon) => hashset!{6},
};
assert_eq!(
Nfa::new(start, accept_states, transitions),
Nfa::from(Re::new("a*b*"))
);
}
}
| true |
48599e09f880e21259945e6f7a2b48599ac35002
|
Rust
|
sepiggy/rust-basic-datacamp
|
/ch03/demo01variables/src/main.rs
|
UTF-8
| 1,139 | 3.640625 | 4 |
[] |
no_license
|
const MAX_POINTS: i32 = 100_000;
//region mut
// fn main() {
// let mut x = 5;
// println!("The value of x is {}", x);
// println!("The max points is {}", MAX_POINTS);
// x = 100;
// }
//endregion
//region shadowing
// fn main() {
// let x = 5;
// let x = x + 1;
// let x = x * 2;
// println!("The value of x is {}", x);
// let spaces = " ";
// let spaces = spaces.len();
// println!("The value of spaces is {}", spaces);
// }
//endregion
//region 类型标注
// fn main() {
// let guess = "42".parse().expect("Not a number");
// println!("{}", guess);
// }
//endregion
//region 标量类型
// fn main() {
// let x = 2.0;
// let y: f32 = 3.0;
// }
//endregion
//region Tuple
// fn main() {
// let tup = (500, 6.4, 1);
// // 访问Tuple中的元素
// println!("{}, {}, {}", tup.0, tup.1, tup.2);
//
// // Tuple的解构
// let (x, y, z) = tup;
// println!("{}, {}, {}", x, y, z);
// }
//endregion
//region 数组
fn main() {
// 数组存储在栈上
let a = [1, 2, 3, 4, 5];
let a = [3; 5];
println!("{}", a[4]);
}
//endregion
| true |
783bd379299c84b142c9b3df973b79f4e65359d2
|
Rust
|
samwho/rust-lisp-with-traits
|
/examples/map_reduce.rs
|
UTF-8
| 243 | 2.859375 | 3 |
[] |
no_license
|
use lisp::prelude::*;
fn add_one(a: i32) -> i32 {
a + 1
}
fn main() {
let v = vec![1, 2, 3];
let res = eval((map, add_one, v));
// let res = eval((reduce, 0, add, (map, add_one, vec![1, 2, 3])));
println!("{:?}", res);
}
| true |
b631dd0a70e28e731bf5f9a3effeeba8665516c3
|
Rust
|
humantree/rusty-boy
|
/src/registers.rs
|
UTF-8
| 1,942 | 3.359375 | 3 |
[
"MIT"
] |
permissive
|
use self::RegisterPair::*;
use std::ops::{Index, IndexMut};
#[derive(Clone, Copy, Debug)]
pub enum Register { A, B, C, D, E, H, L }
#[derive(Clone, Copy, Debug)]
pub enum RegisterPair { BC, DE, HL }
#[derive(Debug)]
pub struct Registers {
pub a: u8,
pub b: u8,
pub c: u8,
pub d: u8,
pub e: u8,
pub h: u8,
pub l: u8,
}
impl Registers {
pub fn new() -> Registers {
Registers {
a: 0,
b: 0,
c: 0,
d: 0,
e: 0,
h: 0,
l: 0,
}
}
pub fn pair(&self, rp: RegisterPair) -> u16 {
match rp {
BC => ((self.b as u16) << 8) + (self.c as u16),
DE => ((self.d as u16) << 8) + (self.e as u16),
HL => ((self.h as u16) << 8) + (self.l as u16),
}
}
pub fn set_pair(&mut self, rp: RegisterPair, rhs: u16) {
let first_byte = (rhs >> 8) as u8;
let second_byte = (rhs & 0x00FF) as u8;
match rp {
BC => { self.b = first_byte; self.c = second_byte; },
DE => { self.d = first_byte; self.e = second_byte; },
HL => { self.h = first_byte; self.l = second_byte; },
}
}
}
impl Index<Register> for Registers {
type Output = u8;
fn index(&self, r: Register) -> &u8 {
use self::Register::*;
match r {
A => &self.a,
B => &self.b,
C => &self.c,
D => &self.d,
E => &self.e,
H => &self.h,
L => &self.l,
}
}
}
impl IndexMut<Register> for Registers {
fn index_mut(&mut self, r: Register) -> &mut u8 {
use self::Register::*;
match r {
A => &mut self.a,
B => &mut self.b,
C => &mut self.c,
D => &mut self.d,
E => &mut self.e,
H => &mut self.h,
L => &mut self.l,
}
}
}
| true |
790589bd8ce2519715243f81fe218e8a242e4cac
|
Rust
|
microsoft/synthetic-data-showcase
|
/packages/core/src/utils/reporting/processing_stopped_error.rs
|
UTF-8
| 891 | 2.71875 | 3 |
[
"MIT",
"LicenseRef-scancode-generic-cla"
] |
permissive
|
use std::fmt::{Debug, Display, Formatter, Result as FmtResult};
#[cfg(feature = "pyo3")]
use pyo3::exceptions::PyIOError;
#[cfg(feature = "pyo3")]
use pyo3::prelude::*;
/// Indicates that a processing step that reports progress
/// has been stopped
#[derive(Default)]
pub struct ProcessingStoppedError;
impl Display for ProcessingStoppedError {
fn fmt(&self, f: &mut Formatter<'_>) -> FmtResult {
write!(f, "processing has been stopped")
}
}
impl Debug for ProcessingStoppedError {
fn fmt(&self, f: &mut Formatter<'_>) -> FmtResult {
write!(f, "{}", self)
}
}
#[cfg(feature = "pyo3")]
impl From<ProcessingStoppedError> for PyErr {
fn from(err: ProcessingStoppedError) -> PyErr {
PyIOError::new_err(err.to_string())
}
}
/// Result that wraps something that can be stopped
pub type StoppableResult<T> = Result<T, ProcessingStoppedError>;
| true |
13e037782860e94b96b534dde49917678208954d
|
Rust
|
germangb/lua-rs
|
/src/error.rs
|
UTF-8
| 1,439 | 3 | 3 |
[
"MIT"
] |
permissive
|
use ffi;
use std::error::Error as StdError;
use std::{fmt, io, io::ErrorKind};
#[derive(Debug, Clone, Eq, PartialEq)]
pub enum Error {
/// Error during UTF-8 encoding/decoding
Utf8,
/// Error during IO
Io(io::ErrorKind),
/// Lua program execution error
Runtime,
/// Malformed lua syntax
Syntax,
/// Internal memory error
Memory,
/// Garbage collector error
Gc,
/// Type error
Type,
}
impl Error {
#[inline]
pub fn from_lua_result(res: ::std::os::raw::c_int) -> Error {
match res as _ {
ffi::LUA_ERRSYNTAX => Error::Syntax,
ffi::LUA_ERRRUN => Error::Runtime,
ffi::LUA_ERRMEM => Error::Memory,
ffi::LUA_ERRGCMM => Error::Gc,
_ => unreachable!(),
}
}
}
impl From<io::Error> for Error {
fn from(err: io::Error) -> Self {
Error::Io(err.kind())
}
}
impl fmt::Display for Error {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
match self {
Error::Utf8 => write!(f, "UTF-8 error"),
Error::Io(e) => write!(f, "IO error: {:?}", e),
Error::Runtime => write!(f, "Runtime error"),
Error::Syntax => write!(f, "Syntax error"),
Error::Memory => write!(f, "Memory error"),
Error::Gc => write!(f, "Garbage collector error"),
Error::Type => write!(f, "Unexpected type"),
}
}
}
| true |
2fd087746c69b6890403a4f2904340a135bca4aa
|
Rust
|
zerosign/envit
|
/src/de.rs
|
UTF-8
| 5,110 | 3.4375 | 3 |
[
"MIT"
] |
permissive
|
use std::{
cmp::Ordering,
collections::{BinaryHeap, HashMap, HashSet},
io::{BufRead, Cursor},
iter::FromIterator,
};
#[derive(Debug, Clone)]
pub(crate) struct EnvPair {
fields: Vec<String>,
value: String,
}
impl EnvPair {
#[inline]
pub fn from_str<'a>(line: &'a str, comment: char, kv_sep: char, key_sep: &str) -> Option<Self> {
let line = line.trim();
if line.starts_with(comment) {
return None;
}
let pair = line
.splitn(2, kv_sep)
.map(move |line| line.to_string())
.collect::<Vec<_>>();
match &pair[..] {
[key, value] => {
let fields = key
.split(key_sep)
.map(move |line| line.to_string())
.collect::<Vec<_>>();
Some(Self {
fields: fields,
value: value.clone(),
})
}
_ => None,
}
}
}
impl PartialEq for EnvPair {
#[inline]
fn eq(&self, other: &EnvPair) -> bool {
self.fields.eq(&other.fields)
}
}
impl Eq for EnvPair {}
impl PartialOrd for EnvPair {
fn partial_cmp(&self, other: &EnvPair) -> Option<Ordering> {
Some(self.cmp(other))
}
}
impl Ord for EnvPair {
fn cmp(&self, other: &EnvPair) -> Ordering {
self.fields.cmp(&other.fields)
}
}
/// This abstraction give us contract that
/// if the parent not exists, we could create only the branch
/// This save us to do combinatoric search for a parent sets
/// in the segment
///
/// the parent shouldn't be empty even it's first node.
///
/// [0, 1, 2] -> Segment { parent : [0, 1], branch: [2] }
///
/// we could savely said that if hole is exists, then create a new node based on it.
/// when traversing Segment will gradually move hole to parent.
///
/// example:
///
/// [[0, 1, 2], [0, 1, 3], [0, 1, 4, 5], [0, 1, 4, 6], [0, 1, 7, 8], [0, 1, 7, 9], [0, 1, 7, 10], [0, 11, 12, 13]]
///
/// hole: [0, 1], parent: [], leaf: 2
/// hole: [], parent: [0, 1], leaf: 3
/// hole: [], parent: [0, 1, 4], leaf: 6
/// hole: [7], parent: [0, 1], leaf: 8
/// hole: [], parent: [0, 1, 7], leaf: 9
/// hole: [], parent: [0, 1, 7], leaf: 10
/// hole: [11, 12], parent: [0], leaf: 13
///
#[derive(Debug, Clone)]
pub(crate) struct Segment {
hole: Vec<usize>,
parent: Vec<usize>,
leaf: usize,
}
#[derive(Debug, Clone)]
pub struct Envs<'a> {
reverse: Vec<&'a str>,
indices: Vec<Segment>,
data: Vec<&'a str>,
}
impl<'a> Default for Envs<'a> {
#[inline]
fn default() -> Self {
Self {
reverse: Vec::with_capacity(0),
indices: Vec::with_capacity(0),
data: Vec::with_capacity(0),
}
}
}
impl<'a> Envs<'a> {
#[inline]
pub fn from_str<'c>(
raw: &'c str,
comment: char,
kv_sep: char,
key_sep: &str,
) -> Result<Self, ()> {
Self::from_reader(Cursor::new(raw), comment, kv_sep, key_sep)
}
pub fn from_reader<'c, R>(
reader: R,
comment: char,
kv_sep: char,
key_sep: &str,
) -> Result<Self, ()>
where
R: BufRead,
{
let data: BinaryHeap<EnvPair> = BinaryHeap::from_iter(reader.lines().filter_map(|r| {
r.ok()
.as_ref()
.and_then(move |line| EnvPair::from_str(line, comment, kv_sep, key_sep))
}));
let mut inner = Self::default();
let mut reverse_idx = HashMap::<String, usize>::new();
//
// env pair fields will always direct to a leaf node in the branch/tree
// so, the only possible parent node would be fields[0-k-1] or
// any subsequences fields that exists in the parent sets
//
// logic:
// sets : {}
// fields : [0 ... ii]
//
// sets : {[0 ... ii-1]}
// fields : [0 ... ii-1 ... k]
//
// sets : {[0 ... ii-1], [ii-1 ... k-1], [0 ... k-1]}
//
// example:
// [[0, 1, 2], [0, 1, 3], [0, 1, 4, 5], [0, 1, 4, 6], [0, 1, 7, 8], [0, 1, 7, 9], [0, 1, 7, 10], [0, 11, 12, 13]]
//
let mut parents = HashSet::<Vec<usize>>::new();
let mut idx = 0;
for EnvPair { fields, value } in data.iter() {
let mut indices: Vec<usize> = Vec::new();
for field in fields {
let ridx = match reverse_idx.get(field) {
Some(ridx) => *ridx,
_ => {
let old_idx = idx;
reverse_idx.insert(field.to_string(), idx);
inner.reverse.push(field);
idx += 1;
old_idx
}
};
indices.push(ridx);
if !parents.contains(indices.as_slice()) {
parents.insert(indices.clone());
}
}
// do combinatoric checks for each leftmost subsequences &
}
Err(())
}
}
| true |
1ef9a92601374134789f461e7cf8e898e170d1a9
|
Rust
|
delventhalz/pirate-talk
|
/processor-rust/src/handler.rs
|
UTF-8
| 1,998 | 2.703125 | 3 |
[
"CC-BY-4.0",
"Apache-2.0"
] |
permissive
|
use std::iter::repeat;
use sawtooth_sdk::processor::handler::ApplyError;
use sawtooth_sdk::processor::handler::TransactionContext;
use sawtooth_sdk::processor::handler::TransactionHandler;
use sawtooth_sdk::messages::processor::TpProcessRequest;
fn piratify(msg: String) -> String {
let rs = repeat("r").take(msg.len() / 3 + 1).collect::<String>();
let excls = repeat("!").take(msg.len() / 5 + 1).collect::<String>();
format!("{}{} {}{}", "ya", rs, msg, excls).to_uppercase()
}
pub struct PirateHandler {
family_name: String,
family_versions: Vec<String>,
namespaces: Vec<String>,
}
impl PirateHandler {
pub fn new() -> PirateHandler {
PirateHandler {
family_name: "pirate-talk".to_string(),
family_versions: vec!["0.0".to_string()],
namespaces: vec!["aaaaaa".to_string()],
}
}
}
impl TransactionHandler for PirateHandler {
fn family_name(&self) -> String {
self.family_name.clone()
}
fn family_versions(&self) -> Vec<String> {
self.family_versions.clone()
}
fn namespaces(&self) -> Vec<String> {
self.namespaces.clone()
}
fn apply(
&self,
txn: &TpProcessRequest,
context: &mut TransactionContext,
) -> Result<(), ApplyError> {
let signature = txn.get_signature();
let message = match String::from_utf8(txn.get_payload().to_vec()) {
Err(e) => return Err(ApplyError::InvalidTransaction(e.to_string())),
Ok(payload) => payload
};
let uuid = signature[96..].to_string();
let filler = repeat("a").take(32).collect::<String>();
let address = "aaaaaa".to_string() + &filler + &uuid;
let pirate_message = piratify(message);
println!("{}", pirate_message);
match context.set_state(&address, pirate_message.as_bytes()) {
Err(e) => Err(ApplyError::InternalError(e.to_string())),
Ok(_) => Ok(())
}
}
}
| true |
556feee412cb401cb9ad76ebc1580f0956ddb2d3
|
Rust
|
iCodeIN/slabmap
|
/src/lib.rs
|
UTF-8
| 21,550 | 3.953125 | 4 |
[
"MIT",
"Apache-2.0",
"LicenseRef-scancode-unknown-license-reference"
] |
permissive
|
/*! This crate provides the type [`SlabMap`].
[`SlabMap`] is HashMap-like collection that automatically determines the key.
# Examples
```
use slabmap::SlabMap;
let mut s = SlabMap::new();
let key_a = s.insert("aaa");
let key_b = s.insert("bbb");
assert_eq!(s[key_a], "aaa");
assert_eq!(s[key_b], "bbb");
for (key, value) in &s {
println!("{} -> {}", key, value);
}
assert_eq!(s.remove(key_a), Some("aaa"));
assert_eq!(s.remove(key_a), None);
```
*/
use std::{fmt::Debug, iter::FusedIterator, mem::replace};
/**
A fast HashMap-like collection that automatically determines the key.
*/
#[derive(Clone)]
pub struct SlabMap<T> {
entries: Vec<Entry<T>>,
next_vacant_idx: usize,
len: usize,
non_optimized: usize,
}
const INVALID_INDEX: usize = usize::MAX;
#[derive(Clone)]
enum Entry<T> {
Occupied(T),
VacantHead { vacant_body_len: usize },
VacantTail { next_vacant_idx: usize },
}
impl<T> SlabMap<T> {
/// Constructs a new, empty SlabMap<T>.
/// The SlabMap will not allocate until elements are pushed onto it.
#[inline]
pub fn new() -> Self {
Self {
entries: Vec::new(),
next_vacant_idx: INVALID_INDEX,
len: 0,
non_optimized: 0,
}
}
/// Constructs a new, empty SlabMap<T> with the specified capacity.
#[inline]
pub fn with_capacity(capacity: usize) -> Self {
Self {
entries: Vec::with_capacity(capacity),
next_vacant_idx: INVALID_INDEX,
len: 0,
non_optimized: 0,
}
}
/// Returns the number of elements the SlabMap can hold without reallocating.
#[inline]
pub fn capacity(&self) -> usize {
self.entries.capacity()
}
/// Reserves capacity for at least additional more elements to be inserted in the given SlabMap<T>.
///
/// # Panics
/// Panics if the new capacity overflows usize.
#[inline]
pub fn reserve(&mut self, additional: usize) {
self.entries.reserve(self.entries_additional(additional));
}
/// Reserves the minimum capacity for exactly additional more elements to be inserted in the given SlabMap<T>.
///
/// # Panics
/// Panics if the new capacity overflows usize.
#[inline]
pub fn reserve_exact(&mut self, additional: usize) {
self.entries
.reserve_exact(self.entries_additional(additional));
}
#[inline]
fn entries_additional(&self, additional: usize) -> usize {
additional.saturating_sub(self.entries.len() - self.len)
}
/// Returns the number of elements in the SlabMap.
///
/// # Examples
/// ```
/// use slabmap::SlabMap;
///
/// let mut s = SlabMap::new();
/// assert_eq!(s.len(), 0);
///
/// let key1 = s.insert(10);
/// let key2 = s.insert(15);
///
/// assert_eq!(s.len(), 2);
///
/// s.remove(key1);
/// assert_eq!(s.len(), 1);
///
/// s.remove(key2);
/// assert_eq!(s.len(), 0);
/// ```
#[inline]
pub fn len(&self) -> usize {
self.len
}
/// Returns true if the SlabMap contains no elements.
///
/// # Examples
/// ```
/// use slabmap::SlabMap;
///
/// let mut s = SlabMap::new();
/// assert_eq!(s.is_empty(), true);
///
/// let key = s.insert("a");
/// assert_eq!(s.is_empty(), false);
///
/// s.remove(key);
/// assert_eq!(s.is_empty(), true);
/// ```
#[inline]
pub fn is_empty(&self) -> bool {
self.len == 0
}
/// Returns a reference to the value corresponding to the key.
///
/// # Examples
/// ```
/// use slabmap::SlabMap;
///
/// let mut s = SlabMap::new();
/// let key = s.insert(100);
///
/// assert_eq!(s.get(key), Some(&100));
/// assert_eq!(s.get(key + 1), None);
/// ```
#[inline]
pub fn get(&self, key: usize) -> Option<&T> {
if let Entry::Occupied(value) = self.entries.get(key)? {
Some(value)
} else {
None
}
}
/// Returns a mutable reference to the value corresponding to the key.
#[inline]
pub fn get_mut(&mut self, key: usize) -> Option<&mut T> {
if let Entry::Occupied(value) = self.entries.get_mut(key)? {
Some(value)
} else {
None
}
}
/// Returns true if the SlabMap contains a value for the specified key.
///
/// # Examples
/// ```
/// use slabmap::SlabMap;
///
/// let mut s = SlabMap::new();
/// let key = s.insert(100);
///
/// assert_eq!(s.contains_key(key), true);
/// assert_eq!(s.contains_key(key + 1), false);
/// ```
#[inline]
pub fn contains_key(&self, key: usize) -> bool {
self.get(key).is_some()
}
/// Inserts a value into the SlabMap.
///
/// Returns the key associated with the value.
///
/// # Examples
/// ```
/// use slabmap::SlabMap;
///
/// let mut s = SlabMap::new();
/// let key_abc = s.insert("abc");
/// let key_xyz = s.insert("xyz");
///
/// assert_eq!(s[key_abc], "abc");
/// assert_eq!(s[key_xyz], "xyz");
/// ```
pub fn insert(&mut self, value: T) -> usize {
self.insert_with_key(|_| value)
}
/// Inserts a value given by `f` into the SlabMap. The key to be associated with the value is passed to `f`.
///
/// Returns the key associated with the value.
///
/// # Examples
/// ```
/// use slabmap::SlabMap;
///
/// let mut s = SlabMap::new();
/// let key = s.insert_with_key(|key| format!("my key is {}", key));
///
/// assert_eq!(s[key], format!("my key is {}", key));
/// ```
#[inline]
pub fn insert_with_key(&mut self, f: impl FnOnce(usize) -> T) -> usize {
let idx;
if self.next_vacant_idx < self.entries.len() {
idx = self.next_vacant_idx;
self.next_vacant_idx = match self.entries[idx] {
Entry::VacantHead { vacant_body_len } => {
if vacant_body_len > 0 {
self.entries[idx + 1] = Entry::VacantHead {
vacant_body_len: vacant_body_len - 1,
};
}
idx + 1
}
Entry::VacantTail { next_vacant_idx } => next_vacant_idx,
Entry::Occupied(_) => unreachable!(),
};
self.entries[idx] = Entry::Occupied(f(idx));
self.non_optimized = self.non_optimized.saturating_sub(1);
} else {
idx = self.entries.len();
self.entries.push(Entry::Occupied(f(idx)));
}
self.len += 1;
idx
}
/// Removes a key from the SlabMap, returning the value at the key if the key was previously in the SlabMap.
///
/// # Examples
/// ```
/// use slabmap::SlabMap;
///
/// let mut s = SlabMap::new();
/// let key = s.insert("a");
/// assert_eq!(s.remove(key), Some("a"));
/// assert_eq!(s.remove(key), None);
/// ```
pub fn remove(&mut self, key: usize) -> Option<T> {
let is_last = key + 1 == self.entries.len();
let e = self.entries.get_mut(key)?;
if !matches!(e, Entry::Occupied(..)) {
return None;
}
self.len -= 1;
let e = if is_last {
self.entries.pop().unwrap()
} else {
let e = replace(
e,
Entry::VacantTail {
next_vacant_idx: self.next_vacant_idx,
},
);
self.next_vacant_idx = key;
self.non_optimized += 1;
e
};
if self.is_empty() {
self.clear();
}
if let Entry::Occupied(value) = e {
Some(value)
} else {
unreachable!()
}
}
/// Clears the SlabMap, removing all values and optimize free spaces.
///
/// # Examples
/// ```
/// use slabmap::SlabMap;
///
/// let mut s = SlabMap::new();
/// s.insert(1);
/// s.insert(2);
///
/// s.clear();
///
/// assert_eq!(s.is_empty(), true);
/// ```
pub fn clear(&mut self) {
self.entries.clear();
self.len = 0;
self.next_vacant_idx = INVALID_INDEX;
self.non_optimized = 0;
}
/// Clears the SlabMap, returning all values as an iterator and optimize free spaces.
///
/// # Examples
/// ```
/// use slabmap::SlabMap;
///
/// let mut s = SlabMap::new();
/// s.insert(10);
/// s.insert(20);
///
/// let d: Vec<_> = s.drain().collect();
///
/// assert_eq!(s.is_empty(), true);
/// assert_eq!(d, vec![10, 20]);
/// ```
pub fn drain(&mut self) -> Drain<T> {
let len = self.len;
self.len = 0;
self.next_vacant_idx = INVALID_INDEX;
self.non_optimized = 0;
Drain {
iter: self.entries.drain(..),
len,
}
}
/// Retains only the elements specified by the predicate and optimize free spaces.
///
/// # Examples
/// ```
/// use slabmap::SlabMap;
///
/// let mut s = SlabMap::new();
/// s.insert(10);
/// s.insert(15);
/// s.insert(20);
/// s.insert(25);
///
/// s.retain(|_idx, value| *value % 2 == 0);
///
/// let value: Vec<_> = s.values().cloned().collect();
/// assert_eq!(value, vec![10, 20]);
/// ```
pub fn retain(&mut self, f: impl FnMut(usize, &mut T) -> bool) {
let mut f = f;
let mut idx = 0;
let mut idx_vacant_start = 0;
self.next_vacant_idx = INVALID_INDEX;
while let Some(e) = self.entries.get_mut(idx) {
match e {
Entry::VacantTail { .. } => {
idx += 1;
}
Entry::VacantHead { vacant_body_len } => {
idx += *vacant_body_len + 2;
}
Entry::Occupied(value) => {
if f(idx, value) {
self.merge_vacant(idx_vacant_start, idx);
idx += 1;
idx_vacant_start = idx;
} else {
self.entries[idx] = Entry::VacantTail {
next_vacant_idx: INVALID_INDEX,
};
idx += 1;
}
}
}
}
self.entries.truncate(idx_vacant_start);
self.non_optimized = 0;
}
/// Optimizing the free space for speeding up iterations.
///
/// If the free space has already been optimized, this method does nothing and completes with O(1).
///
/// # Examples
/// ```
/// use slabmap::SlabMap;
/// use std::time::Instant;
///
/// let mut s = SlabMap::new();
/// const COUNT: usize = 1000000;
/// for i in 0..COUNT {
/// s.insert(i);
/// }
/// let keys: Vec<_> = s.keys().take(COUNT - 1).collect();
/// for key in keys {
/// s.remove(key);
/// }
///
/// s.optimize(); // if comment out this line, `s.values().sum()` to be slow.
///
/// let begin = Instant::now();
/// let sum: usize = s.values().sum();
/// println!("sum : {}", sum);
/// println!("duration : {} ms", (Instant::now() - begin).as_millis());
/// ```
pub fn optimize(&mut self) {
if !self.is_optimized() {
self.retain(|_, _| true);
}
}
#[inline]
fn is_optimized(&self) -> bool {
self.non_optimized == 0
}
fn merge_vacant(&mut self, start: usize, end: usize) {
if start < end {
if start < end - 1 {
self.entries[start] = Entry::VacantHead {
vacant_body_len: end - start - 2,
}
}
self.entries[end - 1] = Entry::VacantTail {
next_vacant_idx: self.next_vacant_idx,
};
self.next_vacant_idx = start;
}
}
/// Gets an iterator over the entries of the SlabMap, sorted by key.
///
/// If you make a large number of [`remove`](SlabMap::remove) calls, [`optimize`](SlabMap::optimize) should be called before calling this function.
#[inline]
pub fn iter(&self) -> Iter<T> {
Iter {
iter: self.entries.iter().enumerate(),
len: self.len,
}
}
/// Gets a mutable iterator over the entries of the slab, sorted by key.
///
/// If you make a large number of [`remove`](SlabMap::remove) calls, [`optimize`](SlabMap::optimize) should be called before calling this function.
#[inline]
pub fn iter_mut(&mut self) -> IterMut<T> {
IterMut {
iter: self.entries.iter_mut().enumerate(),
len: self.len,
}
}
/// Gets an iterator over the keys of the SlabMap, in sorted order.
///
/// If you make a large number of [`remove`](SlabMap::remove) calls, [`optimize`](SlabMap::optimize) should be called before calling this function.
#[inline]
pub fn keys(&self) -> Keys<T> {
Keys(self.iter())
}
/// Gets an iterator over the values of the SlabMap.
///
/// If you make a large number of [`remove`](SlabMap::remove) calls, [`optimize`](SlabMap::optimize) should be called before calling this function.
#[inline]
pub fn values(&self) -> Values<T> {
Values(self.iter())
}
/// Gets a mutable iterator over the values of the SlabMap.
///
/// If you make a large number of [`remove`](SlabMap::remove) calls, [`optimize`](SlabMap::optimize) should be called before calling this function.
#[inline]
pub fn values_mut(&mut self) -> ValuesMut<T> {
ValuesMut(self.iter_mut())
}
}
impl<T> Default for SlabMap<T> {
#[inline]
fn default() -> Self {
Self::new()
}
}
impl<T: Debug> Debug for SlabMap<T> {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
f.debug_map().entries(self.iter()).finish()
}
}
impl<T> std::ops::Index<usize> for SlabMap<T> {
type Output = T;
#[inline]
fn index(&self, index: usize) -> &Self::Output {
self.get(index).expect("out of index.")
}
}
impl<T> std::ops::IndexMut<usize> for SlabMap<T> {
#[inline]
fn index_mut(&mut self, index: usize) -> &mut Self::Output {
self.get_mut(index).expect("out of index.")
}
}
impl<T> IntoIterator for SlabMap<T> {
type Item = T;
type IntoIter = IntoIter<T>;
#[inline]
fn into_iter(self) -> Self::IntoIter {
IntoIter {
iter: self.entries.into_iter(),
len: self.len,
}
}
}
impl<'a, T> IntoIterator for &'a SlabMap<T> {
type Item = (usize, &'a T);
type IntoIter = Iter<'a, T>;
#[inline]
fn into_iter(self) -> Self::IntoIter {
self.iter()
}
}
impl<'a, T> IntoIterator for &'a mut SlabMap<T> {
type Item = (usize, &'a mut T);
type IntoIter = IterMut<'a, T>;
#[inline]
fn into_iter(self) -> Self::IntoIter {
self.iter_mut()
}
}
/// An owning iterator over the values of a SlabMap.
///
/// This struct is created by the `into_iter` method on [`SlabMap`] (provided by the IntoIterator trait).
pub struct IntoIter<T> {
iter: std::vec::IntoIter<Entry<T>>,
len: usize,
}
impl<T> Iterator for IntoIter<T> {
type Item = T;
#[inline]
fn next(&mut self) -> Option<Self::Item> {
let mut e_opt = self.iter.next();
while let Some(e) = e_opt {
e_opt = match e {
Entry::Occupied(value) => {
self.len -= 1;
return Some(value);
}
Entry::VacantHead { vacant_body_len } => self.iter.nth(vacant_body_len + 1),
Entry::VacantTail { .. } => self.iter.next(),
}
}
None
}
#[inline]
fn size_hint(&self) -> (usize, Option<usize>) {
(self.len, Some(self.len))
}
#[inline]
fn count(self) -> usize
where
Self: Sized,
{
self.len
}
}
/// A draining iterator for SlabMap<T>.
///
/// This struct is created by the [`drain`](SlabMap::drain) method on [`SlabMap`].
pub struct Drain<'a, T> {
iter: std::vec::Drain<'a, Entry<T>>,
len: usize,
}
impl<'a, T> Iterator for Drain<'a, T> {
type Item = T;
#[inline]
fn next(&mut self) -> Option<Self::Item> {
let mut e_opt = self.iter.next();
while let Some(e) = e_opt {
e_opt = match e {
Entry::Occupied(value) => {
self.len -= 1;
return Some(value);
}
Entry::VacantHead { vacant_body_len } => self.iter.nth(vacant_body_len + 1),
Entry::VacantTail { .. } => self.iter.next(),
}
}
None
}
#[inline]
fn size_hint(&self) -> (usize, Option<usize>) {
(self.len, Some(self.len))
}
#[inline]
fn count(self) -> usize
where
Self: Sized,
{
self.len
}
}
/// An iterator over the entries of a SlabMap.
///
/// This struct is created by the [`iter`](SlabMap::iter) method on [`SlabMap`].
pub struct Iter<'a, T> {
iter: std::iter::Enumerate<std::slice::Iter<'a, Entry<T>>>,
len: usize,
}
impl<'a, T> Iterator for Iter<'a, T> {
type Item = (usize, &'a T);
#[inline]
fn next(&mut self) -> Option<Self::Item> {
let mut e_opt = self.iter.next();
while let Some(e) = e_opt {
e_opt = match e {
(key, Entry::Occupied(value)) => {
self.len -= 1;
return Some((key, value));
}
(_, Entry::VacantHead { vacant_body_len }) => self.iter.nth(*vacant_body_len + 1),
(_, Entry::VacantTail { .. }) => self.iter.next(),
}
}
None
}
#[inline]
fn size_hint(&self) -> (usize, Option<usize>) {
(self.len, Some(self.len))
}
#[inline]
fn count(self) -> usize
where
Self: Sized,
{
self.len
}
}
impl<'a, T> FusedIterator for Iter<'a, T> {}
impl<'a, T> ExactSizeIterator for Iter<'a, T> {}
/// A mutable iterator over the entries of a SlabMap.
///
/// This struct is created by the [`iter_mut`](SlabMap::iter_mut) method on [`SlabMap`].
pub struct IterMut<'a, T> {
iter: std::iter::Enumerate<std::slice::IterMut<'a, Entry<T>>>,
len: usize,
}
impl<'a, T> Iterator for IterMut<'a, T> {
type Item = (usize, &'a mut T);
#[inline]
fn next(&mut self) -> Option<Self::Item> {
let mut e_opt = self.iter.next();
while let Some(e) = e_opt {
e_opt = match e {
(key, Entry::Occupied(value)) => {
self.len -= 1;
return Some((key, value));
}
(_, Entry::VacantHead { vacant_body_len }) => self.iter.nth(*vacant_body_len + 1),
(_, Entry::VacantTail { .. }) => self.iter.next(),
}
}
None
}
#[inline]
fn size_hint(&self) -> (usize, Option<usize>) {
(self.len, Some(self.len))
}
#[inline]
fn count(self) -> usize
where
Self: Sized,
{
self.len
}
}
impl<'a, T> FusedIterator for IterMut<'a, T> {}
impl<'a, T> ExactSizeIterator for IterMut<'a, T> {}
/// An iterator over the keys of a SlabMap.
///
/// This struct is created by the [`keys`](SlabMap::keys) method on [`SlabMap`].
pub struct Keys<'a, T>(Iter<'a, T>);
impl<'a, T> Iterator for Keys<'a, T> {
type Item = usize;
#[inline]
fn next(&mut self) -> Option<Self::Item> {
self.0.next().map(|(k, _)| k)
}
#[inline]
fn size_hint(&self) -> (usize, Option<usize>) {
self.0.size_hint()
}
#[inline]
fn count(self) -> usize
where
Self: Sized,
{
self.0.count()
}
}
impl<'a, T> FusedIterator for Keys<'a, T> {}
impl<'a, T> ExactSizeIterator for Keys<'a, T> {}
/// An iterator over the values of a SlabMap.
///
/// This struct is created by the [`values`](SlabMap::values) method on [`SlabMap`].
pub struct Values<'a, T>(Iter<'a, T>);
impl<'a, T> Iterator for Values<'a, T> {
type Item = &'a T;
#[inline]
fn next(&mut self) -> Option<Self::Item> {
self.0.next().map(|(_, v)| v)
}
#[inline]
fn size_hint(&self) -> (usize, Option<usize>) {
self.0.size_hint()
}
#[inline]
fn count(self) -> usize
where
Self: Sized,
{
self.0.count()
}
}
impl<'a, T> FusedIterator for Values<'a, T> {}
impl<'a, T> ExactSizeIterator for Values<'a, T> {}
/// A mutable iterator over the values of a SlabMap.
///
/// This struct is created by the [`values_mut`](SlabMap::values_mut) method on [`SlabMap`].
pub struct ValuesMut<'a, T>(IterMut<'a, T>);
impl<'a, T> Iterator for ValuesMut<'a, T> {
type Item = &'a mut T;
#[inline]
fn next(&mut self) -> Option<Self::Item> {
self.0.next().map(|(_, v)| v)
}
#[inline]
fn size_hint(&self) -> (usize, Option<usize>) {
self.0.size_hint()
}
#[inline]
fn count(self) -> usize
where
Self: Sized,
{
self.0.count()
}
}
impl<'a, T> FusedIterator for ValuesMut<'a, T> {}
impl<'a, T> ExactSizeIterator for ValuesMut<'a, T> {}
| true |
c0680c9ab3f248a3bd0ac1a643a6085d3a8b02a0
|
Rust
|
jonhoo/bystander
|
/src/help_queue.rs
|
UTF-8
| 7,656 | 2.890625 | 3 |
[] |
no_license
|
use crate::OperationRecordBox;
use std::sync::atomic::{AtomicPtr, Ordering};
struct Node<T> {
value: Option<T>,
next: AtomicPtr<Self>,
enq_id: Option<usize>,
}
impl<T> Node<T> {
fn new(value: T, enq_id: usize) -> *mut Self {
Box::into_raw(Box::new(Self {
value: Some(value),
next: AtomicPtr::new(std::ptr::null_mut()),
enq_id: Some(enq_id),
}))
}
fn sentinel() -> Self {
Self {
value: None,
next: AtomicPtr::new(std::ptr::null_mut()),
enq_id: None,
}
}
}
struct OpDesc<T> {
phase: Option<u64>,
pending: bool,
enqueue: bool,
node: Option<*mut Node<T>>,
}
/// Operations are linear in N.
pub(crate) struct WaitFreeHelpQueue<T, const N: usize> {
head: AtomicPtr<Node<T>>,
tail: AtomicPtr<Node<T>>,
state: [AtomicPtr<OpDesc<T>>; N],
}
impl<T, const N: usize> WaitFreeHelpQueue<T, N>
where
T: Copy + PartialEq + Eq,
{
pub(crate) fn new() -> Self {
use std::convert::TryInto;
let sentinel = Box::into_raw(Box::new(Node::sentinel()));
let head = AtomicPtr::new(sentinel);
let tail = AtomicPtr::new(sentinel);
// TODO: Once consts can depend on T, make this constant instead of going via Vec
let state: [AtomicPtr<OpDesc<T>>; N] = (0..N)
.map(|_| {
AtomicPtr::new(Box::into_raw(Box::new(OpDesc {
phase: None,
pending: false,
enqueue: true,
node: None,
})))
})
.collect::<Vec<_>>()
.try_into()
.expect("gave N elements");
Self { head, tail, state }
}
pub(crate) fn enqueue(&self, id: usize, value: T) {
let phase = self.max_phase().map_or(0, |p| p + 1);
self.state[id].store(
Box::into_raw(Box::new(OpDesc {
phase: Some(phase),
pending: true,
enqueue: true,
node: Some(Node::new(value, id)),
})),
Ordering::SeqCst,
);
self.help(phase);
self.help_finish_enq();
}
pub(crate) fn peek(&self) -> Option<T> {
// Safety: we never deallocate.
let node = unsafe { &*self.head.load(Ordering::SeqCst) };
let next = node.next.load(Ordering::SeqCst);
if next.is_null() {
None
} else {
Some(unsafe { &*next }.value.expect("not a sentinel Node"))
}
}
pub(crate) fn try_remove_front(&self, front: T) -> Result<(), ()> {
let curr_head_ptr = self.head.load(Ordering::SeqCst);
let curr_head = unsafe { &*curr_head_ptr };
let next = curr_head.next.load(Ordering::SeqCst);
if next.is_null() || (unsafe { &*next }.value.expect("not a sentinel node")) != front {
return Err(());
}
match self
.head
.compare_exchange(curr_head_ptr, next, Ordering::SeqCst, Ordering::Relaxed)
{
Ok(_) => {
self.help_finish_enq();
// TODO: is this needed?
curr_head.next.store(std::ptr::null_mut(), Ordering::SeqCst);
Ok(())
}
Err(_) => Err(()),
}
}
fn help(&self, phase: u64) {
for (id, desc_atomic) in self.state.iter().enumerate() {
let desc_ptr = desc_atomic.load(Ordering::SeqCst);
let desc = unsafe { &*desc_ptr };
if desc.pending && desc.phase.unwrap_or(0) <= phase {
// This operation needs help.
// Currently, the only helpable operation is enqueue.
if desc.enqueue {
self.help_enq(id, phase)
}
}
}
}
fn help_enq(&self, id: usize, phase: u64) {
while self.is_still_pending(id, phase) {
let last_ptr = self.tail.load(Ordering::SeqCst);
let last = unsafe { &*last_ptr };
let next_ptr = last.next.load(Ordering::SeqCst);
if last_ptr != self.tail.load(Ordering::SeqCst) {
// Tail was concurrently updated.
continue;
}
if !next_ptr.is_null() {
// Tail is not up to date -- help update it.
self.help_finish_enq();
continue;
}
if !self.is_still_pending(id, phase) {
// Phase is already over.
// TODO: Can this just return?
continue;
}
// We know we have a consistent (tail, tail.next) pair, and that it likely still needs
// to be updated, so let's try to actually execute the to-be-enqueued node from the
// enqueuing thread's descriptor.
let curr_desc_ptr = self.state[id].load(Ordering::SeqCst);
let curr_desc = unsafe { &*curr_desc_ptr };
if !curr_desc.pending {
// TODO: Can we continue? Can we assert this is still pending?
}
debug_assert!(curr_desc.enqueue);
if last
.next
.compare_exchange(
next_ptr,
curr_desc
.node
.expect("node should always be Some for pending enqueue"),
Ordering::SeqCst,
Ordering::Relaxed,
)
.is_ok()
{
self.help_finish_enq();
return;
}
}
}
fn help_finish_enq(&self) {
let last_ptr = self.tail.load(Ordering::SeqCst);
let last = unsafe { &*last_ptr };
let next_ptr = last.next.load(Ordering::SeqCst);
if next_ptr.is_null() {
// Tail pointer is already up to date, so nothing to do.
return;
}
let next = unsafe { &*next_ptr };
let id = next.enq_id.expect("next is never the sentinel");
let cur_desc_ptr = self.state[id].load(Ordering::SeqCst);
let cur_desc = unsafe { &*cur_desc_ptr };
if last_ptr != self.tail.load(Ordering::SeqCst) {
// Tail pointer has already been updated.
return;
}
if cur_desc.node.unwrap_or_else(std::ptr::null_mut) != next_ptr {
// Owner of the next node is now working on a subsequent operation,
// the enqueue must have finished.
return;
}
// This is really just setting pending = false.
let new_desc_ptr = Box::into_raw(Box::new(OpDesc {
phase: cur_desc.phase,
pending: false,
enqueue: true,
node: cur_desc.node,
}));
let _ = self.state[id].compare_exchange(
cur_desc_ptr,
new_desc_ptr,
Ordering::SeqCst,
Ordering::Relaxed,
);
let _ = self
.tail
.compare_exchange(last_ptr, next_ptr, Ordering::SeqCst, Ordering::Relaxed);
}
fn max_phase(&self) -> Option<u64> {
self.state
.iter()
.filter_map(|s| unsafe { (&*s.load(Ordering::SeqCst)).phase })
.max()
}
fn is_still_pending(&self, id: usize, phase: u64) -> bool {
let state = unsafe { &*self.state[id].load(Ordering::SeqCst) };
state.pending && state.phase.unwrap_or(0) <= phase
}
}
// A wait-free queue.
pub(crate) type HelpQueue<LF, const N: usize> = WaitFreeHelpQueue<*const OperationRecordBox<LF>, N>;
| true |
4ba8978b21eeb26fe6d4fdf3ef94a566b811a3c6
|
Rust
|
nsclass/raft-rs
|
/datadriven/src/lib.rs
|
UTF-8
| 2,534 | 2.78125 | 3 |
[
"Apache-2.0"
] |
permissive
|
/*!
# datadriven
**datadriven** is a tool for testing. Ported from [cockroachdb/datadriven](https://github.com/cockroachdb/datadriven)
To execute data-driven tests, pass the path of the test file as well as a
function which can interpret and execute whatever commands are present in
the test file. The framework invokes the function, passing it information
about the test case in a TestData struct.
The function must return the actual results of the case, which
run_test() compares with the expected results. If the two are not
equal, the test is marked to fail.
[run_test()](fn.run_test.html) will run the test on a file or given folder
Recommend usage:
for test function `test_func_001`, place the testdata in `src/testdata/test_func_001`, `run_test(src/testdata/test_func_001, func_001)`
for test function `test_func_002`, place the testdata in `src/testdata/test_func_002`, `run_test(src/testdata/test_func_002, func_002)`
or just run a file `run_test(src/testdata/data.txt, func_002)`
and so on.
The path tree looks like the following:
```text
.
├── Cargo.toml
└── src
├── datadriven.rs
├── lib.rs
└── testdata
├── data.txt
├── test_func_001
│ ├── data_001.txt
│ └── data_002.txt
└── test_func_002
├── data_001.txt
└── data_002.txt
```
The comparison is done by [similar-asserts](https://docs.rs/similar-asserts/1.1.0/similar_asserts/)
The difference between [cockroachdb/datadriven](https://github.com/cockroachdb/datadriven)
1. no rewrite
2. no subtest
*/
#![deny(missing_docs)]
mod datadriven;
mod line_sparser;
mod test_data;
mod test_data_reader;
pub use self::datadriven::run_test;
pub use self::datadriven::walk;
pub use self::test_data::CmdArg;
pub use self::test_data::TestData;
use anyhow::Result;
use slog::Drain;
use std::fs::read_dir;
use std::io;
use std::path::PathBuf;
use slog::o;
#[allow(dead_code)]
fn default_logger() -> slog::Logger {
let decorator = slog_term::TermDecorator::new().build();
let drain = slog_term::FullFormat::new(decorator).build().fuse();
let drain = slog_async::Async::new(drain).build().fuse();
slog::Logger::root(drain, o!())
}
fn get_dirs_or_file(path: &str) -> Result<Vec<PathBuf>> {
match read_dir(path) {
Ok(dir) => Ok(dir
.map(|res| res.map(|e| e.path()))
.collect::<Result<Vec<_>, io::Error>>()?),
_ => Ok(vec![PathBuf::from(path)]),
}
}
| true |
5543f83a4fb62c1eb82aacbea9bc8202ec3e483a
|
Rust
|
TheCowKingmoo/Practice_Problems
|
/Rust/phone_combo/src/main.rs
|
UTF-8
| 1,495 | 3.71875 | 4 |
[] |
no_license
|
fn main() {
println!("Hello, world!");
}
fn letter_combinations(digits: String) -> Vec<String> {
let chars: Vec<char> = s.chars().collect();
let mut string_array: Vec<String> = Vec::new();
for i in 0..chars.len() {
let current_char: char = chars[i];
letter_vec: Vec<char> = digit_char_to_letter_vec(current_char);
if string_array.len() == 0 {
for j in letter_vec.len() {
string_array.push(letter_vec[j]);
}
} else {
string_array = apply_char_vec_to_string_vec(string_array, letter_vec);
}
}
return string_array;
}
fn apply_char_vec_to_string_vec(string_vec, char_vec) {
let mut new_string_vec: Vec<String> = Vec::new();
for i in 0..string_vec.len() {
let current_string = string_vec[i];
for j in 0..char_vec.len() {
let current_char = char_vec[j];
let new_string = current_string + current_char;
new_string_vec.push(new_string);
}
}
return new_string_vec;
}
fn digit_char_to_letter_vec(input: char) -> Vec<char> {
match input {
'2' => return ['a', 'b', 'c'],
'3' => return ['d', 'e', 'f'],
'4' => return ['g', 'h', 'i'],
'5' => return ['j', 'k', 'l'],
'6' => return ['m', 'n', 'o'],
'7' => return ['p', 'q', 'r', 's'],
'8' => return ['t', 'u', 'v'],
'9' => return ['w', 'x', 'y', 'z'],
_ => return null
}
}
| true |
357086db9a089b59d58c94ac692d543963c2c733
|
Rust
|
charlescerisier/tempest
|
/tempest/src/service/cli.rs
|
UTF-8
| 4,682 | 2.8125 | 3 |
[
"MIT"
] |
permissive
|
use crate::service::config::{get_topology_config, TopologyConfig};
use structopt::StructOpt;
/// PackageOpt provides an interface for
/// parsing Topology Package command line arguments
#[derive(Debug, Clone, StructOpt)]
#[structopt(name = "Tempest Package", about = "Topology cli options")]
pub(crate) struct PackageOpt {
#[structopt(short = "h", long = "host", default_value = "0.0.0.0")]
/// Topology host
pub host: String,
#[structopt(short = "p", long = "port", default_value = "8765")]
/// Topology port
pub port: String,
#[structopt(long = "agent_host", default_value = "0.0.0.0")]
/// Agent host
pub agent_host: String,
#[structopt(long = "agent_port", default_value = "7654")]
/// Agent port
pub agent_port: String,
#[structopt(short = "g", long = "graceful_shutdown", default_value = "30000")]
/// Graceful shutdown milliseconds
pub graceful_shutdown: u64,
#[structopt(subcommand)]
pub cmd: PackageCmd,
}
impl PackageOpt {
/// Returns the `{host}:{port}`
pub fn host_port(&self) -> String {
format!("{}:{}", self.host, self.port)
}
/// Returns the sub-command config
pub fn get_config(&self) -> Result<Option<TopologyConfig>, config::ConfigError> {
let cfg = match &self.cmd {
PackageCmd::Task(ref opt) => &opt.config,
PackageCmd::Topology(ref opt) => &opt.config,
PackageCmd::Standalone(ref opt) => &opt.config,
};
match &cfg {
Some(ConfigOpt::Config { path }) => {
if let Some(path) = path {
match get_topology_config(path) {
Ok(cfg) => return Ok(Some(cfg)),
Err(err) => return Err(err),
}
}
}
_ => {}
}
Ok(None)
}
}
/// Package sub-commands
#[derive(Debug, Clone, StructOpt)]
pub(crate) enum PackageCmd {
/// Standalone option for running a topology
/// and all tasks as a single, multi-threaded process
#[structopt(name = "standalone")]
Standalone(StandaloneOpt),
/// Run a topology (source & pipeline) process
#[structopt(name = "topology")]
Topology(TopologyOpt),
/// Run a task process by name
#[structopt(name = "task")]
Task(TaskOpt),
}
/// Sub-command for running a Standalone process
#[derive(Debug, Clone, StructOpt)]
pub(crate) struct StandaloneOpt {
#[structopt(subcommand)]
/// Topology.toml config option sub-command
pub config: Option<ConfigOpt>,
}
/// Sub-command for running a Topology process
#[derive(Debug, Clone, StructOpt)]
pub(crate) struct TopologyOpt {
#[structopt(subcommand)]
/// Topology.toml config
pub config: Option<ConfigOpt>,
}
/// Sub-command for running a Task process
#[derive(Default, Debug, Clone, StructOpt)]
pub(crate) struct TaskOpt {
#[structopt(short = "n", long = "name")]
/// Name of the topology task to run
pub name: String,
// Not currently implemented
// #[structopt(short = "w", long = "workers")]
// /// Number of workers to spin up for this task per node
// pub workers: Option<u64>,
#[structopt(short = "i", long = "poll_interval")]
/// Poll interval milliseconds
pub poll_interval: Option<u64>,
#[structopt(short = "c", long = "poll_count")]
/// Number of messages to read per poll
pub poll_count: Option<u16>,
#[structopt(short = "b", long = "max_backoff")]
/// Max backoff wait in milliseconds
pub max_backoff: Option<u64>,
#[structopt(subcommand)]
/// Topology.toml config
pub config: Option<ConfigOpt>,
}
/// Config option
#[derive(Debug, Clone, StructOpt)]
pub(crate) enum ConfigOpt {
#[structopt(name = "config")]
/// Topology.toml config
Config {
#[structopt(short = "p", long = "path")]
path: Option<String>,
// TODO: implement toml string loading
// #[structopt(short = "t", long = "toml")]
// /// Parse this String as a toml table
// toml: Option<String>,
},
}
/// Command options for configuring an Agent.
///
#[derive(Default, Debug, Clone, StructOpt)]
pub(crate) struct AgentOpt {
#[structopt(short = "h", long = "host", default_value = "0.0.0.0")]
/// Agent host
pub host: String,
#[structopt(short = "p", long = "port", default_value = "7654")]
/// Agent port
pub port: String,
}
impl AgentOpt {
pub fn new(host: String, port: String) -> Self {
AgentOpt { host, port }
}
/// Returns the `{host}:{port}`
pub fn host_port(&self) -> String {
format!("{}:{}", self.host, self.port)
}
}
| true |
e157a0aa626655ff7f0647bf24ad233629124dfe
|
Rust
|
frankegoesdown/LeetCode-in-Go
|
/Algorithms/0210.course-schedule-ii/course-schedule-ii.go
|
UTF-8
| 1,108 | 3.078125 | 3 |
[
"MIT"
] |
permissive
|
package problem0210
func findOrder(numCourses int, prerequisites [][]int) []int {
n, p := build(numCourses, prerequisites)
return search(n, p)
}
func build(num int, requires [][]int) (next [][]int, pre []int) {
// next[i][j] : i -> next[i]... ,i 是 next[i] 的先修课
next = make([][]int, num)
// pres[i] : i 的先修课程的**个数**
pre = make([]int, num)
for _, r := range requires {
next[r[1]] = append(next[r[1]], r[0])
pre[r[0]]++
}
return
}
func search(next [][]int, pre []int) []int {
n := len(pre)
res := make([]int, n)
var i, j int
// 第 i 个完成的课程的代号是 j
for i = 0; i < n; i++ {
// 完成首先遇到的,先修课程为 0 的课程
for j = 0; j < n; j++ {
if pre[j] == 0 {
break
}
}
// 每个课程都需要先修课
// 出现了环路
if j == n {
return nil
}
// 修改 pres[j] 为负数
// 避免重修
pre[j] = -1
// 完成 j 课程后
// j 的后续课程的,先修课程数量都可以 -1
for _, c := range next[j] {
pre[c]--
}
// 把课程代号放入答案
res[i] = j
}
return res
}
| true |
dd9cb0278706bb4bc94a70007cec6942888ada3e
|
Rust
|
joserbl/llama
|
/libllama/src/cpu/instructions_arm/program_status.rs
|
UTF-8
| 2,529 | 2.625 | 3 |
[
"BSD-3-Clause"
] |
permissive
|
use cpu;
use cpu::Cpu;
use cpu::decoder_arm as arm;
pub fn mrs(cpu: &mut Cpu, data: arm::mrs::InstrDesc) -> cpu::InstrStatus {
if !cpu::cond_passed(bf!(data.cond), &cpu.cpsr) {
return cpu::InstrStatus::InBlock;
}
let rd = bf!(data.rd);
let r_bit = bf!(data.r_bit);
if r_bit == 1 {
cpu.regs[rd as usize] = cpu.get_current_spsr().raw();
} else {
cpu.regs[rd as usize] = cpu.cpsr.raw();
}
cpu::InstrStatus::InBlock
}
pub fn instr_msr(cpu: &mut Cpu, data: arm::msr_1::InstrDesc, immediate: bool) -> cpu::InstrStatus {
if !cpu::cond_passed(bf!(data.cond), &cpu.cpsr) {
return cpu::InstrStatus::InBlock;
}
let field_mask = bf!(data.field_mask);
let shifter_operand = bf!(data.shifter_operand);
let val = if immediate {
let immed_8 = bits!(shifter_operand, 0 => 7);
let rotate_imm = bits!(shifter_operand, 8 => 11);
immed_8.rotate_right(rotate_imm * 2)
} else {
cpu.regs[bits!(shifter_operand, 0 => 3) as usize]
};
let unalloc_mask = 0x07FFFF00u32;
let user_mask = 0xF8000000u32;
let priv_mask = 0x0000000Fu32;
let state_mask = 0x00000020u32;
if val & unalloc_mask != 0 {
error!("Attempted to set reserved PSR bits through MSR instruction!");
}
let mut byte_mask = 0u32;
byte_mask |= if bit!(field_mask, 0) == 1 { 0x000000FF } else { 0 };
byte_mask |= if bit!(field_mask, 1) == 1 { 0x0000FF00 } else { 0 };
byte_mask |= if bit!(field_mask, 2) == 1 { 0x00FF0000 } else { 0 };
byte_mask |= if bit!(field_mask, 3) == 1 { 0xFF000000 } else { 0 };
if bf!(data.r_bit) == 0 {
// CPSR
// TODO: Check privileges
let cleared_cpsr = cpu.cpsr.raw() & !byte_mask;
cpu.cpsr.set_raw(cleared_cpsr | (val & byte_mask));
if bit!(field_mask, 0) == 1 {
// CPU mode may have been changed
cpu.regs.swap(cpu::Mode::from_num(bf!((cpu.cpsr).mode)));
}
} else {
// SPSR
let spsr = cpu.get_current_spsr();
byte_mask &= user_mask | priv_mask | state_mask;
let cleared_spsr = spsr.raw() & !byte_mask;
spsr.set_raw(cleared_spsr | (val & byte_mask));
}
cpu::InstrStatus::InBlock
}
pub fn msr_1(cpu: &mut Cpu, data: arm::msr_1::InstrDesc) -> cpu::InstrStatus {
instr_msr(cpu, data, true)
}
pub fn msr_2(cpu: &mut Cpu, data: arm::msr_2::InstrDesc) -> cpu::InstrStatus {
instr_msr(cpu, arm::msr_1::InstrDesc::new(data.raw()), false)
}
| true |
802e764425de24e3ef9c40fe8159bade01027dd6
|
Rust
|
IThawk/rust-project
|
/rust-master/src/test/run-pass/intrinsics/intrinsic-move-val-cleanups.rs
|
UTF-8
| 6,986 | 2.953125 | 3 |
[
"MIT",
"LicenseRef-scancode-other-permissive",
"Apache-2.0",
"BSD-3-Clause",
"BSD-2-Clause",
"NCSA"
] |
permissive
|
// run-pass
#![allow(unused_unsafe)]
#![allow(unreachable_code)]
// ignore-emscripten no threads support
#![allow(stable_features)]
// This test is checking that the move_val_init intrinsic is
// respecting cleanups for both of its argument expressions.
//
// In other words, if either DEST or SOURCE in
//
// `intrinsics::move_val_init(DEST, SOURCE)
//
// introduce temporaries that require cleanup, and SOURCE panics, then
// make sure the cleanups still occur.
#![feature(core_intrinsics, sync_poison)]
use std::cell::RefCell;
use std::intrinsics;
use std::sync::{Arc, LockResult, Mutex, MutexGuard};
use std::thread;
type LogEntry = (&'static str, i32);
type Guarded = RefCell<Vec<LogEntry>>;
#[derive(Clone)]
struct Log(Arc<Mutex<Guarded>>);
struct Acquired<'a>(MutexGuard<'a, Guarded>);
type LogState = (MutexWas, &'static [LogEntry]);
#[derive(Copy, Clone, PartialEq, Eq, Debug)]
enum MutexWas { Poisoned, NotPoisoned }
impl Log {
fn lock(&self) -> LockResult<MutexGuard<RefCell<Vec<LogEntry>>>> { self.0.lock() }
fn acquire(&self) -> Acquired { Acquired(self.0.lock().unwrap()) }
}
impl<'a> Acquired<'a> {
fn log(&self, s: &'static str, i: i32) { self.0.borrow_mut().push((s, i)); }
}
const TEST1_EXPECT: LogState = (MutexWas::NotPoisoned,
&[("double-check non-poisoning path", 1)
]);
fn test1(log: Log) {
{
let acq = log.acquire();
acq.log("double-check non-poisoning path", 1);
}
panic!("every test ends in a panic");
}
const TEST2_EXPECT: LogState = (MutexWas::Poisoned,
&[("double-check poisoning path", 1),
("and multiple log entries", 2),
]);
fn test2(log: Log) {
let acq = log.acquire();
acq.log("double-check poisoning path", 1);
acq.log("and multiple log entries", 2);
panic!("every test ends in a panic");
}
struct LogOnDrop<'a>(&'a Acquired<'a>, &'static str, i32);
impl<'a> Drop for LogOnDrop<'a> {
fn drop(&mut self) {
self.0.log(self.1, self.2);
}
}
const TEST3_EXPECT: LogState = (MutexWas::Poisoned,
&[("double-check destructors can log", 1),
("drop d2", 2),
("drop d1", 3),
]);
fn test3(log: Log) {
let acq = log.acquire();
acq.log("double-check destructors can log", 1);
let _d1 = LogOnDrop(&acq, "drop d1", 3);
let _d2 = LogOnDrop(&acq, "drop d2", 2);
panic!("every test ends in a panic");
}
// The *real* tests of panic-handling for move_val_init intrinsic
// start here.
const TEST4_EXPECT: LogState = (MutexWas::Poisoned,
&[("neither arg panics", 1),
("drop temp LOD", 2),
("drop temp LOD", 3),
("drop dest_b", 4),
("drop dest_a", 5),
]);
fn test4(log: Log) {
let acq = log.acquire();
acq.log("neither arg panics", 1);
let mut dest_a = LogOnDrop(&acq, "a will be overwritten, not dropped", 0);
let mut dest_b = LogOnDrop(&acq, "b will be overwritten, not dropped", 0);
unsafe {
intrinsics::move_val_init({ LogOnDrop(&acq, "drop temp LOD", 2); &mut dest_a },
LogOnDrop(&acq, "drop dest_a", 5));
intrinsics::move_val_init(&mut dest_b, { LogOnDrop(&acq, "drop temp LOD", 3);
LogOnDrop(&acq, "drop dest_b", 4) });
}
panic!("every test ends in a panic");
}
// Check that move_val_init(PANIC, SOURCE_EXPR) never evaluates SOURCE_EXPR
const TEST5_EXPECT: LogState = (MutexWas::Poisoned,
&[("first arg panics", 1),
("drop orig dest_a", 2),
]);
fn test5(log: Log) {
let acq = log.acquire();
acq.log("first arg panics", 1);
let mut _dest_a = LogOnDrop(&acq, "drop orig dest_a", 2);
unsafe {
intrinsics::move_val_init({ panic!("every test ends in a panic") },
LogOnDrop(&acq, "we never get here", 0));
}
}
// Check that move_val_init(DEST_EXPR, PANIC) cleans up temps from DEST_EXPR.
const TEST6_EXPECT: LogState = (MutexWas::Poisoned,
&[("second arg panics", 1),
("drop temp LOD", 2),
("drop orig dest_a", 3),
]);
fn test6(log: Log) {
let acq = log.acquire();
acq.log("second arg panics", 1);
let mut dest_a = LogOnDrop(&acq, "drop orig dest_a", 3);
unsafe {
intrinsics::move_val_init({ LogOnDrop(&acq, "drop temp LOD", 2); &mut dest_a },
{ panic!("every test ends in a panic"); });
}
}
// Check that move_val_init(DEST_EXPR, COMPLEX_PANIC) cleans up temps from COMPLEX_PANIC.
const TEST7_EXPECT: LogState = (MutexWas::Poisoned,
&[("second arg panics", 1),
("drop temp LOD", 2),
("drop temp LOD", 3),
("drop orig dest_a", 4),
]);
fn test7(log: Log) {
let acq = log.acquire();
acq.log("second arg panics", 1);
let mut dest_a = LogOnDrop(&acq, "drop orig dest_a", 4);
unsafe {
intrinsics::move_val_init({ LogOnDrop(&acq, "drop temp LOD", 2); &mut dest_a },
{ LogOnDrop(&acq, "drop temp LOD", 3);
panic!("every test ends in a panic"); });
}
}
const TEST_SUITE: &'static [(&'static str, fn (Log), LogState)] =
&[("test1", test1, TEST1_EXPECT),
("test2", test2, TEST2_EXPECT),
("test3", test3, TEST3_EXPECT),
("test4", test4, TEST4_EXPECT),
("test5", test5, TEST5_EXPECT),
("test6", test6, TEST6_EXPECT),
("test7", test7, TEST7_EXPECT),
];
fn main() {
for &(name, test, expect) in TEST_SUITE {
let log = Log(Arc::new(Mutex::new(RefCell::new(Vec::new()))));
let ret = { let log = log.clone(); thread::spawn(move || test(log)).join() };
assert!(ret.is_err(), "{} must end with panic", name);
{
let l = log.lock();
match l {
Ok(acq) => {
assert_eq!((MutexWas::NotPoisoned, &acq.borrow()[..]), expect);
println!("{} (unpoisoned) log: {:?}", name, *acq);
}
Err(e) => {
let acq = e.into_inner();
assert_eq!((MutexWas::Poisoned, &acq.borrow()[..]), expect);
println!("{} (poisoned) log: {:?}", name, *acq);
}
}
}
}
}
| true |
1f2b02b5630f40dd1e06e3305bec1d2bbba21c94
|
Rust
|
kannanvijayan-zz/binjs-ref
|
/crates/binjs_io/src/bytes/varnum.rs
|
UTF-8
| 2,282 | 3.296875 | 3 |
[
"MIT"
] |
permissive
|
use std;
use std::io::{Read, Write};
pub trait WriteVarNum {
fn write_varnum(&mut self, num: u32) -> Result<usize, std::io::Error>;
}
pub trait ReadVarNum {
fn read_varnum_2(&mut self) -> Result<u32, std::io::Error>;
fn read_varnum(&mut self, num: &mut u32) -> Result<usize, std::io::Error>;
}
impl<T> WriteVarNum for T where T: Write {
fn write_varnum(&mut self, mut value: u32) -> Result<usize, std::io::Error> {
let mut bytes = Vec::with_capacity(4);
loop {
let mut byte = ((value & 0x7F) << 1) as u8;
if value > 0x7F {
byte |= 1;
}
bytes.push(byte);
value >>= 7;
if value == 0 {
break
}
}
self.write(&bytes)
}
}
impl<T> ReadVarNum for T where T: Read {
fn read_varnum_2(&mut self) -> Result<u32, std::io::Error> {
let mut result = 0;
self.read_varnum(&mut result)?;
Ok(result)
}
fn read_varnum(&mut self, num: &mut u32) -> Result<usize, std::io::Error> {
let mut bytes = 0;
let mut result : u32 = 0;
let mut shift : u32 = 0;
let mut buf : [u8;1] = [0];
loop {
debug_assert!(shift < 32);
bytes += self.read(&mut buf)?;
let byte = buf[0];
result |= (byte as u32 >> 1) << shift;
shift += 7;
if byte & 1 == 0 {
*num = result;
return Ok(bytes);
}
}
}
}
#[test]
fn test_varnum() {
use std::io::Cursor;
// Produce a reasonably unbiaised sample of numbers.
for i in 1..5 {
let mut start = i;
for num in &[3, 5, 7, 11, 13] {
start *= *num;
println!("test_varnum, testing with {}", start);
let mut encoded = vec![];
let encoded_bytes = encoded.write_varnum(start).unwrap();
assert_eq!(encoded_bytes, encoded.len());
println!("test_varnum, encoded as {:?}", encoded);
let mut decoded : u32 = 0;
let decoded_bytes = Cursor::new(encoded).read_varnum(&mut decoded).unwrap();
assert_eq!(start, decoded);
assert_eq!(encoded_bytes, decoded_bytes);
}
}
}
| true |
f8ea646004a8eda14e352f0a51f2e9f1a7bc4a32
|
Rust
|
Cannonbait/RustRaytracer
|
/src/main.rs
|
UTF-8
| 2,806 | 3.109375 | 3 |
[] |
no_license
|
use std::fs::File;
use std::io::prelude::*;
mod point;
mod shape;
use point::*;
use shape::*;
const WIDTH: usize = 100;
const HEIGHT: usize = 100;
const FOV: usize = 90;
const FILENAME: &str = "render";
pub type Number = u32;
pub type Area = Vec<Vec<Number>>;
fn calculate_fov() -> f32 {
std::cmp::max(WIDTH, HEIGHT) as f32 / 2.0 / ((FOV as f32) / 2.0).tan()
}
fn main() {
//Create matrix with default background
let mut render_area = setup_render_matrix();
//For each pixel, calculate intersection and set colour
let circle = Circle {
pos: Point3 {
x: 3.0,
y: 5.0,
z: 6.0,
},
radius: 10.0,
colour: 14,
};
let rectangle = Rectangle {
pos: Point3 {
x: 3.0,
y: 5.0,
z: 7.0,
},
width: 20.0,
height: 20.0,
colour: 13,
};
let ructangle = Rectangle {
pos: Point3 {
x: 7.0,
y: 5.0,
z: 8.0,
},
width: 20.0,
height: 20.0,
colour: 10,
};
let distance = calculate_fov();
let objects: Vec<Box<dyn Intersectable>> =
vec![Box::new(ructangle), Box::new(circle), Box::new(rectangle)];
let viewpoint = Point3 {
x: (WIDTH as f32) / 2f32,
y: (HEIGHT as f32) / 2f32,
z: distance,
};
//For each pixel in render
for y in 0..HEIGHT {
for x in 0..WIDTH {
//Calculate ray
let ray = Point3 {
x: (x as f32) - viewpoint.x,
y: (y as f32) - viewpoint.y,
z: distance,
};
for o in objects.iter() {
if let Some(color) = o.intersects(&viewpoint, &ray) {
render_area[x][y] = color;
}
}
}
}
render_to_file(render_area, FILENAME).expect("Unable to save image");
open_and_save_as_png(FILENAME)
}
fn setup_render_matrix() -> Area {
return vec![vec![0; WIDTH]; HEIGHT];
}
fn render_to_file(matrix: Area, name: &str) -> std::io::Result<()> {
let mut file = File::create(format!("{}.ppm", name))?;
file.write_all(b"P2\n")?;
let num_columns = matrix.len();
let num_rows = matrix[0].len();
file.write_all(format!("{} {} \n", num_columns, num_rows).as_bytes())?;
file.write_all(b"15\n")?;
for row in matrix.iter() {
let string_row = row
.iter()
.map(|c| c.to_string())
.collect::<Vec<String>>()
.join(" ");
file.write_all(format!("{}\n", string_row).as_bytes())?;
}
Ok(())
}
fn open_and_save_as_png(name: &str) {
let img = image::open(format!("{}.ppm", name)).unwrap();
img.save(format!("{}.png", name)).unwrap();
}
| true |
94e09cfe8103f30d8979c3235e70e166a5469461
|
Rust
|
HerringtonDarkholme/leetcode
|
/src/1320_minimum_distance.rs
|
UTF-8
| 1,919 | 3.515625 | 4 |
[] |
no_license
|
/*
So this problem is to give two subsequences that has minimum target value.
A brute force solution has exponential complexity 2**n.
But we know we are to assign a sequence of elements to 2 bins so that the target value is minimized. This looks like a dynamic programming.
If we know some min distances to type a word with two fingers, given a new char, what's the new distance?
We can maintain two fingers' previous typed chars. And the next min distance is solely determined by **the last chars two fingers typed respectively.**
This gives rise to dp cache: a matrix representing the last char of two finger.
*/
impl Solution {
pub fn minimum_distance(word: String) -> i32 {
let word: Vec<_> = word.chars().collect();
if word.len() <= 2 {
return 0;
}
// dp: the min distances for one finger last typed i-th char.
// dp[[0]]: the another finger types nothing
// dp[j+1] : the another finger types j-th char
let mut dp = vec![
compute_dist(word[0], word[1]), 0
];
for i in 2..word.len() {
let c = word[i];
let prev = word[i - 1];
let dist = compute_dist(prev, c);
let mut next = vec![];
let mut min = i32::max_value();
for (j, &d) in dp.iter().enumerate() {
next.push(d + dist);
if j == 0 {
min = min.min(d);
} else {
min = min.min(d + compute_dist(word[j - 1], c));
}
}
next.push(min);
dp = next;
}
dp.into_iter().fold(i32::max_value(), i32::min)
}
}
fn compute_dist(c1: char, c2: char) -> i32 {
let c1 = c1 as i32 - 'A' as i32;
let c2 = c2 as i32 - 'A' as i32;
let (x1, y1) = (c1 / 6, c1 % 6);
let (x2, y2) = (c2 / 6, c2 % 6);
(x1 - x2).abs() + (y1 - y2).abs()
}
| true |
ee5b7f46d59342da40895f009d7a9613a24cb6eb
|
Rust
|
metasyn/aoc
|
/2021/src/bin/day01.rs
|
UTF-8
| 1,226 | 3.3125 | 3 |
[
"Unlicense"
] |
permissive
|
mod util;
use std::io::Result;
fn main() -> Result<()> {
let input: Vec<i32> = util::load_file_split("input/day01.txt")
.unwrap()
.iter()
.filter(|x| !x.is_empty())
.map(|x| x.parse::<i32>().unwrap())
.collect();
let sample = vec![199, 200, 208, 210, 200, 207, 240, 269, 260, 263];
// Part 1
let ans = count_increases(&sample);
println!("{} should be 7", ans);
let ans = count_increases(&input);
println!("the answer is {}", ans);
let ans = count_increases_sliding(&sample, 3);
println!("{} shoud be 5", ans);
let ans = count_increases_sliding(&input, 3);
println!("the answer is {}", ans);
return Ok(());
}
fn count_increases(vec: &Vec<i32>) -> i32 {
let mut ans = 0;
let mut iter = vec.iter();
let mut current = iter.next();
loop {
let next = iter.next();
if next.is_none() {
break;
}
if next > current {
ans += 1
}
current = next;
}
return ans;
}
fn count_increases_sliding(vec: &Vec<i32>, window: usize) -> i32 {
let iter = vec.windows(window).map(|x| x.iter().sum::<i32>());
return count_increases(&iter.collect());
}
| true |
e52daaba7d804a549315c55759f47b45aed30a3c
|
Rust
|
z2oh/lase
|
/src/components/laser.rs
|
UTF-8
| 592 | 2.953125 | 3 |
[
"Unlicense"
] |
permissive
|
//! This component holds properties about a laser.
use amethyst::ecs::prelude::{Component, DenseVecStorage};
use crate::vector::StorageTy;
pub struct Laser {
// TODO: is this idiomatic? I should probably be using amethyst's color
// types.
/// The color of the laser. This is currently applied as a tint over an all
/// white sprite.
pub color: (f32, f32, f32),
pub len: StorageTy,
}
impl Component for Laser {
// TODO: investigate storage types. This component in particular should
// probably use `VecStorage`.
type Storage = DenseVecStorage<Self>;
}
| true |
07088a54163450e54ff6a50c50cb655097f781a7
|
Rust
|
hassanin/azure-sdk-for-rust
|
/sdk/identity/src/token_credentials/managed_identity_credentials.rs
|
UTF-8
| 2,580 | 2.578125 | 3 |
[
"LicenseRef-scancode-generic-cla",
"MIT"
] |
permissive
|
use azure_core::errors::AzureError;
use azure_core::{TokenCredential, TokenResponse};
use chrono::{DateTime, Utc};
use oauth2::AccessToken;
use serde::Deserialize;
use url::Url;
use std::str;
const MSI_ENDPOINT_ENV_KEY: &str = "IDENTITY_ENDPOINT";
const MSI_SECRET_ENV_KEY: &str = "IDENTITY_HEADER";
const MSI_API_VERSION: &str = "2019-08-01";
/// Attempts authentication using a managed identity that has been assigned to the deployment environment.
///
/// This authentication type works in Azure VMs, App Service and Azure Functions applications, as well as the Azure Cloud Shell
///
/// Built up from docs at [https://docs.microsoft.com/en-us/azure/app-service/overview-managed-identity#using-the-rest-protocol](https://docs.microsoft.com/en-us/azure/app-service/overview-managed-identity#using-the-rest-protocol)
pub struct ManagedIdentityCredential;
#[async_trait::async_trait]
impl TokenCredential for ManagedIdentityCredential {
async fn get_token(&self, resource: &str) -> Result<TokenResponse, AzureError> {
let msi_endpoint = std::env::var(MSI_ENDPOINT_ENV_KEY)
.unwrap_or_else(|_| "http://169.254.169.254/metadata/identity/oauth2/token".to_owned());
let query_items = vec![("api-version", MSI_API_VERSION), ("resource", resource)];
let msi_endpoint_url = Url::parse_with_params(&msi_endpoint, &query_items)
.map_err(|error| AzureError::GenericErrorWithText(error.to_string()))?;
let msi_secret = std::env::var(MSI_SECRET_ENV_KEY).map_err(|_| {
AzureError::GenericErrorWithText(format!(
"Missing environment variable {}",
MSI_SECRET_ENV_KEY
))
})?;
let client = reqwest::Client::new();
let res_body = client
.get(msi_endpoint_url)
.header("Metadata", "true")
.header("X-IDENTITY-HEADER", msi_secret)
.send()
.await
.map_err(|e| AzureError::GenericErrorWithText(e.to_string()))?
.text()
.await
.map_err(|e| AzureError::GenericErrorWithText(e.to_string()))?;
let token_response = serde_json::from_str::<MsiTokenResponse>(&res_body)
.map_err(|_| AzureError::GenericError)?;
Ok(TokenResponse::new(
token_response.access_token,
token_response.expires_on,
))
}
}
#[derive(Debug, Clone, Deserialize)]
struct MsiTokenResponse {
pub access_token: AccessToken,
pub expires_on: DateTime<Utc>,
pub token_type: String,
pub resource: String,
}
| true |
c59e946bd37c360cde5d74584ff7d24cd7083e52
|
Rust
|
bytebuddha/cli-table
|
/src/error.rs
|
UTF-8
| 565 | 3.390625 | 3 |
[
"LicenseRef-scancode-unknown-license-reference",
"MIT",
"Apache-2.0"
] |
permissive
|
use std::fmt;
/// Errors returned by functions in this crate
#[derive(Debug)]
pub enum Error {
/// Returned when there is a mismatch in number of columns in different rows of a [`Table`](struct.Table.html)
MismatchedColumns,
}
impl std::error::Error for Error {}
impl fmt::Display for Error {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
match self {
Error::MismatchedColumns => write!(
f,
"All rows in the table should have same number of cells/columns"
),
}
}
}
| true |
8731a9bf870580e2380923086e45f6185938039a
|
Rust
|
preston-evans98/bitcoin-warp
|
/shared/src/merkle_tree.rs
|
UTF-8
| 2,814 | 3.078125 | 3 |
[
"MIT"
] |
permissive
|
use warp_crypto::merkleize;
pub use crate::hashes::MerkleRoot;
use crate::TxID;
// #[derive(Serializable, Deserializable, Debug)]
// pub struct MerkleRoot {
// root: [u8; 32],
// }
// impl PartialEq for MerkleRoot {
// fn eq(&self, other: &Self) -> bool {
// self.root == other.root
// }
// }
// struct MerkleNode (
// Vec<u8>
// );
// impl MerkleNode {
// pub fn new()
// }
impl MerkleRoot {
pub fn from_vec(txids: Vec<&TxID>) -> MerkleRoot {
MerkleRoot::from(merkle_root(txids.iter().map(|h| *h)))
}
pub fn from_iter<'a, I: ExactSizeIterator<Item = &'a TxID>>(iter: I) -> MerkleRoot {
MerkleRoot::from(merkle_root(iter))
}
pub fn root(&self) -> &[u8; 32] {
self.inner()
}
}
/// Calculates the merkle root of a list of hashes inline
/// into the allocated slice.
///
/// In most cases, you'll want to use [merkle_root] instead.
/// Adapted from https://github.com/rust-bitcoin/rust-bitcoin/blob/master/src/util/hash.rs
pub fn merkle_root_inline(data: &mut [[u8; 32]]) -> [u8; 32] {
// Base case
if data.is_empty() {
return [0u8; 32];
}
if data.len() < 2 {
return data[0];
}
// Recursion
for idx in 0..((data.len() + 1) / 2) {
let idx1 = 2 * idx;
let idx2 = std::cmp::min(idx1 + 1, data.len() - 1);
data[idx] = merkleize(&data[idx1], &data[idx2]);
}
let half_len = data.len() / 2 + data.len() % 2;
merkle_root_inline(&mut data[0..half_len])
}
/// Creates a merkle tree from an iterator over u256.
///
///Adapted from https://github.com/rust-bitcoin/rust-bitcoin/blob/master/src/util/hash.rs
pub fn merkle_root<'a, I: ExactSizeIterator<Item = &'a TxID>>(mut iter: I) -> [u8; 32] {
// Base case
if iter.len() == 0 {
return [0u8; 32];
}
// If the vec contains only the coinbase, the merkle root is the coinbase
if iter.len() == 1 {
return iter.next().unwrap().to_le_bytes().clone();
}
// Recursion
let half_len = iter.len() / 2 + iter.len() % 2;
let mut alloc = Vec::with_capacity(half_len);
while let Some(hash1) = iter.next() {
let hash2 = iter.next().unwrap_or(hash1);
alloc.push(merkleize(hash1.to_le_bytes(), hash2.to_le_bytes()))
}
merkle_root_inline(&mut alloc)
}
// impl MerkleRoot {
// pub fn new() -> MerkleRoot {
// MerkleRoot { root: u256::new() }
// }
// pub fn update(&mut self, hash: &u256) {
// self.hashes.push(hash)
// }
// pub fn matches(&self, other: &u256) -> bool {
// &self.root == other
// }
// pub fn finish(&mut self) {
// if self.hashes.len() == 1 {
// self.root = self.hashes.pop().expect("Merkle tree must have one hash");
// return;
// }
// }
// }
| true |
4273464c098f64e1e36d0c1155bb684a4902c224
|
Rust
|
hderms/kefka
|
/src/database.rs
|
UTF-8
| 756 | 2.671875 | 3 |
[] |
no_license
|
use sled::{IVec, Result as SledResult};
use crate::NodeConfig;
#[derive(Clone)]
pub struct Database {
pub db: sled::Db,
}
impl Database {
pub fn default(node_config: NodeConfig) -> Database {
let db_path = node_config.db_path;
let config = sled::Config::default()
.path(db_path)
.cache_capacity(10_000_000_000)
.flush_every_ms(Some(1000))
.mode(sled::Mode::HighThroughput);
let tree = config.open().expect("open");
Database { db: tree }
}
pub fn query(&self, key: &[u8]) -> SledResult<Option<IVec>> {
self.db.get(key)
}
pub fn insert(&self, key: &[u8], value: &[u8]) -> SledResult<Option<IVec>> {
self.db.insert(key, value)
}
}
| true |
d91189c5fc21f77e17d84c8d86fa281b97bfd5a5
|
Rust
|
CPSSD/cerberus
|
/libcerberus/examples/end-to-end.rs
|
UTF-8
| 2,110 | 2.578125 | 3 |
[
"MIT"
] |
permissive
|
extern crate cerberus;
#[macro_use]
extern crate error_chain;
use cerberus::*;
struct TestMapper;
impl Map for TestMapper {
type Key = String;
type Value = String;
fn map<E>(&self, input: MapInputKV, mut emitter: E) -> Result<()>
where
E: EmitIntermediate<Self::Key, Self::Value>,
{
for word in input.value.split_whitespace() {
emitter.emit(word.to_owned(), "test".to_owned())?;
}
Ok(())
}
}
struct TestReducer;
impl Reduce<String, String> for TestReducer {
type Output = String;
fn reduce<E>(&self, input: IntermediateInputKV<String, String>, mut emitter: E) -> Result<()>
where
E: EmitFinal<Self::Output>,
{
emitter.emit(input.values.iter().fold(String::new(), |acc, x| acc + x))?;
Ok(())
}
}
struct TestPartitioner;
impl Partition<String, String> for TestPartitioner {
fn partition(&self, input: PartitionInputKV<String, String>) -> Result<u64> {
let key = input.key;
let first_char = key.chars()
.nth(0)
.chain_err(|| "Cannot partition key of empty string.")?;
let partition = {
if first_char.is_lowercase() {
if first_char > 'm' {
1
} else {
0
}
} else if first_char > 'M' {
1
} else {
0
}
};
Ok(partition)
}
}
fn run() -> Result<()> {
let test_mapper = TestMapper;
let test_reducer = TestReducer;
let test_partitioner = TestPartitioner;
let matches = cerberus::parse_command_line();
let registry = UserImplRegistryBuilder::new_no_combiner()
.mapper(&test_mapper)
.reducer(&test_reducer)
.partitioner(&test_partitioner)
.build()
.chain_err(|| "Error building UserImplRegistry.")?;
cerberus::run(&matches, ®istry)
}
// Macro to generate a quick error_chain main function.
// https://github.com/rust-lang-nursery/error-chain/blob/master/examples/quickstart.rs
quick_main!(run);
| true |
314af3d908ec32973decca42825d7fb7c9e47bbb
|
Rust
|
sugyan/leetcode
|
/problems/0856-score-of-parentheses/lib.rs
|
UTF-8
| 934 | 3.609375 | 4 |
[] |
no_license
|
pub struct Solution;
impl Solution {
pub fn score_of_parentheses(s: String) -> i32 {
let s = s.as_bytes();
let (mut answer, mut depth) = (0, 0);
for (i, b) in s.iter().enumerate() {
match b {
b'(' => depth += 1,
b')' => {
depth -= 1;
if s[i - 1] == b'(' {
answer += 1 << depth;
}
}
_ => unreachable!(),
}
}
answer
}
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn example_1() {
assert_eq!(1, Solution::score_of_parentheses(String::from("()")));
}
#[test]
fn example_2() {
assert_eq!(2, Solution::score_of_parentheses(String::from("(())")));
}
#[test]
fn example_3() {
assert_eq!(2, Solution::score_of_parentheses(String::from("()()")));
}
}
| true |
0ab54e803919e6e78eacd61875fd494f598ec23c
|
Rust
|
activeledger/SDK-Rust
|
/src/key/import.rs
|
UTF-8
| 6,299 | 2.84375 | 3 |
[
"MIT"
] |
permissive
|
/*
* MIT License (MIT)
* Copyright (c) 2019 Activeledger
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in all
* copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
* SOFTWARE.
*/
//! # Importer
//!
//! The importer module is used to import keys from files.
//!
//! The file passed to the importer must be JSON that matches the expected structure.
//!
//! Currently RSA and EC (SECP256K1) keys can be imported.
//!
//! ## Examples
//! This example will use an RSA key as an example but other keys should
//! use the same process. Examples will be provided if this is not the case.
//! Alternative functions will be listed at the end of the example.
//! ```
//! # use activeledger::key::import;
//! let rsa_key_path = "/path/to/key.json";
//!
//! # let rsa_key_path = "./testfiles/rsa.json";
//!
//! let rsa = import::import_rsa(&rsa_key_path).unwrap();
//! ```
//! The other functions for key importing are:
//!
//! ```
//! # use activeledger::key::import;
//! # let ec_key_path = "./testfiles/ec.json";
//! import::import_ec(ec_key_path).unwrap();
//! ```
//!
//! ## File Structure
//! The file you import should have the following structure, otherwise the import will fail.
//! ```JSON
//! {
//! "name":"",
//! "type":"",
//! "pem": {
//! "private": "",
//! "public":""
//! }
//! }
//! ```
extern crate serde_json;
use std::fs::File;
use std::io::Read;
use std::path::Path;
use std::str;
use crate::key::Pkcs8pem;
use super::error::{KeyError, KeyResult};
use super::EllipticCurve;
use super::RSA;
struct ImportData {
name: String,
pkcs8pem: Pkcs8pem,
}
/// Import an RSA key from the specified file.
///
/// The document must be a JSON file of the expected structure else importing will fail.
/// An template of the structure is provided below if you are importing a file not exported
/// from this SDK.
///
/// # Example
/// ```
/// use activeledger::key::import;
///
/// let rsa_key_path = "/path/to/key.json";
/// # let rsa_key_path = "./testfiles/rsa.json";
/// let rsa = import::import_rsa(&rsa_key_path).unwrap();
/// ```
///
/// ## File Structure
/// The file you import should have the following structure, otherwise the import will fail.
/// ```JSON
/// {
/// "name":"",
/// "type":"",
/// "pem": {
/// "private": "",
/// "public":""
/// }
/// }
/// ```
pub fn import_rsa(path: &str) -> KeyResult<RSA> {
let rsa_data = import(path, "\"rsa\"")?;
Ok(RSA::create_from_pem(&rsa_data.name, &rsa_data.pkcs8pem))
}
/// Import an EC (SECP256K1) key from the specified file.
///
/// The document must be a JSON file of the expected structure else importing will fail.
/// An template of the structure is provided below if you are importing a file not exported
/// from this SDK.
///
/// # Example
/// ```
/// use activeledger::key::import;
/// let ec_key_path = "/path/to/key.json";
/// # let ec_key_path = "./testfiles/ec.json";
///
/// let ec = import::import_ec(&ec_key_path).unwrap();
/// ```
///
/// ## File Structure
/// The file you import should have the following structure, otherwise the import will fail.
/// ```JSON
/// {
/// "name":"",
/// "type":"",
/// "pem": {
/// "private": "",
/// "public":""
/// }
/// }
/// ```
pub fn import_ec(path: &str) -> KeyResult<EllipticCurve> {
let ec_data = import(path, "\"ec\"")?;
Ok(EllipticCurve::create_from_pem(
&ec_data.name,
&ec_data.pkcs8pem,
))
}
/// Handle opening the file and returning the contents as JSON
fn import(path: &str, expected_type: &str) -> KeyResult<ImportData> {
let path = Path::new(path);
let mut file = match File::open(&path) {
Ok(file) => file,
Err(_) => return Err(KeyError::ImportError(4000)),
};
let mut contents = String::new();
match file.read_to_string(&mut contents) {
Ok(_) => (),
Err(_) => return Err(KeyError::ImportError(4001)),
};
let data_obj: serde_json::Value = match serde_json::from_str(&contents) {
Ok(json) => json,
Err(_) => return Err(KeyError::ImportError(4001)),
};
let name = match data_obj["name"].as_str() {
Some(data) => data,
None => return Err(KeyError::ImportError(4001)),
};
let pem_public = match data_obj["pem"]["public"].as_str() {
Some(data) => data,
None => return Err(KeyError::ImportError(4001)),
};
let pem_private = match data_obj["pem"]["private"].as_str() {
Some(data) => data,
None => return Err(KeyError::ImportError(4001)),
};
if data_obj["type"].to_string() != expected_type {
return Err(KeyError::ImportError(4002));
}
let pkcs8pem = Pkcs8pem {
public: pem_public.to_string(),
private: pem_private.to_string(),
};
Ok(ImportData {
name: name.to_string(),
pkcs8pem,
})
}
#[cfg(test)]
mod tests {
use crate::key::import;
#[test]
fn import_rsa() {
import::import_rsa("./testfiles/rsa.json").unwrap();
}
#[test]
fn import_ec() {
import::import_ec("./testfiles/ec.json").unwrap();
}
}
| true |
e906bbbcb9063809e7e37ed875a9e14d043288b9
|
Rust
|
brianjd/merit
|
/humanize/src/lib.rs
|
UTF-8
| 8,543 | 3.84375 | 4 |
[
"MIT"
] |
permissive
|
//! Formatter for human readable number or struct
//!
//! This crate exposes `humanize` fn to format numbers to human readable string
//!
//! # Quick Start
//!
//! Add `humanize` to your `Cargo.toml` as as a dependency.
//!
//! # Examples
//!
//! ```rust
//! use humanize::*;
//!
//! fn main() {
//! let opts = HumanizeOptions::builder().build().unwrap();
//! let human_readable = 1234;
//! assert_eq!(human_readable.humanize(opts), Some("1.23K".to_string()))
//! }
//! ```
//!
use derive_builder::Builder;
/// Options to pass to humanize function
#[derive(Debug, Builder)]
pub struct HumanizeOptions {
#[builder(default = "1000")]
denominator: usize,
#[builder(default = "2")]
precision: usize,
#[builder(default = "false")]
keep_zero: bool,
#[builder(default = r#"".""#)]
decimal_separator: &'static str,
#[builder(default = "false")]
lower_case: bool,
#[builder(default = "false")]
space: bool,
#[builder(default = r#"vec!["", "K", "M", "B", "T", "P", "E"]"#)]
units: Vec<&'static str>,
}
impl HumanizeOptions {
/// Create a builder for HumanizeOptions
pub fn builder() -> HumanizeOptionsBuilder {
HumanizeOptionsBuilder::default()
}
}
impl AsRef<HumanizeOptions> for HumanizeOptions {
fn as_ref(&self) -> &HumanizeOptions {
self
}
}
/// Trait that can be implemented for any type.
pub trait Humanize {
/// formats a type to human readable string
fn humanize<T: AsRef<HumanizeOptions>>(&self, opts: T) -> Option<String>;
}
macro_rules! impl_humanize_u {
(for $($t: ty)*) => ($(
impl Humanize for $t {
fn humanize<T: AsRef<HumanizeOptions>>(&self, opts: T) -> Option<String>{
let opts = opts.as_ref();
let denominator = opts.denominator as f64;
let mut val: f64 = *self as f64;
let mut unit = 0;
while val>= denominator as f64 {
val /= denominator;
unit += 1;
}
let mut suffix:String = if unit > opts.units.len() {
opts.units.last().unwrap().to_string()
} else {
opts.units[unit].to_owned()
};
if opts.lower_case {
suffix = suffix.to_lowercase();
}
let fract = (val.fract() * (10.0f64).powi(opts.precision as i32)).round() / 10.0f64.powi(opts.precision as i32);
let precision: usize = if fract == 0.0 && !opts.keep_zero { 0 } else { opts.precision as usize };
let space = if opts.space { " " } else { "" };
let mut formatted:String = format!("{:.*}{}{}", precision , val, space, suffix);
if opts.decimal_separator != "." {
formatted = formatted.replace(".", opts.decimal_separator);
}
Some(formatted)
}
}
)*)
}
macro_rules! impl_humanize_i {
(for $($t: ty)*) => ($(
impl Humanize for $t {
fn humanize<T: AsRef<HumanizeOptions>>(&self, _opts: T) -> Option<String>{
let opts: &HumanizeOptions = _opts.as_ref();
let sign = if *self < 0 { "-" } else { "" };
Some(format!("{}{}", sign, (self.abs() as u64).humanize(opts).unwrap()))
}
}
)*)
}
macro_rules! impl_humanize_f {
(for $($t: ty)*) => ($(
impl Humanize for $t {
fn humanize<T: AsRef<HumanizeOptions>>(&self, _opts: T) -> Option<String>{
let opts: &HumanizeOptions = _opts.as_ref();
let sign = if *self < 0.0 { "-" } else { "" };
Some(format!("{}{}", sign, (self.abs() as u64).humanize(opts).unwrap()))
}
}
)*)
}
impl_humanize_u!(for usize u8 u16 u32 u64);
impl_humanize_i!(for isize i8 i16 i32 i64);
impl_humanize_f!(for f32 f64);
#[cfg(test)]
mod tests {
use super::{Humanize, HumanizeOptions};
#[test]
fn test_usize() {
let opt = HumanizeOptions::builder().build().unwrap();
assert_eq!(100.humanize(&opt), Some("100".to_owned()));
assert_eq!(1000.humanize(&opt), Some("1K".to_owned()));
assert_eq!(1000000.humanize(&opt), Some("1M".to_owned()));
assert_eq!(1000000000.humanize(&opt), Some("1B".to_owned()));
assert_eq!(1000000000000u64.humanize(&opt), Some("1T".to_owned()));
}
#[test]
fn test_isize() {
let opt = HumanizeOptions::builder().build().unwrap();
assert_eq!((-100).humanize(&opt), Some("-100".to_string()));
assert_eq!((100).humanize(&opt), Some("100".to_owned()));
assert_eq!((-1000).humanize(&opt), Some("-1K".to_owned()));
assert_eq!((-1000000).humanize(&opt), Some("-1M".to_owned()));
assert_eq!((-1000000000).humanize(&opt), Some("-1B".to_owned()));
assert_eq!((-1000000000000i64).humanize(&opt), Some("-1T".to_owned()));
}
#[test]
fn test_floats() {
let opt = HumanizeOptions::builder().build().unwrap();
assert_eq!((-100f32).humanize(&opt), Some("-100".to_string()));
assert_eq!((100f32).humanize(&opt), Some("100".to_owned()));
assert_eq!((-1000f32).humanize(&opt), Some("-1K".to_owned()));
assert_eq!((-1000000f32).humanize(&opt), Some("-1M".to_owned()));
assert_eq!((-1000000000f32).humanize(&opt), Some("-1B".to_owned()));
assert_eq!((-1000000000000f64).humanize(&opt), Some("-1T".to_owned()));
assert_eq!((-12345.678f32).humanize(&opt), Some("-12.35K".to_owned()))
}
#[test]
fn test_lowercase_suffix() {
let opt = HumanizeOptions::builder().lower_case(true).build().unwrap();
assert_eq!(1000.humanize(&opt), Some("1k".to_owned()));
assert_eq!(1000000.humanize(&opt), Some("1m".to_owned()));
assert_eq!(1000000000.humanize(&opt), Some("1b".to_owned()));
assert_eq!(1000000000000u64.humanize(&opt), Some("1t".to_owned()));
}
#[test]
fn test_precision() {
let value = 12345.6789;
let mut opts = HumanizeOptions::builder();
assert_eq!(
value.humanize(&opts.precision(0usize).build().unwrap()),
Some("12K".to_owned())
);
assert_eq!(
value.humanize(&opts.precision(1usize).build().unwrap()),
Some("12.3K".to_owned())
);
assert_eq!(
value.humanize(&opts.precision(2usize).build().unwrap()),
Some("12.35K".to_owned())
);
assert_eq!(
value.humanize(&opts.precision(3usize).build().unwrap()),
Some("12.345K".to_owned())
);
}
#[test]
fn test_precision_with_zero() {
let mut opt_builder = HumanizeOptions::builder();
let opt = opt_builder.precision(1usize).build().unwrap();
assert_eq!(1010000000.humanize(&opt), Some("1B".to_owned()));
assert_eq!(1060000000.humanize(&opt), Some("1.1B".to_owned()));
assert_eq!(1810000000.humanize(&opt), Some("1.8B".to_owned()));
let opt = opt_builder
.keep_zero(true)
.precision(1usize)
.build()
.unwrap();
assert_eq!(1010000000.humanize(&opt), Some("1.0B".to_owned()));
assert_eq!(1060000000.humanize(&opt), Some("1.1B".to_owned()));
assert_eq!(1810000000.humanize(&opt), Some("1.8B".to_owned()));
let opt = opt_builder
.keep_zero(false)
.precision(2usize)
.build()
.unwrap();
assert_eq!(1001000000.humanize(&opt), Some("1B".to_owned()));
assert_eq!(1060000000.humanize(&opt), Some("1.06B".to_owned()));
assert_eq!(1810000000.humanize(&opt), Some("1.81B".to_owned()));
let opt = opt_builder
.precision(2usize)
.keep_zero(true)
.build()
.unwrap();
assert_eq!(1001000000.humanize(&opt), Some("1.00B".to_owned()));
assert_eq!(1060000000.humanize(&opt), Some("1.06B".to_owned()));
assert_eq!(1810000000.humanize(&opt), Some("1.81B".to_owned()));
let opt = opt_builder
.keep_zero(false)
.precision(3usize)
.build()
.unwrap();
assert_eq!(1000100000.humanize(&opt), Some("1B".to_owned()));
assert_eq!(1060000000.humanize(&opt), Some("1.060B".to_owned()));
assert_eq!(1813450000.humanize(&opt), Some("1.813B".to_owned()));
let opt = opt_builder
.keep_zero(true)
.precision(3usize)
.build()
.unwrap();
assert_eq!(1000100000.humanize(&opt), Some("1.000B".to_owned()));
assert_eq!(1060000000.humanize(&opt), Some("1.060B".to_owned()));
assert_eq!(1813450000.humanize(&opt), Some("1.813B".to_owned()));
}
#[test]
fn test_decimal_separator() {
let value = 12345.6789;
let opt = HumanizeOptions::builder()
.decimal_separator("_")
.build()
.unwrap();
assert_eq!(value.humanize(&opt), Some("12_35K".to_owned()));
}
#[test]
fn test_units() {
let value = 123450.6789;
let opt = HumanizeOptions::builder()
.units(vec!["m", "km"])
.build()
.unwrap();
assert_eq!(value.humanize(&opt), Some("123.45km".to_owned()));
}
}
| true |
e5e36e5a0f0eced4ee5646d7091ce661485df81f
|
Rust
|
InteractiveComputerGraphics/higher_order_embedded_fem
|
/fenris/tests/unit_tests/reorder.rs
|
UTF-8
| 999 | 2.796875 | 3 |
[
"LicenseRef-scancode-unknown-license-reference",
"MIT"
] |
permissive
|
use fenris::reorder::{cuthill_mckee, reverse_cuthill_mckee};
use fenris::sparse::CsrMatrix;
use nalgebra::DMatrix;
#[test]
fn cuthill_mckee_basic_examples() {
// Basic example
{
let matrix = DMatrix::from_row_slice(4, 4, &[1, 0, 1, 1, 0, 1, 0, 1, 1, 0, 1, 0, 1, 1, 0, 1]);
let pattern = CsrMatrix::from(&matrix).sparsity_pattern();
let perm = cuthill_mckee(&pattern);
assert_eq!(perm.perm(), &[1, 3, 0, 2]);
let mut rcm_expected_perm = perm.clone();
rcm_expected_perm.reverse();
assert_eq!(&reverse_cuthill_mckee(&pattern), &rcm_expected_perm);
}
// Diagonal pattern
// Note that the "standard" CM algorithm
{
let matrix = DMatrix::from_row_slice(4, 4, &[1, 0, 0, 0, 0, 1, 0, 0, 0, 0, 1, 0, 0, 0, 0, 1]);
let pattern = CsrMatrix::from(&matrix).sparsity_pattern();
let perm = cuthill_mckee(&pattern);
assert_eq!(perm.perm(), &[0, 1, 2, 3]);
}
// TODO: Property-based tests
}
| true |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.