{ // 获取包含Hugging Face文本的span元素 const spans = link.querySelectorAll('span.whitespace-nowrap, span.hidden.whitespace-nowrap'); spans.forEach(span => { if (span.textContent && span.textContent.trim().match(/Hugging\s*Face/i)) { span.textContent = 'AI快站'; } }); }); // 替换logo图片的alt属性 document.querySelectorAll('img[alt*="Hugging"], img[alt*="Face"]').forEach(img => { if (img.alt.match(/Hugging\s*Face/i)) { img.alt = 'AI快站 logo'; } }); } // 替换导航栏中的链接 function replaceNavigationLinks() { // 已替换标记,防止重复运行 if (window._navLinksReplaced) { return; } // 已经替换过的链接集合,防止重复替换 const replacedLinks = new Set(); // 只在导航栏区域查找和替换链接 const headerArea = document.querySelector('header') || document.querySelector('nav'); if (!headerArea) { return; } // 在导航区域内查找链接 const navLinks = headerArea.querySelectorAll('a'); navLinks.forEach(link => { // 如果已经替换过,跳过 if (replacedLinks.has(link)) return; const linkText = link.textContent.trim(); const linkHref = link.getAttribute('href') || ''; // 替换Spaces链接 - 仅替换一次 if ( (linkHref.includes('/spaces') || linkHref === '/spaces' || linkText === 'Spaces' || linkText.match(/^s*Spacess*$/i)) && linkText !== 'OCR模型免费转Markdown' && linkText !== 'OCR模型免费转Markdown' ) { link.textContent = 'OCR模型免费转Markdown'; link.href = 'https://fast360.xyz'; link.setAttribute('target', '_blank'); link.setAttribute('rel', 'noopener noreferrer'); replacedLinks.add(link); } // 删除Posts链接 else if ( (linkHref.includes('/posts') || linkHref === '/posts' || linkText === 'Posts' || linkText.match(/^s*Postss*$/i)) ) { if (link.parentNode) { link.parentNode.removeChild(link); } replacedLinks.add(link); } // 替换Docs链接 - 仅替换一次 else if ( (linkHref.includes('/docs') || linkHref === '/docs' || linkText === 'Docs' || linkText.match(/^s*Docss*$/i)) && linkText !== '模型下载攻略' ) { link.textContent = '模型下载攻略'; link.href = '/'; replacedLinks.add(link); } // 删除Enterprise链接 else if ( (linkHref.includes('/enterprise') || linkHref === '/enterprise' || linkText === 'Enterprise' || linkText.match(/^s*Enterprises*$/i)) ) { if (link.parentNode) { link.parentNode.removeChild(link); } replacedLinks.add(link); } }); // 查找可能嵌套的Spaces和Posts文本 const textNodes = []; function findTextNodes(element) { if (element.nodeType === Node.TEXT_NODE) { const text = element.textContent.trim(); if (text === 'Spaces' || text === 'Posts' || text === 'Enterprise') { textNodes.push(element); } } else { for (const child of element.childNodes) { findTextNodes(child); } } } // 只在导航区域内查找文本节点 findTextNodes(headerArea); // 替换找到的文本节点 textNodes.forEach(node => { const text = node.textContent.trim(); if (text === 'Spaces') { node.textContent = node.textContent.replace(/Spaces/g, 'OCR模型免费转Markdown'); } else if (text === 'Posts') { // 删除Posts文本节点 if (node.parentNode) { node.parentNode.removeChild(node); } } else if (text === 'Enterprise') { // 删除Enterprise文本节点 if (node.parentNode) { node.parentNode.removeChild(node); } } }); // 标记已替换完成 window._navLinksReplaced = true; } // 替换代码区域中的域名 function replaceCodeDomains() { // 特别处理span.hljs-string和span.njs-string元素 document.querySelectorAll('span.hljs-string, span.njs-string, span[class*="hljs-string"], span[class*="njs-string"]').forEach(span => { if (span.textContent && span.textContent.includes('huggingface.co')) { span.textContent = span.textContent.replace(/huggingface.co/g, 'aifasthub.com'); } }); // 替换hljs-string类的span中的域名(移除多余的转义符号) document.querySelectorAll('span.hljs-string, span[class*="hljs-string"]').forEach(span => { if (span.textContent && span.textContent.includes('huggingface.co')) { span.textContent = span.textContent.replace(/huggingface.co/g, 'aifasthub.com'); } }); // 替换pre和code标签中包含git clone命令的域名 document.querySelectorAll('pre, code').forEach(element => { if (element.textContent && element.textContent.includes('git clone')) { const text = element.innerHTML; if (text.includes('huggingface.co')) { element.innerHTML = text.replace(/huggingface.co/g, 'aifasthub.com'); } } }); // 处理特定的命令行示例 document.querySelectorAll('pre, code').forEach(element => { const text = element.innerHTML; if (text.includes('huggingface.co')) { // 针对git clone命令的专门处理 if (text.includes('git clone') || text.includes('GIT_LFS_SKIP_SMUDGE=1')) { element.innerHTML = text.replace(/huggingface.co/g, 'aifasthub.com'); } } }); // 特别处理模型下载页面上的代码片段 document.querySelectorAll('.flex.border-t, .svelte_hydrator, .inline-block').forEach(container => { const content = container.innerHTML; if (content && content.includes('huggingface.co')) { container.innerHTML = content.replace(/huggingface.co/g, 'aifasthub.com'); } }); // 特别处理模型仓库克隆对话框中的代码片段 try { // 查找包含"Clone this model repository"标题的对话框 const cloneDialog = document.querySelector('.svelte_hydration_boundary, [data-target="MainHeader"]'); if (cloneDialog) { // 查找对话框中所有的代码片段和命令示例 const codeElements = cloneDialog.querySelectorAll('pre, code, span'); codeElements.forEach(element => { if (element.textContent && element.textContent.includes('huggingface.co')) { if (element.innerHTML.includes('huggingface.co')) { element.innerHTML = element.innerHTML.replace(/huggingface.co/g, 'aifasthub.com'); } else { element.textContent = element.textContent.replace(/huggingface.co/g, 'aifasthub.com'); } } }); } // 更精确地定位克隆命令中的域名 document.querySelectorAll('[data-target]').forEach(container => { const codeBlocks = container.querySelectorAll('pre, code, span.hljs-string'); codeBlocks.forEach(block => { if (block.textContent && block.textContent.includes('huggingface.co')) { if (block.innerHTML.includes('huggingface.co')) { block.innerHTML = block.innerHTML.replace(/huggingface.co/g, 'aifasthub.com'); } else { block.textContent = block.textContent.replace(/huggingface.co/g, 'aifasthub.com'); } } }); }); } catch (e) { // 错误处理但不打印日志 } } // 当DOM加载完成后执行替换 if (document.readyState === 'loading') { document.addEventListener('DOMContentLoaded', () => { replaceHeaderBranding(); replaceNavigationLinks(); replaceCodeDomains(); // 只在必要时执行替换 - 3秒后再次检查 setTimeout(() => { if (!window._navLinksReplaced) { console.log('[Client] 3秒后重新检查导航链接'); replaceNavigationLinks(); } }, 3000); }); } else { replaceHeaderBranding(); replaceNavigationLinks(); replaceCodeDomains(); // 只在必要时执行替换 - 3秒后再次检查 setTimeout(() => { if (!window._navLinksReplaced) { console.log('[Client] 3秒后重新检查导航链接'); replaceNavigationLinks(); } }, 3000); } // 增加一个MutationObserver来处理可能的动态元素加载 const observer = new MutationObserver(mutations => { // 检查是否导航区域有变化 const hasNavChanges = mutations.some(mutation => { // 检查是否存在header或nav元素变化 return Array.from(mutation.addedNodes).some(node => { if (node.nodeType === Node.ELEMENT_NODE) { // 检查是否是导航元素或其子元素 if (node.tagName === 'HEADER' || node.tagName === 'NAV' || node.querySelector('header, nav')) { return true; } // 检查是否在导航元素内部 let parent = node.parentElement; while (parent) { if (parent.tagName === 'HEADER' || parent.tagName === 'NAV') { return true; } parent = parent.parentElement; } } return false; }); }); // 只在导航区域有变化时执行替换 if (hasNavChanges) { // 重置替换状态,允许再次替换 window._navLinksReplaced = false; replaceHeaderBranding(); replaceNavigationLinks(); } }); // 开始观察document.body的变化,包括子节点 if (document.body) { observer.observe(document.body, { childList: true, subtree: true }); } else { document.addEventListener('DOMContentLoaded', () => { observer.observe(document.body, { childList: true, subtree: true }); }); } })(); \", response);\n Ok(Response::with((Status::Ok, response, mime)))\n}\n"},"download_success":{"kind":"bool","value":true,"string":"true"}}},{"rowIdx":1127,"cells":{"blob_id":{"kind":"string","value":"1b11341c511b87dd9b770d99be27b805aaba22e9"},"language":{"kind":"string","value":"Rust"},"repo_name":{"kind":"string","value":"nikolabr/base64rs"},"path":{"kind":"string","value":"/src/lib.rs"},"src_encoding":{"kind":"string","value":"UTF-8"},"length_bytes":{"kind":"number","value":3224,"string":"3,224"},"score":{"kind":"number","value":3.375,"string":"3.375"},"int_score":{"kind":"number","value":3,"string":"3"},"detected_licenses":{"kind":"list like","value":[],"string":"[]"},"license_type":{"kind":"string","value":"no_license"},"text":{"kind":"string","value":"pub mod base64 {\n static BASE64_TABLE: &str = \"ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+/=\";\n\n fn get_ascii(ch: u8) -> u8 {\n match BASE64_TABLE.chars().position(|x| x == ch as char) {\n None => 0,\n Some(n) => n as u8\n }\n }\n\n fn get_b64_char(val: u8) -> char { \n match BASE64_TABLE.chars().nth((val & 0x3F) as usize) { \n None => '0', \n Some(x) => x\n }\n }\n\n pub fn is_valid_b64(data: &String) -> bool { \n if (data.len() == 0) || (data.len() % 4 != 0) { \n return false\n }\n else { \n let char_is_b64 = |x| { (0x30..=0x39).contains(x) || (0x41..=0x90).contains(x) \n || (0x61..=0x80).contains(x) || *x == 0x2F || *x == 0x2B || *x == 0x3D };\n return data.as_bytes().iter().all(char_is_b64)\n }\n }\n\n fn encode_chunk(chunk: &[u8]) -> [char; 4]{\n let mut tmp : [char; 4] = ['0'; 4];\n\n tmp[0] = get_b64_char(chunk[0] >> 2);\n tmp[1] = get_b64_char((chunk[0] << 4) | (chunk[1] >> 4));\n tmp[2] = get_b64_char((chunk[1] << 2) | (chunk[2] >> 6));\n tmp[3] = get_b64_char(chunk[2]);\n\n tmp\n }\n\n fn add_padding(chunk: &[u8]) -> [char; 4]{\n let mut tmp : [char; 4] = ['0'; 4];\n tmp[0] = get_b64_char(chunk[0] >> 2);\n\n match chunk.len() {\n 1 => {\n tmp[1] = get_b64_char((chunk[0] << 4) | 0x00); \n tmp[2] = '=';\n }\n 2 => {\n tmp[1] = get_b64_char((chunk[0] << 4) | (chunk[1] >> 4));\n tmp[2] = get_b64_char((chunk[1] << 2) | (chunk[2] >> 6));\n }\n _ => panic!(\"Invalid padding!\")\n };\n\n tmp[3] = '=';\n tmp\n\n }\n\n fn decode_chunk(chunk: &[u8]) -> Vec{\n let mut tmp : Vec = Vec::new();\n\n tmp.push((get_ascii(chunk[0]) << 2) | (get_ascii(chunk[1]) >> 4));\n if chunk[2] != '=' as u8 {\n tmp.push((get_ascii(chunk[1]) << 4) | (get_ascii(chunk[2]) >> 2));\n if chunk[3] != '=' as u8 {\n tmp.push((get_ascii(chunk[2]) << 6) | get_ascii(chunk[3]));\n }\n }\n\n tmp\n }\n\n pub fn encode(data: &Vec) -> String {\n let mut res = String::new();\n let chunks = data.chunks_exact(3);\n let remainder = chunks.remainder(); // For loop will consume the iterator, so the remainder must be copied\n\n for chunk in chunks {\n res.extend(encode_chunk(chunk)); \n }\n if !remainder.is_empty() { \n res.extend(add_padding(remainder));\n }\n\n res\n }\n\n pub fn decode(data: &String) -> Result, String> {\n match is_valid_b64(data){\n true => {\n let mut res: Vec = Vec::new();\n let bytes = data.as_bytes();\n let chunks = bytes.chunks(4);\n\n for chunk in chunks {\n res.extend(decode_chunk(chunk)); \n }\n return Ok(res);\n },\n false => {\n return Err(\"Input is not Base64!\".to_string());\n }\n }; \n }\n}\n"},"download_success":{"kind":"bool","value":true,"string":"true"}}},{"rowIdx":1128,"cells":{"blob_id":{"kind":"string","value":"810107b60d4368049d58d73dd702c5b8dc9f4921"},"language":{"kind":"string","value":"Rust"},"repo_name":{"kind":"string","value":"arothstein/sandbox-rust"},"path":{"kind":"string","value":"/the-book/03/shadowing/src/main.rs"},"src_encoding":{"kind":"string","value":"UTF-8"},"length_bytes":{"kind":"number","value":798,"string":"798"},"score":{"kind":"number","value":4.375,"string":"4.375"},"int_score":{"kind":"number","value":4,"string":"4"},"detected_licenses":{"kind":"list like","value":[],"string":"[]"},"license_type":{"kind":"string","value":"no_license"},"text":{"kind":"string","value":"fn main() {\n let x = 5;\n\n // We can shadow a variable by using the same variable’s name and repeating the use of the let keyword as follows:\n let x = x + 1;\n\n let x = x * 2;\n\n println!(\"The value of x is: {}\", x);\n\n // Shadowing is different from marking a variable as mut, because we’ll get a compile-time error if we\n // accidentally try to reassign to this variable without using the let keyword. By using let, we can perform\n // a few transformations on a value but have the variable be immutable after those transformations have been completed.\n\n // The other difference between mut and shadowing is that because we’re effectively creating a new\n // variable when we use the let keyword again, we can change the type of the value but reuse the same name.\n}\n"},"download_success":{"kind":"bool","value":true,"string":"true"}}},{"rowIdx":1129,"cells":{"blob_id":{"kind":"string","value":"14e00dc3fb6c55fcec1bb334ebc82ba020b5af9b"},"language":{"kind":"string","value":"Rust"},"repo_name":{"kind":"string","value":"EFanZh/n-body"},"path":{"kind":"string","value":"/src/basic_renderer.rs"},"src_encoding":{"kind":"string","value":"UTF-8"},"length_bytes":{"kind":"number","value":1785,"string":"1,785"},"score":{"kind":"number","value":2.8125,"string":"2.8125"},"int_score":{"kind":"number","value":3,"string":"3"},"detected_licenses":{"kind":"list like","value":["MIT"],"string":"[\n \"MIT\"\n]"},"license_type":{"kind":"string","value":"permissive"},"text":{"kind":"string","value":"use crate::configuration::Color;\nuse crate::renderer::Renderer;\nuse cgmath::Vector2;\nuse itertools::izip;\nuse wasm_bindgen::JsValue;\nuse web_sys::CanvasRenderingContext2d;\n\npub struct BasicRenderer {\n canvas_context: CanvasRenderingContext2d,\n body_colors: Vec,\n trail_widths: Vec,\n}\n\nimpl BasicRenderer {\n pub fn new(\n canvas_context: CanvasRenderingContext2d,\n width: f64,\n height: f64,\n body_colors: Vec,\n trail_widths: Vec,\n ) -> BasicRenderer {\n canvas_context.set_global_composite_operation(\"screen\").unwrap();\n canvas_context.set_fill_style(&JsValue::from_str(\"black\"));\n canvas_context.fill_rect(-width * 0.5, -height * 0.5, width, height);\n\n BasicRenderer {\n canvas_context,\n body_colors: body_colors.iter().map(|c| c.to_rgba()).collect(),\n trail_widths,\n }\n }\n}\n\nimpl Renderer for BasicRenderer {\n fn render(&mut self, position_histories: &[Vec>]) {\n for (position_history, color, trail_width) in izip!(position_histories, &self.body_colors, &self.trail_widths) {\n if position_history.len() > 1 {\n self.canvas_context.set_stroke_style(&JsValue::from_str(&color));\n self.canvas_context.set_line_width(*trail_width);\n\n self.canvas_context.begin_path();\n\n let (first_position, rest_positions) = position_history.split_first().unwrap();\n\n self.canvas_context.move_to(first_position.x, first_position.y);\n\n for position in rest_positions {\n self.canvas_context.line_to(position.x, position.y);\n }\n\n self.canvas_context.stroke();\n }\n }\n }\n}\n"},"download_success":{"kind":"bool","value":true,"string":"true"}}},{"rowIdx":1130,"cells":{"blob_id":{"kind":"string","value":"e23266534b9a41311fe49104eec99a6650a9eaea"},"language":{"kind":"string","value":"Rust"},"repo_name":{"kind":"string","value":"Vicfred/codeforces-rust"},"path":{"kind":"string","value":"/insomnia_cure_148A.rs"},"src_encoding":{"kind":"string","value":"UTF-8"},"length_bytes":{"kind":"number","value":1055,"string":"1,055"},"score":{"kind":"number","value":2.828125,"string":"2.828125"},"int_score":{"kind":"number","value":3,"string":"3"},"detected_licenses":{"kind":"list like","value":["BSD-3-Clause"],"string":"[\n \"BSD-3-Clause\"\n]"},"license_type":{"kind":"string","value":"permissive"},"text":{"kind":"string","value":"// https://codeforces.com/problemset/problem/148/A\n// implementation, simulation, simple math\nuse std::io;\n\nfn main() {\n let mut k = String::new();\n\n io::stdin()\n .read_line(&mut k)\n .unwrap();\n\n let k: i64 = k.trim().parse().unwrap();\n\n let mut l = String::new();\n\n io::stdin()\n .read_line(&mut l)\n .unwrap();\n\n let l: i64 = l.trim().parse().unwrap();\n\n let mut m = String::new();\n\n io::stdin()\n .read_line(&mut m)\n .unwrap();\n\n let m: i64 = m.trim().parse().unwrap();\n\n let mut n = String::new();\n\n io::stdin()\n .read_line(&mut n)\n .unwrap();\n\n let n: i64 = n.trim().parse().unwrap();\n\n let mut d = String::new();\n\n io::stdin()\n .read_line(&mut d)\n .unwrap();\n\n let d: i64 = d.trim().parse().unwrap();\n\n let mut dragons = 0;\n\n for idx in 1..d+1 {\n if idx % k == 0\n || idx % l == 0\n || idx % m == 0\n || idx % n == 0 {\n dragons += 1;\n }\n }\n\n println!(\"{}\", dragons);\n}\n\n"},"download_success":{"kind":"bool","value":true,"string":"true"}}},{"rowIdx":1131,"cells":{"blob_id":{"kind":"string","value":"b31eb9662d953601504e039d375052a60afc0190"},"language":{"kind":"string","value":"Rust"},"repo_name":{"kind":"string","value":"geom3trik/fft_resample"},"path":{"kind":"string","value":"/examples/demo.rs"},"src_encoding":{"kind":"string","value":"UTF-8"},"length_bytes":{"kind":"number","value":1768,"string":"1,768"},"score":{"kind":"number","value":2.703125,"string":"2.703125"},"int_score":{"kind":"number","value":3,"string":"3"},"detected_licenses":{"kind":"list like","value":["MIT"],"string":"[\n \"MIT\"\n]"},"license_type":{"kind":"string","value":"permissive"},"text":{"kind":"string","value":"\nuse std::{error::Error, fs::File, io::BufReader, usize};\n\nuse fft_resample::fft_upsample;\nuse hound::{WavReader, WavSpec, SampleFormat, WavWriter};\n\nfn main() -> Result<(), hound::Error> {\n // Replace with path to test file\n let path = \"C:/Users/Setup/Music/file_example_WAV_5MG.wav\";\n let mut reader = WavReader::open(path)?;\n let spec = reader.spec();\n let mut data = Vec::with_capacity((spec.channels as usize) * (reader.duration() as usize));\n match (spec.bits_per_sample, spec.sample_format) {\n (16, SampleFormat::Int) => {\n for sample in reader.samples::() {\n data.push((sample? as f32) / (0x7fffi32 as f32));\n }\n }\n (24, SampleFormat::Int) => {\n for sample in reader.samples::() {\n let val = (sample? as f32) / (0x00ff_ffffi32 as f32);\n data.push(val);\n }\n }\n (32, SampleFormat::Int) => {\n for sample in reader.samples::() {\n data.push((sample? as f32) / (0x7fff_ffffi32 as f32));\n }\n }\n (32, SampleFormat::Float) => {\n for sample in reader.samples::() {\n data.push(sample?);\n }\n }\n _ => return Err(hound::Error::Unsupported),\n }\n\n let upsample_length = (data.len() as f32 / 44100.0) * 48000.0;\n\n let resampled_buffer = fft_upsample(&data, upsample_length.round() as usize, spec.channels as usize);\n\n let mut writer = WavWriter::create(\"test3.wav\", spec)?;\n\n for t in 0..resampled_buffer.len() {\n let sample = resampled_buffer[t];\n let amplitude = i16::MAX as f32;\n writer.write_sample((sample * amplitude) as i16)?;\n }\n writer.finalize()?;\n\n Ok(())\n}"},"download_success":{"kind":"bool","value":true,"string":"true"}}},{"rowIdx":1132,"cells":{"blob_id":{"kind":"string","value":"4c05ac454e5ce39285e5543013d9bd27fae4b091"},"language":{"kind":"string","value":"Rust"},"repo_name":{"kind":"string","value":"SeijiEmery/subliminal"},"path":{"kind":"string","value":"/structural_parser/src/text_style.rs"},"src_encoding":{"kind":"string","value":"UTF-8"},"length_bytes":{"kind":"number","value":641,"string":"641"},"score":{"kind":"number","value":3.5,"string":"3.5"},"int_score":{"kind":"number","value":4,"string":"4"},"detected_licenses":{"kind":"list like","value":["MIT"],"string":"[\n \"MIT\"\n]"},"license_type":{"kind":"string","value":"permissive"},"text":{"kind":"string","value":"\npub struct TextStyle {\n font: Option,\n color: Option,\n size: Option,\n italic: Option,\n bold: Option,\n}\nimpl Default for TextStyle {\n fn default () -> TextStyle {\n TextStyle { font: None, color: None, size: None, italic: false, bold: false }\n }\n}\nimpl BitOr for TextStyle {\n type Output = Self;\n fn bitor (self, rhs: Self) -> Self {\n TextStyle {\n font: font.or(rhs.font),\n color: color.or(rhs.font),\n size: size.or(rhs.size),\n italic: italic.or(rhs.italic),\n bold: bold.or(rhs.bold),\n }\n }\n}\n"},"download_success":{"kind":"bool","value":true,"string":"true"}}},{"rowIdx":1133,"cells":{"blob_id":{"kind":"string","value":"77058d7ec4c60610a551e6b77abc64b8a98b6227"},"language":{"kind":"string","value":"Rust"},"repo_name":{"kind":"string","value":"mchesser/platformer"},"path":{"kind":"string","value":"/src/map.rs"},"src_encoding":{"kind":"string","value":"UTF-8"},"length_bytes":{"kind":"number","value":2903,"string":"2,903"},"score":{"kind":"number","value":3.078125,"string":"3.078125"},"int_score":{"kind":"number","value":3,"string":"3"},"detected_licenses":{"kind":"list like","value":["MIT"],"string":"[\n \"MIT\"\n]"},"license_type":{"kind":"string","value":"permissive"},"text":{"kind":"string","value":"use std::{fs::File, io::Read, path::Path};\n\nuse anyhow::Context;\nuse macroquad::prelude::{Rect, Vec2};\n\nuse crate::tiles::{TileInfo, TileSet};\n\npub struct Map {\n pub width: usize,\n pub height: usize,\n tiles: Vec,\n tileset: TileSet,\n}\n\nimpl Map {\n /// Loads a map from a file\n pub fn load_map(path: &Path, tileset: TileSet) -> anyhow::Result {\n static VERSION: u8 = 1;\n static MAGIC_ID: [u8; 3] = *b\"MAP\";\n\n let mut file =\n File::open(path).with_context(|| format!(\"failed to open: {}\", path.display()))?;\n\n let mut header = [0; 12];\n // Load header into the buffer\n match file.read(&mut header) {\n Ok(n) if n == 12 => {}\n _ => anyhow::bail!(\"Could not read file header\"),\n }\n\n // Check the magic id\n if &header[0..3] != &MAGIC_ID {\n anyhow::bail!(\"Invalid magic id\");\n }\n\n // Check the version number\n if header[3] != VERSION {\n anyhow::bail!(\"Invalid map version\");\n }\n\n // Get the width and height of the map\n let width = u32::from_le_bytes(header[4..8].try_into().unwrap()) as usize;\n let height = u32::from_le_bytes(header[8..12].try_into().unwrap()) as usize;\n\n // Read the tiles\n let length = width * height * 2;\n let mut tile_buffer = vec![0; length];\n match file.read(&mut tile_buffer) {\n Ok(n) if n == length => {}\n Ok(n) => anyhow::bail!(\"Invalid number of tiles, expected: {length}, but found: {n}\"),\n _ => anyhow::bail!(\"Could not load map tiles\"),\n }\n\n let tiles =\n tile_buffer.chunks(2).map(|x| u16::from_le_bytes(x.try_into().unwrap())).collect();\n\n Ok(Self { tiles, width, height, tileset })\n }\n\n pub fn size(&self) -> Vec2 {\n let tile_size = self.tile_size() as f32;\n Vec2::new(self.width as f32 * tile_size, self.height as f32 * tile_size)\n }\n\n pub fn tile_size(&self) -> i32 {\n self.tileset.tile_size\n }\n\n pub fn tile_info_at(&self, x: usize, y: usize) -> TileInfo {\n self.tileset.id(self.get(x, y))\n }\n\n fn get(&self, x: usize, y: usize) -> u16 {\n assert!(x < self.width);\n assert!(y < self.height);\n self.tiles[x + y * self.width]\n }\n\n pub fn draw(&self, camera: Vec2) {\n for tile_x in 0..self.width {\n for tile_y in 0..self.height {\n let x = (tile_x * self.tile_size() as usize) as f32;\n let y = (tile_y * self.tile_size() as usize) as f32;\n let dest_rect = Rect::new(\n x - camera.x,\n y - camera.y,\n self.tile_size() as f32,\n self.tile_size() as f32,\n );\n self.tileset.draw(self.get(tile_x, tile_y), dest_rect);\n }\n }\n }\n}\n"},"download_success":{"kind":"bool","value":true,"string":"true"}}},{"rowIdx":1134,"cells":{"blob_id":{"kind":"string","value":"84b82fbb5622e37e3ebdfbf0163a4f74cbfcf4b0"},"language":{"kind":"string","value":"Rust"},"repo_name":{"kind":"string","value":"HappyCodingRust/async_executors"},"path":{"kind":"string","value":"/src/tokio_ct.rs"},"src_encoding":{"kind":"string","value":"UTF-8"},"length_bytes":{"kind":"number","value":5475,"string":"5,475"},"score":{"kind":"number","value":2.84375,"string":"2.84375"},"int_score":{"kind":"number","value":3,"string":"3"},"detected_licenses":{"kind":"list like","value":["Unlicense"],"string":"[\n \"Unlicense\"\n]"},"license_type":{"kind":"string","value":"permissive"},"text":{"kind":"string","value":"use\n{\n\tcrate :: { SpawnHandle, LocalSpawnHandle, JoinHandle, join_handle::InnerJh } ,\n\tstd :: { sync::Arc, future::Future, sync::atomic::AtomicBool } ,\n\ttokio :: { task::LocalSet, runtime::{ Runtime } } ,\n\tfutures_task :: { FutureObj, LocalFutureObj, Spawn, LocalSpawn, SpawnError } ,\n};\n\n\n/// An executor that uses a [`tokio::runtime::Runtime`] with the [current thread](tokio::runtime::Builder::new_current_thread)\n/// and a [`tokio::task::LocalSet`]. Can spawn `!Send` futures.\n///\n/// ## Creation of the runtime\n///\n/// You must use [`TokioCtBuilder`](crate::TokioCtBuilder) to create the executor.\n///\n/// ```\n/// // Make sure to set the `tokio_ct` feature on async_executors.\n/// //\n/// use\n/// {\n/// async_executors :: { TokioCt, TokioCtBuilder, LocalSpawnHandleExt } ,\n/// tokio :: { runtime::Builder } ,\n/// std :: { rc::Rc } ,\n/// };\n///\n/// // You must use the builder. This guarantees that TokioCt is always backed by a single threaded runtime.\n/// // You can set other configurations by calling `tokio_builder()` on TokioCtBuilder, so you get\n/// // access to the `tokio::runtime::Builder`.\n/// //\n/// let exec = TokioCtBuilder::new().build().expect( \"create tokio runtime\" );\n///\n/// // block_on takes a &self, so if you need to `async move`,\n/// // just clone it for use inside the async block.\n/// //\n/// exec.block_on( async\n/// {\n/// let not_send = async { let rc = Rc::new(()); };\n///\n/// // We can spawn !Send futures here.\n/// //\n/// let join_handle = exec.spawn_handle_local( not_send ).expect( \"spawn\" );\n///\n/// join_handle.await;\n/// });\n///```\n///\n/// ## Unwind Safety.\n///\n/// When a future spawned on this wrapper panics, the panic will be caught by tokio in the poll function.\n///\n/// You must only spawn futures to this API that are unwind safe. Tokio will wrap spawned tasks in\n/// [`std::panic::AssertUnwindSafe`] and wrap the poll invocation with [`std::panic::catch_unwind`].\n///\n/// They reason that this is fine because they require `Send + 'static` on the task. As far\n/// as I can tell this is wrong. Unwind safety can be circumvented in several ways even with\n/// `Send + 'static` (eg. `parking_lot::Mutex` is `Send + 'static` but `!UnwindSafe`).\n///\n/// You should make sure that if your future panics, no code that lives on after the panic,\n/// nor any destructors called during the unwind can observe data in an inconsistent state.\n///\n/// Note: the future running from within `block_on` as opposed to `spawn` does not exhibit this behavior and will panic\n/// the current thread.\n///\n/// Note that these are logic errors, not related to the class of problems that cannot happen\n/// in safe rust (memory safety, undefined behavior, unsoundness, data races, ...). See the relevant\n/// [catch_unwind RFC](https://github.com/rust-lang/rfcs/blob/master/text/1236-stabilize-catch-panic.md)\n/// and it's discussion threads for more info as well as the documentation of [std::panic::UnwindSafe]\n/// for more information.\n///\n//\n#[ derive( Debug, Clone ) ]\n//\n#[ cfg_attr( nightly, doc(cfg( feature = \"tokio_ct\" )) ) ]\n//\npub struct TokioCt\n{\n\tpub(crate) exec : Arc< Runtime > ,\n\tpub(crate) local : Arc< LocalSet > ,\n}\n\n\n\nimpl TokioCt\n{\n\t/// This is the entry point for this executor. Once this call returns, no remaining tasks shall be polled anymore.\n\t/// However the tasks stay in the executor, so if you make a second call to `block_on` with a new task, the older\n\t/// tasks will start making progress again.\n\t///\n\t/// For simplicity, it's advised to just create top level task that you run through `block_on` and make sure your\n\t/// program is done when it returns.\n\t///\n\t/// See: [tokio::runtime::Runtime::block_on]\n\t///\n\t/// ## Panics\n\t///\n\t/// This function will panic if it is called from an async context, including but not limited to making a nested\n\t/// call. It will also panic if the provided future panics.\n\t//\n\tpub fn block_on< F: Future >( &self, f: F ) -> F::Output\n\t{\n\t\tself.exec.block_on( self.local.run_until( f ) )\n\t}\n}\n\n\nimpl Spawn for TokioCt\n{\n\tfn spawn_obj( &self, future: FutureObj<'static, ()> ) -> Result<(), SpawnError>\n\t{\n\t\t// We drop the JoinHandle, so the task becomes detached.\n\t\t//\n\t\tlet _ = self.local.spawn_local( future );\n\n\t\tOk(())\n\t}\n}\n\n\n\nimpl LocalSpawn for TokioCt\n{\n\tfn spawn_local_obj( &self, future: LocalFutureObj<'static, ()> ) -> Result<(), SpawnError>\n\t{\n\t\t// We drop the JoinHandle, so the task becomes detached.\n\t\t//\n\t\tlet _ = self.local.spawn_local( future );\n\n\t\tOk(())\n\t}\n}\n\n\n\nimpl SpawnHandle for TokioCt\n{\n\tfn spawn_handle_obj( &self, future: FutureObj<'static, Out> ) -> Result, SpawnError>\n\t{\n\t\tOk( JoinHandle{ inner: InnerJh::Tokio\n\t\t{\n\t\t\thandle : self.exec.spawn( future ) ,\n\t\t\tdetached: AtomicBool::new( false ) ,\n\t\t}})\n\t}\n}\n\n\n\nimpl LocalSpawnHandle for TokioCt\n{\n\tfn spawn_handle_local_obj( &self, future: LocalFutureObj<'static, Out> ) -> Result, SpawnError>\n\t{\n\t\tOk( JoinHandle{ inner: InnerJh::Tokio\n\t\t{\n\t\t\thandle : self.local.spawn_local( future ) ,\n\t\t\tdetached: AtomicBool::new( false ) ,\n\t\t}})\n\n\t}\n}\n\n\n\n#[ cfg(test) ]\n//\nmod tests\n{\n\tuse super::*;\n\n\t// It's important that this is not Send, as we allow spawning !Send futures on it.\n\t//\n\tstatic_assertions::assert_not_impl_any!( TokioCt: Send, Sync );\n}\n"},"download_success":{"kind":"bool","value":true,"string":"true"}}},{"rowIdx":1135,"cells":{"blob_id":{"kind":"string","value":"7e852c5803efe0d60fab202ff6f01b44360f7835"},"language":{"kind":"string","value":"Rust"},"repo_name":{"kind":"string","value":"rovangju/vacuum-robot-simulator"},"path":{"kind":"string","value":"/src/geometry/vector.rs"},"src_encoding":{"kind":"string","value":"UTF-8"},"length_bytes":{"kind":"number","value":1738,"string":"1,738"},"score":{"kind":"number","value":3.796875,"string":"3.796875"},"int_score":{"kind":"number","value":4,"string":"4"},"detected_licenses":{"kind":"list like","value":[],"string":"[]"},"license_type":{"kind":"string","value":"no_license"},"text":{"kind":"string","value":"use std::cmp;\nuse std::fmt;\nuse std::ops;\n\nuse math::{Angle, Scalar};\n\n#[derive(Debug, Clone, Copy)]\npub struct Vector {\n pub x: Scalar,\n pub y: Scalar,\n}\n\nimpl fmt::Display for Vector {\n fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {\n write!(f, \"({}, {})\", self.x, self.y)\n }\n}\n\nimpl ops::Add for Vector {\n type Output = Vector;\n\n fn add(self, other: Vector) -> Vector {\n Vector::new(self.x + other.x, self.y + other.y)\n }\n}\n\nimpl ops::Sub for Vector {\n type Output = Vector;\n\n fn sub(self, other: Vector) -> Vector {\n Vector::new(self.x - other.x, self.y - other.y)\n }\n}\n\nimpl ops::Mul for Vector {\n type Output = Vector;\n\n fn mul(self, s: Scalar) -> Vector {\n Vector::new(self.x * s, self.y * s)\n }\n}\n\nimpl cmp::PartialEq for Vector {\n fn eq(&self, other: &Vector) -> bool {\n self.x == other.x && self.y == other.y\n }\n}\n\nimpl Vector {\n pub fn new(x: Scalar, y: Scalar) -> Vector {\n Vector { x, y }\n }\n\n pub fn from_angle(angle: Angle) -> Vector {\n // 0° is in forward direction (along Y-axis)\n Vector {\n x: -angle.sin(),\n y: angle.cos(),\n }\n }\n\n pub fn length(&self) -> Scalar {\n (self.x.powi(2) + self.y.powi(2)).sqrt()\n }\n\n pub fn dot(&self, q: Vector) -> Scalar {\n self.x * q.x + q.y * self.y\n }\n\n pub fn cross(&self, q: Vector) -> Scalar {\n self.x * q.y - q.x * self.y\n }\n\n pub fn angle(&self) -> Scalar {\n -self.x.atan2(self.y)\n }\n\n pub fn rotate(&self, angle: Angle) -> Vector {\n let c = angle.cos();\n let s = angle.sin();\n Vector::new(c * self.x - s * self.y, s * self.x + c * self.y)\n }\n}\n"},"download_success":{"kind":"bool","value":true,"string":"true"}}},{"rowIdx":1136,"cells":{"blob_id":{"kind":"string","value":"d36dd1e963fe5fee1c70aba9b5486e97a945cf83"},"language":{"kind":"string","value":"Rust"},"repo_name":{"kind":"string","value":"Vanille-N/rask"},"path":{"kind":"string","value":"/src/parse/mod.rs"},"src_encoding":{"kind":"string","value":"UTF-8"},"length_bytes":{"kind":"number","value":2833,"string":"2,833"},"score":{"kind":"number","value":2.671875,"string":"2.671875"},"int_score":{"kind":"number","value":3,"string":"3"},"detected_licenses":{"kind":"list like","value":["MIT"],"string":"[\n \"MIT\"\n]"},"license_type":{"kind":"string","value":"permissive"},"text":{"kind":"string","value":"mod build;\nmod lex;\nmod split;\nmod util;\n\npub use build::build;\npub use lex::distribute_lex as lex;\npub use split::split;\nuse std::rc::Rc;\npub use util::*;\n\npub fn parse(src: &str) -> Vec, ParseErr>> {\n let symbols = split(src);\n if let Err(e) = symbols {\n return vec![Err(e)];\n }\n let tokens = lex(&symbols.unwrap());\n if let Err(e) = tokens {\n return vec![Err(e)];\n }\n build(&tokens.unwrap())\n}\n\n#[cfg(test)]\n#[cfg_attr(tarpaulin, skip)]\nmod integrate {\n const ASSETS: [&str; 9] = [\n \"sort\",\n \"set-construct\",\n \"word-count\",\n \"printer\",\n \"interprete\",\n \"unification\",\n \"timer\",\n \"sprintf\",\n \"matrix\",\n ];\n use super::*;\n use crate::source;\n\n #[test]\n fn read_sources() {\n for file in ASSETS.iter() {\n let prog = source(&(\"assets/\".to_owned() + *file)).unwrap();\n let symbols = split(&prog[..]);\n if let Err(e) = symbols {\n panic!(\"Could not split {} properly: {:?}\", file, e);\n }\n let symbols = symbols.ok().unwrap();\n let tokens = lex(&symbols);\n if let Err(e) = tokens {\n panic!(\"Could not tokenize {} properly: {:?}\", file, e);\n }\n let tokens = tokens.ok().unwrap();\n let exprs = build(&tokens);\n for expr in exprs.iter() {\n if let Err(e) = expr {\n match e {\n ParseErr::MismatchedOpenBrace(n)\n | ParseErr::MismatchedOpenParen(n)\n | ParseErr::MismatchedCloseBrace(n)\n | ParseErr::MismatchedCloseParen(n) => panic!(\n \"Could not build {} properly: {:?}\\nContext: {:?}\",\n file,\n e,\n &tokens[n - 5..n + 5]\n ),\n e => panic!(\"Could not build {} properly: {:?}\", file, e),\n }\n }\n }\n }\n }\n\n #[test]\n fn failures() {\n assert_eq!(\n *parse(\"(\")[0].as_ref().err().unwrap(),\n ParseErr::MismatchedOpenParen(0)\n );\n assert_eq!(\n *parse(\"#\")[0].as_ref().err().unwrap(),\n ParseErr::LoneNumbersign\n );\n assert_eq!(source(\"nofile\"), None);\n assert_eq!(\n *parse(\"abc |# x\")[0].as_ref().err().unwrap(),\n ParseErr::NoCommentStart\n );\n assert_eq!(\n *parse(\"x #| abc\")[0].as_ref().err().unwrap(),\n ParseErr::UnterminatedComment\n );\n assert_eq!(\n *parse(\"\\\"abc\")[0].as_ref().err().unwrap(),\n ParseErr::UnterminatedString(1)\n );\n }\n}\n"},"download_success":{"kind":"bool","value":true,"string":"true"}}},{"rowIdx":1137,"cells":{"blob_id":{"kind":"string","value":"2b1e6aba5adabab5c7179b13fd62d2395bc06ca0"},"language":{"kind":"string","value":"Rust"},"repo_name":{"kind":"string","value":"rust-lang/rust-analyzer"},"path":{"kind":"string","value":"/crates/ide-assists/src/handlers/generate_default_from_enum_variant.rs"},"src_encoding":{"kind":"string","value":"UTF-8"},"length_bytes":{"kind":"number","value":3765,"string":"3,765"},"score":{"kind":"number","value":2.859375,"string":"2.859375"},"int_score":{"kind":"number","value":3,"string":"3"},"detected_licenses":{"kind":"list like","value":["Apache-2.0","MIT"],"string":"[\n \"Apache-2.0\",\n \"MIT\"\n]"},"license_type":{"kind":"string","value":"permissive"},"text":{"kind":"string","value":"use ide_db::{famous_defs::FamousDefs, RootDatabase};\nuse syntax::ast::{self, AstNode, HasName};\n\nuse crate::{AssistContext, AssistId, AssistKind, Assists};\n\n// Assist: generate_default_from_enum_variant\n//\n// Adds a Default impl for an enum using a variant.\n//\n// ```\n// enum Version {\n// Undefined,\n// Minor$0,\n// Major,\n// }\n// ```\n// ->\n// ```\n// enum Version {\n// Undefined,\n// Minor,\n// Major,\n// }\n//\n// impl Default for Version {\n// fn default() -> Self {\n// Self::Minor\n// }\n// }\n// ```\npub(crate) fn generate_default_from_enum_variant(\n acc: &mut Assists,\n ctx: &AssistContext<'_>,\n) -> Option<()> {\n let variant = ctx.find_node_at_offset::()?;\n let variant_name = variant.name()?;\n let enum_name = variant.parent_enum().name()?;\n if !matches!(variant.kind(), ast::StructKind::Unit) {\n cov_mark::hit!(test_gen_default_on_non_unit_variant_not_implemented);\n return None;\n }\n\n if existing_default_impl(&ctx.sema, &variant).is_some() {\n cov_mark::hit!(test_gen_default_impl_already_exists);\n return None;\n }\n\n let target = variant.syntax().text_range();\n acc.add(\n AssistId(\"generate_default_from_enum_variant\", AssistKind::Generate),\n \"Generate `Default` impl from this enum variant\",\n target,\n |edit| {\n let start_offset = variant.parent_enum().syntax().text_range().end();\n let buf = format!(\n r#\"\n\nimpl Default for {enum_name} {{\n fn default() -> Self {{\n Self::{variant_name}\n }}\n}}\"#,\n );\n edit.insert(start_offset, buf);\n },\n )\n}\n\nfn existing_default_impl(\n sema: &'_ hir::Semantics<'_, RootDatabase>,\n variant: &ast::Variant,\n) -> Option<()> {\n let variant = sema.to_def(variant)?;\n let enum_ = variant.parent_enum(sema.db);\n let krate = enum_.module(sema.db).krate();\n\n let default_trait = FamousDefs(sema, krate).core_default_Default()?;\n let enum_type = enum_.ty(sema.db);\n\n if enum_type.impls_trait(sema.db, default_trait, &[]) {\n Some(())\n } else {\n None\n }\n}\n\n#[cfg(test)]\nmod tests {\n use crate::tests::{check_assist, check_assist_not_applicable};\n\n use super::*;\n\n #[test]\n fn test_generate_default_from_variant() {\n check_assist(\n generate_default_from_enum_variant,\n r#\"\n//- minicore: default\nenum Variant {\n Undefined,\n Minor$0,\n Major,\n}\n\"#,\n r#\"\nenum Variant {\n Undefined,\n Minor,\n Major,\n}\n\nimpl Default for Variant {\n fn default() -> Self {\n Self::Minor\n }\n}\n\"#,\n );\n }\n\n #[test]\n fn test_generate_default_already_implemented() {\n cov_mark::check!(test_gen_default_impl_already_exists);\n check_assist_not_applicable(\n generate_default_from_enum_variant,\n r#\"\n//- minicore: default\nenum Variant {\n Undefined,\n Minor$0,\n Major,\n}\n\nimpl Default for Variant {\n fn default() -> Self {\n Self::Minor\n }\n}\n\"#,\n );\n }\n\n #[test]\n fn test_add_from_impl_no_element() {\n cov_mark::check!(test_gen_default_on_non_unit_variant_not_implemented);\n check_assist_not_applicable(\n generate_default_from_enum_variant,\n r#\"\n//- minicore: default\nenum Variant {\n Undefined,\n Minor(u32)$0,\n Major,\n}\n\"#,\n );\n }\n\n #[test]\n fn test_generate_default_from_variant_with_one_variant() {\n check_assist(\n generate_default_from_enum_variant,\n r#\"\n//- minicore: default\nenum Variant { Undefi$0ned }\n\"#,\n r#\"\nenum Variant { Undefined }\n\nimpl Default for Variant {\n fn default() -> Self {\n Self::Undefined\n }\n}\n\"#,\n );\n }\n}\n"},"download_success":{"kind":"bool","value":true,"string":"true"}}},{"rowIdx":1138,"cells":{"blob_id":{"kind":"string","value":"b06e4f1d03551cd08f93419ec93a7285cf13e6e8"},"language":{"kind":"string","value":"Rust"},"repo_name":{"kind":"string","value":"xgillard/ddo"},"path":{"kind":"string","value":"/ddo/examples/max2sat/errors.rs"},"src_encoding":{"kind":"string","value":"UTF-8"},"length_bytes":{"kind":"number","value":1981,"string":"1,981"},"score":{"kind":"number","value":2.6875,"string":"2.6875"},"int_score":{"kind":"number","value":3,"string":"3"},"detected_licenses":{"kind":"list like","value":["MIT"],"string":"[\n \"MIT\"\n]"},"license_type":{"kind":"string","value":"permissive"},"text":{"kind":"string","value":"// Copyright 2020 Xavier Gillard\n//\n// Permission is hereby granted, free of charge, to any person obtaining a copy of\n// this software and associated documentation files (the \"Software\"), to deal in\n// the Software without restriction, including without limitation the rights to\n// use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of\n// the Software, and to permit persons to whom the Software is furnished to do so,\n// subject to the following conditions:\n//\n// The above copyright notice and this permission notice shall be included in all\n// copies or substantial portions of the Software.\n//\n// THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR\n// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS\n// FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR\n// COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER\n// IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN\n// CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.\n\n//! This module contains the definition of the errors that can be triggered when\n//! parsing an instance of the max2sat problem.\n\nuse std::num::ParseIntError;\n\n\n/// This enumeration simply groups the kind of errors that might occur when parsing a\n/// instance file. There can be io errors (file unavailable ?), format error\n/// (e.g. the file is not an instance but contains the text of your next paper), \n/// or parse int errors (which are actually a variant of the format error since it tells \n/// you that the parser expected an integer number but got ... something else).\n#[derive(Debug, thiserror::Error)]\npub enum Error {\n /// There was an io related error\n #[error(\"io error {0}\")]\n Io(#[from] std::io::Error),\n /// The parser expected to read something that was an integer but got some garbage\n #[error(\"parse int {0}\")]\n ParseInt(#[from] ParseIntError),\n}\n"},"download_success":{"kind":"bool","value":true,"string":"true"}}},{"rowIdx":1139,"cells":{"blob_id":{"kind":"string","value":"27cbbbb07c1dc63f818962407e4be6ed5ab15d49"},"language":{"kind":"string","value":"Rust"},"repo_name":{"kind":"string","value":"CryZe/livesplit-lite-core"},"path":{"kind":"string","value":"/src/segment.rs"},"src_encoding":{"kind":"string","value":"UTF-8"},"length_bytes":{"kind":"number","value":309,"string":"309"},"score":{"kind":"number","value":3.09375,"string":"3.09375"},"int_score":{"kind":"number","value":3,"string":"3"},"detected_licenses":{"kind":"list like","value":["MIT"],"string":"[\n \"MIT\"\n]"},"license_type":{"kind":"string","value":"permissive"},"text":{"kind":"string","value":"use std::fmt;\n\n#[derive(Clone)]\npub struct Segment {\n pub name: String,\n}\n\nimpl fmt::Display for Segment {\n fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {\n write!(f, \"{}\", self.name)\n }\n}\n\nimpl Segment {\n pub fn new(name: String) -> Segment {\n Segment { name: name }\n }\n}\n"},"download_success":{"kind":"bool","value":true,"string":"true"}}},{"rowIdx":1140,"cells":{"blob_id":{"kind":"string","value":"6121cd16bfb95c5752f517480a371dfc984682df"},"language":{"kind":"string","value":"Rust"},"repo_name":{"kind":"string","value":"alexbispo/rust_the_book"},"path":{"kind":"string","value":"/cap03/temperatures_converter/src/main.rs"},"src_encoding":{"kind":"string","value":"UTF-8"},"length_bytes":{"kind":"number","value":1664,"string":"1,664"},"score":{"kind":"number","value":3.75,"string":"3.75"},"int_score":{"kind":"number","value":4,"string":"4"},"detected_licenses":{"kind":"list like","value":[],"string":"[]"},"license_type":{"kind":"string","value":"no_license"},"text":{"kind":"string","value":"use std::io;\n\nfn main() {\n println!(\"Temperature converter!\");\n println!(\"===============================================================\");\n\n loop {\n println!(\"Quit: q \");\n println!(\"Fahrenheit to Celsius: f \");\n println!(\"Celsius to Fahrenheit: c \");\n\n let mut user_input = String::new();\n\n io::stdin()\n .read_line(&mut user_input)\n .expect(\"Something was bad!\");\n\n println!(\"\");\n\n let user_input = user_input.trim();\n\n if user_input == \"q\" {\n println!(\"Bye!\");\n break;\n }\n\n let inputs: Vec<&str> = user_input.trim().split(' ').collect();\n\n if inputs.len() != 2 {\n println!(\"Sorry! Invalid option.\\n\");\n continue;\n }\n\n let origin_temperature = inputs[0];\n\n let temperature: f32 = match inputs[1].trim().parse() {\n Ok(num) => num,\n Err(_) => {\n println!(\"Sorry! Invalid option.\\n\");\n continue;\n }\n };\n\n if origin_temperature == \"f\" {\n let converted_temperature = (temperature - 32.0) * (5.0/9.0);\n\n println!(\"{} fahrenheit to celsius is {:.2}\", temperature, converted_temperature);\n } else if origin_temperature == \"c\" {\n let converted_temperature = (temperature * (9.0/5.0)) + 32.0;\n\n println!(\"{} celsius to fahrenheit is {:.2}\", temperature, converted_temperature);\n } else {\n println!(\"Sorry! Invalid option.\\n\");\n continue;\n }\n\n println!(\"\");\n }\n}\n"},"download_success":{"kind":"bool","value":true,"string":"true"}}},{"rowIdx":1141,"cells":{"blob_id":{"kind":"string","value":"70d343ea2276c78f5e6b20ef9090473d65964fd8"},"language":{"kind":"string","value":"Rust"},"repo_name":{"kind":"string","value":"cbrewster/alcova"},"path":{"kind":"string","value":"/alcova/src/live_view.rs"},"src_encoding":{"kind":"string","value":"UTF-8"},"length_bytes":{"kind":"number","value":5393,"string":"5,393"},"score":{"kind":"number","value":2.578125,"string":"2.578125"},"int_score":{"kind":"number","value":3,"string":"3"},"detected_licenses":{"kind":"list like","value":["MIT"],"string":"[\n \"MIT\"\n]"},"license_type":{"kind":"string","value":"permissive"},"text":{"kind":"string","value":"use crate::{\n live_socket::{ClientMessage, LiveSocketContext, SocketViewMessage},\n LiveSocket, LiveTemplate,\n};\nuse actix::{Actor, ActorContext, Addr, Context, Handler, Message};\nuse actix_web::{HttpRequest, HttpResponse, Responder};\nuse jsonwebtoken::{encode, EncodingKey, Header};\nuse serde::{de::DeserializeOwned, Deserialize, Serialize};\n\n#[derive(Debug, Clone, Copy, Hash, PartialEq, Eq, Serialize, Deserialize)]\n#[serde(transparent)]\npub struct LiveViewId(pub usize);\n\npub type LiveViewContext = Context>;\n\npub(crate) fn signing_secret() -> String {\n std::env::var(\"ALCOVA_SECRET_KEY\").unwrap_or_else(|_| {\n warn!(\"No secret key set! Using unsecure default\");\n \"secret\".into()\n })\n}\n\n#[derive(Debug, Clone, Serialize, Deserialize)]\npub(crate) struct Claims {\n exp: u64,\n pub(crate) data: T,\n}\n\nimpl Claims {\n fn new(minutes: u64, data: T) -> Self {\n let exp = std::time::SystemTime::now()\n .duration_since(std::time::SystemTime::UNIX_EPOCH)\n .unwrap()\n .as_secs()\n + (minutes * 60);\n Self { exp, data }\n }\n}\n\npub trait LiveView: Sized + Unpin + 'static {\n type Template: LiveTemplate + Unpin;\n type SessionData: Serialize + DeserializeOwned;\n\n fn name() -> &'static str;\n\n fn mount(socket_ctx: &LiveSocketContext, session: Self::SessionData) -> Self;\n\n fn started(&mut self, _ctx: &mut LiveViewContext) {}\n\n fn handle_event(&mut self, _event: &str, _value: &str, _ctx: &mut LiveViewContext) {}\n\n fn template(&self) -> Self::Template;\n\n fn to_string(&self, session: &Self::SessionData) -> String {\n let key = signing_secret();\n\n // TODO: Not sure how we should handle tokens expiring. Maybe reload the page on the\n // client?\n let claims = Claims::new(60, session);\n\n let token = encode(\n &Header::default(),\n &claims,\n &EncodingKey::from_secret(key.as_bytes()),\n )\n .unwrap();\n self.template()\n .render_with_wrapper(Self::name(), token.as_str())\n }\n\n fn to_response(self, session: Self::SessionData) -> LiveViewResponse {\n LiveViewResponse {\n live_view: self,\n session,\n }\n }\n}\n\npub trait LiveMessage: Message {}\n\npub struct LiveViewResponse {\n live_view: T,\n session: T::SessionData,\n}\n\nimpl Responder for LiveViewResponse\nwhere\n T: LiveView,\n{\n type Error = actix_web::Error;\n type Future = futures::future::Ready>;\n\n fn respond_to(self, _req: &HttpRequest) -> Self::Future {\n let body = self.live_view.to_string(&self.session);\n\n // Create response and set content type\n futures::future::ready(Ok(HttpResponse::Ok().body(body)))\n }\n}\n\n#[derive(Message, Debug, Deserialize)]\n#[rtype(result = \"()\")]\npub enum LiveViewMessage {\n ClientAction(LiveViewAction),\n Stop,\n}\n\n#[derive(Debug, Deserialize)]\npub struct LiveViewAction {\n action: String,\n value: Option,\n}\n\npub struct LiveViewActor {\n id: LiveViewId,\n pub view: T,\n socket: Addr,\n old_template: Option,\n}\n\nimpl LiveViewActor {\n pub fn new(\n id: LiveViewId,\n socket: Addr,\n context: &LiveSocketContext,\n session: T::SessionData,\n ) -> Self {\n LiveViewActor {\n id,\n view: T::mount(context, session),\n socket,\n old_template: None,\n }\n }\n\n pub fn send_changes(&mut self) {\n let template = self.view.template();\n let message = ClientMessage::Changes(template.changes(self.old_template.as_ref().unwrap()));\n self.old_template = Some(template);\n self.socket.do_send(SocketViewMessage { message });\n }\n}\n\nimpl Actor for LiveViewActor\nwhere\n T: LiveView + Unpin + 'static,\n{\n type Context = Context;\n\n fn started(&mut self, ctx: &mut Self::Context) {\n self.view.started(ctx);\n let template = self.view.template();\n let message = ClientMessage::Template {\n template: template.render(),\n id: self.id,\n };\n self.old_template = Some(template);\n self.socket.do_send(SocketViewMessage { message });\n }\n}\n\nimpl Handler for LiveViewActor\nwhere\n T: LiveView + Unpin + 'static,\n{\n type Result = ();\n\n fn handle(&mut self, msg: LiveViewMessage, ctx: &mut Self::Context) -> Self::Result {\n match msg {\n LiveViewMessage::ClientAction(LiveViewAction { action, value }) => {\n let value = value.unwrap_or(String::new());\n self.view.handle_event(&action, &value, ctx);\n self.send_changes();\n }\n LiveViewMessage::Stop => ctx.stop(),\n }\n }\n}\n\npub trait LiveHandler\nwhere\n Self: LiveView,\n{\n fn handle(&mut self, msg: M, ctx: &mut LiveViewContext);\n}\n\nimpl Handler for LiveViewActor\nwhere\n T: LiveView + Unpin + LiveHandler + 'static,\n M: LiveMessage,\n{\n type Result = ();\n\n fn handle(&mut self, msg: M, ctx: &mut Self::Context) -> Self::Result {\n self.view.handle(msg, ctx);\n self.send_changes();\n }\n}\n"},"download_success":{"kind":"bool","value":true,"string":"true"}}},{"rowIdx":1142,"cells":{"blob_id":{"kind":"string","value":"ebedcc11fadf54c9cb0f39de30e39b06574346c1"},"language":{"kind":"string","value":"Rust"},"repo_name":{"kind":"string","value":"ypoluektovich/tmux-interface-rs"},"path":{"kind":"string","value":"/src/commands/windows_and_panes/select_pane.rs"},"src_encoding":{"kind":"string","value":"UTF-8"},"length_bytes":{"kind":"number","value":4684,"string":"4,684"},"score":{"kind":"number","value":2.953125,"string":"2.953125"},"int_score":{"kind":"number","value":3,"string":"3"},"detected_licenses":{"kind":"list like","value":["MIT","LicenseRef-scancode-unknown-license-reference"],"string":"[\n \"MIT\",\n \"LicenseRef-scancode-unknown-license-reference\"\n]"},"license_type":{"kind":"string","value":"permissive"},"text":{"kind":"string","value":"use crate::commands::constants::*;\nuse crate::{Error, TmuxCommand, TmuxOutput};\nuse std::borrow::Cow;\n\n/// Make pane `target-pane` the active pane in window `target-window`\n///\n/// # Manual\n///\n/// tmux ^3.1:\n/// ```text\n/// tmux select-pane [-DdeLlMmRUZ] [-T title] [-t target-pane]\n/// (alias: selectp)\n/// ```\n///\n/// tmux ^2.6:\n/// ```text\n/// tmux select-pane [-DdeLlMmRU] [-T title] [-t target-pane]\n/// (alias: selectp)\n/// ```\n///\n/// tmux ^2.1:\n/// ```text\n/// tmux select-pane [-DdegLlMmRU] [-P style] [-t target-pane]\n/// (alias: selectp)\n/// ```\n///\n/// tmux ^2.0:\n/// ```text\n/// tmux select-pane [-DdeLlRU] [-t target-pane]\n/// (alias: selectp)\n/// ```\n///\n/// tmux ^1.5:\n/// ```text\n/// tmux select-pane [-DLlRU] [-t target-pane]\n/// (alias: selectp)\n/// ```\n///\n/// tmux ^1.3:\n/// ```text\n/// tmux select-pane [-DLRU] [-t target-pane]\n/// (alias: selectp)\n/// ```\n///\n/// tmux ^1.0:\n/// ```text\n/// tmux select-pane [-t target-pane]\n/// (alias: selectp)\n/// ```\n///\n/// tmux ^0.8:\n/// ```text\n/// tmux select-pane [-p pane-index] [-t target-window]\n/// (alias: selectp)\n/// ```\n#[derive(Debug, Clone)]\npub struct SelectPane<'a>(pub TmuxCommand<'a>);\n\nimpl<'a> Default for SelectPane<'a> {\n fn default() -> Self {\n Self(TmuxCommand {\n cmd: Some(Cow::Borrowed(SELECT_PANE)),\n ..Default::default()\n })\n }\n}\n\nimpl<'a> SelectPane<'a> {\n pub fn new() -> Self {\n Default::default()\n }\n\n /// `[-D]` - pane below\n #[cfg(feature = \"tmux_1_3\")]\n pub fn down(&mut self) -> &mut Self {\n self.0.push_flag(D_UPPERCASE_KEY);\n self\n }\n\n /// `[-d]` - disable input\n #[cfg(feature = \"tmux_2_0\")]\n pub fn disable(&mut self) -> &mut Self {\n self.0.push_flag(D_LOWERCASE_KEY);\n self\n }\n\n /// `[-e]` - enable input\n #[cfg(feature = \"tmux_2_0\")]\n pub fn enable(&mut self) -> &mut Self {\n self.0.push_flag(E_LOWERCASE_KEY);\n self\n }\n\n /// `[-g]` - show the current pane style\n #[cfg(feature = \"tmux_2_1\")]\n pub fn show_style(&mut self) -> &mut Self {\n self.0.push_flag(G_LOWERCASE_KEY);\n self\n }\n\n /// `[-L]` - pane left\n #[cfg(feature = \"tmux_1_3\")]\n pub fn left(&mut self) -> &mut Self {\n self.0.push_flag(L_UPPERCASE_KEY);\n self\n }\n\n /// `[-l]` - equivalent to last-pane command\n #[cfg(feature = \"tmux_1_5\")]\n pub fn last(&mut self) -> &mut Self {\n self.0.push_flag(L_LOWERCASE_KEY);\n self\n }\n\n /// `[-M]` - clear marked pane\n #[cfg(feature = \"tmux_2_1\")]\n pub fn set_marked(&mut self) -> &mut Self {\n self.0.push_flag(M_UPPERCASE_KEY);\n self\n }\n\n /// `[-m]` - set marked pane\n #[cfg(feature = \"tmux_2_1\")]\n pub fn clear_marked(&mut self) -> &mut Self {\n self.0.push_flag(M_LOWERCASE_KEY);\n self\n }\n\n /// `[-R]` - pane right\n #[cfg(feature = \"tmux_1_3\")]\n pub fn right(&mut self) -> &mut Self {\n self.0.push_flag(R_UPPERCASE_KEY);\n self\n }\n\n /// `[-U]` - pane above\n #[cfg(feature = \"tmux_1_3\")]\n pub fn up(&mut self) -> &mut Self {\n self.0.push_flag(U_UPPERCASE_KEY);\n self\n }\n\n /// `[-Z]` - keep the window zoomed if it was zoomed\n #[cfg(feature = \"tmux_3_1\")]\n pub fn keep_zoomed(&mut self) -> &mut Self {\n self.0.push_flag(Z_UPPERCASE_KEY);\n self\n }\n\n /// `[-P style]` - set the style for a single pane\n #[cfg(feature = \"tmux_2_1\")]\n pub fn style>>(&mut self, style: S) -> &mut Self {\n self.0.push_option(P_UPPERCASE_KEY, style);\n self\n }\n\n /// `[-T title]` - title\n #[cfg(feature = \"tmux_2_6\")]\n pub fn title>>(&mut self, title: S) -> &mut Self {\n self.0.push_option(T_UPPERCASE_KEY, title);\n self\n }\n\n /// `[-t target-pane]` - target-pane\n #[cfg(feature = \"tmux_1_0\")]\n pub fn target_pane>>(&mut self, target_pane: S) -> &mut Self {\n self.0.push_option(T_LOWERCASE_KEY, target_pane);\n self\n }\n\n pub fn output(&self) -> Result {\n self.0.output()\n }\n}\n\nimpl<'a> From> for SelectPane<'a> {\n fn from(item: TmuxCommand<'a>) -> Self {\n Self(TmuxCommand {\n bin: item.bin,\n cmd: Some(Cow::Borrowed(SELECT_PANE)),\n ..Default::default()\n })\n }\n}\n\nimpl<'a> From<&TmuxCommand<'a>> for SelectPane<'a> {\n fn from(item: &TmuxCommand<'a>) -> Self {\n Self(TmuxCommand {\n bin: item.bin.clone(),\n cmd: Some(Cow::Borrowed(SELECT_PANE)),\n ..Default::default()\n })\n }\n}\n"},"download_success":{"kind":"bool","value":true,"string":"true"}}},{"rowIdx":1143,"cells":{"blob_id":{"kind":"string","value":"1b436e87e29d2332e959e3d751d15137af83a859"},"language":{"kind":"string","value":"Rust"},"repo_name":{"kind":"string","value":"Ryan1729/rote"},"path":{"kind":"string","value":"/libs/move_mod/src/move_mod.rs"},"src_encoding":{"kind":"string","value":"UTF-8"},"length_bytes":{"kind":"number","value":1600,"string":"1,600"},"score":{"kind":"number","value":3.09375,"string":"3.09375"},"int_score":{"kind":"number","value":3,"string":"3"},"detected_licenses":{"kind":"list like","value":["Apache-2.0","MIT"],"string":"[\n \"Apache-2.0\",\n \"MIT\"\n]"},"license_type":{"kind":"string","value":"permissive"},"text":{"kind":"string","value":"use macros::{fmt_display, ord};\n\n#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash)]\npub enum Move {\n Up,\n Down,\n Left,\n Right,\n ToLineStart,\n ToLineEnd,\n ToBufferStart,\n ToBufferEnd,\n ToPreviousLikelyEditLocation,\n ToNextLikelyEditLocation,\n}\nuse Move::*;\n\nfmt_display!(for Move: r#move in \"{}\", match r#move {\n Up => \"^\",\n Down => \"v\",\n Left => \"<\",\n Right => \">\",\n ToLineStart => \"Line<\",\n ToLineEnd => \"Line>\",\n ToBufferStart => \"Buffer<\",\n ToBufferEnd => \"Buffer>\",\n ToPreviousLikelyEditLocation => \"Edit<\",\n ToNextLikelyEditLocation => \"Edit>\",\n});\n\nmacro_rules! to_num {\n ($m: expr) => {\n match $m {\n Up => 0,\n Down => 1,\n Left => 2,\n Right => 3,\n ToLineStart => 4,\n ToLineEnd => 5,\n ToBufferStart => 6,\n ToBufferEnd => 7,\n ToPreviousLikelyEditLocation => 8,\n ToNextLikelyEditLocation => 9,\n }\n };\n}\n\nord!(for Move: r#move, other in to_num!(r#move).cmp(&to_num!(other)));\n\nimpl std::ops::Not for Move {\n type Output = Move;\n\n fn not(self) -> Self::Output {\n match self {\n Up => Down,\n Down => Up,\n Left => Right,\n Right => Left,\n ToLineStart => ToLineEnd,\n ToLineEnd => ToLineStart,\n ToBufferStart => ToBufferEnd,\n ToBufferEnd => ToBufferStart,\n ToPreviousLikelyEditLocation => ToNextLikelyEditLocation,\n ToNextLikelyEditLocation => ToPreviousLikelyEditLocation,\n }\n }\n}\n"},"download_success":{"kind":"bool","value":true,"string":"true"}}},{"rowIdx":1144,"cells":{"blob_id":{"kind":"string","value":"c8d35e534af7e67f65945615931fceb055007d00"},"language":{"kind":"string","value":"Rust"},"repo_name":{"kind":"string","value":"mishaszu/RUST-INTRO"},"path":{"kind":"string","value":"/src/ex1/match_statement/src/basic_matching.rs"},"src_encoding":{"kind":"string","value":"UTF-8"},"length_bytes":{"kind":"number","value":328,"string":"328"},"score":{"kind":"number","value":3.15625,"string":"3.15625"},"int_score":{"kind":"number","value":3,"string":"3"},"detected_licenses":{"kind":"list like","value":[],"string":"[]"},"license_type":{"kind":"string","value":"no_license"},"text":{"kind":"string","value":"fn match_test() {\n let country_code = 1000;\n let country = match country_code {\n 44 => \"UK\",\n 46 => \"Sweden\",\n 1...999 => \"unknown\",\n _ => \"invalid\",\n };\n\n println!(\"the country for this code is: {}\", country);\n}\n\npub fn run() {\n println!(\"Running basic maching\");\n match_test();\n}\n"},"download_success":{"kind":"bool","value":true,"string":"true"}}},{"rowIdx":1145,"cells":{"blob_id":{"kind":"string","value":"40d6a584f86d61530d0458014433319562701614"},"language":{"kind":"string","value":"Rust"},"repo_name":{"kind":"string","value":"rust-lang/crates.io"},"path":{"kind":"string","value":"/src/email.rs"},"src_encoding":{"kind":"string","value":"UTF-8"},"length_bytes":{"kind":"number","value":9186,"string":"9,186"},"score":{"kind":"number","value":2.796875,"string":"2.796875"},"int_score":{"kind":"number","value":3,"string":"3"},"detected_licenses":{"kind":"list like","value":["MIT","Apache-2.0"],"string":"[\n \"MIT\",\n \"Apache-2.0\"\n]"},"license_type":{"kind":"string","value":"permissive"},"text":{"kind":"string","value":"use std::path::PathBuf;\nuse std::sync::Mutex;\n\nuse crate::util::errors::{server_error, AppResult};\n\nuse crate::config;\nuse crate::Env;\nuse lettre::message::header::ContentType;\nuse lettre::transport::file::FileTransport;\nuse lettre::transport::smtp::authentication::{Credentials, Mechanism};\nuse lettre::transport::smtp::SmtpTransport;\nuse lettre::{Message, Transport};\nuse rand::distributions::{Alphanumeric, DistString};\n\n#[derive(Debug)]\npub struct Emails {\n backend: EmailBackend,\n}\n\nimpl Emails {\n /// Create a new instance detecting the backend from the environment. This will either connect\n /// to a SMTP server or store the emails on the local filesystem.\n pub fn from_environment(config: &config::Server) -> Self {\n let backend = match (\n dotenvy::var(\"MAILGUN_SMTP_LOGIN\"),\n dotenvy::var(\"MAILGUN_SMTP_PASSWORD\"),\n dotenvy::var(\"MAILGUN_SMTP_SERVER\"),\n ) {\n (Ok(login), Ok(password), Ok(server)) => EmailBackend::Smtp {\n server,\n login,\n password,\n },\n _ => EmailBackend::FileSystem {\n path: \"/tmp\".into(),\n },\n };\n\n if config.base.env == Env::Production && !matches!(backend, EmailBackend::Smtp { .. }) {\n panic!(\"only the smtp backend is allowed in production\");\n }\n\n Self { backend }\n }\n\n /// Create a new test backend that stores all the outgoing emails in memory, allowing for tests\n /// to later assert the mails were sent.\n pub fn new_in_memory() -> Self {\n Self {\n backend: EmailBackend::Memory {\n mails: Mutex::new(Vec::new()),\n },\n }\n }\n\n /// Attempts to send a confirmation email.\n pub fn send_user_confirm(&self, email: &str, user_name: &str, token: &str) -> AppResult<()> {\n // Create a URL with token string as path to send to user\n // If user clicks on path, look email/user up in database,\n // make sure tokens match\n\n let subject = \"Please confirm your email address\";\n let body = format!(\n \"Hello {}! Welcome to Crates.io. Please click the\nlink below to verify your email address. Thank you!\\n\nhttps://{}/confirm/{}\",\n user_name,\n crate::config::domain_name(),\n token\n );\n\n self.send(email, subject, &body)\n }\n\n /// Attempts to send an ownership invitation.\n pub fn send_owner_invite(\n &self,\n email: &str,\n user_name: &str,\n crate_name: &str,\n token: &str,\n ) -> AppResult<()> {\n let subject = \"Crate ownership invitation\";\n let body = format!(\n \"{user_name} has invited you to become an owner of the crate {crate_name}!\\n\nVisit https://{domain}/accept-invite/{token} to accept this invitation,\nor go to https://{domain}/me/pending-invites to manage all of your crate ownership invitations.\",\n domain = crate::config::domain_name()\n );\n\n self.send(email, subject, &body)\n }\n\n /// Attempts to send an API token exposure notification email\n pub fn send_token_exposed_notification(\n &self,\n email: &str,\n url: &str,\n reporter: &str,\n source: &str,\n token_name: &str,\n ) -> AppResult<()> {\n let subject = \"Exposed API token found\";\n let mut body = format!(\n \"{reporter} has notified us that your crates.io API token {token_name}\\n\nhas been exposed publicly. We have revoked this token as a precaution.\\n\nPlease review your account at https://{domain} to confirm that no\\n\nunexpected changes have been made to your settings or crates.\\n\n\\n\nSource type: {source}\\n\",\n domain = crate::config::domain_name()\n );\n if url.is_empty() {\n body.push_str(\"\\nWe were not informed of the URL where the token was found.\\n\");\n } else {\n body.push_str(&format!(\"\\nURL where the token was found: {url}\\n\"));\n }\n self.send(email, subject, &body)\n }\n\n /// This is supposed to be used only during tests, to retrieve the messages stored in the\n /// \"memory\" backend. It's not cfg'd away because our integration tests need to access this.\n pub fn mails_in_memory(&self) -> Option> {\n if let EmailBackend::Memory { mails } = &self.backend {\n Some(mails.lock().unwrap().clone())\n } else {\n None\n }\n }\n\n fn send(&self, recipient: &str, subject: &str, body: &str) -> AppResult<()> {\n // The message ID is normally generated by the SMTP server, but if we let it generate the\n // ID there will be no way for the crates.io application to know the ID of the message it\n // just sent, as it's not included in the SMTP response.\n //\n // Our support staff needs to know the message ID to be able to find misdelivered emails.\n // Because of that we're generating a random message ID, hoping the SMTP server doesn't\n // replace it when it relays the message.\n let message_id = format!(\n \"<{}@{}>\",\n Alphanumeric.sample_string(&mut rand::thread_rng(), 32),\n crate::config::domain_name(),\n );\n\n let email = Message::builder()\n .message_id(Some(message_id.clone()))\n .to(recipient.parse()?)\n .from(self.sender_address().parse()?)\n .subject(subject)\n .header(ContentType::TEXT_PLAIN)\n .body(body.to_string())?;\n\n match &self.backend {\n EmailBackend::Smtp {\n server,\n login,\n password,\n } => {\n SmtpTransport::relay(server)\n .and_then(|transport| {\n transport\n .credentials(Credentials::new(login.clone(), password.clone()))\n .authentication(vec![Mechanism::Plain])\n .build()\n .send(&email)\n })\n .map_err(|error| {\n error!(?error, \"Failed to send email\");\n server_error(\"Failed to send the email\")\n })?;\n\n info!(?message_id, ?subject, \"Email sent\");\n }\n EmailBackend::FileSystem { path } => {\n let id = FileTransport::new(path).send(&email).map_err(|error| {\n error!(?error, \"Failed to send email\");\n server_error(\"Email file could not be generated\")\n })?;\n\n info!(\n path = ?path.join(format!(\"{id}.eml\")),\n ?subject,\n \"Email sent\"\n );\n }\n EmailBackend::Memory { mails } => {\n mails.lock().unwrap().push(StoredEmail {\n to: recipient.into(),\n subject: subject.into(),\n body: body.into(),\n });\n }\n }\n\n Ok(())\n }\n\n fn sender_address(&self) -> &str {\n match &self.backend {\n EmailBackend::Smtp { login, .. } => login,\n EmailBackend::FileSystem { .. } => \"test@localhost\",\n EmailBackend::Memory { .. } => \"test@localhost\",\n }\n }\n}\n\nenum EmailBackend {\n /// Backend used in production to send mails using SMTP.\n Smtp {\n server: String,\n login: String,\n password: String,\n },\n /// Backend used locally during development, will store the emails in the provided directory.\n FileSystem { path: PathBuf },\n /// Backend used during tests, will keep messages in memory to allow tests to retrieve them.\n Memory { mails: Mutex> },\n}\n\n// Custom Debug implementation to avoid showing the SMTP password.\nimpl std::fmt::Debug for EmailBackend {\n fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {\n match self {\n EmailBackend::Smtp { server, login, .. } => {\n // The password field is *intentionally* not included\n f.debug_struct(\"Smtp\")\n .field(\"server\", server)\n .field(\"login\", login)\n .finish()?;\n }\n EmailBackend::FileSystem { path } => {\n f.debug_struct(\"FileSystem\").field(\"path\", path).finish()?;\n }\n EmailBackend::Memory { .. } => f.write_str(\"Memory\")?,\n }\n Ok(())\n }\n}\n\n#[derive(Debug, Clone)]\npub struct StoredEmail {\n pub to: String,\n pub subject: String,\n pub body: String,\n}\n\n#[cfg(test)]\nmod tests {\n use super::*;\n\n #[test]\n fn sending_to_invalid_email_fails() {\n let emails = Emails::new_in_memory();\n\n assert_err!(emails.send(\n \"String.Format(\\\"{0}.{1}@live.com\\\", FirstName, LastName)\",\n \"test\",\n \"test\",\n ));\n }\n\n #[test]\n fn sending_to_valid_email_succeeds() {\n let emails = Emails::new_in_memory();\n\n assert_ok!(emails.send(\"someone@example.com\", \"test\", \"test\"));\n }\n}\n"},"download_success":{"kind":"bool","value":true,"string":"true"}}},{"rowIdx":1146,"cells":{"blob_id":{"kind":"string","value":"0264df150e0ef52391e318759d05ad4079086336"},"language":{"kind":"string","value":"Rust"},"repo_name":{"kind":"string","value":"michaelherger/librespot"},"path":{"kind":"string","value":"/core/src/util.rs"},"src_encoding":{"kind":"string","value":"UTF-8"},"length_bytes":{"kind":"number","value":589,"string":"589"},"score":{"kind":"number","value":3.09375,"string":"3.09375"},"int_score":{"kind":"number","value":3,"string":"3"},"detected_licenses":{"kind":"list like","value":["LicenseRef-scancode-warranty-disclaimer","MIT"],"string":"[\n \"LicenseRef-scancode-warranty-disclaimer\",\n \"MIT\"\n]"},"license_type":{"kind":"string","value":"permissive"},"text":{"kind":"string","value":"use std::mem;\n\npub trait Seq {\n fn next(&self) -> Self;\n}\n\nmacro_rules! impl_seq {\n ($($ty:ty)*) => { $(\n impl Seq for $ty {\n fn next(&self) -> Self { (*self).wrapping_add(1) }\n }\n )* }\n}\n\nimpl_seq!(u8 u16 u32 u64 usize);\n\n#[derive(Debug, Clone, Copy, Hash, PartialEq, Eq, PartialOrd, Ord, Default)]\npub struct SeqGenerator(T);\n\nimpl SeqGenerator {\n pub fn new(value: T) -> Self {\n SeqGenerator(value)\n }\n\n pub fn get(&mut self) -> T {\n let value = self.0.next();\n mem::replace(&mut self.0, value)\n }\n}\n"},"download_success":{"kind":"bool","value":true,"string":"true"}}},{"rowIdx":1147,"cells":{"blob_id":{"kind":"string","value":"3e59f3b73d6bfb037621cc2624509352efe28e92"},"language":{"kind":"string","value":"Rust"},"repo_name":{"kind":"string","value":"aheart/hearth"},"path":{"kind":"string","value":"/src/metrics/mod.rs"},"src_encoding":{"kind":"string","value":"UTF-8"},"length_bytes":{"kind":"number","value":1523,"string":"1,523"},"score":{"kind":"number","value":2.671875,"string":"2.671875"},"int_score":{"kind":"number","value":3,"string":"3"},"detected_licenses":{"kind":"list like","value":["MIT","LicenseRef-scancode-unknown-license-reference","Apache-2.0"],"string":"[\n \"MIT\",\n \"LicenseRef-scancode-unknown-license-reference\",\n \"Apache-2.0\"\n]"},"license_type":{"kind":"string","value":"permissive"},"text":{"kind":"string","value":"pub mod aggregator;\nmod cpu;\nmod disk;\npub mod hub;\nmod la;\nmod metric_buffer;\nmod network;\nmod ram;\nmod space;\n\nuse std::time::SystemTime;\n\n#[derive(PartialEq, Debug)]\npub enum Metrics {\n Cpu(cpu::CpuMetrics),\n Disk(disk::DiskMetrics),\n La(la::LaMetrics),\n Net(network::NetMetrics),\n Ram(ram::RamMetrics),\n Space(space::SpaceMetrics),\n}\n\n/// Interface for Metric Plugins that possess the knowledge of retrieving raw metric data and\n/// processing this raw data into structured Metric key value pairs.\npub trait MetricPlugin: Send + 'static {\n /// Returns a command that should be run in order to retrieve raw data\n fn get_query(&self) -> &str;\n\n /// Transforms raw data into a HashMap of metrics\n fn process_data(&mut self, raw_data: &str, timestamp: &SystemTime) -> Metrics;\n\n /// Returns a HashMap with keys and empty values\n fn empty_metrics(&self) -> Metrics;\n}\n\n/// Creates all possible metric plugins and returns them as a HashMap\nfn metric_plugin_factory(\n disk: &str,\n filesystem: &str,\n network_interface: &str,\n) -> Vec> {\n let metric_plugins: Vec> = vec![\n Box::new(cpu::CpuMetricPlugin::new()),\n Box::new(ram::RamMetricPlugin::new()),\n Box::new(la::LoadAverageMetricPlugin::new()),\n Box::new(disk::DiskMetricPlugin::new(disk)),\n Box::new(network::NetworkMetricPlugin::new(network_interface)),\n Box::new(space::SpaceMetricPlugin::new(filesystem)),\n ];\n\n metric_plugins\n}\n"},"download_success":{"kind":"bool","value":true,"string":"true"}}},{"rowIdx":1148,"cells":{"blob_id":{"kind":"string","value":"81754a6d5d7adb8cc231319c4b1e3c1837370eda"},"language":{"kind":"string","value":"Rust"},"repo_name":{"kind":"string","value":"lorenzoditucci/calyx"},"path":{"kind":"string","value":"/calyx/src/passes/group_to_invoke.rs"},"src_encoding":{"kind":"string","value":"UTF-8"},"length_bytes":{"kind":"number","value":4197,"string":"4,197"},"score":{"kind":"number","value":2.828125,"string":"2.828125"},"int_score":{"kind":"number","value":3,"string":"3"},"detected_licenses":{"kind":"list like","value":["MIT"],"string":"[\n \"MIT\"\n]"},"license_type":{"kind":"string","value":"permissive"},"text":{"kind":"string","value":"use std::rc::Rc;\n\nuse itertools::Itertools;\n\nuse crate::analysis::ReadWriteSet;\nuse crate::ir::RRC;\nuse crate::ir::{\n self,\n traversal::{Action, Named, VisResult, Visitor},\n};\n\n/// Transform groups that are structurally invoking components into equivalent\n/// [ir::Invoke] statements.\n///\n/// For a group to meet the requirements of this pass, it must\n/// 1. Only use unguarded assignments\n/// 2. Only assign to input ports of one component\n/// 3. Assign `1'd1` to the @go port of the component, and\n/// 4. Depend directly on the @done port of the component for its done\n/// condition.\n#[derive(Default)]\npub struct GroupToInvoke;\n\nimpl Named for GroupToInvoke {\n fn name() -> &'static str {\n \"group2invoke\"\n }\n\n fn description() -> &'static str {\n \"covert groups that structurally invoke one component into invoke statements\"\n }\n}\n\n/// Construct an [ir::Invoke] from an [ir::Group] that has been validated by this pass.\nfn construct_invoke(\n assigns: &[ir::Assignment],\n comp: RRC,\n) -> ir::Control {\n let mut inputs = Vec::new();\n let mut outputs = Vec::new();\n\n let cell_is_parent = |port: &ir::Port| -> bool {\n if let ir::PortParent::Cell(cell_wref) = &port.parent {\n Rc::ptr_eq(&cell_wref.upgrade(), &comp)\n } else {\n false\n }\n };\n\n for assign in assigns {\n // If the cell's port is being used as a source, add the dst to\n // outputs\n if cell_is_parent(&assign.src.borrow())\n && assign.src != comp.borrow().get_with_attr(\"done\")\n {\n let name = assign.src.borrow().name.clone();\n outputs.push((name, Rc::clone(&assign.dst)));\n }\n // If the cell's port is being used as a dest, add the source to\n // inputs\n if cell_is_parent(&assign.dst.borrow())\n && assign.dst != comp.borrow().get_with_attr(\"go\")\n {\n let name = assign.dst.borrow().name.clone();\n inputs.push((name, Rc::clone(&assign.src)));\n }\n }\n\n ir::Control::invoke(comp, inputs, outputs)\n}\n\nimpl Visitor for GroupToInvoke {\n fn enable(\n &mut self,\n s: &mut ir::Enable,\n _comp: &mut ir::Component,\n _sigs: &ir::LibrarySignatures,\n ) -> VisResult {\n let group = s.group.borrow();\n\n // There should be exactly one component being written to in the\n // group.\n let mut writes =\n ReadWriteSet::write_set(&group.assignments).collect_vec();\n if writes.len() != 1 {\n return Ok(Action::Continue);\n }\n\n // Component must define a @go/@done interface\n let cell = writes.pop().unwrap();\n let maybe_go_port = cell.borrow().find_with_attr(\"go\");\n let maybe_done_port = cell.borrow().find_with_attr(\"done\");\n if maybe_go_port.is_none() || maybe_done_port.is_none() {\n return Ok(Action::Continue);\n }\n\n let go_port = maybe_go_port.unwrap();\n let mut go_multi_write = false;\n let done_port = maybe_done_port.unwrap();\n let mut done_multi_write = false;\n for assign in &group.assignments {\n // All assignments should be unguaraded.\n if !assign.guard.is_true() {\n return Ok(Action::Continue);\n }\n // @go port should have exactly one write and the src should be 1.\n if assign.dst == go_port {\n if go_multi_write {\n return Ok(Action::Continue);\n }\n if !go_multi_write && assign.src.borrow().is_constant(1, 1) {\n go_multi_write = true;\n }\n }\n // @done port should have exactly one read and the dst should be\n // group's done signal.\n if assign.src == done_port {\n if done_multi_write {\n return Ok(Action::Continue);\n }\n if !done_multi_write && assign.dst == group.get(\"done\") {\n done_multi_write = true;\n }\n }\n }\n\n Ok(Action::Change(construct_invoke(&group.assignments, cell)))\n }\n}\n"},"download_success":{"kind":"bool","value":true,"string":"true"}}},{"rowIdx":1149,"cells":{"blob_id":{"kind":"string","value":"b93dae2a656f75b8afab53af8cbad710419bc1ae"},"language":{"kind":"string","value":"Rust"},"repo_name":{"kind":"string","value":"Sophie-Williams/GameRoom-Bot"},"path":{"kind":"string","value":"/src/command.rs"},"src_encoding":{"kind":"string","value":"UTF-8"},"length_bytes":{"kind":"number","value":836,"string":"836"},"score":{"kind":"number","value":3.09375,"string":"3.09375"},"int_score":{"kind":"number","value":3,"string":"3"},"detected_licenses":{"kind":"list like","value":[],"string":"[]"},"license_type":{"kind":"string","value":"no_license"},"text":{"kind":"string","value":"use std::string::String;\n\nuse discord::model::{Message, ChannelId, User};\n\n#[derive(Debug)]\npub struct Command {\n user: User,\n channel_id: ChannelId,\n command: String,\n args: Vec,\n}\n\nimpl Command {\n pub fn parse(message: &Message) -> Command {\n let mut args: Vec = message.content.split_whitespace().map(|s| String::from(s)).collect();\n Command {\n user: message.author.clone(),\n channel_id: message.channel_id.clone(),\n command: args.remove(0),\n args: args,\n }\n }\n \n pub fn user(&self) -> &User {\n &self.user\n }\n pub fn channel_id(&self) -> &ChannelId {\n &self.channel_id\n }\n pub fn command(&self) -> &str {\n &*self.command\n }\n pub fn args(&self) -> &Vec {\n &self.args\n }\n}\n"},"download_success":{"kind":"bool","value":true,"string":"true"}}},{"rowIdx":1150,"cells":{"blob_id":{"kind":"string","value":"4177bdbb65c162c1eac67135c784f12bd836fda9"},"language":{"kind":"string","value":"Rust"},"repo_name":{"kind":"string","value":"tinaun/playbot_ng_serenity"},"path":{"kind":"string","value":"/src/context.rs"},"src_encoding":{"kind":"string","value":"UTF-8"},"length_bytes":{"kind":"number","value":3842,"string":"3,842"},"score":{"kind":"number","value":3.046875,"string":"3.046875"},"int_score":{"kind":"number","value":3,"string":"3"},"detected_licenses":{"kind":"list like","value":[],"string":"[]"},"license_type":{"kind":"string","value":"no_license"},"text":{"kind":"string","value":"use serenity;\nuse serenity::model::{\n channel::Message,\n id::{ChannelId, UserId},\n};\n\nuse threadpool::ThreadPool;\n\nuse regex::Regex;\nuse std::rc::Rc;\n\ntype SendFn = fn(&ThreadPool, ChannelId, &str) -> serenity::Result<()>;\n\n#[derive(Clone)]\npub struct Context<'a> {\n body: &'a str,\n is_directly_addressed: bool,\n send_fn: SendFn,\n source: UserId,\n source_nickname: &'a str,\n target: ChannelId,\n client: &'a Message,\n pool: &'a ThreadPool,\n current_nickname: Rc,\n}\n\nimpl<'a> Context<'a> {\n pub fn new(pool: &'a ThreadPool, message: &'a Message) -> Option {\n lazy_static! {\n static ref MENTION: Regex = Regex::new(r\"<@[0-9]*>\").unwrap();\n }\n\n let mut body = &message.content[..];\n let id = serenity::CACHE.read().user.id;\n\n let current_nickname = Rc::new(serenity::CACHE.read().user.name.to_owned());\n\n let source_nickname = &message.author.name;\n\n let source = message.author.id;\n\n let target = message.channel_id;\n\n let is_directly_addressed = {\n if body.starts_with(current_nickname.as_str()) {\n let new_body = body[current_nickname.len()..].trim_left();\n let has_separator = new_body.starts_with(\":\") || new_body.starts_with(\",\");\n\n if has_separator {\n body = new_body[1..].trim_left();\n }\n\n has_separator\n } else {\n let mentioned = message.mentions_user_id(id);\n \n if mentioned {\n let mention = MENTION\n .captures(body)\n .and_then(|cap| cap.get(0))\n .unwrap();\n\n body = body[mention.end()..].trim_left();\n }\n\n mentioned\n }\n };\n\n let send_fn: SendFn = |_pool, channel_id, msg| { channel_id.say(msg).map(|_| ()) }; \n\n Some(Self {\n client: message,\n pool,\n body,\n send_fn,\n source,\n source_nickname,\n target,\n is_directly_addressed,\n current_nickname\n })\n }\n\n pub fn body(&self) -> &'a str {\n self.body\n }\n\n /// Wether the message was aimed directetly at the bot,\n /// either via private message or by prefixing a channel message with\n /// the bot's name, followed by ',' or ':'.\n pub fn is_directly_addressed(&self) -> bool {\n self.is_directly_addressed\n }\n\n pub fn is_ctcp(&self) -> bool {\n false\n }\n\n pub fn reply>(&self, message: S) {\n let message = message.as_ref();\n eprintln!(\"Replying: {:?}\", message);\n for line in message.lines() {\n if line.len() > 2000 {\n let _ = (self.send_fn)(self.pool, self.target, \"<<>>\");\n continue;\n }\n let _ = (self.send_fn)(self.pool, self.target, line);\n }\n }\n\n pub fn source(&self) -> UserId {\n self.source\n }\n\n pub fn source_nickname(&self) -> &'a str {\n self.source_nickname\n }\n\n pub fn current_nickname(&self) -> Rc {\n self.current_nickname.clone()\n }\n\n pub fn inline_contexts<'b>(&'b self) -> impl Iterator> + 'b {\n lazy_static! {\n static ref INLINE_CMD: Regex = Regex::new(r\"\\{(.*?)}\").unwrap();\n }\n\n let body = if self.is_directly_addressed() { \"\" } else { self.body };\n\n let contexts = INLINE_CMD\n .captures_iter(body)\n .flat_map(|caps| caps.get(1))\n .map(move |body| Context {\n body: body.as_str(),\n .. self.clone()\n });\n \n Box::new(contexts)\n }\n}\n"},"download_success":{"kind":"bool","value":true,"string":"true"}}},{"rowIdx":1151,"cells":{"blob_id":{"kind":"string","value":"dee11345a693a0d28811763657a85ef4b000e0b2"},"language":{"kind":"string","value":"Rust"},"repo_name":{"kind":"string","value":"Larusso/unity-version-manager"},"path":{"kind":"string","value":"/uvm_core/src/unity/version/mod.rs"},"src_encoding":{"kind":"string","value":"UTF-8"},"length_bytes":{"kind":"number","value":24312,"string":"24,312"},"score":{"kind":"number","value":2.59375,"string":"2.59375"},"int_score":{"kind":"number","value":3,"string":"3"},"detected_licenses":{"kind":"list like","value":["Apache-2.0"],"string":"[\n \"Apache-2.0\"\n]"},"license_type":{"kind":"string","value":"permissive"},"text":{"kind":"string","value":"use crate::unity::Installation;\nuse log::{debug, info};\nuse regex::Regex;\nuse semver;\nuse serde::{self, Deserialize, Deserializer, Serialize, Serializer};\nuse std::cmp::Ordering;\nuse std::convert::{AsMut, AsRef, From, TryFrom};\nuse std::fmt;\nuse std::path::{Path, PathBuf};\nuse std::result;\nuse std::str::FromStr;\nmod error;\nmod hash;\npub use error::{Result, VersionError};\npub mod manifest;\npub mod module;\n\nuse crate::sys::unity::version as version_impl;\n\npub use self::hash::all_versions;\nuse self::hash::UnityHashError;\n\npub use self::version_impl::read_version_from_path;\n\n#[derive(PartialEq, Eq, Ord, Hash, Debug, Clone, Copy, Deserialize)]\npub enum VersionType {\n Alpha,\n Beta,\n Patch,\n Final,\n}\n\nimpl PartialOrd for VersionType {\n fn partial_cmp(&self, other: &VersionType) -> Option {\n Some(self.cmp(other))\n }\n}\n\n#[derive(Eq, Debug, Clone, Hash, PartialOrd)]\npub struct Version {\n base: semver::Version,\n release_type: VersionType,\n revision: u64,\n hash: Option,\n}\n\nimpl Ord for Version {\n fn cmp(&self, other: &Version) -> Ordering {\n self.base\n .cmp(&other.base)\n .then(self.release_type.cmp(&other.release_type))\n .then(self.revision.cmp(&other.revision))\n }\n}\n\nimpl Serialize for Version {\n fn serialize(&self, serializer: S) -> result::Result\n where\n S: Serializer,\n {\n let s = self.to_string();\n serializer.serialize_str(&s)\n }\n}\n\nimpl<'de> Deserialize<'de> for Version {\n fn deserialize(deserializer: D) -> result::Result\n where\n D: Deserializer<'de>,\n {\n let s = String::deserialize(deserializer)?;\n Version::from_str(&s).map_err(serde::de::Error::custom)\n }\n}\n\nimpl Version {\n pub fn new(\n major: u64,\n minor: u64,\n patch: u64,\n release_type: VersionType,\n revision: u64,\n ) -> Version {\n let base = semver::Version::new(major, minor, patch);\n Version {\n base,\n release_type,\n revision,\n hash: None,\n }\n }\n\n pub fn from_path>(path: P) -> Result {\n version_impl::read_version_from_path(path)\n }\n\n pub fn a(major: u64, minor: u64, patch: u64, revision: u64) -> Version {\n let base = semver::Version::new(major, minor, patch);\n Version {\n base,\n release_type: VersionType::Alpha,\n revision,\n hash: None,\n }\n }\n\n pub fn b(major: u64, minor: u64, patch: u64, revision: u64) -> Version {\n let base = semver::Version::new(major, minor, patch);\n Version {\n base,\n release_type: VersionType::Beta,\n revision,\n hash: None,\n }\n }\n\n pub fn p(major: u64, minor: u64, patch: u64, revision: u64) -> Version {\n let base = semver::Version::new(major, minor, patch);\n Version {\n base,\n release_type: VersionType::Patch,\n revision,\n hash: None,\n }\n }\n\n pub fn f(major: u64, minor: u64, patch: u64, revision: u64) -> Version {\n let base = semver::Version::new(major, minor, patch);\n Version {\n base,\n release_type: VersionType::Final,\n revision,\n hash: None,\n }\n }\n\n pub fn release_type(&self) -> &VersionType {\n &self.release_type\n }\n\n pub fn version_hash(&self) -> Result {\n self.hash\n .as_ref()\n .map(|h| h.to_owned())\n .ok_or_else(|| VersionError::HashMissing {\n source: UnityHashError::Other,\n version: self.to_string(),\n })\n .or_else(|_err| {\n hash::hash_for_version(self).map_err(|source| VersionError::HashMissing {\n source,\n version: self.to_string(),\n })\n })\n }\n\n pub fn major(&self) -> u64 {\n self.base.major\n }\n\n pub fn minor(&self) -> u64 {\n self.base.minor\n }\n\n pub fn patch(&self) -> u64 {\n self.base.patch\n }\n\n pub fn revision(&self) -> u64 {\n self.revision\n }\n\n #[cfg(unix)]\n pub fn find_version_in_file>(path: P) -> Result {\n use std::process::{Command, Stdio};\n\n let path = path.as_ref();\n debug!(\"find unity version in Unity executable {}\", path.display());\n\n let child = Command::new(\"strings\")\n .arg(\"--\")\n .arg(path)\n .stdout(Stdio::piped())\n .stderr(Stdio::piped())\n .spawn()?;\n\n let output = child.wait_with_output()?;\n\n if !output.status.success() {\n return Err(VersionError::ExecutableContainsNoVersion(\n path.display().to_string(),\n ));\n }\n\n let version = Version::from_str(&String::from_utf8_lossy(&output.stdout))?;\n debug!(\"found version {}\", &version);\n Ok(version)\n }\n\n pub fn base(&self) -> &semver::Version {\n &self.base\n }\n\n pub fn as_semver(&self) -> semver::Version {\n let mut v = self.base.clone();\n if self.release_type != VersionType::Final {\n v.pre = semver::Prerelease::new(&format!(\"{}.{}\", self.release_type, self.revision))\n .unwrap();\n }\n v\n }\n\n pub fn set_version_hash>(&mut self, hash: Option) {\n self.hash = hash.map(|s| s.as_ref().to_owned());\n }\n\n pub fn has_version_hash(&self) -> bool {\n self.hash.is_some()\n }\n}\n\nimpl PartialEq for Version {\n fn eq(&self, other: &Self) -> bool {\n let eq = self.base == other.base && self.release_type == other.release_type && self.revision == other.revision;\n if self.hash.is_some() && other.hash.is_some() {\n return eq && self.hash == other.hash\n }\n eq\n }\n}\n\nimpl From<(u64, u64, u64, u64)> for Version {\n fn from(tuple: (u64, u64, u64, u64)) -> Version {\n let (major, minor, patch, revision) = tuple;\n Version::f(major, minor, patch, revision)\n }\n}\n\nimpl TryFrom for Version {\n type Error = VersionError;\n\n fn try_from(path: PathBuf) -> Result {\n Version::from_path(path)\n }\n}\n\nimpl TryFrom<&Path> for Version {\n type Error = VersionError;\n\n fn try_from(path: &Path) -> Result {\n Version::from_path(path)\n }\n}\n\nimpl fmt::Display for VersionType {\n fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {\n if f.alternate() {\n match *self {\n VersionType::Final => write!(f, \"final\"),\n VersionType::Patch => write!(f, \"patch\"),\n VersionType::Beta => write!(f, \"beta\"),\n VersionType::Alpha => write!(f, \"alpha\"),\n }\n } else {\n match *self {\n VersionType::Final => write!(f, \"f\"),\n VersionType::Patch => write!(f, \"p\"),\n VersionType::Beta => write!(f, \"b\"),\n VersionType::Alpha => write!(f, \"a\"),\n }\n }\n }\n}\n\nimpl Default for VersionType {\n fn default() -> VersionType {\n VersionType::Final\n }\n}\n\nimpl fmt::Display for Version {\n fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {\n write!(\n f,\n \"{}{}{}\",\n self.base,\n self.release_type.to_string(),\n self.revision\n )\n }\n}\n\nimpl AsRef for Version {\n fn as_ref(&self) -> &Self {\n self\n }\n}\n\nimpl AsMut for Version {\n fn as_mut(&mut self) -> &mut Self {\n self\n }\n}\n\nimpl FromStr for Version {\n type Err = VersionError;\n\n fn from_str(s: &str) -> Result {\n let version_pattern =\n Regex::new(r\"([0-9]{1,4})\\.([0-9]{1,4})\\.([0-9]{1,4})(f|p|b|a)([0-9]{1,4})( \\(([a-z0-9]{12})\\)|/([a-z0-9]{12}))?\").unwrap();\n match version_pattern.captures(s) {\n Some(caps) => {\n let major: u64 = caps.get(1).map_or(\"0\", |m| m.as_str()).parse().unwrap();\n let minor: u64 = caps.get(2).map_or(\"0\", |m| m.as_str()).parse().unwrap();\n let patch: u64 = caps.get(3).map_or(\"0\", |m| m.as_str()).parse().unwrap();\n\n let release_type = match caps.get(4).map_or(\"\", |m| m.as_str()) {\n \"f\" => Some(VersionType::Final),\n \"p\" => Some(VersionType::Patch),\n \"b\" => Some(VersionType::Beta),\n \"a\" => Some(VersionType::Alpha),\n _ => None,\n };\n\n let revision: u64 = caps.get(5).map_or(\"0\", |m| m.as_str()).parse().unwrap();\n let hash = caps.get(7).or(caps.get(8)).map(|m| m.as_str().to_owned());\n let base = semver::Version::new(major, minor, patch);\n Ok(Version {\n base,\n revision,\n release_type: release_type.unwrap(),\n hash: hash,\n })\n }\n None => Err(VersionError::ParsingFailed(s.to_string())),\n }\n }\n}\n\nimpl FromStr for VersionType {\n type Err = VersionError;\n\n fn from_str(s: &str) -> Result {\n match s {\n \"f\" => Ok(VersionType::Final),\n \"p\" => Ok(VersionType::Patch),\n \"b\" => Ok(VersionType::Beta),\n \"a\" => Ok(VersionType::Alpha),\n \"final\" => Ok(VersionType::Final),\n \"patch\" => Ok(VersionType::Patch),\n \"beta\" => Ok(VersionType::Beta),\n \"alpha\" => Ok(VersionType::Alpha),\n _ => Err(VersionError::VersionTypeParsingFailed(s.to_string())),\n }\n }\n}\n\nimpl From for Version {\n fn from(item: Installation) -> Self {\n item.version_owned()\n }\n}\n\npub fn fetch_matching_version>(\n versions: I,\n version_req: semver::VersionReq,\n release_type: VersionType,\n) -> Result {\n versions\n .filter(|version| {\n let semver_version = if version.release_type() < &release_type {\n debug!(\n \"version {} release type is smaller than specified type {:#}\",\n version, release_type\n );\n let mut semver_version = version.base().clone();\n semver_version.pre = semver::Prerelease::new(&format!(\n \"{}.{}\",\n version.release_type, version.revision\n ))\n .unwrap();\n semver_version\n } else {\n let b = version.base().clone();\n debug!(\n \"use base semver version {} of {} for comparison\",\n b, version\n );\n b\n };\n\n let is_match = version_req.matches(&semver_version);\n if is_match {\n info!(\"version {} is a match\", version);\n } else {\n info!(\"version {} is not a match\", version);\n }\n\n is_match\n })\n .max()\n .ok_or_else(|| VersionError::NoMatch(version_req.to_string()))\n}\n\n#[cfg(test)]\nmod tests {\n use super::*;\n\n macro_rules! invalid_version_input {\n ($($name:ident: $input:expr),*) => {\n $(\n #[test]\n fn $name() {\n let version_string = $input;\n let version = Version::from_str(version_string);\n assert!(version.is_err(), \"invalid input returns None\")\n }\n )*\n };\n }\n\n macro_rules! valid_version_input {\n ($($name:ident: $input:expr),*) => {\n $(\n #[test]\n fn $name() {\n let version_string = $input;\n let version = Version::from_str(version_string);\n assert!(version.is_ok(), \"valid input returns a version\")\n }\n )*\n };\n }\n\n invalid_version_input! {\n when_version_is_empty: \"dsd\",\n when_version_is_a_random_string: \"sdfrersdfgsdf\",\n when_version_is_a_short_version: \"1.2\",\n when_version_is_semver: \"1.2.3\",\n when_version_contains_unknown_release_type: \"1.2.3g2\"\n }\n\n valid_version_input! {\n when_version_has_single_digits: \"1.2.3f4\",\n when_version_has_long_digits: \"0.0.0f43\",\n when_version_has_only_zero_digits: \"0.0.0f0\",\n when_version_has_optional_hash_project_settings_style: \"2020.3.38f1 (8f5fde82e2dc)\",\n when_version_has_optional_hash_unity_hub_style: \"2020.3.38f1/8f5fde82e2dc\"\n }\n\n #[test]\n fn parse_version_string_with_valid_input() {\n let version_string = \"1.2.3f4\";\n let version = Version::from_str(version_string);\n assert!(version.is_ok(), \"valid input returns a version\")\n }\n\n #[test]\n fn splits_version_string_into_components() {\n let version_string = \"1.2.3f4\";\n let version = Version::from_str(version_string).ok().unwrap();\n\n assert!(version.base.major == 1, \"parse correct major component\");\n assert!(version.base.minor == 2, \"parse correct minor component\");\n assert!(version.base.patch == 3, \"parse correct patch component\");\n\n assert_eq!(version.release_type, VersionType::Final);\n assert!(version.revision == 4, \"parse correct revision component\");\n assert!(version.hash.is_none(), \"parse correct optional hash\")\n }\n\n #[test]\n fn splits_version_string_into_components_with_hash() {\n let version_string = \"1.2.3f4 (abcdefghijkm)\";\n let version = Version::from_str(version_string).ok().unwrap();\n\n assert!(version.base.major == 1, \"parse correct major component\");\n assert!(version.base.minor == 2, \"parse correct minor component\");\n assert!(version.base.patch == 3, \"parse correct patch component\");\n\n assert_eq!(version.release_type, VersionType::Final);\n assert!(version.revision == 4, \"parse correct revision component\");\n assert!(version.hash.unwrap() == \"abcdefghijkm\", \"parse correct optional hash\")\n }\n\n #[test]\n fn splits_version_string_into_components_with_hash_unity_hub_style() {\n let version_string = \"1.2.3f4/abcdefghijkm\";\n let version = Version::from_str(version_string).ok().unwrap();\n\n assert!(version.base.major == 1, \"parse correct major component\");\n assert!(version.base.minor == 2, \"parse correct minor component\");\n assert!(version.base.patch == 3, \"parse correct patch component\");\n\n assert_eq!(version.release_type, VersionType::Final);\n assert!(version.revision == 4, \"parse correct revision component\");\n assert!(version.hash.unwrap() == \"abcdefghijkm\", \"parse correct optional hash\")\n }\n\n #[test]\n fn orders_version_final_release_greater_than_patch() {\n let version_a = Version::from_str(\"1.2.3f4\").ok().unwrap();\n let version_b = Version::from_str(\"1.2.3p4\").ok().unwrap();\n assert_eq!(Ordering::Greater, version_a.cmp(&version_b));\n }\n\n #[test]\n fn orders_version_patch_release_greater_than_beta() {\n let version_a = Version::from_str(\"1.2.3p4\").ok().unwrap();\n let version_b = Version::from_str(\"1.2.3b4\").ok().unwrap();\n assert_eq!(Ordering::Greater, version_a.cmp(&version_b));\n }\n\n #[test]\n fn orders_version_final_release_greater_than_beta() {\n let version_a = Version::from_str(\"1.2.3f4\").ok().unwrap();\n let version_b = Version::from_str(\"1.2.3b4\").ok().unwrap();\n assert_eq!(Ordering::Greater, version_a.cmp(&version_b));\n }\n\n #[test]\n fn orders_version_all_equak() {\n let version_a = Version::from_str(\"1.2.3f4\").ok().unwrap();\n let version_b = Version::from_str(\"1.2.3f4\").ok().unwrap();\n assert_eq!(Ordering::Equal, version_a.cmp(&version_b));\n }\n\n #[test]\n fn orders_version_major_smaller() {\n let version_a = Version::from_str(\"1.2.3f4\").ok().unwrap();\n let version_b = Version::from_str(\"0.2.3f4\").ok().unwrap();\n assert_eq!(Ordering::Greater, version_a.cmp(&version_b));\n }\n\n #[test]\n fn orders_version_minor_smaller() {\n let version_a = Version::from_str(\"1.2.3f4\").ok().unwrap();\n let version_b = Version::from_str(\"1.1.3f4\").ok().unwrap();\n assert_eq!(Ordering::Greater, version_a.cmp(&version_b));\n }\n\n #[test]\n fn orders_version_patch_smaller() {\n let version_a = Version::from_str(\"1.2.3f4\").ok().unwrap();\n let version_b = Version::from_str(\"1.2.2f4\").ok().unwrap();\n assert_eq!(Ordering::Greater, version_a.cmp(&version_b));\n }\n\n #[test]\n fn orders_version_revision_smaller() {\n let version_a = Version::from_str(\"1.2.3f4\").ok().unwrap();\n let version_b = Version::from_str(\"1.2.3f3\").ok().unwrap();\n assert_eq!(Ordering::Greater, version_a.cmp(&version_b));\n }\n\n #[test]\n fn fetch_hash_for_known_version() {\n let version = Version::f(2017, 1, 0, 2);\n assert_eq!(\n version.version_hash().unwrap(),\n String::from(\"66e9e4bfc850\")\n );\n }\n\n #[test]\n fn compares_versions() {\n let version_a = Version::from_str(\"1.2.3f4\").ok().unwrap();\n let version_b = Version::from_str(\"1.2.3f4\").ok().unwrap();\n assert_eq!(version_a, version_b, \"testing version equality\");\n\n let version_c = Version::from_str(\"1.2.3f4\").ok().unwrap();\n let version_d = Version::from_str(\"1.2.3f5\").ok().unwrap();\n assert_ne!(version_c, version_d, \"testing version nonequality\"); \n\n let version_c = Version::from_str(\"1.2.3f4\").ok().unwrap();\n let version_d = Version::from_str(\"1.2.3f4/1234567890ab\").ok().unwrap();\n assert_eq!(version_c, version_d, \"testing version equality when one version has hash other not\"); \n\n let version_c = Version::from_str(\"1.2.3f4/0987654321ab\").ok().unwrap();\n let version_d = Version::from_str(\"1.2.3f4/1234567890ab\").ok().unwrap();\n assert_ne!(version_c, version_d, \"testing version equality when one version hash is different\"); \n }\n\n #[cfg(unix)]\n #[test]\n fn reads_version_from_binary_file() {\n use std::io::Write;\n use tempfile::Builder;\n\n let mut test_file = Builder::new()\n .prefix(\"version_binary\")\n .rand_bytes(5)\n .tempfile()\n .unwrap();\n\n let version = \"2018.2.1f2\";\n let version_hash = \"dft74dsds844\";\n\n //Some known result patterns\n let test_value_1 = format!(\"Unity {}\\n\", version);\n let test_value_2 = format!(\"{}_{}\\n\", version, version_hash);\n let test_value_3 = format!(\"{} ({})\\n\", version, version_hash);\n let test_value_4 = format!(\"Mozilla/5.0 (MacIntel; ) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/37.0.2062.94 Safari/537.36 Unity/{} (unity3d.com;\\n\", version);\n let test_value_5 = format!(\"Invalid serialized file version. File: \\\"%s\\\". Expected version: {}. Actual version: %s.\\n\", version);\n let test_value_6 = format!(\n \"UnityPlayer/{} (UnityWebRequest/1.0, libcurl/7.52.0-DEV)\\n\",\n version\n );\n\n let f = test_file.as_file_mut();\n let random_bytes: Vec = (0..2048).map(|_| rand::random::()).collect();\n\n f.write_all(&random_bytes).unwrap();\n f.write_all(test_value_1.as_bytes()).unwrap();\n f.write_all(&random_bytes).unwrap();\n f.write_all(test_value_2.as_bytes()).unwrap();\n f.write_all(&random_bytes).unwrap();\n f.write_all(test_value_3.as_bytes()).unwrap();\n f.write_all(&random_bytes).unwrap();\n f.write_all(test_value_4.as_bytes()).unwrap();\n f.write_all(&random_bytes).unwrap();\n f.write_all(test_value_5.as_bytes()).unwrap();\n f.write_all(&random_bytes).unwrap();\n f.write_all(test_value_6.as_bytes()).unwrap();\n f.write_all(&random_bytes).unwrap();\n\n let v = Version::find_version_in_file(test_file.path()).unwrap();\n assert_eq!(v, Version::f(2018, 2, 1, 2));\n }\n\n #[cfg(unix)]\n #[test]\n fn fails_to_read_version_from_binary_file_if_verion_can_not_be_found() {\n use std::io::Write;\n use tempfile::Builder;\n\n let mut test_file = Builder::new()\n .prefix(\"version_binary\")\n .rand_bytes(5)\n .tempfile()\n .unwrap();\n\n let f = test_file.as_file_mut();\n let random_bytes: Vec = (0..8000).map(|_| rand::random::()).collect();\n\n f.write_all(&random_bytes).unwrap();\n let v = Version::find_version_in_file(test_file.path());\n assert!(v.is_err());\n }\n\n #[test]\n fn fetch_hash_for_unknown_version_yields_none() {\n let version = Version::f(2080, 2, 0, 2);\n assert!(version.version_hash().is_err());\n }\n\n proptest! {\n #[test]\n fn doesnt_crash(ref s in \"\\\\PC*\") {\n let _ = Version::from_str(s);\n }\n\n #[test]\n fn parses_all_valid_versions(ref s in r\"[0-9]{1,4}\\.[0-9]{1,4}\\.[0-9]{1,4}[fpb][0-9]{1,4}\") {\n Version::from_str(s).ok().unwrap();\n }\n\n #[test]\n fn parses_version_back_to_original(major in 0u64..9999, minor in 0u64..9999, patch in 0u64..9999, revision in 0u64..9999 ) {\n let v1 = Version {\n base: semver::Version::new(major, minor, patch),\n revision,\n release_type: VersionType::Final,\n hash: None\n };\n\n let v2 = Version::from_str(&format!(\"{:04}.{:04}.{:04}f{:04}\", major, minor, patch, revision)).ok().unwrap();\n prop_assert_eq!(v1, v2);\n }\n\n #[test]\n fn create_version_from_tuple(major in 0u64..9999, minor in 0u64..9999, patch in 0u64..9999, revision in 0u64..9999 ) {\n let v1 = Version {\n base: semver::Version::new(major, minor, patch),\n revision,\n release_type: VersionType::Final,\n hash: None\n };\n\n let v2:Version = (major, minor, patch, revision).into();\n prop_assert_eq!(v1, v2);\n }\n\n #[test]\n fn create_version_final_versions(major in 0u64..9999, minor in 0u64..9999, patch in 0u64..9999, revision in 0u64..9999 ) {\n let v1 = Version {\n base: semver::Version::new(major, minor, patch),\n revision,\n release_type: VersionType::Final,\n hash: None\n };\n\n let v2:Version = Version::f(major, minor, patch, revision);\n prop_assert_eq!(v1, v2);\n }\n\n #[test]\n fn create_version_beta_versions(major in 0u64..9999, minor in 0u64..9999, patch in 0u64..9999, revision in 0u64..9999 ) {\n let v1 = Version {\n base: semver::Version::new(major, minor, patch),\n revision,\n release_type: VersionType::Beta,\n hash: None\n };\n\n let v2:Version = Version::b(major, minor, patch, revision);\n prop_assert_eq!(v1, v2);\n }\n\n #[test]\n fn create_version_alpha_versions(major in 0u64..9999, minor in 0u64..9999, patch in 0u64..9999, revision in 0u64..9999 ) {\n let v1 = Version {\n base: semver::Version::new(major, minor, patch),\n revision,\n release_type: VersionType::Alpha,\n hash: None\n };\n\n let v2:Version = Version::a(major, minor, patch, revision);\n prop_assert_eq!(v1, v2);\n }\n\n #[test]\n fn create_version_patch_versions(major in 0u64..9999, minor in 0u64..9999, patch in 0u64..9999, revision in 0u64..9999 ) {\n let v1 = Version {\n base: semver::Version::new(major, minor, patch),\n revision,\n release_type: VersionType::Patch,\n hash: None\n };\n\n let v2:Version = Version::p(major, minor, patch, revision);\n prop_assert_eq!(v1, v2);\n }\n }\n}\n"},"download_success":{"kind":"bool","value":true,"string":"true"}}},{"rowIdx":1152,"cells":{"blob_id":{"kind":"string","value":"3a3e6db3ec6b081684634f6365f9e1bffc59153d"},"language":{"kind":"string","value":"Rust"},"repo_name":{"kind":"string","value":"thchittenden/rust-kernel"},"path":{"kind":"string","value":"/src/interrupt/timer.rs"},"src_encoding":{"kind":"string","value":"UTF-8"},"length_bytes":{"kind":"number","value":1688,"string":"1,688"},"score":{"kind":"number","value":2.9375,"string":"2.9375"},"int_score":{"kind":"number","value":3,"string":"3"},"detected_licenses":{"kind":"list like","value":[],"string":"[]"},"license_type":{"kind":"string","value":"no_license"},"text":{"kind":"string","value":"#![allow(dead_code)] // Constants.\nuse util::asm;\n\nconst TIMER_CHAN0: u16 = 0x0040;\nconst TIMER_CHAN1: u16 = 0x0041;\nconst TIMER_CHAN2: u16 = 0x0042;\nconst TIMER_COMM: u16 = 0x0043;\n\n/// The timer frequency in hertz.\nconst TIMER_FREQ: u32 = 1_193_182;\n\n/// The desired interrupt frequency in hertz.\nconst INT_FREQ: u32 = 1_000;\n\n/// The timer divider.\nconst TIMER_DIV: u32 = TIMER_FREQ / INT_FREQ;\n\n/// x86 timer commands.\nbitflags! {\n flags TimerCommand: u8 {\n const Binary = 0b0000_0000,\n const BCD = 0b0000_0001,\n const Mode0 = 0b0000_0000, // Interrupt on terminal count.\n const Mode1 = 0b0000_0010, // Hardware one shot.\n const Mode2 = 0b0000_0100, // Rate generator.\n const Mode3 = 0b0000_0110, // Square wave.\n const Mode4 = 0b0000_1000, // Software strobe.\n const Mode5 = 0b0000_1010, // Hardware strobe.\n const LoOnly = 0b0001_0000,\n const HiOnly = 0b0010_0000,\n const LoHi = 0b0011_0000,\n const Chan0 = 0b0000_0000,\n const Chan1 = 0b0100_0000,\n const Chan2 = 0b1000_0000,\n }\n}\n\n/// Initializes the timer and sets the default frequency.\npub fn init_timer() {\n set_frequency(INT_FREQ);\n}\n\n/// Sets the frequency of the timer.\n///\n/// # Panics\n///\n/// Panics if the requested frequency cannot be set.\npub fn set_frequency(req_freq: u32) {\n let div = TIMER_FREQ / req_freq;\n assert!(div <= u16::max_value() as u32);\n let div_lo = getbyte!(div, 0);\n let div_hi = getbyte!(div, 1);\n let command = (Binary | Mode3 | LoHi | Chan0).bits;\n asm::outb8(TIMER_COMM, command);\n asm::outb8(TIMER_CHAN0, div_lo);\n asm::outb8(TIMER_CHAN0, div_hi);\n}\n"},"download_success":{"kind":"bool","value":true,"string":"true"}}},{"rowIdx":1153,"cells":{"blob_id":{"kind":"string","value":"991a1ed94869dde2f068b53d42c6ba8b34ee28c3"},"language":{"kind":"string","value":"Rust"},"repo_name":{"kind":"string","value":"sirxyzzy/ilbm"},"path":{"kind":"string","value":"/src/lib.rs"},"src_encoding":{"kind":"string","value":"UTF-8"},"length_bytes":{"kind":"number","value":4464,"string":"4,464"},"score":{"kind":"number","value":2.84375,"string":"2.84375"},"int_score":{"kind":"number","value":3,"string":"3"},"detected_licenses":{"kind":"list like","value":[],"string":"[]"},"license_type":{"kind":"string","value":"no_license"},"text":{"kind":"string","value":"#[macro_use]\nextern crate log;\npub mod iff;\nmod bytes;\nmod compression;\nmod read;\n\nuse iff::ChunkId;\nuse thiserror::Error;\nuse std::path::Path;\n\n/// Global settings when reading image files\npub struct ReadOptions {\n pub read_pixels: bool,\n pub page_scale: bool,\n}\n\n/// Main entry point\npub fn read_from_file>(file: P, options: ReadOptions) -> Result {\n read::read_file(file, options)\n}\n\n/// Custom errors for ilbm library\n#[derive(Error, Debug)]\npub enum IlbmError {\n #[error(\"invalid header (expected {expected:?}, found {actual:?})\")]\n InvalidHeader {\n expected: String,\n actual: String,\n },\n\n #[error(\"invalid data: {0}\")]\n InvalidData (\n String\n ),\n\n #[error(\"File does not contain image data\")]\n NoImage,\n\n #[error(\"No planes, possibly a color map with no image data\")]\n NoPlanes,\n\n #[error(\"File does not contain image header (FORM.BMHD)\")]\n NoHeader,\n\n #[error(\"Color map of map_size {map_size:?} has no entry for {index:?}\")]\n NoMapEntry{ index: usize, map_size: usize},\n\n #[error(\"Unexpected end of image data\")]\n NoData,\n\n #[error(\"{0} not supported\")]\n NotSupported(String),\n\n #[error(\"IO Error\")]\n Io {\n #[from]\n source: std::io::Error\n },\n}\n\n/// Standardize my result Errors\npub type Result = std::result::Result;\n\n#[derive(Debug,Clone,Copy, PartialEq)]\npub enum Masking {\n NoMask, \n HasMask,\n HasTransparentColor,\n Lasso\n}\n\nimpl Default for Masking {\n fn default() -> Self { Masking::NoMask }\n}\n\nfn as_masking(v: u8) -> Masking {\n match v {\n 0 => Masking::NoMask,\n 1 => Masking::HasMask,\n 2 => Masking::HasTransparentColor,\n 3 => Masking::Lasso,\n x => {\n error!(\"Masking value of {} unsupported, mapping to None\", x);\n Masking::NoMask\n }\n }\n}\n\n/// Display mode, aka ModeID is Amiga specific, and quite complex\n/// in terms of interpretation. However, our usage is pretty trivial\n// It comes from the CAMG chunk \n#[derive(Copy, Debug, Clone, Default)]\npub struct DisplayMode (u32);\n\nimpl DisplayMode {\n pub fn is_ham(&self) -> bool {self.0 & 0x800 != 0} \n pub fn is_halfbrite(&self) -> bool {self.0 & 0x80 != 0}\n\n pub fn new(mode: u32) -> DisplayMode {\n DisplayMode(mode)\n }\n \n pub fn ham() -> DisplayMode {\n DisplayMode(0x800)\n }\n}\n\nimpl std::fmt::Display for DisplayMode {\n fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::result::Result<(), std::fmt::Error> {\n if self.is_ham() { \n write!(f, \"0x{:X} HAM\", self.0)\n } else if self.is_halfbrite() { \n write!(f, \"0x{:X} HALF\", self.0) \n } else {\n write!(f, \"0x{:X}\", self.0)\n }\n }\n}\n\n#[derive(Copy, Debug, Clone, Default)]\npub struct RgbValue (u8, u8, u8);\n\n/// This is an amalgam of information drawn from\n/// various chunks in the ILBM, mapped to more native\n/// types such as usize for u16, and enums for masking\n#[derive(Debug, Default)]\npub struct IlbmImage {\n pub size: Size2D,\n pub map_size: usize,\n pub planes: usize,\n pub masking: Masking,\n pub compression: bool,\n pub display_mode: DisplayMode,\n pub dpi: Size2D,\n pub pixel_aspect: Size2D,\n pub transparent_color: usize, // Actually a color index\n pub page_size: Size2D,\n\n /// RGB data triples\n /// Left to right in row, then top to bottom\n /// so indexes look like y * width + x where\n /// y=0 is the top \n pub pixels: Vec\n}\n\nimpl std::fmt::Display for IlbmImage {\n fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::result::Result<(), std::fmt::Error> {\n let compressed = if self.compression { \"Comp\" } else { \"\" };\n write!(f, \"{} dpi:{} p:{} {} {:?} map:{} mode:{} aspect:{} trans:{} page:{}\",\n self.size, self.dpi, self.planes,\n compressed, self.masking, self.map_size, self.display_mode, \n self.pixel_aspect, self.transparent_color, self.page_size)\n }\n}\n\n#[derive(Debug, Copy, Clone, Default)]\npub struct Size2D (usize,usize);\n\nimpl Size2D {\n pub fn width(&self) -> usize {self.0}\n pub fn height(&self) -> usize {self.1}\n}\n\nimpl std::fmt::Display for Size2D {\n fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::result::Result<(), std::fmt::Error> {\n write!(f, \"{}x{}\", self.width(), self.height()) \n }\n}\n\n#[derive(Debug, Clone)]\nstruct ColorMap {\n colors: Vec\n}\n"},"download_success":{"kind":"bool","value":true,"string":"true"}}},{"rowIdx":1154,"cells":{"blob_id":{"kind":"string","value":"98e4d999ddc132ac5e4863dcddad3bb61ae8a932"},"language":{"kind":"string","value":"Rust"},"repo_name":{"kind":"string","value":"crlf0710/modern-web"},"path":{"kind":"string","value":"/mweb/src/classical/lexer.rs"},"src_encoding":{"kind":"string","value":"UTF-8"},"length_bytes":{"kind":"number","value":61619,"string":"61,619"},"score":{"kind":"number","value":3.234375,"string":"3.234375"},"int_score":{"kind":"number","value":3,"string":"3"},"detected_licenses":{"kind":"list like","value":[],"string":"[]"},"license_type":{"kind":"string","value":"no_license"},"text":{"kind":"string","value":"use crate::utils::U8SpanRef;\nuse thiserror::Error;\n\npub mod ascii_char {\n #[derive(Copy, Clone, PartialEq)]\n enum AsciiCharCategory {\n Alphabetic,\n Digit,\n Symbol,\n InlineWhitespace,\n LineFeedWhitespace,\n Invalid,\n }\n\n fn ascii_char_category(ch: u8) -> AsciiCharCategory {\n match ch {\n 0x09 | 0xC | b' ' => AsciiCharCategory::InlineWhitespace,\n 0xA | 0xD => AsciiCharCategory::LineFeedWhitespace,\n b'A'..=b'Z' | b'a'..=b'z' => AsciiCharCategory::Alphabetic,\n b'0'..=b'9' => AsciiCharCategory::Digit,\n 0x0..=0x8 | 0xB | 0xE..=0x1F | 0x7F..=0xFF => AsciiCharCategory::Invalid,\n _ => AsciiCharCategory::Symbol,\n }\n }\n\n #[allow(dead_code)]\n pub fn is_invalid_char(ch: u8) -> bool {\n ascii_char_category(ch) == AsciiCharCategory::Invalid\n }\n\n pub fn is_inline_whitespace_char(ch: u8) -> bool {\n let category = ascii_char_category(ch);\n category == AsciiCharCategory::InlineWhitespace\n }\n\n pub fn is_whitespace_char(ch: u8) -> bool {\n let category = ascii_char_category(ch);\n category == AsciiCharCategory::InlineWhitespace\n || category == AsciiCharCategory::LineFeedWhitespace\n }\n\n #[allow(dead_code)]\n pub fn is_alphanumeric_char(ch: u8) -> bool {\n let category = ascii_char_category(ch);\n category == AsciiCharCategory::Alphabetic || category == AsciiCharCategory::Digit\n }\n\n pub fn is_numeric_char(ch: u8) -> bool {\n let category = ascii_char_category(ch);\n category == AsciiCharCategory::Digit\n }\n\n pub fn is_id_start(ch: u8) -> bool {\n match ch {\n b'A'..=b'Z' | b'a'..=b'z' | b'_' => true,\n _ => false,\n }\n }\n\n pub fn is_id_continue(ch: u8) -> bool {\n match ch {\n b'A'..=b'Z' | b'a'..=b'z' | b'0'..=b'9' | b'_' => true,\n _ => false,\n }\n }\n\n pub fn is_punct_char(ch: u8) -> bool {\n let category = ascii_char_category(ch);\n category == AsciiCharCategory::Symbol\n }\n\n pub fn is_octal_digit(ch: u8) -> bool {\n match ch {\n b'0'..=b'7' => true,\n _ => false,\n }\n }\n\n pub fn is_hex_digit(ch: u8) -> bool {\n match ch {\n b'0'..=b'9' | b'A'..=b'F' | b'a'..=b'f' => true,\n _ => false,\n }\n }\n}\n\npub mod ascii_str {\n use std::fmt::{self, Debug};\n use thiserror::Error;\n #[repr(transparent)]\n #[derive(PartialEq)]\n pub struct AsciiStr(pub [u8]);\n\n impl AsciiStr {\n pub fn try_split_ending_substr(&self, bytes: &Self) -> (&Self, Option<&Self>) {\n if (self.0).ends_with(&bytes.0) {\n let pos = self.0.len() - bytes.0.len();\n unsafe { std::mem::transmute((&(self.0)[..pos], Some(&(self.0)[pos..]))) }\n } else {\n (self, None)\n }\n }\n }\n\n #[derive(Error, Debug)]\n #[error(\"not 7-bit ascii string\")]\n pub struct NotAsciiStrError;\n\n pub fn from_bytes(bytes: &[u8]) -> Result<&AsciiStr, NotAsciiStrError> {\n for &byte in bytes {\n if byte >= 0x80 {\n return Err(NotAsciiStrError);\n }\n }\n unsafe { Ok(std::mem::transmute(bytes)) }\n }\n\n impl<'x> Debug for &'x AsciiStr {\n fn fmt(&self, fmt: &mut fmt::Formatter<'_>) -> fmt::Result {\n let str = std::str::from_utf8(&self.0).map_err(|_| fmt::Error)?;\n write!(fmt, \"{:?}\", str).map_err(|_| fmt::Error)?;\n Ok(())\n }\n }\n}\n\nuse bitflags::bitflags;\n\nbitflags! {\n pub struct LexModeSet : u8 {\n const NOTHING = 0;\n const COMMENT = 0x1;\n const LIMBO = 0x2;\n const MODULE_NAME = 0x4;\n const STRING_LITERAL = 0x8;\n const PASCAL_TEXT = 0x10;\n const TEX_TEXT = 0x20;\n const DEFINITION_TEXT = 0x40;\n const INLINE_PASCAL_TEXT = 0x80;\n }\n}\n\nimpl LexModeSet {\n // workaround for https://github.com/bitflags/bitflags/issues/180\n const fn const_or(self, other: LexModeSet) -> Self {\n LexModeSet::from_bits_truncate(self.bits() | other.bits())\n }\n const fn contains_mode(&self, mode: LexMode) -> bool {\n (self.bits & mode.0) != 0\n }\n}\n\n#[derive(Copy, Clone, PartialEq, Eq)]\npub struct LexMode(u8);\n\nuse std::fmt;\n\nimpl fmt::Debug for LexMode {\n fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {\n let mode_text = match *self {\n LexMode::LIMBO => \"Limbo\",\n LexMode::TEX_TEXT => \"TeXText\",\n LexMode::MODULE_NAME => \"ModuleName\",\n LexMode::PASCAL_TEXT => \"PascalText\",\n LexMode::COMMENT => \"Comment\",\n LexMode::STRING_LITERAL => \"StrLiteral\",\n LexMode::DEFINITION_TEXT => \"DefinitionText\",\n LexMode::INLINE_PASCAL_TEXT => \"InlinePascalText\",\n _ => unreachable!(),\n };\n write!(f, \"{}\", mode_text).map_err(|_| fmt::Error)?;\n Ok(())\n }\n}\n\nimpl LexMode {\n pub const LIMBO: LexMode = LexMode(LexModeSet::LIMBO.bits);\n pub const TEX_TEXT: LexMode = LexMode(LexModeSet::TEX_TEXT.bits);\n pub const MODULE_NAME: LexMode = LexMode(LexModeSet::MODULE_NAME.bits);\n pub const PASCAL_TEXT: LexMode = LexMode(LexModeSet::PASCAL_TEXT.bits);\n pub const COMMENT: LexMode = LexMode(LexModeSet::COMMENT.bits);\n pub const STRING_LITERAL: LexMode = LexMode(LexModeSet::STRING_LITERAL.bits);\n pub const DEFINITION_TEXT: LexMode = LexMode(LexModeSet::DEFINITION_TEXT.bits);\n pub const INLINE_PASCAL_TEXT: LexMode = LexMode(LexModeSet::INLINE_PASCAL_TEXT.bits);\n}\n\npub mod control_code {\n use super::token::BoxedTokenList;\n use super::LexModeSet;\n\n #[derive(Copy, Clone)]\n pub enum SpecialHandling {\n None,\n GroupTitle,\n ModuleName,\n MacroDefinition,\n FormatDefinition,\n OctalConst,\n HexConst,\n ControlTextUpToAtGT,\n WarnAndIgnore, // occurred in xetex.web:9057\n }\n\n #[derive(Copy, Clone, PartialEq, Debug)]\n pub enum ControlCodeKind {\n EscapedAt,\n DefineModule,\n DefineStarredModule,\n DefineMacro,\n DefineFormat,\n DefineProgram,\n ModuleName,\n OctalConst,\n HexConst,\n StringPoolChecksum,\n MetaCommentBegin,\n MetaCommentEnd,\n ProgramAdjacent,\n ForceIndex,\n ForceIndexMono,\n ForceIndexStyle9,\n ForceHBox,\n ForceVerbatim,\n ForceEOL,\n UnderlineFlag,\n NoUnderlineFlag,\n FormatThinSpace,\n FormatLineBreak,\n FormatSuggestLineBreak,\n FormatLineBreakLarge,\n FormatNoLineBreak,\n FormatInvisibleSemicolon,\n HiddenEndOfModuleName,\n Ignored,\n }\n\n #[derive(Debug, PartialEq)]\n pub struct ControlCode<'x> {\n pub kind: ControlCodeKind,\n pub param: Option>,\n }\n\n pub struct ControlCodeInfoRecord {\n pub selector: &'static [u8],\n pub kind: ControlCodeKind,\n pub special_handling: SpecialHandling,\n pub terminating_modes: LexModeSet,\n pub appliable_modes: LexModeSet,\n }\n\n pub const CONTROL_CODE_DATA: &'static [ControlCodeInfoRecord] = &[\n ControlCodeInfoRecord {\n selector: b\"@\",\n kind: ControlCodeKind::EscapedAt,\n special_handling: SpecialHandling::None,\n terminating_modes: LexModeSet::NOTHING,\n appliable_modes: LexModeSet::COMMENT\n .const_or(LexModeSet::LIMBO)\n .const_or(LexModeSet::MODULE_NAME)\n .const_or(LexModeSet::PASCAL_TEXT)\n .const_or(LexModeSet::STRING_LITERAL)\n .const_or(LexModeSet::TEX_TEXT)\n .const_or(LexModeSet::DEFINITION_TEXT)\n .const_or(LexModeSet::INLINE_PASCAL_TEXT),\n },\n ControlCodeInfoRecord {\n selector: b\" \\t\\r\\n\",\n kind: ControlCodeKind::DefineModule,\n special_handling: SpecialHandling::None,\n terminating_modes: LexModeSet::LIMBO\n .const_or(LexModeSet::PASCAL_TEXT)\n .const_or(LexModeSet::TEX_TEXT)\n .const_or(LexModeSet::DEFINITION_TEXT)\n .const_or(LexModeSet::INLINE_PASCAL_TEXT),\n appliable_modes: LexModeSet::LIMBO\n .const_or(LexModeSet::PASCAL_TEXT)\n .const_or(LexModeSet::TEX_TEXT)\n .const_or(LexModeSet::DEFINITION_TEXT)\n .const_or(LexModeSet::INLINE_PASCAL_TEXT),\n },\n ControlCodeInfoRecord {\n selector: b\"*\",\n kind: ControlCodeKind::DefineStarredModule,\n special_handling: SpecialHandling::GroupTitle,\n terminating_modes: LexModeSet::LIMBO\n .const_or(LexModeSet::PASCAL_TEXT)\n .const_or(LexModeSet::TEX_TEXT)\n .const_or(LexModeSet::DEFINITION_TEXT),\n appliable_modes: LexModeSet::LIMBO\n .const_or(LexModeSet::PASCAL_TEXT)\n .const_or(LexModeSet::TEX_TEXT)\n .const_or(LexModeSet::DEFINITION_TEXT),\n },\n ControlCodeInfoRecord {\n selector: b\"dD\",\n kind: ControlCodeKind::DefineMacro,\n special_handling: SpecialHandling::MacroDefinition,\n terminating_modes: LexModeSet::PASCAL_TEXT\n .const_or(LexModeSet::TEX_TEXT)\n .const_or(LexModeSet::DEFINITION_TEXT),\n appliable_modes: LexModeSet::PASCAL_TEXT\n .const_or(LexModeSet::TEX_TEXT)\n .const_or(LexModeSet::DEFINITION_TEXT),\n },\n ControlCodeInfoRecord {\n selector: b\"fF\",\n kind: ControlCodeKind::DefineFormat,\n special_handling: SpecialHandling::FormatDefinition,\n terminating_modes: LexModeSet::PASCAL_TEXT\n .const_or(LexModeSet::TEX_TEXT)\n .const_or(LexModeSet::DEFINITION_TEXT),\n appliable_modes: LexModeSet::PASCAL_TEXT\n .const_or(LexModeSet::TEX_TEXT)\n .const_or(LexModeSet::DEFINITION_TEXT),\n },\n ControlCodeInfoRecord {\n selector: b\"pP\",\n kind: ControlCodeKind::DefineProgram,\n special_handling: SpecialHandling::None,\n terminating_modes: LexModeSet::PASCAL_TEXT\n .const_or(LexModeSet::TEX_TEXT)\n .const_or(LexModeSet::DEFINITION_TEXT),\n appliable_modes: LexModeSet::PASCAL_TEXT\n .const_or(LexModeSet::TEX_TEXT)\n .const_or(LexModeSet::DEFINITION_TEXT),\n },\n ControlCodeInfoRecord {\n selector: b\"<\",\n kind: ControlCodeKind::ModuleName,\n special_handling: SpecialHandling::ModuleName,\n terminating_modes: LexModeSet::TEX_TEXT.const_or(LexModeSet::DEFINITION_TEXT),\n appliable_modes: LexModeSet::PASCAL_TEXT\n .const_or(LexModeSet::DEFINITION_TEXT)\n .const_or(LexModeSet::TEX_TEXT),\n },\n ControlCodeInfoRecord {\n selector: b\"\\'\",\n kind: ControlCodeKind::OctalConst,\n special_handling: SpecialHandling::OctalConst,\n terminating_modes: LexModeSet::NOTHING,\n appliable_modes: LexModeSet::PASCAL_TEXT\n .const_or(LexModeSet::DEFINITION_TEXT)\n .const_or(LexModeSet::TEX_TEXT)\n .const_or(LexModeSet::INLINE_PASCAL_TEXT),\n },\n ControlCodeInfoRecord {\n selector: b\"\\\"\",\n kind: ControlCodeKind::HexConst,\n special_handling: SpecialHandling::HexConst,\n terminating_modes: LexModeSet::NOTHING,\n appliable_modes: LexModeSet::PASCAL_TEXT\n .const_or(LexModeSet::DEFINITION_TEXT)\n .const_or(LexModeSet::TEX_TEXT)\n .const_or(LexModeSet::INLINE_PASCAL_TEXT)\n .const_or(LexModeSet::COMMENT /*xetex.web:8641*/),\n },\n ControlCodeInfoRecord {\n selector: b\"$\",\n kind: ControlCodeKind::StringPoolChecksum,\n special_handling: SpecialHandling::None,\n terminating_modes: LexModeSet::NOTHING,\n appliable_modes: LexModeSet::PASCAL_TEXT\n .const_or(LexModeSet::DEFINITION_TEXT)\n .const_or(LexModeSet::INLINE_PASCAL_TEXT),\n },\n ControlCodeInfoRecord {\n selector: b\"{\",\n kind: ControlCodeKind::MetaCommentBegin,\n special_handling: SpecialHandling::None,\n terminating_modes: LexModeSet::NOTHING,\n appliable_modes: LexModeSet::PASCAL_TEXT\n .const_or(LexModeSet::DEFINITION_TEXT)\n .const_or(LexModeSet::INLINE_PASCAL_TEXT),\n },\n ControlCodeInfoRecord {\n selector: b\"}\",\n kind: ControlCodeKind::MetaCommentEnd,\n special_handling: SpecialHandling::None,\n terminating_modes: LexModeSet::NOTHING,\n appliable_modes: LexModeSet::PASCAL_TEXT\n .const_or(LexModeSet::DEFINITION_TEXT)\n .const_or(LexModeSet::INLINE_PASCAL_TEXT),\n },\n ControlCodeInfoRecord {\n selector: b\"&\",\n kind: ControlCodeKind::ProgramAdjacent,\n special_handling: SpecialHandling::None,\n terminating_modes: LexModeSet::NOTHING,\n appliable_modes: LexModeSet::PASCAL_TEXT\n .const_or(LexModeSet::DEFINITION_TEXT)\n .const_or(LexModeSet::INLINE_PASCAL_TEXT),\n },\n ControlCodeInfoRecord {\n selector: b\"^\",\n kind: ControlCodeKind::ForceIndex,\n special_handling: SpecialHandling::ControlTextUpToAtGT,\n terminating_modes: LexModeSet::NOTHING,\n appliable_modes: LexModeSet::PASCAL_TEXT\n .const_or(LexModeSet::DEFINITION_TEXT)\n .const_or(LexModeSet::TEX_TEXT)\n .const_or(LexModeSet::INLINE_PASCAL_TEXT),\n },\n ControlCodeInfoRecord {\n selector: b\".\",\n kind: ControlCodeKind::ForceIndexMono,\n special_handling: SpecialHandling::ControlTextUpToAtGT,\n terminating_modes: LexModeSet::NOTHING,\n appliable_modes: LexModeSet::PASCAL_TEXT\n .const_or(LexModeSet::DEFINITION_TEXT)\n .const_or(LexModeSet::TEX_TEXT)\n .const_or(LexModeSet::INLINE_PASCAL_TEXT),\n },\n ControlCodeInfoRecord {\n selector: b\":\",\n kind: ControlCodeKind::ForceIndexStyle9,\n special_handling: SpecialHandling::ControlTextUpToAtGT,\n terminating_modes: LexModeSet::NOTHING,\n appliable_modes: LexModeSet::PASCAL_TEXT\n .const_or(LexModeSet::DEFINITION_TEXT)\n .const_or(LexModeSet::TEX_TEXT)\n .const_or(LexModeSet::INLINE_PASCAL_TEXT),\n },\n ControlCodeInfoRecord {\n selector: b\"t\",\n kind: ControlCodeKind::ForceHBox,\n special_handling: SpecialHandling::ControlTextUpToAtGT,\n terminating_modes: LexModeSet::NOTHING,\n appliable_modes: LexModeSet::PASCAL_TEXT\n .const_or(LexModeSet::DEFINITION_TEXT)\n .const_or(LexModeSet::INLINE_PASCAL_TEXT),\n },\n ControlCodeInfoRecord {\n selector: b\"=\",\n kind: ControlCodeKind::ForceVerbatim,\n special_handling: SpecialHandling::ControlTextUpToAtGT,\n terminating_modes: LexModeSet::NOTHING,\n appliable_modes: LexModeSet::PASCAL_TEXT\n .const_or(LexModeSet::DEFINITION_TEXT)\n .const_or(LexModeSet::INLINE_PASCAL_TEXT),\n },\n ControlCodeInfoRecord {\n selector: b\"\\\\\",\n kind: ControlCodeKind::ForceEOL,\n special_handling: SpecialHandling::None,\n terminating_modes: LexModeSet::NOTHING,\n appliable_modes: LexModeSet::PASCAL_TEXT\n .const_or(LexModeSet::DEFINITION_TEXT)\n .const_or(LexModeSet::INLINE_PASCAL_TEXT),\n },\n ControlCodeInfoRecord {\n selector: b\"!\",\n kind: ControlCodeKind::UnderlineFlag,\n special_handling: SpecialHandling::None,\n terminating_modes: LexModeSet::NOTHING,\n appliable_modes: LexModeSet::PASCAL_TEXT\n .const_or(LexModeSet::DEFINITION_TEXT)\n .const_or(LexModeSet::TEX_TEXT)\n .const_or(LexModeSet::INLINE_PASCAL_TEXT),\n },\n ControlCodeInfoRecord {\n selector: b\"?\",\n kind: ControlCodeKind::NoUnderlineFlag,\n special_handling: SpecialHandling::None,\n terminating_modes: LexModeSet::NOTHING,\n appliable_modes: LexModeSet::PASCAL_TEXT\n .const_or(LexModeSet::DEFINITION_TEXT)\n .const_or(LexModeSet::TEX_TEXT)\n .const_or(LexModeSet::INLINE_PASCAL_TEXT),\n },\n ControlCodeInfoRecord {\n selector: b\",\",\n kind: ControlCodeKind::FormatThinSpace,\n special_handling: SpecialHandling::None,\n terminating_modes: LexModeSet::NOTHING,\n appliable_modes: LexModeSet::PASCAL_TEXT\n .const_or(LexModeSet::DEFINITION_TEXT)\n .const_or(LexModeSet::INLINE_PASCAL_TEXT),\n },\n ControlCodeInfoRecord {\n selector: b\"/\",\n kind: ControlCodeKind::FormatLineBreak,\n special_handling: SpecialHandling::None,\n terminating_modes: LexModeSet::NOTHING,\n appliable_modes: LexModeSet::PASCAL_TEXT\n .const_or(LexModeSet::DEFINITION_TEXT)\n .const_or(LexModeSet::INLINE_PASCAL_TEXT),\n },\n ControlCodeInfoRecord {\n selector: b\"|\",\n kind: ControlCodeKind::FormatSuggestLineBreak,\n special_handling: SpecialHandling::None,\n terminating_modes: LexModeSet::NOTHING,\n appliable_modes: LexModeSet::PASCAL_TEXT\n .const_or(LexModeSet::DEFINITION_TEXT)\n .const_or(LexModeSet::INLINE_PASCAL_TEXT),\n },\n ControlCodeInfoRecord {\n selector: b\"#\",\n kind: ControlCodeKind::FormatLineBreakLarge,\n special_handling: SpecialHandling::None,\n terminating_modes: LexModeSet::NOTHING,\n appliable_modes: LexModeSet::PASCAL_TEXT\n .const_or(LexModeSet::DEFINITION_TEXT)\n .const_or(LexModeSet::INLINE_PASCAL_TEXT),\n },\n ControlCodeInfoRecord {\n selector: b\"+\",\n kind: ControlCodeKind::FormatNoLineBreak,\n special_handling: SpecialHandling::None,\n terminating_modes: LexModeSet::NOTHING,\n appliable_modes: LexModeSet::PASCAL_TEXT\n .const_or(LexModeSet::DEFINITION_TEXT)\n .const_or(LexModeSet::INLINE_PASCAL_TEXT),\n },\n ControlCodeInfoRecord {\n selector: b\";\",\n kind: ControlCodeKind::FormatInvisibleSemicolon,\n special_handling: SpecialHandling::None,\n terminating_modes: LexModeSet::NOTHING,\n appliable_modes: LexModeSet::PASCAL_TEXT\n .const_or(LexModeSet::DEFINITION_TEXT)\n .const_or(LexModeSet::INLINE_PASCAL_TEXT),\n },\n ControlCodeInfoRecord {\n selector: b\"z\",\n kind: ControlCodeKind::Ignored,\n special_handling: SpecialHandling::WarnAndIgnore,\n terminating_modes: LexModeSet::NOTHING,\n appliable_modes: LexModeSet::PASCAL_TEXT,\n },\n ];\n pub fn get_control_code_info_record_for_selector(\n selector: u8,\n ) -> Option<&'static ControlCodeInfoRecord> {\n use once_cell::sync::Lazy;\n static CONTROL_CODE_TABLE: Lazy<[Option<&'static ControlCodeInfoRecord>; 256]> =\n Lazy::new(|| {\n let mut table = [None; 256];\n for item in CONTROL_CODE_DATA.iter() {\n for &ch in item.selector.iter() {\n assert!(table[ch as usize].is_none());\n table[ch as usize] = Some(item);\n }\n }\n table\n });\n CONTROL_CODE_TABLE[selector as usize]\n }\n}\n\npub mod operator {\n #[derive(Clone, PartialEq, Debug)]\n pub enum Operator {\n Plus,\n Subtract,\n Dereference,\n Equal,\n NotEqual,\n GreaterThan,\n LessThan,\n GreaterEq,\n LessEq,\n Multiply,\n Divide,\n Assign,\n }\n}\n\npub mod punctuation {\n use super::operator::Operator;\n\n #[derive(Clone, PartialEq, Debug)]\n pub enum Punctuation {\n Op(Operator),\n LParen,\n RParen,\n LBracket,\n RBracket,\n RangeUntil,\n WithType,\n ArgumentSeparator,\n EndOfStatement,\n DotOrEndOfProgram,\n DefineAs,\n Dollar,\n Backslash, /*xetex.web:24446*/\n }\n\n pub struct PunctuationInfo {\n pub literal: &'static [u8],\n pub kind: Punctuation,\n }\n\n pub const PUNCTUATION_TABLE: &[PunctuationInfo] = &[\n PunctuationInfo {\n literal: b\"..\",\n kind: Punctuation::RangeUntil,\n },\n PunctuationInfo {\n literal: b\":=\",\n kind: Punctuation::Op(Operator::Assign),\n },\n PunctuationInfo {\n literal: b\"<>\",\n kind: Punctuation::Op(Operator::NotEqual),\n },\n PunctuationInfo {\n literal: b\"==\",\n kind: Punctuation::DefineAs,\n },\n PunctuationInfo {\n literal: b\">=\",\n kind: Punctuation::Op(Operator::GreaterEq),\n },\n PunctuationInfo {\n literal: b\"<=\",\n kind: Punctuation::Op(Operator::LessEq),\n },\n PunctuationInfo {\n literal: b\">\",\n kind: Punctuation::Op(Operator::GreaterThan),\n },\n PunctuationInfo {\n literal: b\"<\",\n kind: Punctuation::Op(Operator::LessThan),\n },\n PunctuationInfo {\n literal: b\":\",\n kind: Punctuation::WithType,\n },\n PunctuationInfo {\n literal: b\"^\",\n kind: Punctuation::Op(Operator::Dereference),\n },\n PunctuationInfo {\n literal: b\"(\",\n kind: Punctuation::LParen,\n },\n PunctuationInfo {\n literal: b\")\",\n kind: Punctuation::RParen,\n },\n PunctuationInfo {\n literal: b\"[\",\n kind: Punctuation::LBracket,\n },\n PunctuationInfo {\n literal: b\"]\",\n kind: Punctuation::RBracket,\n },\n PunctuationInfo {\n literal: b\",\",\n kind: Punctuation::ArgumentSeparator,\n },\n PunctuationInfo {\n literal: b\";\",\n kind: Punctuation::EndOfStatement,\n },\n PunctuationInfo {\n literal: b\".\",\n kind: Punctuation::DotOrEndOfProgram,\n },\n PunctuationInfo {\n literal: b\"$\",\n kind: Punctuation::Dollar,\n },\n PunctuationInfo {\n literal: b\"=\",\n kind: Punctuation::Op(Operator::Equal),\n },\n PunctuationInfo {\n literal: b\"+\",\n kind: Punctuation::Op(Operator::Plus),\n },\n PunctuationInfo {\n literal: b\"-\",\n kind: Punctuation::Op(Operator::Subtract),\n },\n PunctuationInfo {\n literal: b\"*\",\n kind: Punctuation::Op(Operator::Multiply),\n },\n PunctuationInfo {\n literal: b\"/\",\n kind: Punctuation::Op(Operator::Divide),\n },\n PunctuationInfo {\n literal: b\"\\\\\",\n kind: Punctuation::Backslash,\n },\n ];\n}\n\npub mod literal {\n use super::ascii_str::AsciiStr;\n use super::token::BoxedTokenList;\n\n #[derive(Debug, PartialEq)]\n pub enum Literal<'x> {\n IntegerU32(u32),\n RealF64(f64),\n StringLiteral(&'x AsciiStr),\n PreprocessedStringLiteral(BoxedTokenList<'x>),\n }\n}\n\n#[derive(Error, Debug)]\npub enum LexError {\n #[error(\"Unexpected EOF reached before proper finish\")]\n UnexpectedEOF,\n #[error(\"Not 7-bit ascii byte occurred\")]\n Not7BitAscii(#[from] ascii_str::NotAsciiStrError),\n #[error(\"Invalid control code\")]\n InvalidControlCodeChar { control_code: u8, pos: usize },\n #[error(\"Unrecognized symbol starting with '{0}'\")]\n UnrecognizedPunctuation(char),\n #[error(\"Control code used where it's not usable\")]\n ControlCodeInNonApplicableMode,\n #[error(\"Control code character '{0}' used where it's not usable\")]\n ControlCodeCharInNonApplicableMode(char),\n #[error(\"Integer literal overflow: {0} with radix {1}\")]\n IntegerLiteralOverflow(String, u32),\n #[error(\"Float literal lex error: {0}\")]\n FloatLiteralLexError(String),\n #[error(\"Numeric literal not properly finished\")]\n NumericLiteralNotProperlyFinished,\n #[error(\"Control text not properly finished with @>\")]\n ControlTextNotProperlyFinished,\n #[error(\"Group title not properly finished with .\")]\n GroupTitleNotProperlyFinished,\n #[error(\"Inline program fragment not properly finished\")]\n InlineProgFragmentNotProperlyFinished,\n #[error(\"Comment not properly finished with }}\")]\n CommentNotProperlyFinished,\n #[error(\"Comment nesting too deep\")]\n CommentNestingTooDeep,\n #[error(\"String literal not properly finished with \\'\")]\n StringLiteralNotProperlyFinished,\n #[error(\"String literal not properly finished with \\\"\")]\n PreprocessedStringLiteralNotProperlyFinished,\n #[error(\"Any lex error!\")]\n AnyLexError,\n}\n\npub enum LexControlFlowNewItem {\n Module,\n Definition,\n ProgramText,\n}\n\npub enum LexControlFlow<'x> {\n Continue(U8SpanRef<'x>),\n Finish(U8SpanRef<'x>),\n StartNew(LexControlFlowNewItem, LexMode, U8SpanRef<'x>),\n ModuleNameInlineProgAbort(U8SpanRef<'x>),\n}\n\npub mod token {\n use super::ascii_char::{is_hex_digit, is_inline_whitespace_char, is_octal_digit};\n use super::ascii_str::{self, AsciiStr};\n use super::control_code::ControlCode;\n use super::literal::Literal;\n use super::punctuation::Punctuation;\n use super::{LexControlFlow, LexControlFlowNewItem, LexError, LexMode};\n use crate::utils::U8SpanRef;\n\n #[derive(Debug, PartialEq)]\n pub enum Token<'x> {\n CtrlCode(ControlCode<'x>),\n WS,\n MacroParamMark,\n IdentOrKw(&'x AsciiStr),\n Punct(Punctuation),\n Literal(Literal<'x>),\n Comment(BoxedTokenList<'x>),\n\n InlineProgramFragment(BoxedTokenList<'x>),\n TextFragment(&'x AsciiStr),\n\n ModuleNameInlineProgAbort,\n }\n\n pub type TokenList<'x> = Vec>;\n pub type BoxedTokenList<'x> = Box>>;\n\n fn continue_or_finish(l: U8SpanRef<'_>) -> LexControlFlow<'_> {\n if l.is_empty() {\n LexControlFlow::Finish(l)\n } else {\n LexControlFlow::Continue(l)\n }\n }\n\n fn switch_mode<'x>(control_code: &ControlCode<'x>, l: U8SpanRef<'x>) -> LexControlFlow<'x> {\n use super::control_code::ControlCodeKind;\n match control_code.kind {\n ControlCodeKind::DefineModule | ControlCodeKind::DefineStarredModule => {\n LexControlFlow::StartNew(LexControlFlowNewItem::Module, LexMode::TEX_TEXT, l)\n }\n ControlCodeKind::DefineMacro | ControlCodeKind::DefineFormat => {\n LexControlFlow::StartNew(\n LexControlFlowNewItem::Definition,\n LexMode::DEFINITION_TEXT,\n l,\n )\n }\n ControlCodeKind::DefineProgram | ControlCodeKind::ModuleName => {\n LexControlFlow::StartNew(\n LexControlFlowNewItem::ProgramText,\n LexMode::PASCAL_TEXT,\n l,\n )\n }\n _ => unreachable! {},\n }\n }\n\n pub const CONTROL_CODE_PREFIX: u8 = b'@';\n pub const INLINE_PROGRAM_FRAGMENT: u8 = b'|';\n pub const ESCAPE_CHARACTER: u8 = b'\\\\';\n pub const START_OF_COMMENT: u8 = b'{';\n pub const END_OF_COMMENT: u8 = b'}';\n\n pub const LINE_FEED: u8 = b'\\n';\n pub const CARRIAGE_RETURN: u8 = b'\\r';\n\n pub const SIMPLE_ESCAPED_ATAIL: &'static [u8] = b\"@@\";\n pub const END_OF_CONTROL_TEXT: &'static [u8] = b\"@>\";\n\n pub const START_OF_MACRO_DEFINITION: &'static [u8] = b\"@d\";\n pub const START_OF_FORMAT_DEFINITION: &'static [u8] = b\"@f\";\n\n pub const MODULE_NAME_INLINE_PROGFRAG_ABORT: &'static [u8] = b\"...@>\";\n\n pub fn lex_u32_literal_with_radix(l: &[u8], radix: usize) -> Result {\n use std::str::from_utf8;\n let str = from_utf8(l).unwrap();\n if let Ok(v) = u32::from_str_radix(str, radix as u32) {\n Ok(Literal::IntegerU32(v))\n } else {\n Err(LexError::IntegerLiteralOverflow(\n str.to_owned(),\n radix as u32,\n ))\n }\n }\n\n pub fn lex_f64_literal(l: &[u8]) -> Result {\n use std::str::{from_utf8, FromStr};\n let str = from_utf8(l).unwrap();\n if let Ok(v) = f64::from_str(str) {\n Ok(Literal::RealF64(v))\n } else {\n Err(LexError::FloatLiteralLexError(str.to_owned()))\n }\n }\n\n pub fn lex_numeric_literal(l: U8SpanRef<'_>) -> Result<(Literal, U8SpanRef<'_>), LexError> {\n use super::ascii_char::is_numeric_char;\n let count_int = l\n .bytes()\n .iter()\n .copied()\n .take_while(|&ch| is_numeric_char(ch))\n .count();\n let has_dot = count_int > 0 && l.bytes()[count_int..].starts_with(b\".\");\n let count_fraction = if has_dot {\n l.bytes()[count_int + 1..]\n .iter()\n .copied()\n .take_while(|&ch| is_numeric_char(ch))\n .count()\n } else {\n 0\n };\n if has_dot && count_fraction > 0 {\n let (numeric, rest) = l.split_at(count_int + 1 + count_fraction);\n let literal = lex_f64_literal(numeric)?;\n Ok((literal, rest))\n } else if count_int > 0 {\n let (numeric, rest) = l.split_at(count_int);\n let literal = lex_u32_literal_with_radix(numeric, 10)?;\n Ok((literal, rest))\n } else {\n Err(LexError::NumericLiteralNotProperlyFinished)\n }\n }\n\n fn lex_maybe_whitespace<'x>(l: U8SpanRef<'x>) -> (&'x [u8], U8SpanRef<'x>) {\n use super::ascii_char::is_whitespace_char;\n let pos = l\n .bytes()\n .iter()\n .copied()\n .take_while(|&ch| is_whitespace_char(ch))\n .count();\n l.split_at(pos)\n }\n\n fn lex_identifier<'x>(l: U8SpanRef<'x>) -> (Option<&'x [u8]>, U8SpanRef<'x>) {\n use super::ascii_char::{is_id_continue, is_id_start};\n let pos = l\n .bytes()\n .iter()\n .copied()\n .enumerate()\n .take_while(|&(n, ch)| {\n if n == 0 {\n is_id_start(ch)\n } else {\n is_id_continue(ch)\n }\n })\n .count();\n\n if pos == 0 {\n (None, l)\n } else {\n let (head, rest) = l.split_at(pos);\n (Some(head), rest)\n }\n }\n\n fn lex_punct<'x>(l: U8SpanRef<'x>) -> (Option, U8SpanRef<'x>) {\n use super::punctuation::PUNCTUATION_TABLE;\n for table_item in PUNCTUATION_TABLE {\n if l.starts_with(table_item.literal) {\n let literal_len = table_item.literal.len();\n let (_, rest) = l.split_at(literal_len);\n return (Some(table_item.kind.clone()), rest);\n }\n }\n (None, l)\n }\n\n fn lex_control_code_rest<'x>(\n l: U8SpanRef<'x>,\n mode: LexMode,\n ) -> Result<(ControlCode<'x>, U8SpanRef<'x>, bool), LexError> {\n use super::control_code::get_control_code_info_record_for_selector;\n use super::control_code::SpecialHandling;\n let selector = l.front_cloned().ok_or_else(|| LexError::UnexpectedEOF)?;\n\n let control_code_info =\n get_control_code_info_record_for_selector(selector).ok_or_else(|| {\n LexError::InvalidControlCodeChar {\n control_code: selector,\n pos: l.pos(),\n }\n })?;\n\n if !control_code_info.appliable_modes.contains_mode(mode) {\n return Err(LexError::ControlCodeCharInNonApplicableMode(\n selector as char,\n ));\n }\n\n let is_terminator = control_code_info.terminating_modes.contains_mode(mode);\n\n let rest = l.range(1..);\n let (control_code, rest) = match control_code_info.special_handling {\n SpecialHandling::None => {\n let control_code = ControlCode {\n kind: control_code_info.kind,\n param: None,\n };\n (control_code, rest)\n }\n SpecialHandling::GroupTitle => {\n let group_title_start = rest\n .bytes()\n .iter()\n .take_while(|&&ch| is_inline_whitespace_char(ch))\n .count();\n let group_title_end =\n memchr::memchr2(b'.', b'\\n', rest.bytes()).unwrap_or(rest.len());\n\n let control_text_end;\n if !rest.range(group_title_end..).starts_with(b\".\") {\n eprintln!(\n \"WARN: module group title not finished with dot character, continuing.\"\n );\n control_text_end = group_title_end;\n //return Err(LexError::GroupTitleNotProperlyFinished);\n } else {\n control_text_end = group_title_end + 1;\n }\n let group_title_text =\n ascii_str::from_bytes(&rest.bytes()[group_title_start..group_title_end])?;\n let control_code = ControlCode {\n kind: control_code_info.kind,\n param: Some(Box::new(vec![Token::TextFragment(group_title_text)])),\n };\n (control_code, rest.range(control_text_end..))\n }\n SpecialHandling::ModuleName => {\n let mode = LexMode::MODULE_NAME;\n let mut data = rest;\n let mut tokens = vec![];\n 'module_name_loop: loop {\n use super::control_code::ControlCodeKind;\n let (token, control_flow) = lex_token(data, mode)?;\n match control_flow {\n LexControlFlow::Continue(rest_data) => {\n data = rest_data;\n match token {\n Token::CtrlCode(ControlCode {\n kind: ControlCodeKind::HiddenEndOfModuleName,\n ..\n }) => {\n break 'module_name_loop;\n }\n _ => {\n tokens.push(token);\n }\n }\n }\n LexControlFlow::Finish(..) => {\n return Err(LexError::UnexpectedEOF);\n }\n LexControlFlow::StartNew(..) => {\n return Err(LexError::ControlCodeInNonApplicableMode);\n }\n LexControlFlow::ModuleNameInlineProgAbort(..) => {\n return Err(LexError::ControlCodeInNonApplicableMode);\n }\n }\n }\n let control_code = ControlCode {\n kind: control_code_info.kind,\n param: Some(Box::new(tokens)),\n };\n (control_code, data)\n }\n SpecialHandling::FormatDefinition | SpecialHandling::MacroDefinition => {\n let mode = LexMode::DEFINITION_TEXT;\n let mut data = rest;\n let mut tokens = vec![];\n 'definition_loop: loop {\n if data.starts_with(START_OF_MACRO_DEFINITION)\n || data.starts_with(START_OF_FORMAT_DEFINITION)\n {\n break 'definition_loop;\n }\n let (token, control_flow) = lex_token(data, mode)?;\n match control_flow {\n LexControlFlow::Continue(rest_data) => {\n data = rest_data;\n tokens.push(token);\n }\n LexControlFlow::Finish(rest_data) => {\n data = rest_data;\n tokens.push(token);\n break 'definition_loop;\n }\n LexControlFlow::StartNew(..) => {\n break 'definition_loop;\n }\n LexControlFlow::ModuleNameInlineProgAbort(..) => {\n return Err(LexError::ControlCodeInNonApplicableMode);\n }\n }\n }\n let control_code = ControlCode {\n kind: control_code_info.kind,\n param: Some(Box::new(tokens)),\n };\n (control_code, data)\n }\n SpecialHandling::OctalConst => {\n let octal_digit_count = rest\n .bytes()\n .iter()\n .copied()\n .take_while(|&ch| is_octal_digit(ch))\n .count();\n let (octal_digits, rest) = rest.split_at(octal_digit_count);\n let literal = lex_u32_literal_with_radix(octal_digits, 8)?;\n let control_code = ControlCode {\n kind: control_code_info.kind,\n param: Some(Box::new(vec![Token::Literal(literal)])),\n };\n (control_code, rest)\n }\n SpecialHandling::HexConst => {\n let hex_digit_count = rest\n .bytes()\n .iter()\n .copied()\n .take_while(|&ch| is_hex_digit(ch))\n .count();\n let (hex_digits, rest) = rest.split_at(hex_digit_count);\n let literal = lex_u32_literal_with_radix(hex_digits, 16)?;\n let control_code = ControlCode {\n kind: control_code_info.kind,\n param: Some(Box::new(vec![Token::Literal(literal)])),\n };\n (control_code, rest)\n }\n SpecialHandling::ControlTextUpToAtGT => {\n let control_text_len = memchr::memchr3(\n CONTROL_CODE_PREFIX,\n LINE_FEED,\n CARRIAGE_RETURN,\n rest.bytes(),\n )\n .unwrap_or(rest.len());\n if !rest\n .range(control_text_len..)\n .starts_with(END_OF_CONTROL_TEXT)\n {\n return Err(LexError::ControlTextNotProperlyFinished);\n }\n let control_code = ControlCode {\n kind: control_code_info.kind,\n param: Some(Box::new(vec![Token::TextFragment(ascii_str::from_bytes(\n &rest.bytes()[..control_text_len],\n )?)])),\n };\n (\n control_code,\n rest.range(control_text_len + END_OF_CONTROL_TEXT.len()..),\n )\n }\n SpecialHandling::WarnAndIgnore => {\n use super::control_code::ControlCodeKind;\n eprintln!(\n \"WARN: %{} occurred in the web file, ignoring.\",\n selector as char\n );\n let control_code = ControlCode {\n kind: ControlCodeKind::Ignored,\n param: None,\n };\n (control_code, rest)\n }\n };\n Ok((control_code, rest, is_terminator))\n }\n\n pub fn lex_comment_rest<'x>(\n l: U8SpanRef<'x>,\n ) -> Result<(Token<'x>, LexControlFlow<'x>), LexError> {\n let mode = LexMode::COMMENT;\n let mut l = l;\n let mut tokens = vec![];\n\n let mut level = 1usize;\n 'comment_loop: loop {\n if l.starts_with(b\"\\\\\") {\n let (head, rest) = l.split_at(2);\n if l.len() >= 2 {\n let escaped_fragment = Token::TextFragment(ascii_str::from_bytes(head)?);\n tokens.push(escaped_fragment);\n l = rest;\n } else {\n return Err(LexError::CommentNotProperlyFinished);\n }\n } else if l.starts_with(b\"{\") {\n let (head, rest) = l.split_at(1);\n let fragment = Token::TextFragment(ascii_str::from_bytes(head)?);\n tokens.push(fragment);\n level = level\n .checked_add(1)\n .ok_or(LexError::CommentNestingTooDeep)?;\n l = rest;\n } else if l.starts_with(b\"}\") {\n let (head, rest) = l.split_at(1);\n level -= 1;\n if level != 0 {\n let fragment = Token::TextFragment(ascii_str::from_bytes(head)?);\n tokens.push(fragment);\n }\n l = rest;\n if level == 0 {\n break 'comment_loop;\n }\n } else {\n let (token, control_flow) = lex_token(l, mode)?;\n match control_flow {\n LexControlFlow::Continue(rest_data) => {\n l = rest_data;\n tokens.push(token);\n }\n LexControlFlow::Finish(..) => {\n return Err(LexError::UnexpectedEOF);\n }\n LexControlFlow::StartNew(..) => {\n return Err(LexError::ControlCodeInNonApplicableMode);\n }\n LexControlFlow::ModuleNameInlineProgAbort(..) => {\n return Err(LexError::ControlCodeInNonApplicableMode);\n }\n }\n }\n }\n let token = Token::Comment(Box::new(tokens));\n Ok((token, continue_or_finish(l)))\n }\n\n fn lex_string_literal_rest<'x>(\n l: U8SpanRef<'x>,\n ) -> Result<(Token<'x>, LexControlFlow<'x>), LexError> {\n // fixme: properly parse string literal\n let text_end = memchr::memchr2(b'\\'', b'\\n', l.bytes()).unwrap_or(l.len());\n if !l.range(text_end..).starts_with(b\"\\'\") {\n return Err(LexError::StringLiteralNotProperlyFinished);\n }\n let literal_end = text_end + 1;\n let literal_text = ascii_str::from_bytes(&l.bytes()[..text_end])?;\n let token = Token::Literal(Literal::StringLiteral(literal_text));\n Ok((token, continue_or_finish(l.range(literal_end..))))\n }\n\n fn lex_preprocessed_string_literal_rest<'x>(\n l: U8SpanRef<'x>,\n ) -> Result<(Token<'x>, LexControlFlow<'x>), LexError> {\n // fixme: properly parse string literal\n let text_end = memchr::memchr2(b'\\\"', b'\\n', l.bytes()).unwrap_or(l.len());\n if !l.range(text_end..).starts_with(b\"\\\"\") {\n return Err(LexError::PreprocessedStringLiteralNotProperlyFinished);\n }\n let literal_end = text_end + 1;\n let mut tokens = vec![];\n tokens.push(Token::TextFragment(ascii_str::from_bytes(\n &l.bytes()[..text_end],\n )?));\n let token = Token::Literal(Literal::PreprocessedStringLiteral(Box::new(tokens)));\n Ok((token, continue_or_finish(l.range(literal_end..))))\n }\n\n fn lex_inline_prog_rest<'x>(\n l: U8SpanRef<'x>,\n parent_mode: LexMode,\n ) -> Result<(Token<'x>, LexControlFlow<'x>), LexError> {\n let mode = LexMode::INLINE_PASCAL_TEXT;\n let mut data = l;\n let mut tokens = vec![];\n 'inline_prog_loop: loop {\n if data.starts_with(b\"|\") {\n data = data.range(1..);\n break 'inline_prog_loop;\n } else {\n let (token, control_flow) = lex_token(data, mode)?;\n match control_flow {\n LexControlFlow::Continue(rest_data) => {\n data = rest_data;\n tokens.push(token);\n }\n LexControlFlow::ModuleNameInlineProgAbort(rest_data)\n if parent_mode == LexMode::MODULE_NAME =>\n {\n data = rest_data;\n tokens.push(token);\n break 'inline_prog_loop;\n }\n _ => {\n return Err(LexError::InlineProgFragmentNotProperlyFinished);\n }\n }\n }\n }\n let token = Token::InlineProgramFragment(Box::new(tokens));\n Ok((token, continue_or_finish(data)))\n }\n\n pub fn lex_token<'x>(\n l: U8SpanRef<'x>,\n mode: LexMode,\n ) -> Result<(Token<'x>, LexControlFlow<'x>), LexError> {\n let (l_is_empty, first_ch) = match l.front_cloned() {\n Some(ch) => (false, ch),\n None => (true, 0),\n };\n match mode {\n LexMode::LIMBO | LexMode::TEX_TEXT if l_is_empty => {\n let empty = ascii_str::from_bytes(l.bytes())?;\n return Ok((Token::TextFragment(empty), LexControlFlow::Finish(l)));\n }\n LexMode::DEFINITION_TEXT | LexMode::PASCAL_TEXT if l_is_empty => {\n return Ok((Token::WS, LexControlFlow::Finish(l)));\n }\n _ if l_is_empty => {\n return Err(LexError::UnexpectedEOF);\n }\n LexMode::LIMBO\n | LexMode::TEX_TEXT\n | LexMode::PASCAL_TEXT\n | LexMode::INLINE_PASCAL_TEXT\n | LexMode::DEFINITION_TEXT\n | LexMode::COMMENT\n if first_ch == CONTROL_CODE_PREFIX =>\n {\n let rest = l.range(1..);\n let (control_code, rest, is_terminator) = lex_control_code_rest(rest, mode)?;\n if !is_terminator {\n return Ok((Token::CtrlCode(control_code), continue_or_finish(rest)));\n } else {\n let new_mode = switch_mode(&control_code, rest);\n return Ok((Token::CtrlCode(control_code), new_mode));\n }\n }\n LexMode::MODULE_NAME if first_ch == CONTROL_CODE_PREFIX => {\n use super::control_code::ControlCodeKind;\n if l.starts_with(SIMPLE_ESCAPED_ATAIL) {\n let control_code = ControlCode {\n kind: ControlCodeKind::EscapedAt,\n param: None,\n };\n return Ok((\n Token::CtrlCode(control_code),\n continue_or_finish(l.range(2..)),\n ));\n } else if l.starts_with(END_OF_CONTROL_TEXT) {\n let control_code = ControlCode {\n kind: ControlCodeKind::HiddenEndOfModuleName,\n param: None,\n };\n return Ok((\n Token::CtrlCode(control_code),\n continue_or_finish(l.range(2..)),\n ));\n } else {\n return Err(LexError::ControlCodeInNonApplicableMode);\n }\n }\n LexMode::LIMBO | LexMode::TEX_TEXT | LexMode::MODULE_NAME | LexMode::COMMENT\n if first_ch == INLINE_PROGRAM_FRAGMENT =>\n {\n let rest = l.range(1..);\n return lex_inline_prog_rest(rest, mode);\n }\n LexMode::LIMBO | LexMode::TEX_TEXT | LexMode::MODULE_NAME | LexMode::COMMENT => {\n use memchr::{memchr, memchr2};\n debug_assert_ne!(first_ch, CONTROL_CODE_PREFIX);\n debug_assert_ne!(first_ch, INLINE_PROGRAM_FRAGMENT);\n let text_len = if mode == LexMode::LIMBO {\n memchr(CONTROL_CODE_PREFIX, l.bytes())\n } else if mode != LexMode::COMMENT {\n memchr2(CONTROL_CODE_PREFIX, INLINE_PROGRAM_FRAGMENT, l.bytes())\n } else {\n let count = l\n .bytes()\n .iter()\n .take_while(|&&ch| {\n ch != CONTROL_CODE_PREFIX\n && ch != INLINE_PROGRAM_FRAGMENT\n && ch != ESCAPE_CHARACTER\n && ch != START_OF_COMMENT\n && ch != END_OF_COMMENT\n })\n .count();\n Some(count)\n }\n .unwrap_or_else(|| l.len());\n let (text, rest) = l.split_at(text_len);\n let text = ascii_str::from_bytes(text)?;\n return Ok((Token::TextFragment(text), continue_or_finish(rest)));\n }\n LexMode::PASCAL_TEXT | LexMode::DEFINITION_TEXT | LexMode::INLINE_PASCAL_TEXT => {\n use super::ascii_char;\n\n debug_assert!(first_ch != CONTROL_CODE_PREFIX);\n if ascii_char::is_whitespace_char(first_ch) {\n let (_, rest) = lex_maybe_whitespace(l);\n return Ok((Token::WS, continue_or_finish(rest)));\n } else if ascii_char::is_id_start(first_ch) {\n let (id, rest) = lex_identifier(l);\n let id = id.expect(\"\");\n return Ok((\n Token::IdentOrKw(ascii_str::from_bytes(id)?),\n continue_or_finish(rest),\n ));\n } else if first_ch == b'{' {\n let rest = l.range(1..);\n return lex_comment_rest(rest);\n } else if first_ch == b'\\'' {\n let rest = l.range(1..);\n return lex_string_literal_rest(rest);\n } else if first_ch == b'\\\"' {\n let rest = l.range(1..);\n return lex_preprocessed_string_literal_rest(rest);\n } else if first_ch == b'#' {\n let rest = l.range(1..);\n return Ok((Token::MacroParamMark, continue_or_finish(rest)));\n } else if mode == LexMode::INLINE_PASCAL_TEXT\n && first_ch == b'.'\n && l.starts_with(MODULE_NAME_INLINE_PROGFRAG_ABORT)\n {\n return Ok((\n Token::ModuleNameInlineProgAbort,\n LexControlFlow::ModuleNameInlineProgAbort(l),\n ));\n } else if ascii_char::is_punct_char(first_ch) {\n let (punct, rest) = lex_punct(l);\n let punct =\n punct.ok_or_else(|| LexError::UnrecognizedPunctuation(first_ch as char))?;\n return Ok((Token::Punct(punct), continue_or_finish(rest)));\n } else if ascii_char::is_numeric_char(first_ch) {\n let (numeric, rest) = lex_numeric_literal(l)?;\n return Ok((Token::Literal(numeric), continue_or_finish(rest)));\n } else {\n unimplemented!(\"{:?}\", first_ch);\n }\n }\n _ => unimplemented!(),\n }\n }\n}\n\npub struct LexerRawBuf<'x> {\n mode: LexMode,\n data: U8SpanRef<'x>,\n}\n\n#[derive(Default)]\npub struct LexerLimboBuf<'x> {\n pub(crate) limbo_tokens: token::TokenList<'x>,\n}\n\npub struct LexerModuleBuf<'x> {\n pub(crate) module_type: token::Token<'x>,\n pub(crate) text_in_tex: token::TokenList<'x>,\n pub(crate) definitions: token::TokenList<'x>,\n pub(crate) code_in_pascal: token::TokenList<'x>,\n}\n\n#[derive(Clone, Copy)]\nenum LexerInternalState {\n LimboDirty,\n LimboFilledModuleDirty,\n LimboFilledEOF,\n LimboTakenModuleDirty,\n ModuleFilledNextModuleDirty,\n ModuleFilledEOF,\n EOF,\n}\n\npub struct WEBLexer<'x> {\n raw_buf: LexerRawBuf<'x>,\n state: LexerInternalState,\n limbo_buf: Option>,\n module_buf: Option>,\n next_module_buf: Option>,\n}\n\nimpl<'x> WEBLexer<'x> {\n pub fn new(data: &'x [u8]) -> Self {\n let raw_buf = LexerRawBuf {\n mode: LexMode::LIMBO,\n data: U8SpanRef::new(data),\n };\n let limbo_buf = Some(Default::default());\n let state = LexerInternalState::LimboDirty;\n let module_buf = None;\n let next_module_buf = None;\n WEBLexer {\n raw_buf,\n state,\n limbo_buf,\n module_buf,\n next_module_buf,\n }\n }\n\n fn refill(&mut self) -> Result<(), LexError> {\n let mut output_module;\n match self.state {\n LexerInternalState::LimboDirty => {\n output_module = None;\n }\n LexerInternalState::LimboTakenModuleDirty\n | LexerInternalState::LimboFilledModuleDirty => {\n output_module = Some(self.module_buf.as_mut().unwrap());\n }\n LexerInternalState::ModuleFilledNextModuleDirty => {\n output_module = Some(self.next_module_buf.as_mut().unwrap());\n }\n LexerInternalState::LimboFilledEOF\n | LexerInternalState::ModuleFilledEOF\n | LexerInternalState::EOF => {\n return Ok(());\n }\n }\n let mut pending_token = None;\n 'outer: loop {\n let output_tokenlist;\n if let Some(module) = &mut output_module {\n output_tokenlist = match self.raw_buf.mode {\n LexMode::TEX_TEXT => &mut module.text_in_tex,\n LexMode::DEFINITION_TEXT => &mut module.definitions,\n LexMode::PASCAL_TEXT => &mut module.code_in_pascal,\n _ => unreachable!(),\n };\n } else {\n assert!(self.raw_buf.mode == LexMode::LIMBO);\n output_tokenlist = &mut self.limbo_buf.as_mut().unwrap().limbo_tokens;\n }\n\n if let Some(token) = pending_token.take() {\n output_tokenlist.push(token);\n }\n\n 'inner: loop {\n let (token, control_flow) = token::lex_token(self.raw_buf.data, self.raw_buf.mode)?;\n match control_flow {\n LexControlFlow::Continue(rest_data) => {\n output_tokenlist.push(token);\n self.raw_buf.data = rest_data;\n continue 'inner;\n }\n LexControlFlow::Finish(rest_data) => {\n output_tokenlist.push(token);\n self.raw_buf.data = rest_data;\n self.state = match self.state {\n LexerInternalState::LimboDirty => LexerInternalState::LimboFilledEOF,\n LexerInternalState::LimboTakenModuleDirty => {\n LexerInternalState::ModuleFilledEOF\n }\n LexerInternalState::LimboFilledModuleDirty\n | LexerInternalState::ModuleFilledNextModuleDirty\n | LexerInternalState::LimboFilledEOF\n | LexerInternalState::ModuleFilledEOF\n | LexerInternalState::EOF => unreachable!(),\n };\n break 'outer;\n }\n LexControlFlow::StartNew(\n LexControlFlowNewItem::Module,\n new_mode,\n rest_data,\n ) => {\n self.raw_buf.mode = new_mode;\n self.raw_buf.data = rest_data;\n let new_module = LexerModuleBuf {\n module_type: token,\n text_in_tex: Default::default(),\n definitions: Default::default(),\n code_in_pascal: Default::default(),\n };\n self.state = match self.state {\n LexerInternalState::LimboDirty => {\n assert!(self.module_buf.is_none());\n self.module_buf = Some(new_module);\n LexerInternalState::LimboFilledModuleDirty\n }\n LexerInternalState::LimboTakenModuleDirty => {\n assert!(self.next_module_buf.is_none());\n self.next_module_buf = Some(new_module);\n LexerInternalState::ModuleFilledNextModuleDirty\n }\n LexerInternalState::LimboFilledModuleDirty\n | LexerInternalState::ModuleFilledNextModuleDirty\n | LexerInternalState::LimboFilledEOF\n | LexerInternalState::ModuleFilledEOF\n | LexerInternalState::EOF => unreachable!(),\n };\n break 'outer;\n }\n LexControlFlow::StartNew(\n LexControlFlowNewItem::Definition,\n new_mode,\n rest_data,\n )\n | LexControlFlow::StartNew(\n LexControlFlowNewItem::ProgramText,\n new_mode,\n rest_data,\n ) => {\n assert!(pending_token.is_none());\n pending_token = Some(token);\n self.raw_buf.mode = new_mode;\n self.raw_buf.data = rest_data;\n continue 'outer;\n }\n LexControlFlow::ModuleNameInlineProgAbort(..) => {\n unreachable!();\n }\n }\n }\n }\n Ok(())\n }\n\n pub fn lex_limbo(&mut self) -> Result>, LexError> {\n self.refill()?;\n let result;\n self.state = match self.state {\n LexerInternalState::LimboDirty | LexerInternalState::LimboTakenModuleDirty => unreachable!(),\n LexerInternalState::LimboFilledModuleDirty => {\n result = self.limbo_buf.take();\n LexerInternalState::LimboTakenModuleDirty\n }\n LexerInternalState::LimboFilledEOF => {\n result = self.limbo_buf.take();\n LexerInternalState::EOF\n }\n LexerInternalState::ModuleFilledNextModuleDirty\n | LexerInternalState::ModuleFilledEOF\n | LexerInternalState::EOF => {\n result = None;\n self.state\n }\n };\n Ok(result)\n }\n\n pub fn lex_module(&mut self) -> Result>, LexError> {\n self.refill()?;\n let result;\n self.state = match self.state {\n LexerInternalState::LimboDirty | LexerInternalState::LimboTakenModuleDirty => unreachable!(),\n LexerInternalState::LimboFilledModuleDirty | LexerInternalState::LimboFilledEOF => {\n // must be called in the wrong order.\n unreachable!();\n }\n LexerInternalState::ModuleFilledNextModuleDirty => {\n use std::mem::swap;\n result = self.module_buf.take();\n swap(&mut self.module_buf, &mut self.next_module_buf);\n LexerInternalState::LimboTakenModuleDirty\n }\n LexerInternalState::ModuleFilledEOF => {\n result = self.module_buf.take();\n LexerInternalState::EOF\n }\n LexerInternalState::EOF => {\n result = None;\n self.state\n }\n };\n Ok(result)\n }\n}\n"},"download_success":{"kind":"bool","value":true,"string":"true"}}},{"rowIdx":1155,"cells":{"blob_id":{"kind":"string","value":"24ec11896677f646b439567f03f5daa166f81c60"},"language":{"kind":"string","value":"Rust"},"repo_name":{"kind":"string","value":"unovor/frame"},"path":{"kind":"string","value":"/asn1_der-0.6.3/src/types/boolean.rs"},"src_encoding":{"kind":"string","value":"UTF-8"},"length_bytes":{"kind":"number","value":702,"string":"702"},"score":{"kind":"number","value":2.671875,"string":"2.671875"},"int_score":{"kind":"number","value":3,"string":"3"},"detected_licenses":{"kind":"list like","value":["BSD-2-Clause","MIT"],"string":"[\n \"BSD-2-Clause\",\n \"MIT\"\n]"},"license_type":{"kind":"string","value":"permissive"},"text":{"kind":"string","value":"use ::{ Asn1DerError, types::{ FromDerObject, IntoDerObject }, der::{ DerObject, DerTag} };\n\n\nimpl FromDerObject for bool {\n\tfn from_der_object(der_object: DerObject) -> Result {\n\t\tif der_object.tag != DerTag::Boolean { return Err(Asn1DerError::InvalidTag) }\n\t\t\n\t\tmatch der_object.value.data.as_slice() {\n\t\t\t&[0x00u8] => Ok(false),\n\t\t\t&[0xffu8] => Ok(true),\n\t\t\t_ => return Err(Asn1DerError::InvalidEncoding)\n\t\t}\n\t}\n}\nimpl IntoDerObject for bool {\n\tfn into_der_object(self) -> DerObject {\n\t\tDerObject::new(DerTag::Boolean, match self {\n\t\t\ttrue => vec![0xffu8],\n\t\t\tfalse => vec![0x00u8]\n\t\t}.into())\n\t}\n\t\n\tfn serialized_len(&self) -> usize {\n\t\tDerObject::compute_serialized_len(1)\n\t}\n}"},"download_success":{"kind":"bool","value":true,"string":"true"}}},{"rowIdx":1156,"cells":{"blob_id":{"kind":"string","value":"88d58e93555a8aad6ff888974be5d5196671451d"},"language":{"kind":"string","value":"Rust"},"repo_name":{"kind":"string","value":"esavier/keynesis"},"path":{"kind":"string","value":"/src/passport/block/content.rs"},"src_encoding":{"kind":"string","value":"UTF-8"},"length_bytes":{"kind":"number","value":7809,"string":"7,809"},"score":{"kind":"number","value":2.828125,"string":"2.828125"},"int_score":{"kind":"number","value":3,"string":"3"},"detected_licenses":{"kind":"list like","value":["MIT","Apache-2.0","LicenseRef-scancode-unknown-license-reference"],"string":"[\n \"MIT\",\n \"Apache-2.0\",\n \"LicenseRef-scancode-unknown-license-reference\"\n]"},"license_type":{"kind":"string","value":"permissive"},"text":{"kind":"string","value":"use crate::passport::block::{EntryError, EntrySlice, EntryType, Hash, Hasher};\nuse std::{\n convert::TryInto as _,\n fmt::{self, Formatter},\n iter::FusedIterator,\n ops::Deref,\n};\nuse thiserror::Error;\n\n#[derive(Ord, PartialOrd, Eq, PartialEq, Clone, Hash)]\npub struct Content(Box<[u8]>);\n\n#[derive(Ord, PartialOrd, Eq, PartialEq)]\npub(crate) struct ContentMut<'a>(&'a mut Vec);\n\n#[derive(Ord, PartialOrd, Eq, PartialEq, Copy, Clone, Hash)]\npub struct ContentSlice<'a>(&'a [u8]);\n\n#[derive(Debug, Error)]\npub enum ContentError {\n #[error(\"Content's max size has been reached, cannot add the entry\")]\n MaxSizeReached,\n\n #[error(\"The content has {extra} bytes we do not know what they are for, it could the buffer was truncated\")]\n InvalidLength { extra: usize },\n\n #[error(\"Invalid entry\")]\n Entry(\n #[from]\n #[source]\n EntryError,\n ),\n}\n\npub struct ContentSliceIter<'a>(&'a [u8]);\n\nimpl Content {\n pub const MAX_SIZE: usize = u16::MAX as usize;\n\n pub fn as_slice(&self) -> ContentSlice<'_> {\n ContentSlice(&self.0)\n }\n\n pub fn iter(&self) -> ContentSliceIter<'_> {\n self.as_slice().iter()\n }\n\n pub fn hash(&self) -> Hash {\n self.as_slice().hash()\n }\n}\n\nimpl<'a> ContentMut<'a> {\n pub(crate) fn new(bytes: &'a mut Vec) -> Self {\n Self(bytes)\n }\n\n #[cfg(test)]\n fn into_content(self) -> Content {\n Content(self.0.to_owned().into_boxed_slice())\n }\n\n pub(crate) fn push(&mut self, entry: EntrySlice<'_>) -> Result<(), ContentError> {\n let current_size = self.0.len();\n let needed_size = current_size + entry.as_ref().len();\n if needed_size > Content::MAX_SIZE {\n return Err(ContentError::MaxSizeReached);\n }\n self.0.extend_from_slice(entry.as_ref());\n Ok(())\n }\n}\n\nimpl<'a> ContentSlice<'a> {\n pub fn iter(&self) -> ContentSliceIter<'a> {\n ContentSliceIter(self.0)\n }\n\n pub fn to_content(&self) -> Content {\n Content(self.0.to_vec().into_boxed_slice())\n }\n\n pub fn from_slice_unchecked(slice: &'a [u8]) -> Self {\n Self(slice)\n }\n\n pub fn try_from_slice(slice: &'a [u8]) -> Result {\n if slice.len() > Content::MAX_SIZE {\n return Err(ContentError::MaxSizeReached);\n }\n\n let content = Self(slice);\n\n let mut slice = content.0;\n\n while slice.len() >= 2 {\n let entry_type =\n EntryType::try_from_u16(u16::from_be_bytes(slice[..2].try_into().unwrap()))?;\n let size = entry_type.size(&slice[2..]);\n\n let _ = EntrySlice::try_from_slice(&slice[..size])?;\n slice = &slice[size..];\n }\n\n if slice.is_empty() {\n Ok(content)\n } else {\n Err(ContentError::InvalidLength { extra: slice.len() })\n }\n }\n\n pub fn hash(&self) -> Hash {\n Hasher::hash(self.0)\n }\n}\n\nimpl<'a> IntoIterator for ContentSlice<'a> {\n type IntoIter = ContentSliceIter<'a>;\n type Item = EntrySlice<'a>;\n\n fn into_iter(self) -> Self::IntoIter {\n self.iter()\n }\n}\n\nimpl<'a> Iterator for ContentSliceIter<'a> {\n type Item = EntrySlice<'a>;\n\n fn next(&mut self) -> Option {\n if self.0.is_empty() {\n None\n } else {\n let entry_type =\n EntryType::try_from_u16(u16::from_be_bytes(self.0[..2].try_into().unwrap()))\n .unwrap();\n let size = entry_type.size(&self.0[2..]);\n\n let entry = EntrySlice::from_slice_unchecked(&self.0[..size]);\n self.0 = &self.0[size..];\n\n Some(entry)\n }\n }\n\n fn size_hint(&self) -> (usize, Option) {\n if self.0.is_empty() {\n (0, Some(0))\n } else {\n (1, None)\n }\n }\n}\n\nimpl<'a> FusedIterator for ContentSliceIter<'a> {}\n\nimpl<'a> fmt::Debug for ContentSlice<'a> {\n fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result {\n f.debug_list().entries(self.iter()).finish()\n }\n}\n\nimpl fmt::Debug for Content {\n fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result {\n f.debug_list().entries(self.iter()).finish()\n }\n}\n\nimpl<'a> AsRef<[u8]> for ContentSlice<'a> {\n fn as_ref(&self) -> &[u8] {\n &self.0\n }\n}\n\nimpl<'a> Deref for ContentMut<'a> {\n type Target = [u8];\n fn deref(&self) -> &Self::Target {\n &self.0\n }\n}\n\nimpl<'a> Deref for ContentSlice<'a> {\n type Target = [u8];\n fn deref(&self) -> &Self::Target {\n &self.0\n }\n}\n\nimpl Deref for Content {\n type Target = [u8];\n fn deref(&self) -> &Self::Target {\n &self.0\n }\n}\n\n#[cfg(test)]\nmod tests {\n use super::*;\n use crate::{\n key::{curve25519, ed25519::PublicKey},\n passport::block::{Entry, EntryMut},\n Seed,\n };\n use quickcheck::{Arbitrary, Gen};\n\n impl Arbitrary for Content {\n fn arbitrary(g: &mut Gen) -> Self {\n let max = usize::arbitrary(g) % 12;\n let mut bytes = Vec::with_capacity(1024);\n let mut content = ContentMut::new(&mut bytes);\n\n for _ in 0..max {\n let entry = Entry::arbitrary(g);\n match content.push(entry.as_slice()) {\n Ok(()) => (),\n Err(ContentError::MaxSizeReached) => break,\n Err(error) => {\n // another error occurred, it should not happen but\n // better ready than sorry\n unreachable!(&error)\n }\n }\n }\n\n content.into_content()\n }\n }\n\n /// test to make sure we detect the limit of the Content\n /// when using `push`\n #[test]\n fn too_long_fail() {\n let content = [0; Content::MAX_SIZE + 1];\n\n match ContentSlice::try_from_slice(&content) {\n Err(ContentError::MaxSizeReached) => (),\n Err(error) => panic!(\"Didn't expect this error: {:?}\", error),\n Ok(_) => panic!(\"Content should have failed with too long error\"),\n }\n }\n\n #[test]\n fn test_shared_entry_only() {\n let mut rng = quickcheck::Gen::new(1024);\n\n let max = 1;\n let mut bytes = Vec::with_capacity(1024);\n let mut content = ContentMut::new(&mut bytes);\n\n for _ in 0..max {\n let mut entry_bytes = Vec::with_capacity(1024);\n let key = curve25519::SecretKey::arbitrary(&mut rng);\n let mut builder = EntryMut::new_set_shared_key(&mut entry_bytes, &key.public_key());\n let passphrase = Option::::arbitrary(&mut rng);\n let mut entry_rng = Seed::arbitrary(&mut rng).into_rand_chacha();\n\n let count = u8::arbitrary(&mut rng) % 12 + 1;\n for _ in 0..count {\n builder\n .share_with(\n &mut entry_rng,\n &key,\n &PublicKey::arbitrary(&mut rng),\n &passphrase,\n )\n .expect(\"valid share to this key\");\n }\n\n let entry = builder.finalize().expect(\"valid key sharing entry\");\n match content.push(entry) {\n Ok(()) => (),\n Err(ContentError::MaxSizeReached) => break,\n Err(error) => {\n // another error occurred, it should not happen but\n // better ready than sorry\n unreachable!(&error)\n }\n }\n }\n\n let _ = content.into_content();\n }\n\n #[quickcheck]\n fn decode_slice(content: Content) -> bool {\n ContentSlice::try_from_slice(&content.0).unwrap();\n true\n }\n}\n"},"download_success":{"kind":"bool","value":true,"string":"true"}}},{"rowIdx":1157,"cells":{"blob_id":{"kind":"string","value":"fe2b27935168a910385475d8c99f7a74ebb18257"},"language":{"kind":"string","value":"Rust"},"repo_name":{"kind":"string","value":"kyle-rader/advent_of_code"},"path":{"kind":"string","value":"/src/rust/aoc/src/auth.rs"},"src_encoding":{"kind":"string","value":"UTF-8"},"length_bytes":{"kind":"number","value":2264,"string":"2,264"},"score":{"kind":"number","value":2.875,"string":"2.875"},"int_score":{"kind":"number","value":3,"string":"3"},"detected_licenses":{"kind":"list like","value":[],"string":"[]"},"license_type":{"kind":"string","value":"no_license"},"text":{"kind":"string","value":"use std::fs::{self, File};\r\nuse std::io::Write;\r\nuse std::path::PathBuf;\r\n\r\nuse crate::aoc_client::AocClient;\r\nuse crate::cookies::aoc_session_token_first;\r\nuse anyhow::anyhow;\r\nuse directories::ProjectDirs;\r\n\r\npub fn login(token: Option) -> anyhow::Result<()> {\r\n // Get\r\n let token = match token {\r\n Some(token) => {\r\n println!(\"📝 Using token provided on CLI\");\r\n token\r\n }\r\n None => {\r\n println!(\"🍪 Using token from FireFox cookies\");\r\n aoc_session_token_first()?\r\n }\r\n };\r\n\r\n // Test\r\n let client = AocClient::new(&token);\r\n let user_name = client.user_name()?;\r\n println!(\"✅ Token works!\");\r\n\r\n // Save\r\n let cache_file = save_token(&token)?;\r\n println!(\"💾 Token saved at {}\", &cache_file.display());\r\n\r\n println!(\"🚀 Welcome, {user_name}! Happy solving 🎉\");\r\n Ok(())\r\n}\r\n\r\npub fn logout() -> anyhow::Result<()> {\r\n let cache_file = cache_file()?;\r\n if cache_file.exists() {\r\n fs::remove_file(cache_file)?;\r\n println!(\"🗑️ token cache removed\");\r\n } else {\r\n println!(\"🔎 no token cache found\");\r\n }\r\n\r\n Ok(())\r\n}\r\n\r\npub fn get_token() -> anyhow::Result {\r\n let cache_file = cache_file()?;\r\n\r\n if !cache_file.exists() {\r\n println!(\"⚠️ Attempting to auto login\");\r\n login(None)?\r\n }\r\n\r\n match fs::read_to_string(&cache_file) {\r\n Ok(token) => Ok(token),\r\n Err(err) => Err(anyhow!(\r\n \"❌ {err}\\nUnable to read token. (Make sure you have run the `login` command)\"\r\n )),\r\n }\r\n}\r\n\r\nfn save_token(token: &String) -> anyhow::Result {\r\n let cache_file = cache_file()?;\r\n let mut file = File::create(&cache_file)?;\r\n file.write_all(token.as_bytes())?;\r\n Ok(cache_file)\r\n}\r\n\r\nfn cache_file() -> anyhow::Result {\r\n let Some(project_dir) = ProjectDirs::from(\"com\", \"advent_of_code\", \"aoc_cli\") else { return Err(anyhow!(\"Could not get project directory\")) };\r\n let cache_dir = project_dir.cache_dir();\r\n\r\n if !cache_dir.exists() {\r\n fs::create_dir_all(cache_dir)?;\r\n }\r\n\r\n Ok(cache_dir.join(\"aoc.cache\"))\r\n}\r\n\r\n// todo: function to retrieve cached token\r\n// todo: logout\r\n"},"download_success":{"kind":"bool","value":true,"string":"true"}}},{"rowIdx":1158,"cells":{"blob_id":{"kind":"string","value":"e84f63686921b6e2c38786095c792e852a256097"},"language":{"kind":"string","value":"Rust"},"repo_name":{"kind":"string","value":"nparthas/project_euler"},"path":{"kind":"string","value":"/src/q1_50/q16.rs"},"src_encoding":{"kind":"string","value":"UTF-8"},"length_bytes":{"kind":"number","value":458,"string":"458"},"score":{"kind":"number","value":2.953125,"string":"2.953125"},"int_score":{"kind":"number","value":3,"string":"3"},"detected_licenses":{"kind":"list like","value":[],"string":"[]"},"license_type":{"kind":"string","value":"no_license"},"text":{"kind":"string","value":"extern crate num;\nextern crate num_bigint;\n\nuse self::num_bigint::{BigInt, ToBigInt};\nuse std::ops::Mul;\n\n\npub fn q16() -> i64 {\n\n let num_str: String = {\n let mut num: BigInt = 2.to_bigint().unwrap();\n for _ in 1..1000 {\n num = num.mul(2);\n }\n num.to_str_radix(10)\n };\n\n let mut dig_sum: i64 = 0;\n\n for c in num_str.chars() {\n dig_sum += c.to_digit(10).unwrap() as i64;\n }\n\n return dig_sum;\n}\n"},"download_success":{"kind":"bool","value":true,"string":"true"}}},{"rowIdx":1159,"cells":{"blob_id":{"kind":"string","value":"da72f242775804c301adbff5e55ce4d923f6a425"},"language":{"kind":"string","value":"Rust"},"repo_name":{"kind":"string","value":"adjivas/vt100-rust"},"path":{"kind":"string","value":"/tests/csi.rs"},"src_encoding":{"kind":"string","value":"UTF-8"},"length_bytes":{"kind":"number","value":13668,"string":"13,668"},"score":{"kind":"number","value":2.703125,"string":"2.703125"},"int_score":{"kind":"number","value":3,"string":"3"},"detected_licenses":{"kind":"list like","value":["MIT"],"string":"[\n \"MIT\"\n]"},"license_type":{"kind":"string","value":"permissive"},"text":{"kind":"string","value":"extern crate vt100;\n\nmod support;\nuse support::TestHelpers;\n\n#[test]\nfn absolute_movement() {\n let mut screen = vt100::Screen::new(24, 80);\n assert_eq!(screen.cursor_position(), (0, 0));\n\n screen.assert_process(b\"\\x1b[10;10H\");\n assert_eq!(screen.cursor_position(), (9, 9));\n\n screen.assert_process(b\"\\x1b[d\");\n assert_eq!(screen.cursor_position(), (0, 9));\n\n screen.assert_process(b\"\\x1b[15d\");\n assert_eq!(screen.cursor_position(), (14, 9));\n\n screen.assert_process(b\"\\x1b[H\");\n assert_eq!(screen.cursor_position(), (0, 0));\n\n screen.assert_process(b\"\\x1b[8H\");\n assert_eq!(screen.cursor_position(), (7, 0));\n\n screen.assert_process(b\"\\x1b[15G\");\n assert_eq!(screen.cursor_position(), (7, 14));\n\n screen.assert_process(b\"\\x1b[G\");\n assert_eq!(screen.cursor_position(), (7, 0));\n\n screen.assert_process(b\"\\x1b[0;0H\");\n assert_eq!(screen.cursor_position(), (0, 0));\n\n screen.assert_process(b\"\\x1b[1;1H\");\n assert_eq!(screen.cursor_position(), (0, 0));\n\n screen.assert_process(b\"\\x1b[500;500H\");\n assert_eq!(screen.cursor_position(), (23, 79));\n}\n\n#[test]\nfn relative_movement() {\n let mut screen = vt100::Screen::new(24, 80);\n assert_eq!(screen.cursor_position(), (0, 0));\n\n screen.assert_process(b\"\\x1b[C\");\n assert_eq!(screen.cursor_position(), (0, 1));\n\n screen.assert_process(b\"\\x1b[C\");\n assert_eq!(screen.cursor_position(), (0, 2));\n\n screen.assert_process(b\"\\x1b[20C\");\n assert_eq!(screen.cursor_position(), (0, 22));\n\n screen.assert_process(b\"\\x1b[D\");\n assert_eq!(screen.cursor_position(), (0, 21));\n\n screen.assert_process(b\"\\x1b[D\");\n assert_eq!(screen.cursor_position(), (0, 20));\n\n screen.assert_process(b\"\\x1b[9D\");\n assert_eq!(screen.cursor_position(), (0, 11));\n\n screen.assert_process(b\"\\x1b[500C\");\n assert_eq!(screen.cursor_position(), (0, 79));\n\n screen.assert_process(b\"\\x1b[500D\");\n assert_eq!(screen.cursor_position(), (0, 0));\n\n screen.assert_process(b\"\\x1b[B\");\n assert_eq!(screen.cursor_position(), (1, 0));\n\n screen.assert_process(b\"\\x1b[B\");\n assert_eq!(screen.cursor_position(), (2, 0));\n\n screen.assert_process(b\"\\x1b[20B\");\n assert_eq!(screen.cursor_position(), (22, 0));\n\n screen.assert_process(b\"\\x1b[A\");\n assert_eq!(screen.cursor_position(), (21, 0));\n\n screen.assert_process(b\"\\x1b[A\");\n assert_eq!(screen.cursor_position(), (20, 0));\n\n screen.assert_process(b\"\\x1b[9A\");\n assert_eq!(screen.cursor_position(), (11, 0));\n\n screen.assert_process(b\"\\x1b[500B\");\n assert_eq!(screen.cursor_position(), (23, 0));\n\n screen.assert_process(b\"\\x1b[500A\");\n assert_eq!(screen.cursor_position(), (0, 0));\n}\n\n#[test]\nfn ed() {\n let mut screen = vt100::Screen::new(24, 80);\n assert_eq!(screen.window_contents(0, 0, 23, 79), \"\\n\\n\\n\\n\\n\\n\\n\\n\\n\\n\\n\\n\\n\\n\\n\\n\\n\\n\\n\\n\\n\\n\\n\\n\");\n\n screen.assert_process(b\"foo\\x1b[5;5Hbar\\x1b[10;10Hbaz\\x1b[20;20Hquux\");\n assert_eq!(screen.window_contents(0, 0, 23, 79), \"foo\\n\\n\\n\\n bar\\n\\n\\n\\n\\n baz\\n\\n\\n\\n\\n\\n\\n\\n\\n\\n quux\\n\\n\\n\\n\\n\");\n\n screen.assert_process(b\"\\x1b[10;12H\\x1b[0J\");\n assert_eq!(screen.window_contents(0, 0, 23, 79), \"foo\\n\\n\\n\\n bar\\n\\n\\n\\n\\n ba\\n\\n\\n\\n\\n\\n\\n\\n\\n\\n\\n\\n\\n\\n\\n\");\n\n screen.assert_process(b\"\\x1b[5;7H\\x1b[1J\");\n assert_eq!(screen.window_contents(0, 0, 23, 79), \"\\n\\n\\n\\n r\\n\\n\\n\\n\\n ba\\n\\n\\n\\n\\n\\n\\n\\n\\n\\n\\n\\n\\n\\n\\n\");\n\n screen.assert_process(b\"\\x1b[7;7H\\x1b[2J\");\n assert_eq!(screen.window_contents(0, 0, 23, 79), \"\\n\\n\\n\\n\\n\\n\\n\\n\\n\\n\\n\\n\\n\\n\\n\\n\\n\\n\\n\\n\\n\\n\\n\\n\");\n\n screen.assert_process(b\"\\x1b[2J\\x1b[H\");\n assert_eq!(screen.window_contents(0, 0, 23, 79), \"\\n\\n\\n\\n\\n\\n\\n\\n\\n\\n\\n\\n\\n\\n\\n\\n\\n\\n\\n\\n\\n\\n\\n\\n\");\n\n screen.assert_process(b\"foo\\x1b[5;5Hbar\\x1b[10;10Hbaz\\x1b[20;20Hquux\");\n assert_eq!(screen.window_contents(0, 0, 23, 79), \"foo\\n\\n\\n\\n bar\\n\\n\\n\\n\\n baz\\n\\n\\n\\n\\n\\n\\n\\n\\n\\n quux\\n\\n\\n\\n\\n\");\n\n screen.assert_process(b\"\\x1b[10;12H\\x1b[J\");\n assert_eq!(screen.window_contents(0, 0, 23, 79), \"foo\\n\\n\\n\\n bar\\n\\n\\n\\n\\n ba\\n\\n\\n\\n\\n\\n\\n\\n\\n\\n\\n\\n\\n\\n\\n\");\n\n screen.assert_process(b\"\\x1b[2J\\x1b[H\");\n assert_eq!(screen.window_contents(0, 0, 23, 79), \"\\n\\n\\n\\n\\n\\n\\n\\n\\n\\n\\n\\n\\n\\n\\n\\n\\n\\n\\n\\n\\n\\n\\n\\n\");\n\n screen.assert_process(b\"foo\\x1b[5;5Hbar\\x1b[10;10Hbaz\\x1b[20;20Hquux\");\n assert_eq!(screen.window_contents(0, 0, 23, 79), \"foo\\n\\n\\n\\n bar\\n\\n\\n\\n\\n baz\\n\\n\\n\\n\\n\\n\\n\\n\\n\\n quux\\n\\n\\n\\n\\n\");\n\n screen.assert_process(b\"\\x1b[10;12H\\x1b[?0J\");\n assert_eq!(screen.window_contents(0, 0, 23, 79), \"foo\\n\\n\\n\\n bar\\n\\n\\n\\n\\n ba\\n\\n\\n\\n\\n\\n\\n\\n\\n\\n\\n\\n\\n\\n\\n\");\n\n screen.assert_process(b\"\\x1b[5;7H\\x1b[?1J\");\n assert_eq!(screen.window_contents(0, 0, 23, 79), \"\\n\\n\\n\\n r\\n\\n\\n\\n\\n ba\\n\\n\\n\\n\\n\\n\\n\\n\\n\\n\\n\\n\\n\\n\\n\");\n\n screen.assert_process(b\"\\x1b[7;7H\\x1b[?2J\");\n assert_eq!(screen.window_contents(0, 0, 23, 79), \"\\n\\n\\n\\n\\n\\n\\n\\n\\n\\n\\n\\n\\n\\n\\n\\n\\n\\n\\n\\n\\n\\n\\n\\n\");\n\n screen.assert_process(b\"\\x1b[2J\\x1b[H\");\n assert_eq!(screen.window_contents(0, 0, 23, 79), \"\\n\\n\\n\\n\\n\\n\\n\\n\\n\\n\\n\\n\\n\\n\\n\\n\\n\\n\\n\\n\\n\\n\\n\\n\");\n\n screen.assert_process(b\"foo\\x1b[5;5Hbar\\x1b[10;10Hbaz\\x1b[20;20Hquux\");\n assert_eq!(screen.window_contents(0, 0, 23, 79), \"foo\\n\\n\\n\\n bar\\n\\n\\n\\n\\n baz\\n\\n\\n\\n\\n\\n\\n\\n\\n\\n quux\\n\\n\\n\\n\\n\");\n\n screen.assert_process(b\"\\x1b[10;12H\\x1b[?J\");\n assert_eq!(screen.window_contents(0, 0, 23, 79), \"foo\\n\\n\\n\\n bar\\n\\n\\n\\n\\n ba\\n\\n\\n\\n\\n\\n\\n\\n\\n\\n\\n\\n\\n\\n\\n\");\n}\n\n#[test]\nfn el() {\n let mut screen = vt100::Screen::new(24, 80);\n assert_eq!(screen.window_contents(0, 0, 23, 79), \"\\n\\n\\n\\n\\n\\n\\n\\n\\n\\n\\n\\n\\n\\n\\n\\n\\n\\n\\n\\n\\n\\n\\n\\n\");\n\n screen.assert_process(b\"foo\\x1b[5;5Hbarbar\\x1b[10;10Hbazbaz\\x1b[20;20Hquux\");\n assert_eq!(screen.window_contents(0, 0, 23, 79), \"foo\\n\\n\\n\\n barbar\\n\\n\\n\\n\\n bazbaz\\n\\n\\n\\n\\n\\n\\n\\n\\n\\n quux\\n\\n\\n\\n\\n\");\n\n screen.assert_process(b\"\\x1b[5;8H\\x1b[0K\");\n assert_eq!(screen.window_contents(0, 0, 23, 79), \"foo\\n\\n\\n\\n bar\\n\\n\\n\\n\\n bazbaz\\n\\n\\n\\n\\n\\n\\n\\n\\n\\n quux\\n\\n\\n\\n\\n\");\n\n screen.assert_process(b\"\\x1b[10;13H\\x1b[1K\");\n assert_eq!(screen.window_contents(0, 0, 23, 79), \"foo\\n\\n\\n\\n bar\\n\\n\\n\\n\\n baz\\n\\n\\n\\n\\n\\n\\n\\n\\n\\n quux\\n\\n\\n\\n\\n\");\n\n screen.assert_process(b\"\\x1b[20;22H\\x1b[2K\");\n assert_eq!(screen.window_contents(0, 0, 23, 79), \"foo\\n\\n\\n\\n bar\\n\\n\\n\\n\\n baz\\n\\n\\n\\n\\n\\n\\n\\n\\n\\n\\n\\n\\n\\n\\n\");\n\n screen.assert_process(b\"\\x1b[1;2H\\x1b[K\");\n assert_eq!(screen.window_contents(0, 0, 23, 79), \"f\\n\\n\\n\\n bar\\n\\n\\n\\n\\n baz\\n\\n\\n\\n\\n\\n\\n\\n\\n\\n\\n\\n\\n\\n\\n\");\n\n screen.assert_process(b\"\\x1b[2J\\x1b[H\");\n assert_eq!(screen.window_contents(0, 0, 23, 79), \"\\n\\n\\n\\n\\n\\n\\n\\n\\n\\n\\n\\n\\n\\n\\n\\n\\n\\n\\n\\n\\n\\n\\n\\n\");\n\n screen.assert_process(b\"foo\\x1b[5;5Hbarbar\\x1b[10;10Hbazbaz\\x1b[20;20Hquux\");\n assert_eq!(screen.window_contents(0, 0, 23, 79), \"foo\\n\\n\\n\\n barbar\\n\\n\\n\\n\\n bazbaz\\n\\n\\n\\n\\n\\n\\n\\n\\n\\n quux\\n\\n\\n\\n\\n\");\n\n screen.assert_process(b\"\\x1b[5;8H\\x1b[?0K\");\n assert_eq!(screen.window_contents(0, 0, 23, 79), \"foo\\n\\n\\n\\n bar\\n\\n\\n\\n\\n bazbaz\\n\\n\\n\\n\\n\\n\\n\\n\\n\\n quux\\n\\n\\n\\n\\n\");\n\n screen.assert_process(b\"\\x1b[10;13H\\x1b[?1K\");\n assert_eq!(screen.window_contents(0, 0, 23, 79), \"foo\\n\\n\\n\\n bar\\n\\n\\n\\n\\n baz\\n\\n\\n\\n\\n\\n\\n\\n\\n\\n quux\\n\\n\\n\\n\\n\");\n\n screen.assert_process(b\"\\x1b[20;22H\\x1b[?2K\");\n assert_eq!(screen.window_contents(0, 0, 23, 79), \"foo\\n\\n\\n\\n bar\\n\\n\\n\\n\\n baz\\n\\n\\n\\n\\n\\n\\n\\n\\n\\n\\n\\n\\n\\n\\n\");\n\n screen.assert_process(b\"\\x1b[1;2H\\x1b[?K\");\n assert_eq!(screen.window_contents(0, 0, 23, 79), \"f\\n\\n\\n\\n bar\\n\\n\\n\\n\\n baz\\n\\n\\n\\n\\n\\n\\n\\n\\n\\n\\n\\n\\n\\n\\n\");\n}\n\n#[test]\nfn ich_dch_ech() {\n let mut screen = vt100::Screen::new(24, 80);\n assert_eq!(screen.window_contents(0, 0, 23, 79), \"\\n\\n\\n\\n\\n\\n\\n\\n\\n\\n\\n\\n\\n\\n\\n\\n\\n\\n\\n\\n\\n\\n\\n\\n\");\n\n screen.assert_process(b\"\\x1b[10;10Hfoobar\");\n assert_eq!(screen.window_contents(0, 0, 23, 79), \"\\n\\n\\n\\n\\n\\n\\n\\n\\n foobar\\n\\n\\n\\n\\n\\n\\n\\n\\n\\n\\n\\n\\n\\n\\n\");\n\n screen.assert_process(b\"\\x1b[10;12H\\x1b[3@\");\n assert_eq!(screen.window_contents(0, 0, 23, 79), \"\\n\\n\\n\\n\\n\\n\\n\\n\\n fo obar\\n\\n\\n\\n\\n\\n\\n\\n\\n\\n\\n\\n\\n\\n\\n\");\n assert_eq!(screen.cursor_position(), (9, 11));\n\n screen.assert_process(b\"\\x1b[4P\");\n assert_eq!(screen.window_contents(0, 0, 23, 79), \"\\n\\n\\n\\n\\n\\n\\n\\n\\n fobar\\n\\n\\n\\n\\n\\n\\n\\n\\n\\n\\n\\n\\n\\n\\n\");\n assert_eq!(screen.cursor_position(), (9, 11));\n\n screen.assert_process(b\"\\x1b[100@\");\n assert_eq!(screen.window_contents(0, 0, 23, 79), \"\\n\\n\\n\\n\\n\\n\\n\\n\\n fo\\n\\n\\n\\n\\n\\n\\n\\n\\n\\n\\n\\n\\n\\n\\n\");\n assert_eq!(screen.cursor_position(), (9, 11));\n\n screen.assert_process(b\"obar\");\n assert_eq!(screen.window_contents(0, 0, 23, 79), \"\\n\\n\\n\\n\\n\\n\\n\\n\\n foobar\\n\\n\\n\\n\\n\\n\\n\\n\\n\\n\\n\\n\\n\\n\\n\");\n assert_eq!(screen.cursor_position(), (9, 15));\n\n screen.assert_process(b\"\\x1b[10;12H\\x1b[100P\");\n assert_eq!(screen.window_contents(0, 0, 23, 79), \"\\n\\n\\n\\n\\n\\n\\n\\n\\n fo\\n\\n\\n\\n\\n\\n\\n\\n\\n\\n\\n\\n\\n\\n\\n\");\n assert_eq!(screen.cursor_position(), (9, 11));\n\n screen.assert_process(b\"obar\");\n assert_eq!(screen.window_contents(0, 0, 23, 79), \"\\n\\n\\n\\n\\n\\n\\n\\n\\n foobar\\n\\n\\n\\n\\n\\n\\n\\n\\n\\n\\n\\n\\n\\n\\n\");\n assert_eq!(screen.cursor_position(), (9, 15));\n\n screen.assert_process(b\"\\x1b[10;13H\\x1b[X\");\n assert_eq!(screen.window_contents(0, 0, 23, 79), \"\\n\\n\\n\\n\\n\\n\\n\\n\\n foo ar\\n\\n\\n\\n\\n\\n\\n\\n\\n\\n\\n\\n\\n\\n\\n\");\n assert_eq!(screen.cursor_position(), (9, 12));\n\n screen.assert_process(b\"\\x1b[10;11H\\x1b[4X\");\n assert_eq!(screen.window_contents(0, 0, 23, 79), \"\\n\\n\\n\\n\\n\\n\\n\\n\\n f r\\n\\n\\n\\n\\n\\n\\n\\n\\n\\n\\n\\n\\n\\n\\n\");\n assert_eq!(screen.cursor_position(), (9, 10));\n\n screen.assert_process(b\"\\x1b[10;11H\\x1b[400X\");\n assert_eq!(screen.window_contents(0, 0, 23, 79), \"\\n\\n\\n\\n\\n\\n\\n\\n\\n f\\n\\n\\n\\n\\n\\n\\n\\n\\n\\n\\n\\n\\n\\n\\n\");\n assert_eq!(screen.cursor_position(), (9, 10));\n}\n\n#[test]\nfn il_dl() {\n let mut screen = vt100::Screen::new(24, 80);\n assert_eq!(screen.window_contents(0, 0, 23, 79), \"\\n\\n\\n\\n\\n\\n\\n\\n\\n\\n\\n\\n\\n\\n\\n\\n\\n\\n\\n\\n\\n\\n\\n\\n\");\n\n screen.assert_process(b\"\\x1b[10;10Hfoobar\\x1b[3D\");\n assert_eq!(screen.window_contents(0, 0, 23, 79), \"\\n\\n\\n\\n\\n\\n\\n\\n\\n foobar\\n\\n\\n\\n\\n\\n\\n\\n\\n\\n\\n\\n\\n\\n\\n\");\n assert_eq!(screen.cursor_position(), (9, 12));\n\n screen.assert_process(b\"\\x1b[L\");\n assert_eq!(screen.window_contents(0, 0, 23, 79), \"\\n\\n\\n\\n\\n\\n\\n\\n\\n\\n foobar\\n\\n\\n\\n\\n\\n\\n\\n\\n\\n\\n\\n\\n\\n\");\n assert_eq!(screen.cursor_position(), (9, 12));\n\n screen.assert_process(b\"\\x1b[3L\");\n assert_eq!(screen.window_contents(0, 0, 23, 79), \"\\n\\n\\n\\n\\n\\n\\n\\n\\n\\n\\n\\n\\n foobar\\n\\n\\n\\n\\n\\n\\n\\n\\n\\n\\n\");\n assert_eq!(screen.cursor_position(), (9, 12));\n\n screen.assert_process(b\"\\x1b[500L\");\n assert_eq!(screen.window_contents(0, 0, 23, 79), \"\\n\\n\\n\\n\\n\\n\\n\\n\\n\\n\\n\\n\\n\\n\\n\\n\\n\\n\\n\\n\\n\\n\\n\\n\");\n assert_eq!(screen.cursor_position(), (9, 12));\n\n screen.assert_process(b\"\\x1b[10;10Hfoobar\\x1b[3D\\x1b[6A\");\n assert_eq!(screen.window_contents(0, 0, 23, 79), \"\\n\\n\\n\\n\\n\\n\\n\\n\\n foobar\\n\\n\\n\\n\\n\\n\\n\\n\\n\\n\\n\\n\\n\\n\\n\");\n assert_eq!(screen.cursor_position(), (3, 12));\n\n screen.assert_process(b\"\\x1b[M\");\n assert_eq!(screen.window_contents(0, 0, 23, 79), \"\\n\\n\\n\\n\\n\\n\\n\\n foobar\\n\\n\\n\\n\\n\\n\\n\\n\\n\\n\\n\\n\\n\\n\\n\\n\");\n assert_eq!(screen.cursor_position(), (3, 12));\n\n screen.assert_process(b\"\\x1b[3M\");\n assert_eq!(screen.window_contents(0, 0, 23, 79), \"\\n\\n\\n\\n\\n foobar\\n\\n\\n\\n\\n\\n\\n\\n\\n\\n\\n\\n\\n\\n\\n\\n\\n\\n\\n\");\n assert_eq!(screen.cursor_position(), (3, 12));\n\n screen.assert_process(b\"\\x1b[500M\");\n assert_eq!(screen.window_contents(0, 0, 23, 79), \"\\n\\n\\n\\n\\n\\n\\n\\n\\n\\n\\n\\n\\n\\n\\n\\n\\n\\n\\n\\n\\n\\n\\n\\n\");\n assert_eq!(screen.cursor_position(), (3, 12));\n}\n\n#[test]\nfn scroll() {\n let mut screen = vt100::Screen::new(24, 80);\n assert_eq!(screen.window_contents(0, 0, 23, 79), \"\\n\\n\\n\\n\\n\\n\\n\\n\\n\\n\\n\\n\\n\\n\\n\\n\\n\\n\\n\\n\\n\\n\\n\\n\");\n\n screen.assert_process(b\"1\\r\\n2\\r\\n3\\r\\n4\\r\\n5\\r\\n6\\r\\n7\\r\\n8\\r\\n9\\r\\n10\\r\\n11\\r\\n12\\r\\n13\\r\\n14\\r\\n15\\r\\n16\\r\\n17\\r\\n18\\r\\n19\\r\\n20\\r\\n21\\r\\n22\\r\\n23\\r\\n24\");\n assert_eq!(screen.window_contents(0, 0, 23, 79), \"1\\n2\\n3\\n4\\n5\\n6\\n7\\n8\\n9\\n10\\n11\\n12\\n13\\n14\\n15\\n16\\n17\\n18\\n19\\n20\\n21\\n22\\n23\\n24\\n\");\n\n screen.assert_process(b\"\\x1b[15;15H\");\n assert_eq!(screen.cursor_position(), (14, 14));\n\n screen.assert_process(b\"\\x1b[S\");\n assert_eq!(screen.window_contents(0, 0, 23, 79), \"2\\n3\\n4\\n5\\n6\\n7\\n8\\n9\\n10\\n11\\n12\\n13\\n14\\n15\\n16\\n17\\n18\\n19\\n20\\n21\\n22\\n23\\n24\\n\\n\");\n assert_eq!(screen.cursor_position(), (14, 14));\n\n screen.assert_process(b\"\\x1b[3S\");\n assert_eq!(screen.window_contents(0, 0, 23, 79), \"5\\n6\\n7\\n8\\n9\\n10\\n11\\n12\\n13\\n14\\n15\\n16\\n17\\n18\\n19\\n20\\n21\\n22\\n23\\n24\\n\\n\\n\\n\\n\");\n assert_eq!(screen.cursor_position(), (14, 14));\n\n screen.assert_process(b\"\\x1b[T\");\n assert_eq!(screen.window_contents(0, 0, 23, 79), \"\\n5\\n6\\n7\\n8\\n9\\n10\\n11\\n12\\n13\\n14\\n15\\n16\\n17\\n18\\n19\\n20\\n21\\n22\\n23\\n24\\n\\n\\n\\n\");\n assert_eq!(screen.cursor_position(), (14, 14));\n\n screen.assert_process(b\"\\x1b[5T\");\n assert_eq!(screen.window_contents(0, 0, 23, 79), \"\\n\\n\\n\\n\\n\\n5\\n6\\n7\\n8\\n9\\n10\\n11\\n12\\n13\\n14\\n15\\n16\\n17\\n18\\n19\\n20\\n21\\n22\\n\");\n assert_eq!(screen.cursor_position(), (14, 14));\n}\n"},"download_success":{"kind":"bool","value":true,"string":"true"}}},{"rowIdx":1160,"cells":{"blob_id":{"kind":"string","value":"9aa78b2edba994bb6f3f7e5fdf7b37e563bdb3d7"},"language":{"kind":"string","value":"Rust"},"repo_name":{"kind":"string","value":"dermesser/leveldb-rs"},"path":{"kind":"string","value":"/src/asyncdb.rs"},"src_encoding":{"kind":"string","value":"UTF-8"},"length_bytes":{"kind":"number","value":9918,"string":"9,918"},"score":{"kind":"number","value":3.046875,"string":"3.046875"},"int_score":{"kind":"number","value":3,"string":"3"},"detected_licenses":{"kind":"list like","value":["MIT"],"string":"[\n \"MIT\"\n]"},"license_type":{"kind":"string","value":"permissive"},"text":{"kind":"string","value":"use std::collections::hash_map::HashMap;\nuse std::path::Path;\n\nuse crate::{Options, Result, Status, StatusCode, WriteBatch, DB};\n\nuse tokio::sync::mpsc;\nuse tokio::sync::oneshot;\nuse tokio::task::{spawn_blocking, JoinHandle};\n\nconst CHANNEL_BUFFER_SIZE: usize = 32;\n\n#[derive(Clone, Copy)]\npub struct SnapshotRef(usize);\n\n/// A request sent to the database thread.\nenum Request {\n Close,\n Put { key: Vec, val: Vec },\n Delete { key: Vec },\n Write { batch: WriteBatch, sync: bool },\n Flush,\n GetAt { snapshot: SnapshotRef, key: Vec },\n Get { key: Vec },\n GetSnapshot,\n DropSnapshot { snapshot: SnapshotRef },\n CompactRange { from: Vec, to: Vec },\n}\n\n/// A response received from the database thread.\nenum Response {\n OK,\n Error(Status),\n Value(Option>),\n Snapshot(SnapshotRef),\n}\n\n/// Contains both a request and a back-channel for the reply.\nstruct Message {\n req: Request,\n resp_channel: oneshot::Sender,\n}\n\n/// `AsyncDB` makes it easy to use LevelDB in a tokio runtime.\n/// The methods follow very closely the main API (see `DB` type). Iteration is not yet implemented.\n///\n/// TODO: Make it work in other runtimes as well. This is a matter of adapting the blocking thread\n/// mechanism as well as the channel types.\npub struct AsyncDB {\n jh: JoinHandle<()>,\n send: mpsc::Sender,\n}\n\nimpl AsyncDB {\n /// Create a new or open an existing database.\n pub fn new>(name: P, opts: Options) -> Result {\n let db = DB::open(name, opts)?;\n let (send, recv) = mpsc::channel(CHANNEL_BUFFER_SIZE);\n let jh = spawn_blocking(move || AsyncDB::run_server(db, recv));\n Ok(AsyncDB { jh, send })\n }\n\n pub async fn close(&self) -> Result<()> {\n let r = self.process_request(Request::Close).await?;\n match r {\n Response::OK => Ok(()),\n Response::Error(s) => Err(s),\n _ => Err(Status {\n code: StatusCode::AsyncError,\n err: \"Wrong response type in AsyncDB.\".to_string(),\n }),\n }\n }\n\n pub async fn put(&self, key: Vec, val: Vec) -> Result<()> {\n let r = self.process_request(Request::Put { key, val }).await?;\n match r {\n Response::OK => Ok(()),\n Response::Error(s) => Err(s),\n _ => Err(Status {\n code: StatusCode::AsyncError,\n err: \"Wrong response type in AsyncDB.\".to_string(),\n }),\n }\n }\n pub async fn delete(&self, key: Vec) -> Result<()> {\n let r = self.process_request(Request::Delete { key }).await?;\n match r {\n Response::OK => Ok(()),\n Response::Error(s) => Err(s),\n _ => Err(Status {\n code: StatusCode::AsyncError,\n err: \"Wrong response type in AsyncDB.\".to_string(),\n }),\n }\n }\n pub async fn write(&self, batch: WriteBatch, sync: bool) -> Result<()> {\n let r = self.process_request(Request::Write { batch, sync }).await?;\n match r {\n Response::OK => Ok(()),\n Response::Error(s) => Err(s),\n _ => Err(Status {\n code: StatusCode::AsyncError,\n err: \"Wrong response type in AsyncDB.\".to_string(),\n }),\n }\n }\n pub async fn flush(&self) -> Result<()> {\n let r = self.process_request(Request::Flush).await?;\n match r {\n Response::OK => Ok(()),\n Response::Error(s) => Err(s),\n _ => Err(Status {\n code: StatusCode::AsyncError,\n err: \"Wrong response type in AsyncDB.\".to_string(),\n }),\n }\n }\n pub async fn get(&self, key: Vec) -> Result>> {\n let r = self.process_request(Request::Get { key }).await?;\n match r {\n Response::Value(v) => Ok(v),\n Response::Error(s) => Err(s),\n _ => Err(Status {\n code: StatusCode::AsyncError,\n err: \"Wrong response type in AsyncDB.\".to_string(),\n }),\n }\n }\n pub async fn get_at(&self, snapshot: SnapshotRef, key: Vec) -> Result>> {\n let r = self\n .process_request(Request::GetAt { snapshot, key })\n .await?;\n match r {\n Response::Value(v) => Ok(v),\n Response::Error(s) => Err(s),\n _ => Err(Status {\n code: StatusCode::AsyncError,\n err: \"Wrong response type in AsyncDB.\".to_string(),\n }),\n }\n }\n pub async fn get_snapshot(&self) -> Result {\n let r = self.process_request(Request::GetSnapshot).await?;\n match r {\n Response::Snapshot(sr) => Ok(sr),\n _ => Err(Status {\n code: StatusCode::AsyncError,\n err: \"Wrong response type in AsyncDB.\".to_string(),\n }),\n }\n }\n /// As snapshots returned by `AsyncDB::get_snapshot()` are sort-of \"weak references\" to an\n /// actual snapshot, they need to be dropped explicitly.\n pub async fn drop_snapshot(&self, snapshot: SnapshotRef) -> Result<()> {\n let r = self\n .process_request(Request::DropSnapshot { snapshot })\n .await?;\n match r {\n Response::OK => Ok(()),\n _ => Err(Status {\n code: StatusCode::AsyncError,\n err: \"Wrong response type in AsyncDB.\".to_string(),\n }),\n }\n }\n pub async fn compact_range(&self, from: Vec, to: Vec) -> Result<()> {\n let r = self\n .process_request(Request::CompactRange { from, to })\n .await?;\n match r {\n Response::OK => Ok(()),\n Response::Error(s) => Err(s),\n _ => Err(Status {\n code: StatusCode::AsyncError,\n err: \"Wrong response type in AsyncDB.\".to_string(),\n }),\n }\n }\n\n async fn process_request(&self, req: Request) -> Result {\n let (tx, rx) = oneshot::channel();\n let m = Message {\n req,\n resp_channel: tx,\n };\n if let Err(e) = self.send.send(m).await {\n return Err(Status {\n code: StatusCode::AsyncError,\n err: e.to_string(),\n });\n }\n let resp = rx.await;\n match resp {\n Err(e) => Err(Status {\n code: StatusCode::AsyncError,\n err: e.to_string(),\n }),\n Ok(r) => Ok(r),\n }\n }\n\n fn run_server(mut db: DB, mut recv: mpsc::Receiver) {\n let mut snapshots = HashMap::new();\n let mut snapshot_counter: usize = 0;\n\n while let Some(message) = recv.blocking_recv() {\n match message.req {\n Request::Close => {\n message.resp_channel.send(Response::OK).ok();\n recv.close();\n return;\n }\n Request::Put { key, val } => {\n let ok = db.put(&key, &val);\n send_response(message.resp_channel, ok);\n }\n Request::Delete { key } => {\n let ok = db.delete(&key);\n send_response(message.resp_channel, ok);\n }\n Request::Write { batch, sync } => {\n let ok = db.write(batch, sync);\n send_response(message.resp_channel, ok);\n }\n Request::Flush => {\n let ok = db.flush();\n send_response(message.resp_channel, ok);\n }\n Request::GetAt { snapshot, key } => {\n let snapshot_id = snapshot.0;\n if let Some(snapshot) = snapshots.get(&snapshot_id) {\n let ok = db.get_at(&snapshot, &key);\n match ok {\n Err(e) => {\n message.resp_channel.send(Response::Error(e)).ok();\n }\n Ok(v) => {\n message.resp_channel.send(Response::Value(v)).ok();\n }\n };\n } else {\n message\n .resp_channel\n .send(Response::Error(Status {\n code: StatusCode::AsyncError,\n err: \"Unknown snapshot reference: this is a bug\".to_string(),\n }))\n .ok();\n }\n }\n Request::Get { key } => {\n let r = db.get(&key);\n message.resp_channel.send(Response::Value(r)).ok();\n }\n Request::GetSnapshot => {\n snapshots.insert(snapshot_counter, db.get_snapshot());\n let sref = SnapshotRef(snapshot_counter);\n snapshot_counter += 1;\n message.resp_channel.send(Response::Snapshot(sref)).ok();\n }\n Request::DropSnapshot { snapshot } => {\n snapshots.remove(&snapshot.0);\n send_response(message.resp_channel, Ok(()));\n }\n Request::CompactRange { from, to } => {\n let ok = db.compact_range(&from, &to);\n send_response(message.resp_channel, ok);\n }\n }\n }\n }\n}\n\nfn send_response(ch: oneshot::Sender, result: Result<()>) {\n if let Err(e) = result {\n ch.send(Response::Error(e)).ok();\n } else {\n ch.send(Response::OK).ok();\n }\n}\n"},"download_success":{"kind":"bool","value":true,"string":"true"}}},{"rowIdx":1161,"cells":{"blob_id":{"kind":"string","value":"35e0b54b4037574ec6d98644f5f8c7d0568c401c"},"language":{"kind":"string","value":"Rust"},"repo_name":{"kind":"string","value":"Joxx0r/RustRevEngine"},"path":{"kind":"string","value":"/src/misc/input.rs"},"src_encoding":{"kind":"string","value":"UTF-8"},"length_bytes":{"kind":"number","value":2512,"string":"2,512"},"score":{"kind":"number","value":3.078125,"string":"3.078125"},"int_score":{"kind":"number","value":3,"string":"3"},"detected_licenses":{"kind":"list like","value":[],"string":"[]"},"license_type":{"kind":"string","value":"no_license"},"text":{"kind":"string","value":"use crate::math::vec::Vec2;\nuse glfw::{Key, Action, MouseButton};\nuse crate::misc::camera::Camera;\nuse crate::misc::camera::Camera_Movement::*;\n\npub struct input_state {\n pub left_mouse_button_down: bool,\n pub mouse_position: Vec2,\n pub forward_key_down: bool,\n pub back_key_down: bool,\n pub left_key_down: bool,\n pub right_key_down: bool,\n pub esc_button_down: bool\n}\nimpl input_state {\n pub fn default() -> input_state {\n return input_state{\n left_mouse_button_down: false,\n mouse_position: Vec2::default(),\n forward_key_down: false,\n back_key_down: false,\n left_key_down: false,\n right_key_down: false,\n esc_button_down: false\n }\n }\n}\n\npub fn calculate_input(window: &mut glfw::Window) -> input_state {\n let mut state: input_state = input_state::default();\n state.forward_key_down = window.get_key(Key::W) == Action::Press;\n state.back_key_down = window.get_key(Key::S) == Action::Press;\n state.left_key_down = window.get_key(Key::A) == Action::Press;\n state.right_key_down = window.get_key(Key::D) == Action::Press;\n state.esc_button_down = window.get_key(Key::Escape) == Action::Press;\n state.left_mouse_button_down = window.get_mouse_button(MouseButton::Button1) == Action::Press;\n state.mouse_position = Vec2::new_tuple_f64(window.get_cursor_pos());\n state\n}\n\npub unsafe fn process_input(window: &mut glfw::Window, delta_time: f32, camera: &mut Camera, state:input_state) {\n if state.esc_button_down {\n window.set_should_close(true)\n }\n if state.forward_key_down {\n camera.ProcessKeyboard(FORWARD, delta_time);\n }\n if state.back_key_down {\n camera.ProcessKeyboard(BACKWARD, delta_time);\n }\n if state.left_key_down {\n camera.ProcessKeyboard(LEFT, delta_time);\n }\n if state.right_key_down {\n camera.ProcessKeyboard( RIGHT, delta_time);\n }\n\n static mut prev_frame_mouse_button_down: bool = false;\n static mut old_mouse_position:Vec2 = Vec2::default();\n if(state.left_mouse_button_down) {\n if(prev_frame_mouse_button_down) {\n let delta = Vec2::new_tuple_f32((state.mouse_position.x - old_mouse_position.x, -1.0 * (state.mouse_position.y - old_mouse_position.y)));\n camera.ProcessMouseMovement(delta.x, delta.y, false);\n }\n old_mouse_position = state.mouse_position;\n }\n prev_frame_mouse_button_down = state.left_mouse_button_down;\n}\n\n"},"download_success":{"kind":"bool","value":true,"string":"true"}}},{"rowIdx":1162,"cells":{"blob_id":{"kind":"string","value":"dca7e0facdb300a9c8f85b455b55f5bd534dd19a"},"language":{"kind":"string","value":"Rust"},"repo_name":{"kind":"string","value":"hubris-lang/hubris"},"path":{"kind":"string","value":"/src/hubris_rt/src/lib.rs"},"src_encoding":{"kind":"string","value":"UTF-8"},"length_bytes":{"kind":"number","value":493,"string":"493"},"score":{"kind":"number","value":3.125,"string":"3.125"},"int_score":{"kind":"number","value":3,"string":"3"},"detected_licenses":{"kind":"list like","value":["MIT"],"string":"[\n \"MIT\"\n]"},"license_type":{"kind":"string","value":"permissive"},"text":{"kind":"string","value":"use std::rc::Rc;\nuse std::mem::transmute;\n\nstruct ObjValue {\n ptr: *mut usize,\n}\n\npub struct Obj(Rc);\n\nimpl Obj {\n pub fn from(t: T) -> Obj {\n unsafe {\n let boxed_val = Box::new(t);\n\n let val = ObjValue {\n ptr: transmute(Box::into_raw(boxed_val)),\n };\n\n Obj(Rc::new(val))\n }\n }\n\n pub fn unbox(&self) -> &T {\n let ptr: *mut usize = self.0.ptr;\n unsafe { transmute(ptr) }\n }\n}\n"},"download_success":{"kind":"bool","value":true,"string":"true"}}},{"rowIdx":1163,"cells":{"blob_id":{"kind":"string","value":"00c5e51dfc68f6afdf5222d95fb4ccfe2d728d48"},"language":{"kind":"string","value":"Rust"},"repo_name":{"kind":"string","value":"bollo35/cryptopals"},"path":{"kind":"string","value":"/src/bin/p040.rs"},"src_encoding":{"kind":"string","value":"UTF-8"},"length_bytes":{"kind":"number","value":3181,"string":"3,181"},"score":{"kind":"number","value":3.09375,"string":"3.09375"},"int_score":{"kind":"number","value":3,"string":"3"},"detected_licenses":{"kind":"list like","value":[],"string":"[]"},"license_type":{"kind":"string","value":"no_license"},"text":{"kind":"string","value":"extern crate ooga;\nuse ooga::rsa::Rsa;\nextern crate openssl;\n\nuse openssl::bn::{BigNum, BigNumContext};\nuse std::ops::{Add, Div, Mul, Sub};\n\nfn main() {\n\tlet rsa0 = Rsa::new();\n\tlet rsa1 = Rsa::new();\n\tlet rsa2 = Rsa::new();\n\n\tlet msg = \"Never gonna give you up! Never gonna let you down!\".to_string();\n\n\tlet ct0 = BigNum::from_slice(&rsa0.enc_str(msg.clone()).unwrap()).unwrap();\n\tlet ct1 = BigNum::from_slice(&rsa1.enc_str(msg.clone()).unwrap()).unwrap();\n\tlet ct2 = BigNum::from_slice(&rsa2.enc_str(msg.clone()).unwrap()).unwrap();\n\n\tlet og = bncbrt(BigNum::from_slice(&ct0.to_vec()).unwrap());\n\tprintln!(\"og: {:?}\", String::from_utf8(og.to_vec()));\n\n\tprintln!(\"C0: {:?}\", ct0);\n\tprintln!(\"C1: {:?}\", ct1);\n\tprintln!(\"C2: {:?}\", ct2);\n\tprintln!();\n\tprintln!();\n\tprintln!();\n\n\tlet (e0, n0) = rsa0.get_pubkey();\n\tlet (e1, n1) = rsa1.get_pubkey();\n\tlet (e2, n2) = rsa2.get_pubkey();\n\n\tprintln!(\"n0 == n1: {}\", n0 == n1);\n\tprintln!(\"n2 == n1: {}\", n2 == n1);\n\tprintln!(\"n0 == n2: {}\", n0 == n2);\n\n\tprintln!(\"e0: {}\", e0);\n\tprintln!(\"e1: {}\", e1);\n\tprintln!(\"e2: {}\", e2);\n\n\t// N0 = n1 * n2\n\tlet N0 = n1.mul(&n2);\n\n\t// N1 = n0 * n2\n\tlet N1 = n0.mul(&n2);\n\n\t// N2 = n0 * n1\n\tlet N2 = n0.mul(&n1);\n\n\tlet mut bnctx = BigNumContext::new().unwrap();\n\t// a0 = invmod(N0, n0)\n\tlet mut a0 = BigNum::new().unwrap();\n\ta0.mod_inverse(&N0, &n0, &mut bnctx).unwrap();\n\n\t// a1 = invmod(N1, n1)\n\tlet mut a1 = BigNum::new().unwrap();\n\ta1.mod_inverse(&N1, &n1, &mut bnctx).unwrap();\n\n\t// a2 = invmod(N2, n2)\n\tlet mut a2 = BigNum::new().unwrap();\n\ta2.mod_inverse(&N2, &n2, &mut bnctx).unwrap();\n\n\t// p0 = c0 * N0 * a0\n\tlet p0 = ct0.mul(&N0).mul(&a0);\n\t// p1 = c1 * N1 * a1\n\tlet p1 = ct1.mul(&N1).mul(&a1);\n\t// p2 = c2 * N2 * a2\n\tlet p2 = ct2.mul(&N2).mul(&a2);\n\n\t// In the instructions, they say that you don't need to take the result\n\t// modulo N_012 but that doesn't make sense.\n\t// The interesting thing is that if you have a message that's smaller\n\t// than N, and you know e = 3, you could just take the cubed root\n\t// without the chinese remainder theorem. I don't quite get the point\n\t// of this exercise. Ah, I guess the only thing I can think of is if\n\t// you have a message that gets broken into chunks, then you could\n\t// do this still? I don't know.\n\tlet mut m_e = BigNum::new().unwrap();\n\tm_e.mod_add(&p0.add(&p1), &p2, &n0.mul(&n1).mul(&n2), &mut bnctx).unwrap();\n\tprintln!(\"m^e: {}\", m_e);\n\n\tlet m = bncbrt(BigNum::from_slice(&m_e.to_vec()).unwrap());\n\n\tlet m_3 = m.mul(&m).mul(&m);\n\n\tif m != m_e {\n\t\tprintln!(\"m_e - m'_3 = {}\", m_e.sub(&m_3));\n\t}\n\n\tlet msg = String::from_utf8(m.to_vec());\n\n\tprintln!(\"Recovered message: {:?}\", msg);\n}\n\nfn bncbrt(n: BigNum) -> BigNum {\n\tlet one = BigNum::from_u32(1).unwrap();\n\tlet two = BigNum::from_u32(2).unwrap();\n\t// let's do a binary search...\n\tlet mut high = BigNum::from_slice(&n.to_vec()).unwrap();\n\tlet mut low = BigNum::from_u32(0).unwrap();\n\n\tlet mut guess = high.add(&low).div(&two);\n\n\tlet mut cube = guess.mul(&guess).mul(&guess);\n\n\twhile cube != n && high != low {\n\t\tif cube > n {\n\t\t\thigh = guess.sub(&one);\n\t\t} else {\n\t\t\tlow = guess.add(&one);\n\t\t}\n\t\tguess = high.add(&low).div(&two);\n\t\tcube = guess.mul(&guess).mul(&guess);\n\t}\n\tguess\n}\n"},"download_success":{"kind":"bool","value":true,"string":"true"}}},{"rowIdx":1164,"cells":{"blob_id":{"kind":"string","value":"9bd1c7e7f17365d5f76e9cea6cf3c85305130196"},"language":{"kind":"string","value":"Rust"},"repo_name":{"kind":"string","value":"ushkarev/rusty-kms"},"path":{"kind":"string","value":"/src/key_store/key/tags.rs"},"src_encoding":{"kind":"string","value":"UTF-8"},"length_bytes":{"kind":"number","value":942,"string":"942"},"score":{"kind":"number","value":3.125,"string":"3.125"},"int_score":{"kind":"number","value":3,"string":"3"},"detected_licenses":{"kind":"list like","value":["MIT"],"string":"[\n \"MIT\"\n]"},"license_type":{"kind":"string","value":"permissive"},"text":{"kind":"string","value":"use crate::key_store::errors::*;\nuse crate::key_store::tag::Tag;\nuse crate::key_store::key::{Key, State};\n\nimpl Key {\n pub fn add_tag(&mut self, tag: Tag) -> Result<(), AddTagError> {\n if let State::PendingDeletion(_) = self.state {\n return Err(AddTagError::InvalidState);\n }\n if let Some(existing_tag) = self.tags.iter_mut().find(|t| t.key() == tag.key()) {\n *existing_tag = tag;\n } else {\n self.tags.push(tag);\n }\n Ok(())\n }\n\n pub fn remove_tag(&mut self, tag_key: &str) -> Result {\n if let State::PendingDeletion(_) = self.state {\n return Err(RemoveTagError::InvalidState);\n }\n // TODO: should removing a non-existant tag fail?\n self.tags.iter()\n .position(|t| t.key() == tag_key)\n .map(|index| self.tags.remove(index))\n .ok_or(RemoveTagError::NotFound)\n }\n}\n"},"download_success":{"kind":"bool","value":true,"string":"true"}}},{"rowIdx":1165,"cells":{"blob_id":{"kind":"string","value":"9294846647301ee2e68c1e8190d55b981ccf53fd"},"language":{"kind":"string","value":"Rust"},"repo_name":{"kind":"string","value":"leptonyu/salak.rs"},"path":{"kind":"string","value":"/src/lib.rs"},"src_encoding":{"kind":"string","value":"UTF-8"},"length_bytes":{"kind":"number","value":10594,"string":"10,594"},"score":{"kind":"number","value":3.671875,"string":"3.671875"},"int_score":{"kind":"number","value":4,"string":"4"},"detected_licenses":{"kind":"list like","value":["MIT"],"string":"[\n \"MIT\"\n]"},"license_type":{"kind":"string","value":"permissive"},"text":{"kind":"string","value":"//! Salak is a multi layered configuration loader and zero-boilerplate configuration parser, with many predefined sources.\n//!\n//! 1. [About](#about)\n//! 2. [Quick Start](#quick-start)\n//! 3. [Features](#features)\n//! * [Predefined Sources](#predefined-sources)\n//! * [Key Convention](#key-convention)\n//! * [Value Placeholder Parsing](#value-placeholder-parsing)\n//! * [Attributes For Derive](#attributes-for-derive)\n//! * [Reload Configuration](#reload-configuration)\n//! * [Resource Factory](#resource-factory)\n//!\n//! ## About\n//! `salak` is a multi layered configuration loader with many predefined sources. Also it\n//! is a zero-boilerplate configuration parser which provides an auto-derive procedure macro\n//! to derive [`FromEnvironment`] so that we can parse configuration structs without any additional codes.\n//!\n//! ## Quick Start\n//! A simple example of `salak`:\n//!\n//! ```\n//! use salak::*;\n//!\n//! #[derive(Debug, FromEnvironment)]\n//! #[salak(prefix = \"config\")]\n//! struct Config {\n//! #[salak(default = false)]\n//! verbose: bool,\n//! optional: Option,\n//! #[salak(name = \"val\")]\n//! value: i64,\n//! }\n//! let env = Salak::builder()\n//! .set(\"config.val\", \"2021\")\n//! .build()\n//! .unwrap();\n//! let config = env.get::().unwrap();\n//! assert_eq!(2021, config.value);\n//! assert_eq!(None, config.optional);\n//! assert_eq!(false, config.verbose);\n//! ```\n//!\n//! ## Features\n//!\n//! #### Predefined Sources\n//! Predefined sources has the following order, [`Salak`] will find by sequence of these orders,\n//! if the property with specified key is found at the current source, than return immediately. Otherwise,\n//! it will search the next source.\n//!\n//! 1. Random source provides a group of keys can return random values.\n//! * `random.u8`\n//! * `random.u16`\n//! * `random.u32`\n//! * `random.u64`\n//! * `random.u128`\n//! * `random.usize`\n//! * `random.i8`\n//! * `random.i16`\n//! * `random.i32`\n//! * `random.i64`\n//! * `random.i128`\n//! * `random.isize`\n//! 2. Custom arguments source. [`SalakBuilder::set()`] can set a single kv,\n//! and [`SalakBuilder::set_args()`] can set a group of kvs.\n//! 3. System environment source. Implemented by [`source::system_environment`].\n//! 4. Profile specified file source, eg. `app-dev.toml`, supports reloading.\n//! 5. No profile file source, eg. `app.toml`, supports reloading.\n//! 6. Custom sources, which can register by [`Salak::register()`].\n//!\n//! #### Key Convention\n//! Key is used for search configuration from [`Environment`], normally it is represented by string.\n//! Key is a group of SubKey separated by dot(`.`), and SubKey is a name or a name followed by index.\n//! 1. SubKey Format (`[a-z][_a-z0-9]+(\\[[0-9]+\\])*`)\n//! * `a`\n//! * `a0`\n//! * `a_b`\n//! * `a[0]`\n//! * `a[0][0]`\n//! 2. Key Format (`SubKey(\\.SubKey)*`)\n//! * `a`\n//! * `a.b`\n//! * `a.val[0]`\n//! * `a_b[0]`\n//!\n//! #### Value Placeholder Parsing\n//! 1. Placeholder Format\n//! * `${key}` => Get value of `key`.\n//! * `${key:default}` => Get value of `key`, if not exists return `default`.\n//! 2. Escape Format\n//! * `\\$\\{key\\}` => Return `${key}`.\n//! * `$`, `\\`, `{`, `}` must use escape format.\n//!\n//! #### Attributes For Derive\n//! `salak` supports some attributes for automatically derive [`FromEnvironment`].\n//! All attributes have format `#[salak(..)]`, eg. `#[salak(default = \"default value\")]`.\n//! 1. Struct Header Attribute.\n//! * `#[salak(prefix = \"salak.application\")]`, has this attr will auto implement [`PrefixedFromEnvironment`].\n//! 2. Struct Field Attribute.\n//! * `#[salak(default = \"value\")]`, this attr can specify default value.\n//! * `#[salak(name = \"key\")]`, this attr can specify property key, default convension is use field name.\n//! * `#[salak(desc = \"Field Description\")]`, this attr can be describe this property.\n//!\n//! #### Reload Configuration\n//! `salak` supports reload configurations. Since in rust mutable\n//! and alias can't be used together, here we introduce a wrapper\n//! [`wrapper::IORef`] for updating values when reloading.\n//!\n//! #### Resource Factory\n//! [`Resource`] defines a standard way to create instance. [`Factory`] provides functions to initialize resource\n//! and cache resource. Please refer to [salak_factory](https://docs.rs/salak_factory) for resource usage.\n//! Feature 'app' should be open for this feature.\n//!\n#![cfg_attr(docsrs, feature(doc_cfg))]\n#![warn(\n anonymous_parameters,\n missing_copy_implementations,\n missing_debug_implementations,\n missing_docs,\n nonstandard_style,\n rust_2018_idioms,\n single_use_lifetimes,\n trivial_casts,\n trivial_numeric_casts,\n unreachable_pub,\n unused_extern_crates,\n unused_qualifications,\n variant_size_differences\n)]\n\nuse parking_lot::Mutex;\n\n#[cfg(feature = \"derive\")]\nuse crate::derive::KeyDesc;\n#[cfg(feature = \"derive\")]\nmod derive;\n#[cfg(feature = \"derive\")]\n#[cfg_attr(docsrs, doc(cfg(feature = \"derive\")))]\npub use crate::derive::{\n AutoDeriveFromEnvironment, DescFromEnvironment, PrefixedFromEnvironment, SalakDescContext,\n};\nuse raw_ioref::IORefT;\n/// Auto derive [`FromEnvironment`] for struct.\n#[cfg(feature = \"derive\")]\n#[cfg_attr(docsrs, doc(cfg(feature = \"derive\")))]\npub use salak_derive::FromEnvironment;\n/// Auto derive [`Service`] for struct.\n#[cfg(all(feature = \"derive\", feature = \"app\"))]\n#[cfg_attr(docsrs, doc(cfg(all(feature = \"derive\", feature = \"app\"))))]\npub use salak_derive::Service;\nuse source_raw::PropertyRegistryInternal;\n\n#[cfg(feature = \"args\")]\n#[cfg_attr(docsrs, doc(cfg(feature = \"args\")))]\nmod args;\n#[cfg(feature = \"args\")]\n#[cfg_attr(docsrs, doc(cfg(feature = \"args\")))]\npub use crate::args::AppInfo;\n\nmod err;\nmod raw;\nuse crate::raw::SubKey;\npub use crate::raw::{IsProperty, Property};\nmod raw_ioref;\nmod raw_vec;\nuse crate::env::PREFIX;\npub use crate::env::{Salak, SalakBuilder};\nmod env;\nmod raw_enum;\n\npub use crate::err::PropertyError;\npub use crate::raw_enum::EnumProperty;\n\nmod source_map;\n#[cfg(feature = \"rand\")]\n#[cfg_attr(docsrs, doc(cfg(feature = \"rand\")))]\nmod source_rand;\nmod source_raw;\n#[cfg(feature = \"toml\")]\n#[cfg_attr(docsrs, doc(cfg(feature = \"toml\")))]\nmod source_toml;\n#[cfg(feature = \"yaml\")]\n#[cfg_attr(docsrs, doc(cfg(feature = \"yaml\")))]\nmod source_yaml;\n\nuse crate::source::Key;\nuse crate::source::SubKeys;\n\n#[cfg(feature = \"app\")]\n#[cfg_attr(docsrs, doc(cfg(feature = \"app\")))]\nmod app;\n#[cfg(feature = \"app\")]\n#[cfg_attr(docsrs, doc(cfg(feature = \"app\")))]\npub use crate::app::*;\n\n#[cfg(test)]\n#[macro_use(quickcheck)]\nextern crate quickcheck_macros;\n\n/// Salak wrapper for configuration parsing.\n///\n/// Wrapper can determine extra behavior for parsing.\n/// Such as check empty of vec or update when reloading.\npub mod wrapper {\n pub use crate::raw_ioref::IORef;\n pub use crate::raw_vec::NonEmptyVec;\n}\n\n/// Salak sources.\n///\n/// This mod exports all pub sources.\npub mod source {\n\n #[cfg(feature = \"args\")]\n #[cfg_attr(docsrs, doc(cfg(feature = \"args\")))]\n pub(crate) use crate::args::from_args;\n pub use crate::raw::Key;\n pub use crate::raw::SubKeys;\n pub use crate::source_map::system_environment;\n pub use crate::source_map::HashMapSource;\n}\n\npub(crate) type Res = Result;\npub(crate) type Void = Res<()>;\n\n/// A property source defines how to load properties.\n/// `salak` has some predefined sources, user can\n/// provide custom source by implementing this trait.\n///\n/// Sources provided by `salak`.\n///\n/// * hashmap source\n/// * std::env source\n/// * toml source\n/// * yaml source\npub trait PropertySource: Send + Sync {\n /// [`PropertySource`] name.\n fn name(&self) -> &str;\n\n /// Get property by key.\n fn get_property(&self, key: &Key<'_>) -> Option>;\n\n /// Get all subkeys with given key.\n ///\n /// Subkeys are keys without dot('.').\n /// This method is unstable, and will be simplified by hidding\n /// Key and SubKeys.\n fn get_sub_keys<'a>(&'a self, key: &Key<'_>, sub_keys: &mut SubKeys<'a>);\n\n /// Check whether the [`PropertySource`] is empty.\n /// Empty source will be ignored when registering to `salak`.\n fn is_empty(&self) -> bool;\n\n /// Reload source, if nothing changes, then return none.\n #[inline]\n fn reload_source(&self) -> Res>> {\n Ok(None)\n }\n}\n\n/// Environment defines interface for getting values, and reloading\n/// configurations.\n///\n/// The implementor of this trait is [`Salak`].\npub trait Environment {\n /// Get value by key.\n /// * `key` - Configuration key.\n ///\n /// Require means is if the value `T` is not found,\n /// then error will be returned. But if you try to get\n /// `Option`, then not found will return `None`.\n fn require(&self, key: &str) -> Res;\n\n /// Reload configuration. If reloading is completed,\n /// all values wrapped by [`wrapper::IORef`] will be updated.\n ///\n /// Currently, this feature is unstable, the returned bool\n /// value means reloading is completed without error.\n fn reload(&self) -> Res;\n\n #[cfg(feature = \"derive\")]\n #[cfg_attr(docsrs, doc(cfg(feature = \"derive\")))]\n #[inline]\n /// Get value with predefined key.\n ///\n /// [`PrefixedFromEnvironment`] can be auto derives by\n /// [`salak_derive::FromEnvironment`] macro. It provides\n /// a standard key for getting value `T`.\n fn get(&self) -> Res {\n self.require::(T::prefix())\n }\n}\n\n/// Context for implementing [`FromEnvironment`].\n#[allow(missing_debug_implementations)]\npub struct SalakContext<'a> {\n registry: &'a PropertyRegistryInternal<'a>,\n iorefs: &'a Mutex>>,\n key: &'a mut Key<'a>,\n}\n\n/// Parsing value from environment by [`SalakContext`].\npub trait FromEnvironment: Sized {\n /// Generate object from [`SalakContext`].\n /// * `val` - Property value can be parsed from.\n /// * `env` - Context.\n ///\n /// ```no_run\n /// use salak::*;\n /// pub struct Config {\n /// key: String\n /// }\n /// impl FromEnvironment for Config {\n /// fn from_env(\n /// val: Option>,\n /// env: &mut SalakContext<'_>,\n /// ) -> Result {\n /// Ok(Self{\n /// key: env.require_def(\"key\", None)?,\n /// })\n /// }\n /// }\n ///\n /// ```\n fn from_env(val: Option>, env: &mut SalakContext<'_>) -> Res;\n}\n"},"download_success":{"kind":"bool","value":true,"string":"true"}}},{"rowIdx":1166,"cells":{"blob_id":{"kind":"string","value":"803d47198803d026358b00492feed23c3c720724"},"language":{"kind":"string","value":"Rust"},"repo_name":{"kind":"string","value":"trevershick/uni"},"path":{"kind":"string","value":"/src/main.rs"},"src_encoding":{"kind":"string","value":"UTF-8"},"length_bytes":{"kind":"number","value":1356,"string":"1,356"},"score":{"kind":"number","value":3.03125,"string":"3.03125"},"int_score":{"kind":"number","value":3,"string":"3"},"detected_licenses":{"kind":"list like","value":[],"string":"[]"},"license_type":{"kind":"string","value":"no_license"},"text":{"kind":"string","value":"#[macro_use]\nextern crate simple_error;\n#[macro_use]\nextern crate clap;\n\nuse std::error::Error;\nuse std::process::exit;\nuse std::result::Result;\n\nuse clap::App;\nuse hex;\n\nfn utf8_to_utf16(unicode_bytes: Vec) -> Result> {\n if unicode_bytes.len() != 2 {\n bail!(\"only handle arrays of 2\");\n }\n\n let mut wide = unicode_bytes[0] as u16;\n wide <<= 8;\n wide += unicode_bytes[1] as u16;\n return match String::from_utf16(&[wide]) {\n Ok(v) => Ok(v),\n Err(x) => Err(Box::new(x)),\n };\n}\n\nfn main() {\n let matches = App::new(\"uni\")\n .version(\"1.0.0\")\n .about(\"Convert unicode hex to unicode character\")\n .args_from_usage(\"... 'A sequence of utf16 hex values, i.e. 30CE B0AB'\")\n .get_matches();\n\n let hex_values = values_t!(matches, \"hex_vals\", String).unwrap();\n\n let mut bad: Vec = Vec::new();\n for hex_value in hex_values {\n let decoded = hex::decode(&hex_value);\n match decoded {\n Ok(unicode_bytes) => match utf8_to_utf16(unicode_bytes) {\n Ok(v) => print!(\"{}\", v),\n Err(_) => bad.push(hex_value),\n },\n Err(_) => bad.push(hex_value),\n };\n }\n\n for b in &bad {\n eprintln!(\"bad code {}\", b);\n }\n if bad.len() > 0 {\n exit(1)\n }\n}\n"},"download_success":{"kind":"bool","value":true,"string":"true"}}},{"rowIdx":1167,"cells":{"blob_id":{"kind":"string","value":"9f50f066cda213a8677e45e00a4d365757896afe"},"language":{"kind":"string","value":"Rust"},"repo_name":{"kind":"string","value":"embed-rs/stm32f7x6"},"path":{"kind":"string","value":"/src/ethernet_mmc/mmccr/mod.rs"},"src_encoding":{"kind":"string","value":"UTF-8"},"length_bytes":{"kind":"number","value":19766,"string":"19,766"},"score":{"kind":"number","value":2.921875,"string":"2.921875"},"int_score":{"kind":"number","value":3,"string":"3"},"detected_licenses":{"kind":"list like","value":[],"string":"[]"},"license_type":{"kind":"string","value":"no_license"},"text":{"kind":"string","value":"#[doc = r\" Value read from the register\"]\npub struct R {\n bits: u32,\n}\n#[doc = r\" Value to write to the register\"]\npub struct W {\n bits: u32,\n}\nimpl super::MMCCR {\n #[doc = r\" Modifies the contents of the register\"]\n #[inline]\n pub fn modify(&self, f: F)\n where\n for<'w> F: FnOnce(&R, &'w mut W) -> &'w mut W,\n {\n let bits = self.register.get();\n let r = R { bits: bits };\n let mut w = W { bits: bits };\n f(&r, &mut w);\n self.register.set(w.bits);\n }\n #[doc = r\" Reads the contents of the register\"]\n #[inline]\n pub fn read(&self) -> R {\n R {\n bits: self.register.get(),\n }\n }\n #[doc = r\" Writes to the register\"]\n #[inline]\n pub fn write(&self, f: F)\n where\n F: FnOnce(&mut W) -> &mut W,\n {\n let mut w = W::reset_value();\n f(&mut w);\n self.register.set(w.bits);\n }\n #[doc = r\" Writes the reset value to the register\"]\n #[inline]\n pub fn reset(&self) {\n self.write(|w| w)\n }\n}\n#[doc = \"Possible values of the field `CR`\"]\n#[derive(Clone, Copy, Debug, PartialEq)]\npub enum CRR {\n #[doc = \"Reset all counters. Cleared automatically\"]\n RESET,\n #[doc = r\" Reserved\"]\n _Reserved(bool),\n}\nimpl CRR {\n #[doc = r\" Returns `true` if the bit is clear (0)\"]\n #[inline]\n pub fn bit_is_clear(&self) -> bool {\n !self.bit()\n }\n #[doc = r\" Returns `true` if the bit is set (1)\"]\n #[inline]\n pub fn bit_is_set(&self) -> bool {\n self.bit()\n }\n #[doc = r\" Value of the field as raw bits\"]\n #[inline]\n pub fn bit(&self) -> bool {\n match *self {\n CRR::RESET => true,\n CRR::_Reserved(bits) => bits,\n }\n }\n #[allow(missing_docs)]\n #[doc(hidden)]\n #[inline]\n pub fn _from(value: bool) -> CRR {\n match value {\n true => CRR::RESET,\n i => CRR::_Reserved(i),\n }\n }\n #[doc = \"Checks if the value of the field is `RESET`\"]\n #[inline]\n pub fn is_reset(&self) -> bool {\n *self == CRR::RESET\n }\n}\n#[doc = \"Possible values of the field `CSR`\"]\n#[derive(Clone, Copy, Debug, PartialEq)]\npub enum CSRR {\n #[doc = \"Counters roll over to zero after reaching the maximum value\"]\n DISABLED,\n #[doc = \"Counters do not roll over to zero after reaching the maximum value\"]\n ENABLED,\n}\nimpl CSRR {\n #[doc = r\" Returns `true` if the bit is clear (0)\"]\n #[inline]\n pub fn bit_is_clear(&self) -> bool {\n !self.bit()\n }\n #[doc = r\" Returns `true` if the bit is set (1)\"]\n #[inline]\n pub fn bit_is_set(&self) -> bool {\n self.bit()\n }\n #[doc = r\" Value of the field as raw bits\"]\n #[inline]\n pub fn bit(&self) -> bool {\n match *self {\n CSRR::DISABLED => false,\n CSRR::ENABLED => true,\n }\n }\n #[allow(missing_docs)]\n #[doc(hidden)]\n #[inline]\n pub fn _from(value: bool) -> CSRR {\n match value {\n false => CSRR::DISABLED,\n true => CSRR::ENABLED,\n }\n }\n #[doc = \"Checks if the value of the field is `DISABLED`\"]\n #[inline]\n pub fn is_disabled(&self) -> bool {\n *self == CSRR::DISABLED\n }\n #[doc = \"Checks if the value of the field is `ENABLED`\"]\n #[inline]\n pub fn is_enabled(&self) -> bool {\n *self == CSRR::ENABLED\n }\n}\n#[doc = \"Possible values of the field `ROR`\"]\n#[derive(Clone, Copy, Debug, PartialEq)]\npub enum RORR {\n #[doc = \"MMC counters do not reset on read\"]\n DISABLED,\n #[doc = \"MMC counters reset to zero after read\"]\n ENABLED,\n}\nimpl RORR {\n #[doc = r\" Returns `true` if the bit is clear (0)\"]\n #[inline]\n pub fn bit_is_clear(&self) -> bool {\n !self.bit()\n }\n #[doc = r\" Returns `true` if the bit is set (1)\"]\n #[inline]\n pub fn bit_is_set(&self) -> bool {\n self.bit()\n }\n #[doc = r\" Value of the field as raw bits\"]\n #[inline]\n pub fn bit(&self) -> bool {\n match *self {\n RORR::DISABLED => false,\n RORR::ENABLED => true,\n }\n }\n #[allow(missing_docs)]\n #[doc(hidden)]\n #[inline]\n pub fn _from(value: bool) -> RORR {\n match value {\n false => RORR::DISABLED,\n true => RORR::ENABLED,\n }\n }\n #[doc = \"Checks if the value of the field is `DISABLED`\"]\n #[inline]\n pub fn is_disabled(&self) -> bool {\n *self == RORR::DISABLED\n }\n #[doc = \"Checks if the value of the field is `ENABLED`\"]\n #[inline]\n pub fn is_enabled(&self) -> bool {\n *self == RORR::ENABLED\n }\n}\n#[doc = \"Possible values of the field `MCF`\"]\n#[derive(Clone, Copy, Debug, PartialEq)]\npub enum MCFR {\n #[doc = \"All MMC counters update normally\"]\n UNFROZEN,\n #[doc = \"All MMC counters frozen to their current value\"]\n FROZEN,\n}\nimpl MCFR {\n #[doc = r\" Returns `true` if the bit is clear (0)\"]\n #[inline]\n pub fn bit_is_clear(&self) -> bool {\n !self.bit()\n }\n #[doc = r\" Returns `true` if the bit is set (1)\"]\n #[inline]\n pub fn bit_is_set(&self) -> bool {\n self.bit()\n }\n #[doc = r\" Value of the field as raw bits\"]\n #[inline]\n pub fn bit(&self) -> bool {\n match *self {\n MCFR::UNFROZEN => false,\n MCFR::FROZEN => true,\n }\n }\n #[allow(missing_docs)]\n #[doc(hidden)]\n #[inline]\n pub fn _from(value: bool) -> MCFR {\n match value {\n false => MCFR::UNFROZEN,\n true => MCFR::FROZEN,\n }\n }\n #[doc = \"Checks if the value of the field is `UNFROZEN`\"]\n #[inline]\n pub fn is_unfrozen(&self) -> bool {\n *self == MCFR::UNFROZEN\n }\n #[doc = \"Checks if the value of the field is `FROZEN`\"]\n #[inline]\n pub fn is_frozen(&self) -> bool {\n *self == MCFR::FROZEN\n }\n}\n#[doc = \"Possible values of the field `MCP`\"]\n#[derive(Clone, Copy, Debug, PartialEq)]\npub enum MCPR {\n #[doc = \"MMC counters will be preset to almost full or almost half. Cleared automatically\"]\n PRESET,\n #[doc = r\" Reserved\"]\n _Reserved(bool),\n}\nimpl MCPR {\n #[doc = r\" Returns `true` if the bit is clear (0)\"]\n #[inline]\n pub fn bit_is_clear(&self) -> bool {\n !self.bit()\n }\n #[doc = r\" Returns `true` if the bit is set (1)\"]\n #[inline]\n pub fn bit_is_set(&self) -> bool {\n self.bit()\n }\n #[doc = r\" Value of the field as raw bits\"]\n #[inline]\n pub fn bit(&self) -> bool {\n match *self {\n MCPR::PRESET => true,\n MCPR::_Reserved(bits) => bits,\n }\n }\n #[allow(missing_docs)]\n #[doc(hidden)]\n #[inline]\n pub fn _from(value: bool) -> MCPR {\n match value {\n true => MCPR::PRESET,\n i => MCPR::_Reserved(i),\n }\n }\n #[doc = \"Checks if the value of the field is `PRESET`\"]\n #[inline]\n pub fn is_preset(&self) -> bool {\n *self == MCPR::PRESET\n }\n}\n#[doc = \"Possible values of the field `MCFHP`\"]\n#[derive(Clone, Copy, Debug, PartialEq)]\npub enum MCFHPR {\n #[doc = \"When MCP is set, MMC counters are preset to almost-half value 0x7FFF_FFF0\"]\n ALMOSTHALF,\n #[doc = \"When MCP is set, MMC counters are preset to almost-full value 0xFFFF_FFF0\"]\n ALMOSTFULL,\n}\nimpl MCFHPR {\n #[doc = r\" Returns `true` if the bit is clear (0)\"]\n #[inline]\n pub fn bit_is_clear(&self) -> bool {\n !self.bit()\n }\n #[doc = r\" Returns `true` if the bit is set (1)\"]\n #[inline]\n pub fn bit_is_set(&self) -> bool {\n self.bit()\n }\n #[doc = r\" Value of the field as raw bits\"]\n #[inline]\n pub fn bit(&self) -> bool {\n match *self {\n MCFHPR::ALMOSTHALF => false,\n MCFHPR::ALMOSTFULL => true,\n }\n }\n #[allow(missing_docs)]\n #[doc(hidden)]\n #[inline]\n pub fn _from(value: bool) -> MCFHPR {\n match value {\n false => MCFHPR::ALMOSTHALF,\n true => MCFHPR::ALMOSTFULL,\n }\n }\n #[doc = \"Checks if the value of the field is `ALMOSTHALF`\"]\n #[inline]\n pub fn is_almost_half(&self) -> bool {\n *self == MCFHPR::ALMOSTHALF\n }\n #[doc = \"Checks if the value of the field is `ALMOSTFULL`\"]\n #[inline]\n pub fn is_almost_full(&self) -> bool {\n *self == MCFHPR::ALMOSTFULL\n }\n}\n#[doc = \"Values that can be written to the field `CR`\"]\npub enum CRW {\n #[doc = \"Reset all counters. Cleared automatically\"]\n RESET,\n}\nimpl CRW {\n #[allow(missing_docs)]\n #[doc(hidden)]\n #[inline]\n pub fn _bits(&self) -> bool {\n match *self {\n CRW::RESET => true,\n }\n }\n}\n#[doc = r\" Proxy\"]\npub struct _CRW<'a> {\n w: &'a mut W,\n}\nimpl<'a> _CRW<'a> {\n #[doc = r\" Writes `variant` to the field\"]\n #[inline]\n pub fn variant(self, variant: CRW) -> &'a mut W {\n {\n self.bit(variant._bits())\n }\n }\n #[doc = \"Reset all counters. Cleared automatically\"]\n #[inline]\n pub fn reset(self) -> &'a mut W {\n self.variant(CRW::RESET)\n }\n #[doc = r\" Sets the field bit\"]\n pub fn set_bit(self) -> &'a mut W {\n self.bit(true)\n }\n #[doc = r\" Clears the field bit\"]\n pub fn clear_bit(self) -> &'a mut W {\n self.bit(false)\n }\n #[doc = r\" Writes raw bits to the field\"]\n #[inline]\n pub fn bit(self, value: bool) -> &'a mut W {\n const MASK: bool = true;\n const OFFSET: u8 = 0;\n self.w.bits &= !((MASK as u32) << OFFSET);\n self.w.bits |= ((value & MASK) as u32) << OFFSET;\n self.w\n }\n}\n#[doc = \"Values that can be written to the field `CSR`\"]\npub enum CSRW {\n #[doc = \"Counters roll over to zero after reaching the maximum value\"]\n DISABLED,\n #[doc = \"Counters do not roll over to zero after reaching the maximum value\"]\n ENABLED,\n}\nimpl CSRW {\n #[allow(missing_docs)]\n #[doc(hidden)]\n #[inline]\n pub fn _bits(&self) -> bool {\n match *self {\n CSRW::DISABLED => false,\n CSRW::ENABLED => true,\n }\n }\n}\n#[doc = r\" Proxy\"]\npub struct _CSRW<'a> {\n w: &'a mut W,\n}\nimpl<'a> _CSRW<'a> {\n #[doc = r\" Writes `variant` to the field\"]\n #[inline]\n pub fn variant(self, variant: CSRW) -> &'a mut W {\n {\n self.bit(variant._bits())\n }\n }\n #[doc = \"Counters roll over to zero after reaching the maximum value\"]\n #[inline]\n pub fn disabled(self) -> &'a mut W {\n self.variant(CSRW::DISABLED)\n }\n #[doc = \"Counters do not roll over to zero after reaching the maximum value\"]\n #[inline]\n pub fn enabled(self) -> &'a mut W {\n self.variant(CSRW::ENABLED)\n }\n #[doc = r\" Sets the field bit\"]\n pub fn set_bit(self) -> &'a mut W {\n self.bit(true)\n }\n #[doc = r\" Clears the field bit\"]\n pub fn clear_bit(self) -> &'a mut W {\n self.bit(false)\n }\n #[doc = r\" Writes raw bits to the field\"]\n #[inline]\n pub fn bit(self, value: bool) -> &'a mut W {\n const MASK: bool = true;\n const OFFSET: u8 = 1;\n self.w.bits &= !((MASK as u32) << OFFSET);\n self.w.bits |= ((value & MASK) as u32) << OFFSET;\n self.w\n }\n}\n#[doc = \"Values that can be written to the field `ROR`\"]\npub enum RORW {\n #[doc = \"MMC counters do not reset on read\"]\n DISABLED,\n #[doc = \"MMC counters reset to zero after read\"]\n ENABLED,\n}\nimpl RORW {\n #[allow(missing_docs)]\n #[doc(hidden)]\n #[inline]\n pub fn _bits(&self) -> bool {\n match *self {\n RORW::DISABLED => false,\n RORW::ENABLED => true,\n }\n }\n}\n#[doc = r\" Proxy\"]\npub struct _RORW<'a> {\n w: &'a mut W,\n}\nimpl<'a> _RORW<'a> {\n #[doc = r\" Writes `variant` to the field\"]\n #[inline]\n pub fn variant(self, variant: RORW) -> &'a mut W {\n {\n self.bit(variant._bits())\n }\n }\n #[doc = \"MMC counters do not reset on read\"]\n #[inline]\n pub fn disabled(self) -> &'a mut W {\n self.variant(RORW::DISABLED)\n }\n #[doc = \"MMC counters reset to zero after read\"]\n #[inline]\n pub fn enabled(self) -> &'a mut W {\n self.variant(RORW::ENABLED)\n }\n #[doc = r\" Sets the field bit\"]\n pub fn set_bit(self) -> &'a mut W {\n self.bit(true)\n }\n #[doc = r\" Clears the field bit\"]\n pub fn clear_bit(self) -> &'a mut W {\n self.bit(false)\n }\n #[doc = r\" Writes raw bits to the field\"]\n #[inline]\n pub fn bit(self, value: bool) -> &'a mut W {\n const MASK: bool = true;\n const OFFSET: u8 = 2;\n self.w.bits &= !((MASK as u32) << OFFSET);\n self.w.bits |= ((value & MASK) as u32) << OFFSET;\n self.w\n }\n}\n#[doc = \"Values that can be written to the field `MCF`\"]\npub enum MCFW {\n #[doc = \"All MMC counters update normally\"]\n UNFROZEN,\n #[doc = \"All MMC counters frozen to their current value\"]\n FROZEN,\n}\nimpl MCFW {\n #[allow(missing_docs)]\n #[doc(hidden)]\n #[inline]\n pub fn _bits(&self) -> bool {\n match *self {\n MCFW::UNFROZEN => false,\n MCFW::FROZEN => true,\n }\n }\n}\n#[doc = r\" Proxy\"]\npub struct _MCFW<'a> {\n w: &'a mut W,\n}\nimpl<'a> _MCFW<'a> {\n #[doc = r\" Writes `variant` to the field\"]\n #[inline]\n pub fn variant(self, variant: MCFW) -> &'a mut W {\n {\n self.bit(variant._bits())\n }\n }\n #[doc = \"All MMC counters update normally\"]\n #[inline]\n pub fn unfrozen(self) -> &'a mut W {\n self.variant(MCFW::UNFROZEN)\n }\n #[doc = \"All MMC counters frozen to their current value\"]\n #[inline]\n pub fn frozen(self) -> &'a mut W {\n self.variant(MCFW::FROZEN)\n }\n #[doc = r\" Sets the field bit\"]\n pub fn set_bit(self) -> &'a mut W {\n self.bit(true)\n }\n #[doc = r\" Clears the field bit\"]\n pub fn clear_bit(self) -> &'a mut W {\n self.bit(false)\n }\n #[doc = r\" Writes raw bits to the field\"]\n #[inline]\n pub fn bit(self, value: bool) -> &'a mut W {\n const MASK: bool = true;\n const OFFSET: u8 = 3;\n self.w.bits &= !((MASK as u32) << OFFSET);\n self.w.bits |= ((value & MASK) as u32) << OFFSET;\n self.w\n }\n}\n#[doc = \"Values that can be written to the field `MCP`\"]\npub enum MCPW {\n #[doc = \"MMC counters will be preset to almost full or almost half. Cleared automatically\"]\n PRESET,\n}\nimpl MCPW {\n #[allow(missing_docs)]\n #[doc(hidden)]\n #[inline]\n pub fn _bits(&self) -> bool {\n match *self {\n MCPW::PRESET => true,\n }\n }\n}\n#[doc = r\" Proxy\"]\npub struct _MCPW<'a> {\n w: &'a mut W,\n}\nimpl<'a> _MCPW<'a> {\n #[doc = r\" Writes `variant` to the field\"]\n #[inline]\n pub fn variant(self, variant: MCPW) -> &'a mut W {\n {\n self.bit(variant._bits())\n }\n }\n #[doc = \"MMC counters will be preset to almost full or almost half. Cleared automatically\"]\n #[inline]\n pub fn preset(self) -> &'a mut W {\n self.variant(MCPW::PRESET)\n }\n #[doc = r\" Sets the field bit\"]\n pub fn set_bit(self) -> &'a mut W {\n self.bit(true)\n }\n #[doc = r\" Clears the field bit\"]\n pub fn clear_bit(self) -> &'a mut W {\n self.bit(false)\n }\n #[doc = r\" Writes raw bits to the field\"]\n #[inline]\n pub fn bit(self, value: bool) -> &'a mut W {\n const MASK: bool = true;\n const OFFSET: u8 = 4;\n self.w.bits &= !((MASK as u32) << OFFSET);\n self.w.bits |= ((value & MASK) as u32) << OFFSET;\n self.w\n }\n}\n#[doc = \"Values that can be written to the field `MCFHP`\"]\npub enum MCFHPW {\n #[doc = \"When MCP is set, MMC counters are preset to almost-half value 0x7FFF_FFF0\"]\n ALMOSTHALF,\n #[doc = \"When MCP is set, MMC counters are preset to almost-full value 0xFFFF_FFF0\"]\n ALMOSTFULL,\n}\nimpl MCFHPW {\n #[allow(missing_docs)]\n #[doc(hidden)]\n #[inline]\n pub fn _bits(&self) -> bool {\n match *self {\n MCFHPW::ALMOSTHALF => false,\n MCFHPW::ALMOSTFULL => true,\n }\n }\n}\n#[doc = r\" Proxy\"]\npub struct _MCFHPW<'a> {\n w: &'a mut W,\n}\nimpl<'a> _MCFHPW<'a> {\n #[doc = r\" Writes `variant` to the field\"]\n #[inline]\n pub fn variant(self, variant: MCFHPW) -> &'a mut W {\n {\n self.bit(variant._bits())\n }\n }\n #[doc = \"When MCP is set, MMC counters are preset to almost-half value 0x7FFF_FFF0\"]\n #[inline]\n pub fn almost_half(self) -> &'a mut W {\n self.variant(MCFHPW::ALMOSTHALF)\n }\n #[doc = \"When MCP is set, MMC counters are preset to almost-full value 0xFFFF_FFF0\"]\n #[inline]\n pub fn almost_full(self) -> &'a mut W {\n self.variant(MCFHPW::ALMOSTFULL)\n }\n #[doc = r\" Sets the field bit\"]\n pub fn set_bit(self) -> &'a mut W {\n self.bit(true)\n }\n #[doc = r\" Clears the field bit\"]\n pub fn clear_bit(self) -> &'a mut W {\n self.bit(false)\n }\n #[doc = r\" Writes raw bits to the field\"]\n #[inline]\n pub fn bit(self, value: bool) -> &'a mut W {\n const MASK: bool = true;\n const OFFSET: u8 = 5;\n self.w.bits &= !((MASK as u32) << OFFSET);\n self.w.bits |= ((value & MASK) as u32) << OFFSET;\n self.w\n }\n}\nimpl R {\n #[doc = r\" Value of the register as raw bits\"]\n #[inline]\n pub fn bits(&self) -> u32 {\n self.bits\n }\n #[doc = \"Bit 0 - CR\"]\n #[inline]\n pub fn cr(&self) -> CRR {\n CRR::_from({\n const MASK: bool = true;\n const OFFSET: u8 = 0;\n ((self.bits >> OFFSET) & MASK as u32) != 0\n })\n }\n #[doc = \"Bit 1 - CSR\"]\n #[inline]\n pub fn csr(&self) -> CSRR {\n CSRR::_from({\n const MASK: bool = true;\n const OFFSET: u8 = 1;\n ((self.bits >> OFFSET) & MASK as u32) != 0\n })\n }\n #[doc = \"Bit 2 - ROR\"]\n #[inline]\n pub fn ror(&self) -> RORR {\n RORR::_from({\n const MASK: bool = true;\n const OFFSET: u8 = 2;\n ((self.bits >> OFFSET) & MASK as u32) != 0\n })\n }\n #[doc = \"Bit 3 - MCF\"]\n #[inline]\n pub fn mcf(&self) -> MCFR {\n MCFR::_from({\n const MASK: bool = true;\n const OFFSET: u8 = 3;\n ((self.bits >> OFFSET) & MASK as u32) != 0\n })\n }\n #[doc = \"Bit 4 - MCP\"]\n #[inline]\n pub fn mcp(&self) -> MCPR {\n MCPR::_from({\n const MASK: bool = true;\n const OFFSET: u8 = 4;\n ((self.bits >> OFFSET) & MASK as u32) != 0\n })\n }\n #[doc = \"Bit 5 - MCFHP\"]\n #[inline]\n pub fn mcfhp(&self) -> MCFHPR {\n MCFHPR::_from({\n const MASK: bool = true;\n const OFFSET: u8 = 5;\n ((self.bits >> OFFSET) & MASK as u32) != 0\n })\n }\n}\nimpl W {\n #[doc = r\" Reset value of the register\"]\n #[inline]\n pub fn reset_value() -> W {\n W { bits: 0 }\n }\n #[doc = r\" Writes raw bits to the register\"]\n #[inline]\n pub unsafe fn bits(&mut self, bits: u32) -> &mut Self {\n self.bits = bits;\n self\n }\n #[doc = \"Bit 0 - CR\"]\n #[inline]\n pub fn cr(&mut self) -> _CRW {\n _CRW { w: self }\n }\n #[doc = \"Bit 1 - CSR\"]\n #[inline]\n pub fn csr(&mut self) -> _CSRW {\n _CSRW { w: self }\n }\n #[doc = \"Bit 2 - ROR\"]\n #[inline]\n pub fn ror(&mut self) -> _RORW {\n _RORW { w: self }\n }\n #[doc = \"Bit 3 - MCF\"]\n #[inline]\n pub fn mcf(&mut self) -> _MCFW {\n _MCFW { w: self }\n }\n #[doc = \"Bit 4 - MCP\"]\n #[inline]\n pub fn mcp(&mut self) -> _MCPW {\n _MCPW { w: self }\n }\n #[doc = \"Bit 5 - MCFHP\"]\n #[inline]\n pub fn mcfhp(&mut self) -> _MCFHPW {\n _MCFHPW { w: self }\n }\n}\n"},"download_success":{"kind":"bool","value":true,"string":"true"}}},{"rowIdx":1168,"cells":{"blob_id":{"kind":"string","value":"e293abe837acee93c3c204016fac4b18bdcef1cf"},"language":{"kind":"string","value":"Rust"},"repo_name":{"kind":"string","value":"astro/rust-lpc43xx"},"path":{"kind":"string","value":"/src/scu/pintsel0/mod.rs"},"src_encoding":{"kind":"string","value":"UTF-8"},"length_bytes":{"kind":"number","value":30148,"string":"30,148"},"score":{"kind":"number","value":2.53125,"string":"2.53125"},"int_score":{"kind":"number","value":3,"string":"3"},"detected_licenses":{"kind":"list like","value":["Apache-2.0"],"string":"[\n \"Apache-2.0\"\n]"},"license_type":{"kind":"string","value":"permissive"},"text":{"kind":"string","value":"#[doc = r\" Value read from the register\"]\npub struct R {\n bits: u32,\n}\n#[doc = r\" Value to write to the register\"]\npub struct W {\n bits: u32,\n}\nimpl super::PINTSEL0 {\n #[doc = r\" Modifies the contents of the register\"]\n #[inline]\n pub fn modify(&self, f: F)\n where\n for<'w> F: FnOnce(&R, &'w mut W) -> &'w mut W,\n {\n let bits = self.register.get();\n let r = R { bits: bits };\n let mut w = W { bits: bits };\n f(&r, &mut w);\n self.register.set(w.bits);\n }\n #[doc = r\" Reads the contents of the register\"]\n #[inline]\n pub fn read(&self) -> R {\n R {\n bits: self.register.get(),\n }\n }\n #[doc = r\" Writes to the register\"]\n #[inline]\n pub fn write(&self, f: F)\n where\n F: FnOnce(&mut W) -> &mut W,\n {\n let mut w = W::reset_value();\n f(&mut w);\n self.register.set(w.bits);\n }\n #[doc = r\" Writes the reset value to the register\"]\n #[inline]\n pub fn reset(&self) {\n self.write(|w| w)\n }\n}\n#[doc = r\" Value of the field\"]\npub struct INTPIN0R {\n bits: u8,\n}\nimpl INTPIN0R {\n #[doc = r\" Value of the field as raw bits\"]\n #[inline]\n pub fn bits(&self) -> u8 {\n self.bits\n }\n}\n#[doc = \"Possible values of the field `PORTSEL0`\"]\n#[derive(Clone, Copy, Debug, PartialEq)]\npub enum PORTSEL0R {\n #[doc = \"GPIO Port 0\"]\n GPIO_PORT_0,\n #[doc = \"GPIO Port 1\"]\n GPIO_PORT_1,\n #[doc = \"GPIO Port 2\"]\n GPIO_PORT_2,\n #[doc = \"GPIO Port 3\"]\n GPIO_PORT_3,\n #[doc = \"GPIO Port 4\"]\n GPIO_PORT_4,\n #[doc = \"GPIO Port 5\"]\n GPIO_PORT_5,\n #[doc = \"GPIO Port 6\"]\n GPIO_PORT_6,\n #[doc = \"GPIO Port 7\"]\n GPIO_PORT_7,\n}\nimpl PORTSEL0R {\n #[doc = r\" Value of the field as raw bits\"]\n #[inline]\n pub fn bits(&self) -> u8 {\n match *self {\n PORTSEL0R::GPIO_PORT_0 => 0,\n PORTSEL0R::GPIO_PORT_1 => 1,\n PORTSEL0R::GPIO_PORT_2 => 2,\n PORTSEL0R::GPIO_PORT_3 => 3,\n PORTSEL0R::GPIO_PORT_4 => 4,\n PORTSEL0R::GPIO_PORT_5 => 5,\n PORTSEL0R::GPIO_PORT_6 => 6,\n PORTSEL0R::GPIO_PORT_7 => 7,\n }\n }\n #[allow(missing_docs)]\n #[doc(hidden)]\n #[inline]\n pub fn _from(value: u8) -> PORTSEL0R {\n match value {\n 0 => PORTSEL0R::GPIO_PORT_0,\n 1 => PORTSEL0R::GPIO_PORT_1,\n 2 => PORTSEL0R::GPIO_PORT_2,\n 3 => PORTSEL0R::GPIO_PORT_3,\n 4 => PORTSEL0R::GPIO_PORT_4,\n 5 => PORTSEL0R::GPIO_PORT_5,\n 6 => PORTSEL0R::GPIO_PORT_6,\n 7 => PORTSEL0R::GPIO_PORT_7,\n _ => unreachable!(),\n }\n }\n #[doc = \"Checks if the value of the field is `GPIO_PORT_0`\"]\n #[inline]\n pub fn is_gpio_port_0(&self) -> bool {\n *self == PORTSEL0R::GPIO_PORT_0\n }\n #[doc = \"Checks if the value of the field is `GPIO_PORT_1`\"]\n #[inline]\n pub fn is_gpio_port_1(&self) -> bool {\n *self == PORTSEL0R::GPIO_PORT_1\n }\n #[doc = \"Checks if the value of the field is `GPIO_PORT_2`\"]\n #[inline]\n pub fn is_gpio_port_2(&self) -> bool {\n *self == PORTSEL0R::GPIO_PORT_2\n }\n #[doc = \"Checks if the value of the field is `GPIO_PORT_3`\"]\n #[inline]\n pub fn is_gpio_port_3(&self) -> bool {\n *self == PORTSEL0R::GPIO_PORT_3\n }\n #[doc = \"Checks if the value of the field is `GPIO_PORT_4`\"]\n #[inline]\n pub fn is_gpio_port_4(&self) -> bool {\n *self == PORTSEL0R::GPIO_PORT_4\n }\n #[doc = \"Checks if the value of the field is `GPIO_PORT_5`\"]\n #[inline]\n pub fn is_gpio_port_5(&self) -> bool {\n *self == PORTSEL0R::GPIO_PORT_5\n }\n #[doc = \"Checks if the value of the field is `GPIO_PORT_6`\"]\n #[inline]\n pub fn is_gpio_port_6(&self) -> bool {\n *self == PORTSEL0R::GPIO_PORT_6\n }\n #[doc = \"Checks if the value of the field is `GPIO_PORT_7`\"]\n #[inline]\n pub fn is_gpio_port_7(&self) -> bool {\n *self == PORTSEL0R::GPIO_PORT_7\n }\n}\n#[doc = r\" Value of the field\"]\npub struct INTPIN1R {\n bits: u8,\n}\nimpl INTPIN1R {\n #[doc = r\" Value of the field as raw bits\"]\n #[inline]\n pub fn bits(&self) -> u8 {\n self.bits\n }\n}\n#[doc = \"Possible values of the field `PORTSEL1`\"]\n#[derive(Clone, Copy, Debug, PartialEq)]\npub enum PORTSEL1R {\n #[doc = \"GPIO Port 0\"]\n GPIO_PORT_0,\n #[doc = \"GPIO Port 1\"]\n GPIO_PORT_1,\n #[doc = \"GPIO Port 2\"]\n GPIO_PORT_2,\n #[doc = \"GPIO Port 3\"]\n GPIO_PORT_3,\n #[doc = \"GPIO Port 4\"]\n GPIO_PORT_4,\n #[doc = \"GPIO Port 5\"]\n GPIO_PORT_5,\n #[doc = \"GPIO Port 6\"]\n GPIO_PORT_6,\n #[doc = \"GPIO Port 7\"]\n GPIO_PORT_7,\n}\nimpl PORTSEL1R {\n #[doc = r\" Value of the field as raw bits\"]\n #[inline]\n pub fn bits(&self) -> u8 {\n match *self {\n PORTSEL1R::GPIO_PORT_0 => 0,\n PORTSEL1R::GPIO_PORT_1 => 1,\n PORTSEL1R::GPIO_PORT_2 => 2,\n PORTSEL1R::GPIO_PORT_3 => 3,\n PORTSEL1R::GPIO_PORT_4 => 4,\n PORTSEL1R::GPIO_PORT_5 => 5,\n PORTSEL1R::GPIO_PORT_6 => 6,\n PORTSEL1R::GPIO_PORT_7 => 7,\n }\n }\n #[allow(missing_docs)]\n #[doc(hidden)]\n #[inline]\n pub fn _from(value: u8) -> PORTSEL1R {\n match value {\n 0 => PORTSEL1R::GPIO_PORT_0,\n 1 => PORTSEL1R::GPIO_PORT_1,\n 2 => PORTSEL1R::GPIO_PORT_2,\n 3 => PORTSEL1R::GPIO_PORT_3,\n 4 => PORTSEL1R::GPIO_PORT_4,\n 5 => PORTSEL1R::GPIO_PORT_5,\n 6 => PORTSEL1R::GPIO_PORT_6,\n 7 => PORTSEL1R::GPIO_PORT_7,\n _ => unreachable!(),\n }\n }\n #[doc = \"Checks if the value of the field is `GPIO_PORT_0`\"]\n #[inline]\n pub fn is_gpio_port_0(&self) -> bool {\n *self == PORTSEL1R::GPIO_PORT_0\n }\n #[doc = \"Checks if the value of the field is `GPIO_PORT_1`\"]\n #[inline]\n pub fn is_gpio_port_1(&self) -> bool {\n *self == PORTSEL1R::GPIO_PORT_1\n }\n #[doc = \"Checks if the value of the field is `GPIO_PORT_2`\"]\n #[inline]\n pub fn is_gpio_port_2(&self) -> bool {\n *self == PORTSEL1R::GPIO_PORT_2\n }\n #[doc = \"Checks if the value of the field is `GPIO_PORT_3`\"]\n #[inline]\n pub fn is_gpio_port_3(&self) -> bool {\n *self == PORTSEL1R::GPIO_PORT_3\n }\n #[doc = \"Checks if the value of the field is `GPIO_PORT_4`\"]\n #[inline]\n pub fn is_gpio_port_4(&self) -> bool {\n *self == PORTSEL1R::GPIO_PORT_4\n }\n #[doc = \"Checks if the value of the field is `GPIO_PORT_5`\"]\n #[inline]\n pub fn is_gpio_port_5(&self) -> bool {\n *self == PORTSEL1R::GPIO_PORT_5\n }\n #[doc = \"Checks if the value of the field is `GPIO_PORT_6`\"]\n #[inline]\n pub fn is_gpio_port_6(&self) -> bool {\n *self == PORTSEL1R::GPIO_PORT_6\n }\n #[doc = \"Checks if the value of the field is `GPIO_PORT_7`\"]\n #[inline]\n pub fn is_gpio_port_7(&self) -> bool {\n *self == PORTSEL1R::GPIO_PORT_7\n }\n}\n#[doc = r\" Value of the field\"]\npub struct INTPIN2R {\n bits: u8,\n}\nimpl INTPIN2R {\n #[doc = r\" Value of the field as raw bits\"]\n #[inline]\n pub fn bits(&self) -> u8 {\n self.bits\n }\n}\n#[doc = \"Possible values of the field `PORTSEL2`\"]\n#[derive(Clone, Copy, Debug, PartialEq)]\npub enum PORTSEL2R {\n #[doc = \"GPIO Port 0\"]\n GPIO_PORT_0,\n #[doc = \"GPIO Port 1\"]\n GPIO_PORT_1,\n #[doc = \"GPIO Port 2\"]\n GPIO_PORT_2,\n #[doc = \"GPIO Port 3\"]\n GPIO_PORT_3,\n #[doc = \"GPIO Port 4\"]\n GPIO_PORT_4,\n #[doc = \"GPIO Port 5\"]\n GPIO_PORT_5,\n #[doc = \"GPIO Port 6\"]\n GPIO_PORT_6,\n #[doc = \"GPIO Port 7\"]\n GPIO_PORT_7,\n}\nimpl PORTSEL2R {\n #[doc = r\" Value of the field as raw bits\"]\n #[inline]\n pub fn bits(&self) -> u8 {\n match *self {\n PORTSEL2R::GPIO_PORT_0 => 0,\n PORTSEL2R::GPIO_PORT_1 => 1,\n PORTSEL2R::GPIO_PORT_2 => 2,\n PORTSEL2R::GPIO_PORT_3 => 3,\n PORTSEL2R::GPIO_PORT_4 => 4,\n PORTSEL2R::GPIO_PORT_5 => 5,\n PORTSEL2R::GPIO_PORT_6 => 6,\n PORTSEL2R::GPIO_PORT_7 => 7,\n }\n }\n #[allow(missing_docs)]\n #[doc(hidden)]\n #[inline]\n pub fn _from(value: u8) -> PORTSEL2R {\n match value {\n 0 => PORTSEL2R::GPIO_PORT_0,\n 1 => PORTSEL2R::GPIO_PORT_1,\n 2 => PORTSEL2R::GPIO_PORT_2,\n 3 => PORTSEL2R::GPIO_PORT_3,\n 4 => PORTSEL2R::GPIO_PORT_4,\n 5 => PORTSEL2R::GPIO_PORT_5,\n 6 => PORTSEL2R::GPIO_PORT_6,\n 7 => PORTSEL2R::GPIO_PORT_7,\n _ => unreachable!(),\n }\n }\n #[doc = \"Checks if the value of the field is `GPIO_PORT_0`\"]\n #[inline]\n pub fn is_gpio_port_0(&self) -> bool {\n *self == PORTSEL2R::GPIO_PORT_0\n }\n #[doc = \"Checks if the value of the field is `GPIO_PORT_1`\"]\n #[inline]\n pub fn is_gpio_port_1(&self) -> bool {\n *self == PORTSEL2R::GPIO_PORT_1\n }\n #[doc = \"Checks if the value of the field is `GPIO_PORT_2`\"]\n #[inline]\n pub fn is_gpio_port_2(&self) -> bool {\n *self == PORTSEL2R::GPIO_PORT_2\n }\n #[doc = \"Checks if the value of the field is `GPIO_PORT_3`\"]\n #[inline]\n pub fn is_gpio_port_3(&self) -> bool {\n *self == PORTSEL2R::GPIO_PORT_3\n }\n #[doc = \"Checks if the value of the field is `GPIO_PORT_4`\"]\n #[inline]\n pub fn is_gpio_port_4(&self) -> bool {\n *self == PORTSEL2R::GPIO_PORT_4\n }\n #[doc = \"Checks if the value of the field is `GPIO_PORT_5`\"]\n #[inline]\n pub fn is_gpio_port_5(&self) -> bool {\n *self == PORTSEL2R::GPIO_PORT_5\n }\n #[doc = \"Checks if the value of the field is `GPIO_PORT_6`\"]\n #[inline]\n pub fn is_gpio_port_6(&self) -> bool {\n *self == PORTSEL2R::GPIO_PORT_6\n }\n #[doc = \"Checks if the value of the field is `GPIO_PORT_7`\"]\n #[inline]\n pub fn is_gpio_port_7(&self) -> bool {\n *self == PORTSEL2R::GPIO_PORT_7\n }\n}\n#[doc = r\" Value of the field\"]\npub struct INTPIN3R {\n bits: u8,\n}\nimpl INTPIN3R {\n #[doc = r\" Value of the field as raw bits\"]\n #[inline]\n pub fn bits(&self) -> u8 {\n self.bits\n }\n}\n#[doc = \"Possible values of the field `PORTSEL3`\"]\n#[derive(Clone, Copy, Debug, PartialEq)]\npub enum PORTSEL3R {\n #[doc = \"GPIO Port 0\"]\n GPIO_PORT_0,\n #[doc = \"GPIO Port 1\"]\n GPIO_PORT_1,\n #[doc = \"GPIO Port 2\"]\n GPIO_PORT_2,\n #[doc = \"GPIO Port 3\"]\n GPIO_PORT_3,\n #[doc = \"GPIO Port 4\"]\n GPIO_PORT_4,\n #[doc = \"GPIO Port 5\"]\n GPIO_PORT_5,\n #[doc = \"GPIO Port 6\"]\n GPIO_PORT_6,\n #[doc = \"GPIO Port 7\"]\n GPIO_PORT_7,\n}\nimpl PORTSEL3R {\n #[doc = r\" Value of the field as raw bits\"]\n #[inline]\n pub fn bits(&self) -> u8 {\n match *self {\n PORTSEL3R::GPIO_PORT_0 => 0,\n PORTSEL3R::GPIO_PORT_1 => 1,\n PORTSEL3R::GPIO_PORT_2 => 2,\n PORTSEL3R::GPIO_PORT_3 => 3,\n PORTSEL3R::GPIO_PORT_4 => 4,\n PORTSEL3R::GPIO_PORT_5 => 5,\n PORTSEL3R::GPIO_PORT_6 => 6,\n PORTSEL3R::GPIO_PORT_7 => 7,\n }\n }\n #[allow(missing_docs)]\n #[doc(hidden)]\n #[inline]\n pub fn _from(value: u8) -> PORTSEL3R {\n match value {\n 0 => PORTSEL3R::GPIO_PORT_0,\n 1 => PORTSEL3R::GPIO_PORT_1,\n 2 => PORTSEL3R::GPIO_PORT_2,\n 3 => PORTSEL3R::GPIO_PORT_3,\n 4 => PORTSEL3R::GPIO_PORT_4,\n 5 => PORTSEL3R::GPIO_PORT_5,\n 6 => PORTSEL3R::GPIO_PORT_6,\n 7 => PORTSEL3R::GPIO_PORT_7,\n _ => unreachable!(),\n }\n }\n #[doc = \"Checks if the value of the field is `GPIO_PORT_0`\"]\n #[inline]\n pub fn is_gpio_port_0(&self) -> bool {\n *self == PORTSEL3R::GPIO_PORT_0\n }\n #[doc = \"Checks if the value of the field is `GPIO_PORT_1`\"]\n #[inline]\n pub fn is_gpio_port_1(&self) -> bool {\n *self == PORTSEL3R::GPIO_PORT_1\n }\n #[doc = \"Checks if the value of the field is `GPIO_PORT_2`\"]\n #[inline]\n pub fn is_gpio_port_2(&self) -> bool {\n *self == PORTSEL3R::GPIO_PORT_2\n }\n #[doc = \"Checks if the value of the field is `GPIO_PORT_3`\"]\n #[inline]\n pub fn is_gpio_port_3(&self) -> bool {\n *self == PORTSEL3R::GPIO_PORT_3\n }\n #[doc = \"Checks if the value of the field is `GPIO_PORT_4`\"]\n #[inline]\n pub fn is_gpio_port_4(&self) -> bool {\n *self == PORTSEL3R::GPIO_PORT_4\n }\n #[doc = \"Checks if the value of the field is `GPIO_PORT_5`\"]\n #[inline]\n pub fn is_gpio_port_5(&self) -> bool {\n *self == PORTSEL3R::GPIO_PORT_5\n }\n #[doc = \"Checks if the value of the field is `GPIO_PORT_6`\"]\n #[inline]\n pub fn is_gpio_port_6(&self) -> bool {\n *self == PORTSEL3R::GPIO_PORT_6\n }\n #[doc = \"Checks if the value of the field is `GPIO_PORT_7`\"]\n #[inline]\n pub fn is_gpio_port_7(&self) -> bool {\n *self == PORTSEL3R::GPIO_PORT_7\n }\n}\n#[doc = r\" Proxy\"]\npub struct _INTPIN0W<'a> {\n w: &'a mut W,\n}\nimpl<'a> _INTPIN0W<'a> {\n #[doc = r\" Writes raw bits to the field\"]\n #[inline]\n pub unsafe fn bits(self, value: u8) -> &'a mut W {\n const MASK: u8 = 31;\n const OFFSET: u8 = 0;\n self.w.bits &= !((MASK as u32) << OFFSET);\n self.w.bits |= ((value & MASK) as u32) << OFFSET;\n self.w\n }\n}\n#[doc = \"Values that can be written to the field `PORTSEL0`\"]\npub enum PORTSEL0W {\n #[doc = \"GPIO Port 0\"]\n GPIO_PORT_0,\n #[doc = \"GPIO Port 1\"]\n GPIO_PORT_1,\n #[doc = \"GPIO Port 2\"]\n GPIO_PORT_2,\n #[doc = \"GPIO Port 3\"]\n GPIO_PORT_3,\n #[doc = \"GPIO Port 4\"]\n GPIO_PORT_4,\n #[doc = \"GPIO Port 5\"]\n GPIO_PORT_5,\n #[doc = \"GPIO Port 6\"]\n GPIO_PORT_6,\n #[doc = \"GPIO Port 7\"]\n GPIO_PORT_7,\n}\nimpl PORTSEL0W {\n #[allow(missing_docs)]\n #[doc(hidden)]\n #[inline]\n pub fn _bits(&self) -> u8 {\n match *self {\n PORTSEL0W::GPIO_PORT_0 => 0,\n PORTSEL0W::GPIO_PORT_1 => 1,\n PORTSEL0W::GPIO_PORT_2 => 2,\n PORTSEL0W::GPIO_PORT_3 => 3,\n PORTSEL0W::GPIO_PORT_4 => 4,\n PORTSEL0W::GPIO_PORT_5 => 5,\n PORTSEL0W::GPIO_PORT_6 => 6,\n PORTSEL0W::GPIO_PORT_7 => 7,\n }\n }\n}\n#[doc = r\" Proxy\"]\npub struct _PORTSEL0W<'a> {\n w: &'a mut W,\n}\nimpl<'a> _PORTSEL0W<'a> {\n #[doc = r\" Writes `variant` to the field\"]\n #[inline]\n pub fn variant(self, variant: PORTSEL0W) -> &'a mut W {\n {\n self.bits(variant._bits())\n }\n }\n #[doc = \"GPIO Port 0\"]\n #[inline]\n pub fn gpio_port_0(self) -> &'a mut W {\n self.variant(PORTSEL0W::GPIO_PORT_0)\n }\n #[doc = \"GPIO Port 1\"]\n #[inline]\n pub fn gpio_port_1(self) -> &'a mut W {\n self.variant(PORTSEL0W::GPIO_PORT_1)\n }\n #[doc = \"GPIO Port 2\"]\n #[inline]\n pub fn gpio_port_2(self) -> &'a mut W {\n self.variant(PORTSEL0W::GPIO_PORT_2)\n }\n #[doc = \"GPIO Port 3\"]\n #[inline]\n pub fn gpio_port_3(self) -> &'a mut W {\n self.variant(PORTSEL0W::GPIO_PORT_3)\n }\n #[doc = \"GPIO Port 4\"]\n #[inline]\n pub fn gpio_port_4(self) -> &'a mut W {\n self.variant(PORTSEL0W::GPIO_PORT_4)\n }\n #[doc = \"GPIO Port 5\"]\n #[inline]\n pub fn gpio_port_5(self) -> &'a mut W {\n self.variant(PORTSEL0W::GPIO_PORT_5)\n }\n #[doc = \"GPIO Port 6\"]\n #[inline]\n pub fn gpio_port_6(self) -> &'a mut W {\n self.variant(PORTSEL0W::GPIO_PORT_6)\n }\n #[doc = \"GPIO Port 7\"]\n #[inline]\n pub fn gpio_port_7(self) -> &'a mut W {\n self.variant(PORTSEL0W::GPIO_PORT_7)\n }\n #[doc = r\" Writes raw bits to the field\"]\n #[inline]\n pub fn bits(self, value: u8) -> &'a mut W {\n const MASK: u8 = 7;\n const OFFSET: u8 = 5;\n self.w.bits &= !((MASK as u32) << OFFSET);\n self.w.bits |= ((value & MASK) as u32) << OFFSET;\n self.w\n }\n}\n#[doc = r\" Proxy\"]\npub struct _INTPIN1W<'a> {\n w: &'a mut W,\n}\nimpl<'a> _INTPIN1W<'a> {\n #[doc = r\" Writes raw bits to the field\"]\n #[inline]\n pub unsafe fn bits(self, value: u8) -> &'a mut W {\n const MASK: u8 = 31;\n const OFFSET: u8 = 8;\n self.w.bits &= !((MASK as u32) << OFFSET);\n self.w.bits |= ((value & MASK) as u32) << OFFSET;\n self.w\n }\n}\n#[doc = \"Values that can be written to the field `PORTSEL1`\"]\npub enum PORTSEL1W {\n #[doc = \"GPIO Port 0\"]\n GPIO_PORT_0,\n #[doc = \"GPIO Port 1\"]\n GPIO_PORT_1,\n #[doc = \"GPIO Port 2\"]\n GPIO_PORT_2,\n #[doc = \"GPIO Port 3\"]\n GPIO_PORT_3,\n #[doc = \"GPIO Port 4\"]\n GPIO_PORT_4,\n #[doc = \"GPIO Port 5\"]\n GPIO_PORT_5,\n #[doc = \"GPIO Port 6\"]\n GPIO_PORT_6,\n #[doc = \"GPIO Port 7\"]\n GPIO_PORT_7,\n}\nimpl PORTSEL1W {\n #[allow(missing_docs)]\n #[doc(hidden)]\n #[inline]\n pub fn _bits(&self) -> u8 {\n match *self {\n PORTSEL1W::GPIO_PORT_0 => 0,\n PORTSEL1W::GPIO_PORT_1 => 1,\n PORTSEL1W::GPIO_PORT_2 => 2,\n PORTSEL1W::GPIO_PORT_3 => 3,\n PORTSEL1W::GPIO_PORT_4 => 4,\n PORTSEL1W::GPIO_PORT_5 => 5,\n PORTSEL1W::GPIO_PORT_6 => 6,\n PORTSEL1W::GPIO_PORT_7 => 7,\n }\n }\n}\n#[doc = r\" Proxy\"]\npub struct _PORTSEL1W<'a> {\n w: &'a mut W,\n}\nimpl<'a> _PORTSEL1W<'a> {\n #[doc = r\" Writes `variant` to the field\"]\n #[inline]\n pub fn variant(self, variant: PORTSEL1W) -> &'a mut W {\n {\n self.bits(variant._bits())\n }\n }\n #[doc = \"GPIO Port 0\"]\n #[inline]\n pub fn gpio_port_0(self) -> &'a mut W {\n self.variant(PORTSEL1W::GPIO_PORT_0)\n }\n #[doc = \"GPIO Port 1\"]\n #[inline]\n pub fn gpio_port_1(self) -> &'a mut W {\n self.variant(PORTSEL1W::GPIO_PORT_1)\n }\n #[doc = \"GPIO Port 2\"]\n #[inline]\n pub fn gpio_port_2(self) -> &'a mut W {\n self.variant(PORTSEL1W::GPIO_PORT_2)\n }\n #[doc = \"GPIO Port 3\"]\n #[inline]\n pub fn gpio_port_3(self) -> &'a mut W {\n self.variant(PORTSEL1W::GPIO_PORT_3)\n }\n #[doc = \"GPIO Port 4\"]\n #[inline]\n pub fn gpio_port_4(self) -> &'a mut W {\n self.variant(PORTSEL1W::GPIO_PORT_4)\n }\n #[doc = \"GPIO Port 5\"]\n #[inline]\n pub fn gpio_port_5(self) -> &'a mut W {\n self.variant(PORTSEL1W::GPIO_PORT_5)\n }\n #[doc = \"GPIO Port 6\"]\n #[inline]\n pub fn gpio_port_6(self) -> &'a mut W {\n self.variant(PORTSEL1W::GPIO_PORT_6)\n }\n #[doc = \"GPIO Port 7\"]\n #[inline]\n pub fn gpio_port_7(self) -> &'a mut W {\n self.variant(PORTSEL1W::GPIO_PORT_7)\n }\n #[doc = r\" Writes raw bits to the field\"]\n #[inline]\n pub fn bits(self, value: u8) -> &'a mut W {\n const MASK: u8 = 7;\n const OFFSET: u8 = 13;\n self.w.bits &= !((MASK as u32) << OFFSET);\n self.w.bits |= ((value & MASK) as u32) << OFFSET;\n self.w\n }\n}\n#[doc = r\" Proxy\"]\npub struct _INTPIN2W<'a> {\n w: &'a mut W,\n}\nimpl<'a> _INTPIN2W<'a> {\n #[doc = r\" Writes raw bits to the field\"]\n #[inline]\n pub unsafe fn bits(self, value: u8) -> &'a mut W {\n const MASK: u8 = 31;\n const OFFSET: u8 = 16;\n self.w.bits &= !((MASK as u32) << OFFSET);\n self.w.bits |= ((value & MASK) as u32) << OFFSET;\n self.w\n }\n}\n#[doc = \"Values that can be written to the field `PORTSEL2`\"]\npub enum PORTSEL2W {\n #[doc = \"GPIO Port 0\"]\n GPIO_PORT_0,\n #[doc = \"GPIO Port 1\"]\n GPIO_PORT_1,\n #[doc = \"GPIO Port 2\"]\n GPIO_PORT_2,\n #[doc = \"GPIO Port 3\"]\n GPIO_PORT_3,\n #[doc = \"GPIO Port 4\"]\n GPIO_PORT_4,\n #[doc = \"GPIO Port 5\"]\n GPIO_PORT_5,\n #[doc = \"GPIO Port 6\"]\n GPIO_PORT_6,\n #[doc = \"GPIO Port 7\"]\n GPIO_PORT_7,\n}\nimpl PORTSEL2W {\n #[allow(missing_docs)]\n #[doc(hidden)]\n #[inline]\n pub fn _bits(&self) -> u8 {\n match *self {\n PORTSEL2W::GPIO_PORT_0 => 0,\n PORTSEL2W::GPIO_PORT_1 => 1,\n PORTSEL2W::GPIO_PORT_2 => 2,\n PORTSEL2W::GPIO_PORT_3 => 3,\n PORTSEL2W::GPIO_PORT_4 => 4,\n PORTSEL2W::GPIO_PORT_5 => 5,\n PORTSEL2W::GPIO_PORT_6 => 6,\n PORTSEL2W::GPIO_PORT_7 => 7,\n }\n }\n}\n#[doc = r\" Proxy\"]\npub struct _PORTSEL2W<'a> {\n w: &'a mut W,\n}\nimpl<'a> _PORTSEL2W<'a> {\n #[doc = r\" Writes `variant` to the field\"]\n #[inline]\n pub fn variant(self, variant: PORTSEL2W) -> &'a mut W {\n {\n self.bits(variant._bits())\n }\n }\n #[doc = \"GPIO Port 0\"]\n #[inline]\n pub fn gpio_port_0(self) -> &'a mut W {\n self.variant(PORTSEL2W::GPIO_PORT_0)\n }\n #[doc = \"GPIO Port 1\"]\n #[inline]\n pub fn gpio_port_1(self) -> &'a mut W {\n self.variant(PORTSEL2W::GPIO_PORT_1)\n }\n #[doc = \"GPIO Port 2\"]\n #[inline]\n pub fn gpio_port_2(self) -> &'a mut W {\n self.variant(PORTSEL2W::GPIO_PORT_2)\n }\n #[doc = \"GPIO Port 3\"]\n #[inline]\n pub fn gpio_port_3(self) -> &'a mut W {\n self.variant(PORTSEL2W::GPIO_PORT_3)\n }\n #[doc = \"GPIO Port 4\"]\n #[inline]\n pub fn gpio_port_4(self) -> &'a mut W {\n self.variant(PORTSEL2W::GPIO_PORT_4)\n }\n #[doc = \"GPIO Port 5\"]\n #[inline]\n pub fn gpio_port_5(self) -> &'a mut W {\n self.variant(PORTSEL2W::GPIO_PORT_5)\n }\n #[doc = \"GPIO Port 6\"]\n #[inline]\n pub fn gpio_port_6(self) -> &'a mut W {\n self.variant(PORTSEL2W::GPIO_PORT_6)\n }\n #[doc = \"GPIO Port 7\"]\n #[inline]\n pub fn gpio_port_7(self) -> &'a mut W {\n self.variant(PORTSEL2W::GPIO_PORT_7)\n }\n #[doc = r\" Writes raw bits to the field\"]\n #[inline]\n pub fn bits(self, value: u8) -> &'a mut W {\n const MASK: u8 = 7;\n const OFFSET: u8 = 21;\n self.w.bits &= !((MASK as u32) << OFFSET);\n self.w.bits |= ((value & MASK) as u32) << OFFSET;\n self.w\n }\n}\n#[doc = r\" Proxy\"]\npub struct _INTPIN3W<'a> {\n w: &'a mut W,\n}\nimpl<'a> _INTPIN3W<'a> {\n #[doc = r\" Writes raw bits to the field\"]\n #[inline]\n pub unsafe fn bits(self, value: u8) -> &'a mut W {\n const MASK: u8 = 31;\n const OFFSET: u8 = 24;\n self.w.bits &= !((MASK as u32) << OFFSET);\n self.w.bits |= ((value & MASK) as u32) << OFFSET;\n self.w\n }\n}\n#[doc = \"Values that can be written to the field `PORTSEL3`\"]\npub enum PORTSEL3W {\n #[doc = \"GPIO Port 0\"]\n GPIO_PORT_0,\n #[doc = \"GPIO Port 1\"]\n GPIO_PORT_1,\n #[doc = \"GPIO Port 2\"]\n GPIO_PORT_2,\n #[doc = \"GPIO Port 3\"]\n GPIO_PORT_3,\n #[doc = \"GPIO Port 4\"]\n GPIO_PORT_4,\n #[doc = \"GPIO Port 5\"]\n GPIO_PORT_5,\n #[doc = \"GPIO Port 6\"]\n GPIO_PORT_6,\n #[doc = \"GPIO Port 7\"]\n GPIO_PORT_7,\n}\nimpl PORTSEL3W {\n #[allow(missing_docs)]\n #[doc(hidden)]\n #[inline]\n pub fn _bits(&self) -> u8 {\n match *self {\n PORTSEL3W::GPIO_PORT_0 => 0,\n PORTSEL3W::GPIO_PORT_1 => 1,\n PORTSEL3W::GPIO_PORT_2 => 2,\n PORTSEL3W::GPIO_PORT_3 => 3,\n PORTSEL3W::GPIO_PORT_4 => 4,\n PORTSEL3W::GPIO_PORT_5 => 5,\n PORTSEL3W::GPIO_PORT_6 => 6,\n PORTSEL3W::GPIO_PORT_7 => 7,\n }\n }\n}\n#[doc = r\" Proxy\"]\npub struct _PORTSEL3W<'a> {\n w: &'a mut W,\n}\nimpl<'a> _PORTSEL3W<'a> {\n #[doc = r\" Writes `variant` to the field\"]\n #[inline]\n pub fn variant(self, variant: PORTSEL3W) -> &'a mut W {\n {\n self.bits(variant._bits())\n }\n }\n #[doc = \"GPIO Port 0\"]\n #[inline]\n pub fn gpio_port_0(self) -> &'a mut W {\n self.variant(PORTSEL3W::GPIO_PORT_0)\n }\n #[doc = \"GPIO Port 1\"]\n #[inline]\n pub fn gpio_port_1(self) -> &'a mut W {\n self.variant(PORTSEL3W::GPIO_PORT_1)\n }\n #[doc = \"GPIO Port 2\"]\n #[inline]\n pub fn gpio_port_2(self) -> &'a mut W {\n self.variant(PORTSEL3W::GPIO_PORT_2)\n }\n #[doc = \"GPIO Port 3\"]\n #[inline]\n pub fn gpio_port_3(self) -> &'a mut W {\n self.variant(PORTSEL3W::GPIO_PORT_3)\n }\n #[doc = \"GPIO Port 4\"]\n #[inline]\n pub fn gpio_port_4(self) -> &'a mut W {\n self.variant(PORTSEL3W::GPIO_PORT_4)\n }\n #[doc = \"GPIO Port 5\"]\n #[inline]\n pub fn gpio_port_5(self) -> &'a mut W {\n self.variant(PORTSEL3W::GPIO_PORT_5)\n }\n #[doc = \"GPIO Port 6\"]\n #[inline]\n pub fn gpio_port_6(self) -> &'a mut W {\n self.variant(PORTSEL3W::GPIO_PORT_6)\n }\n #[doc = \"GPIO Port 7\"]\n #[inline]\n pub fn gpio_port_7(self) -> &'a mut W {\n self.variant(PORTSEL3W::GPIO_PORT_7)\n }\n #[doc = r\" Writes raw bits to the field\"]\n #[inline]\n pub fn bits(self, value: u8) -> &'a mut W {\n const MASK: u8 = 7;\n const OFFSET: u8 = 29;\n self.w.bits &= !((MASK as u32) << OFFSET);\n self.w.bits |= ((value & MASK) as u32) << OFFSET;\n self.w\n }\n}\nimpl R {\n #[doc = r\" Value of the register as raw bits\"]\n #[inline]\n pub fn bits(&self) -> u32 {\n self.bits\n }\n #[doc = \"Bits 0:4 - Pint interrupt 0: Select the pin number within the GPIO port selected by the PORTSEL0 bit in this register.\"]\n #[inline]\n pub fn intpin0(&self) -> INTPIN0R {\n let bits = {\n const MASK: u8 = 31;\n const OFFSET: u8 = 0;\n ((self.bits >> OFFSET) & MASK as u32) as u8\n };\n INTPIN0R { bits }\n }\n #[doc = \"Bits 5:7 - Pin interrupt 0: Select the port for the pin number to be selected in the INTPIN0 bits of this register.\"]\n #[inline]\n pub fn portsel0(&self) -> PORTSEL0R {\n PORTSEL0R::_from({\n const MASK: u8 = 7;\n const OFFSET: u8 = 5;\n ((self.bits >> OFFSET) & MASK as u32) as u8\n })\n }\n #[doc = \"Bits 8:12 - Pint interrupt 1: Select the pin number within the GPIO port selected by the PORTSEL1 bit in this register.\"]\n #[inline]\n pub fn intpin1(&self) -> INTPIN1R {\n let bits = {\n const MASK: u8 = 31;\n const OFFSET: u8 = 8;\n ((self.bits >> OFFSET) & MASK as u32) as u8\n };\n INTPIN1R { bits }\n }\n #[doc = \"Bits 13:15 - Pin interrupt 1: Select the port for the pin number to be selected in the INTPIN1 bits of this register.\"]\n #[inline]\n pub fn portsel1(&self) -> PORTSEL1R {\n PORTSEL1R::_from({\n const MASK: u8 = 7;\n const OFFSET: u8 = 13;\n ((self.bits >> OFFSET) & MASK as u32) as u8\n })\n }\n #[doc = \"Bits 16:20 - Pint interrupt 2: Select the pin number within the GPIO port selected by the PORTSEL2 bit in this register.\"]\n #[inline]\n pub fn intpin2(&self) -> INTPIN2R {\n let bits = {\n const MASK: u8 = 31;\n const OFFSET: u8 = 16;\n ((self.bits >> OFFSET) & MASK as u32) as u8\n };\n INTPIN2R { bits }\n }\n #[doc = \"Bits 21:23 - Pin interrupt 2: Select the port for the pin number to be selected in the INTPIN2 bits of this register.\"]\n #[inline]\n pub fn portsel2(&self) -> PORTSEL2R {\n PORTSEL2R::_from({\n const MASK: u8 = 7;\n const OFFSET: u8 = 21;\n ((self.bits >> OFFSET) & MASK as u32) as u8\n })\n }\n #[doc = \"Bits 24:28 - Pint interrupt 3: Select the pin number within the GPIO port selected by the PORTSEL3 bit in this register.\"]\n #[inline]\n pub fn intpin3(&self) -> INTPIN3R {\n let bits = {\n const MASK: u8 = 31;\n const OFFSET: u8 = 24;\n ((self.bits >> OFFSET) & MASK as u32) as u8\n };\n INTPIN3R { bits }\n }\n #[doc = \"Bits 29:31 - Pin interrupt 3: Select the port for the pin number to be selected in the INTPIN3 bits of this register.\"]\n #[inline]\n pub fn portsel3(&self) -> PORTSEL3R {\n PORTSEL3R::_from({\n const MASK: u8 = 7;\n const OFFSET: u8 = 29;\n ((self.bits >> OFFSET) & MASK as u32) as u8\n })\n }\n}\nimpl W {\n #[doc = r\" Reset value of the register\"]\n #[inline]\n pub fn reset_value() -> W {\n W { bits: 0 }\n }\n #[doc = r\" Writes raw bits to the register\"]\n #[inline]\n pub unsafe fn bits(&mut self, bits: u32) -> &mut Self {\n self.bits = bits;\n self\n }\n #[doc = \"Bits 0:4 - Pint interrupt 0: Select the pin number within the GPIO port selected by the PORTSEL0 bit in this register.\"]\n #[inline]\n pub fn intpin0(&mut self) -> _INTPIN0W {\n _INTPIN0W { w: self }\n }\n #[doc = \"Bits 5:7 - Pin interrupt 0: Select the port for the pin number to be selected in the INTPIN0 bits of this register.\"]\n #[inline]\n pub fn portsel0(&mut self) -> _PORTSEL0W {\n _PORTSEL0W { w: self }\n }\n #[doc = \"Bits 8:12 - Pint interrupt 1: Select the pin number within the GPIO port selected by the PORTSEL1 bit in this register.\"]\n #[inline]\n pub fn intpin1(&mut self) -> _INTPIN1W {\n _INTPIN1W { w: self }\n }\n #[doc = \"Bits 13:15 - Pin interrupt 1: Select the port for the pin number to be selected in the INTPIN1 bits of this register.\"]\n #[inline]\n pub fn portsel1(&mut self) -> _PORTSEL1W {\n _PORTSEL1W { w: self }\n }\n #[doc = \"Bits 16:20 - Pint interrupt 2: Select the pin number within the GPIO port selected by the PORTSEL2 bit in this register.\"]\n #[inline]\n pub fn intpin2(&mut self) -> _INTPIN2W {\n _INTPIN2W { w: self }\n }\n #[doc = \"Bits 21:23 - Pin interrupt 2: Select the port for the pin number to be selected in the INTPIN2 bits of this register.\"]\n #[inline]\n pub fn portsel2(&mut self) -> _PORTSEL2W {\n _PORTSEL2W { w: self }\n }\n #[doc = \"Bits 24:28 - Pint interrupt 3: Select the pin number within the GPIO port selected by the PORTSEL3 bit in this register.\"]\n #[inline]\n pub fn intpin3(&mut self) -> _INTPIN3W {\n _INTPIN3W { w: self }\n }\n #[doc = \"Bits 29:31 - Pin interrupt 3: Select the port for the pin number to be selected in the INTPIN3 bits of this register.\"]\n #[inline]\n pub fn portsel3(&mut self) -> _PORTSEL3W {\n _PORTSEL3W { w: self }\n }\n}\n"},"download_success":{"kind":"bool","value":true,"string":"true"}}},{"rowIdx":1169,"cells":{"blob_id":{"kind":"string","value":"161a56edb3db6f76bd0d42056e768fd9b162ee5d"},"language":{"kind":"string","value":"Rust"},"repo_name":{"kind":"string","value":"zfzackfrost/string_studio"},"path":{"kind":"string","value":"/src/generate/regex_gen.rs"},"src_encoding":{"kind":"string","value":"UTF-8"},"length_bytes":{"kind":"number","value":6496,"string":"6,496"},"score":{"kind":"number","value":2.609375,"string":"2.609375"},"int_score":{"kind":"number","value":3,"string":"3"},"detected_licenses":{"kind":"list like","value":["MIT"],"string":"[\n \"MIT\"\n]"},"license_type":{"kind":"string","value":"permissive"},"text":{"kind":"string","value":"use rand::distributions::{Distribution, Uniform};\nuse rand::prelude::*;\nuse regex_syntax::hir::{self, Hir, HirKind};\nuse regex_syntax::Parser;\nuse std::iter::FromIterator;\n\nuse encoding::all::UTF_8;\nuse encoding::{DecoderTrap, EncoderTrap, Encoding};\n\nconst MAX_REPEAT: u32 = 100;\n\nstruct RandomizeState<'a, R: Rng> {\n pub rng: &'a mut R,\n}\n\nfn randomize_alternation(\n rstate: &mut RandomizeState,\n mut exprs: Vec,\n) -> Result {\n exprs.shuffle(rstate.rng);\n if !exprs.is_empty() {\n randomize_for(rstate, exprs[0].kind().clone())\n } else {\n Err(())\n }\n}\n\nfn randomize_word_boundry(\n _rstate: &mut RandomizeState,\n _wb: hir::WordBoundary,\n) -> Result {\n Ok(String::from(\" \"))\n}\n\nfn randomize_anchor(\n _rstate: &mut RandomizeState,\n _anchor: hir::Anchor,\n) -> Result {\n Ok(String::from(\"\"))\n}\n\nfn randomize_group(\n rstate: &mut RandomizeState,\n group: hir::Group,\n) -> Result {\n randomize_for(rstate, group.hir.kind().clone())\n}\n\nfn randomize_literal(\n _rstate: &mut RandomizeState,\n literal: hir::Literal,\n) -> Result {\n match literal {\n hir::Literal::Unicode(c) => Ok(String::from_iter([c].iter())),\n hir::Literal::Byte(_) => Err(()),\n }\n}\n\nfn randomize_concat(rstate: &mut RandomizeState, exprs: Vec) -> Result {\n let mut s = String::new();\n for e in &exprs {\n s += &randomize_for(rstate, e.kind().clone())?;\n }\n Ok(s)\n}\n\nfn repeat_exactly(rstate: &mut RandomizeState, h: Hir, n: u32) -> Result {\n let s = (0..n)\n .map(|_| randomize_for(rstate, h.kind().clone()).unwrap())\n .collect::>()\n .join(\"\");\n Ok(s)\n}\n\nfn repeat_at_least(\n rstate: &mut RandomizeState,\n h: Hir,\n n: u32,\n) -> Result {\n let dist = Uniform::from(n..MAX_REPEAT);\n let n = dist.sample(rstate.rng);\n let s = (0..n)\n .map(|_| randomize_for(rstate, h.kind().clone()).unwrap())\n .collect::>()\n .join(\"\");\n Ok(s)\n}\n\nfn repeat_bounded(\n rstate: &mut RandomizeState,\n h: Hir,\n mn: u32,\n mx: u32,\n) -> Result {\n let mx = mx + 1;\n let dist = Uniform::from(mn..mx);\n let n = dist.sample(rstate.rng);\n let s = (0..n)\n .map(|_| randomize_for(rstate, h.kind().clone()).unwrap())\n .collect::>()\n .join(\"\");\n Ok(s)\n}\n\nfn randomize_unicode_class(\n rstate: &mut RandomizeState,\n cls: hir::ClassUnicode,\n) -> Result {\n let mut chars: Vec = Vec::new();\n\n for r in cls.iter() {\n let s = r.start();\n let e = r.end();\n if let (Ok(s), Ok(e)) = (\n UTF_8.encode(&String::from_iter([s].iter()), EncoderTrap::Strict),\n UTF_8.encode(&String::from_iter([e].iter()), EncoderTrap::Strict),\n ) {\n if s.len() > 0 && e.len() > 0 {\n let s = s[0];\n let e = e[0] + 1;\n\n for byte in s..e {\n if let Ok(s) = UTF_8.decode(&[byte], DecoderTrap::Strict) {\n let c = s.chars().nth(0).unwrap();\n chars.push(c);\n }\n }\n }\n }\n }\n\n Ok(String::from_iter(&[*chars.choose(rstate.rng).unwrap()]))\n}\n\nfn randomize_class(\n rstate: &mut RandomizeState,\n cls: hir::Class,\n) -> Result {\n match cls {\n hir::Class::Unicode(cls) => randomize_unicode_class(rstate, cls),\n _ => Err(()),\n }\n}\n\nfn randomize_repetition(\n rstate: &mut RandomizeState,\n rep: hir::Repetition,\n) -> Result {\n let hir = rep.hir;\n\n match rep.kind {\n hir::RepetitionKind::ZeroOrOne => repeat_bounded(rstate, hir.as_ref().clone(), 0, 1),\n hir::RepetitionKind::ZeroOrMore => {\n repeat_bounded(rstate, hir.as_ref().clone(), 0, MAX_REPEAT)\n }\n hir::RepetitionKind::OneOrMore => {\n repeat_bounded(rstate, hir.as_ref().clone(), 1, MAX_REPEAT)\n }\n hir::RepetitionKind::Range(range) => match range {\n hir::RepetitionRange::Exactly(n) => repeat_exactly(rstate, hir.as_ref().clone(), n),\n hir::RepetitionRange::AtLeast(n) => repeat_at_least(rstate, hir.as_ref().clone(), n),\n hir::RepetitionRange::Bounded(mn, mx) => {\n repeat_bounded(rstate, hir.as_ref().clone(), mn, mx)\n }\n },\n }\n}\n\nfn randomize_for(rstate: &mut RandomizeState, kind: HirKind) -> Result {\n match kind {\n HirKind::Alternation(exprs) => randomize_alternation(rstate, exprs),\n HirKind::Literal(lit) => randomize_literal(rstate, lit),\n HirKind::Concat(exprs) => randomize_concat(rstate, exprs),\n HirKind::Repetition(rep) => randomize_repetition(rstate, rep),\n HirKind::Group(grp) => randomize_group(rstate, grp),\n HirKind::Class(cls) => randomize_class(rstate, cls),\n HirKind::Anchor(a) => randomize_anchor(rstate, a),\n HirKind::WordBoundary(wb) => randomize_word_boundry(rstate, wb),\n _ => Err(()),\n }\n}\n\npub struct RegexGen {\n hir: Hir,\n}\n\nimpl RegexGen {\n pub fn new(pattern: &str) -> Option {\n if let Ok(hir) = Parser::new().parse(pattern) {\n Some(Self { hir })\n } else {\n None\n }\n }\n pub fn kind(&self) -> &HirKind {\n self.hir.kind()\n }\n pub fn randomize(&self, rng: &mut impl Rng) -> Result {\n let mut rstate = RandomizeState { rng: rng };\n randomize_for(&mut rstate, self.kind().clone())\n }\n}\n\n#[cfg(test)]\nmod tests {\n use super::*;\n use rand_xoshiro::Xoshiro256StarStar;\n #[test]\n fn hir_randomize_test() {\n let mut rng = Xoshiro256StarStar::seed_from_u64(0);\n let gen = RegexGen::new(\"([a-zA-Z]){1,3}\").unwrap();\n if let Ok(s) = gen.randomize(&mut rng) {\n println!(\"{}\", s);\n }\n }\n #[test]\n fn hir_parser_test() {\n let hir = Parser::new().parse(\"a|b\").unwrap();\n assert_eq!(\n hir,\n Hir::alternation(vec![\n Hir::literal(hir::Literal::Unicode('a')),\n Hir::literal(hir::Literal::Unicode('b')),\n ])\n );\n }\n}\n"},"download_success":{"kind":"bool","value":true,"string":"true"}}},{"rowIdx":1170,"cells":{"blob_id":{"kind":"string","value":"17931b343d778a5308fb58c83b860016ccc33223"},"language":{"kind":"string","value":"Rust"},"repo_name":{"kind":"string","value":"garciparedes/google-hashcode-2021"},"path":{"kind":"string","value":"/src/main.rs"},"src_encoding":{"kind":"string","value":"UTF-8"},"length_bytes":{"kind":"number","value":6015,"string":"6,015"},"score":{"kind":"number","value":3.28125,"string":"3.28125"},"int_score":{"kind":"number","value":3,"string":"3"},"detected_licenses":{"kind":"list like","value":[],"string":"[]"},"license_type":{"kind":"string","value":"no_license"},"text":{"kind":"string","value":"use std::io::prelude::*;\nuse std::io;\nuse std::collections::{HashMap, HashSet};\n\n\nfn main() -> io::Result<()> {\n let input = read_input()?;\n\n let mut solver = Solver::from_str(&input);\n // println!(\"{:?}\", solver);\n let solution = solver.solve();\n\n write_output(solution);\n return Ok(());\n}\n\n#[derive(Debug)]\nstruct Solver {\n max_duration: usize,\n bonus_points: usize,\n graph: HashMap>,\n streets: HashMap,\n paths: HashSet,\n}\n\n\nimpl Solver {\n fn from_str(input: &str) -> Self {\n let lines = input\n .trim()\n .split('\\n')\n .collect::>();\n \n let header = lines[0].split_whitespace().map(|v| v.parse::().unwrap()).collect::>();\n let (d, _, s, v, f) = (header[0], header[1], header[2], header[3], header[4]);\n\n let mut streets = HashMap::new();\n for k in 1..1 + s {\n let street = Street::from_str(lines[k]);\n streets.insert(street.name.clone(), street);\n } \n\n let mut graph = HashMap::new();\n for street in streets.values() {\n graph.entry(street.to).or_insert_with(HashSet::new).insert(street.name.clone());\n }\n\n let mut paths = HashSet::new();\n for k in (1 + s)..(1 + s + v) {\n let path = Path::from_str(lines[k]);\n paths.insert(path);\n }\n\n return Self::new(d, f, streets, graph, paths);\n }\n\n fn new(\n max_duration: usize, \n bonus_points: usize, \n streets: HashMap,\n graph: HashMap>, \n paths: HashSet\n ) -> Self {\n Self { \n max_duration: max_duration,\n bonus_points: bonus_points,\n streets: streets,\n graph: graph,\n paths: paths,\n }\n }\n\n fn solve(&mut self) -> Solution {\n let mut solution = Solution::new();\n\n\n for path in &self.paths {\n\n let mut duration = 0;\n for street in &path.streets {\n duration += self.streets.get(street).unwrap().transit;\n self.streets.get_mut(street).unwrap().expected_visits.push(duration);\n self.streets.get_mut(street).unwrap().visits += 1;\n }\n\n }\n\n for (intersection_id, streets) in &self.graph {\n let mut streets: Vec<_> = streets\n .iter()\n .clone()\n .filter_map(|name| {\n let street = self.streets.get(name).unwrap();\n if street.visits == 0 {\n return None;\n }\n return Some(street);\n })\n .collect();\n\n if streets.is_empty() {\n continue;\n }\n\n streets.sort_unstable_by_key(|street| cmp::Reverse(street.transit));\n \n let mut incoming = Vec::new();\n let mut cycle = 1;\n let mut last = streets[0].transit;\n for street in streets {\n if last > street.transit {\n last = street.transit;\n cycle += 1;\n }\n incoming.push((street.name.clone(), cycle));\n }\n\n let intersection = Intersection::new(*intersection_id, incoming);\n solution.insert(intersection);\n }\n\n\n return solution;\n }\n}\nuse std::cmp;\n\n#[derive(Debug)]\nstruct Street {\n from: usize,\n to: usize,\n name: String,\n transit: usize,\n visits: usize,\n expected_visits: Vec,\n}\n\nimpl Street {\n fn from_str(raw: &str) -> Self {\n let values: Vec<_> = raw.trim().split_whitespace().collect();\n let from = values[0].parse::().unwrap();\n let to = values[1].parse::().unwrap();\n let name = String::from(values[2]);\n let transit = values[3].parse::().unwrap();\n\n return Self::new(from, to, name, transit);\n }\n\n fn new(from: usize, to: usize, name: String, transit: usize) -> Self {\n Self { from: from, to: to, name: name, transit: transit, visits: 0, expected_visits: Vec::new()}\n }\n}\n\n#[derive(Debug, PartialEq, Eq, Hash)]\nstruct Path {\n streets: Vec,\n}\n\nimpl Path {\n\n fn from_str(raw: &str) -> Self {\n let values: Vec = raw.trim().split_whitespace().skip(1).map(String::from).collect();\n return Self::new(values);\n }\n\n fn new(streets: Vec) -> Self {\n Self { streets: streets }\n }\n}\n\n#[derive(Debug)]\nstruct Solution {\n intersections: HashSet,\n}\n\n\nimpl Solution {\n fn new() -> Self {\n Self { intersections: HashSet::new() }\n }\n\n fn insert(&mut self, intersection: Intersection) {\n self.intersections.insert(intersection);\n }\n\n\n fn to_string(&self) -> String {\n let mut ans = String::new();\n ans.push_str(&self.intersections.len().to_string());\n for intersection in &self.intersections {\n ans.push('\\n');\n ans.push_str(&intersection.to_string());\n } \n return ans;\n }\n}\n\n\n\n#[derive(Debug, PartialEq, Eq, Hash)]\nstruct Intersection {\n id: usize,\n incoming: Vec<(String, usize)> \n}\n\nimpl Intersection {\n fn new(id: usize, incoming: Vec<(String, usize)>) -> Self {\n Self { id: id, incoming: incoming }\n }\n\n fn to_string(&self) -> String {\n let mut ans = String::new();\n ans.push_str(&format!(\"{}\\n\", self.id));\n ans.push_str(&self.incoming.len().to_string());\n for item in &self.incoming {\n ans.push('\\n');\n ans.push_str(&format!(\"{} {}\", item.0, item.1));\n }\n\n return ans;\n }\n}\n\nfn read_input() -> io::Result {\n let mut buffer = String::new();\n io::stdin().read_to_string(&mut buffer)?;\n return Ok(buffer);\n}\n\nfn write_output(solution: Solution) {\n println!(\"{}\", solution.to_string());\n}\n"},"download_success":{"kind":"bool","value":true,"string":"true"}}},{"rowIdx":1171,"cells":{"blob_id":{"kind":"string","value":"b97dde5b6f15364dbfc9169474aebd1212babb74"},"language":{"kind":"string","value":"Rust"},"repo_name":{"kind":"string","value":"k124k3n/competitive-programming-answer"},"path":{"kind":"string","value":"/codesignal/largestNumber.rs"},"src_encoding":{"kind":"string","value":"UTF-8"},"length_bytes":{"kind":"number","value":167,"string":"167"},"score":{"kind":"number","value":3.1875,"string":"3.1875"},"int_score":{"kind":"number","value":3,"string":"3"},"detected_licenses":{"kind":"list like","value":["MIT"],"string":"[\n \"MIT\"\n]"},"license_type":{"kind":"string","value":"permissive"},"text":{"kind":"string","value":"fn largestNumber(n: i32) -> i32 {\n if n == 0 {\n return n;\n }\n let mut out = 9;\n for i in 1..n {\n out *= 10;\n out += 9;\n }\n out\n}"},"download_success":{"kind":"bool","value":true,"string":"true"}}},{"rowIdx":1172,"cells":{"blob_id":{"kind":"string","value":"62aa2389e4b844e79b836993fb75f28eab00910d"},"language":{"kind":"string","value":"Rust"},"repo_name":{"kind":"string","value":"mateuszptr/rust_dhcp"},"path":{"kind":"string","value":"/src/main.rs"},"src_encoding":{"kind":"string","value":"UTF-8"},"length_bytes":{"kind":"number","value":3138,"string":"3,138"},"score":{"kind":"number","value":2.515625,"string":"2.515625"},"int_score":{"kind":"number","value":3,"string":"3"},"detected_licenses":{"kind":"list like","value":[],"string":"[]"},"license_type":{"kind":"string","value":"no_license"},"text":{"kind":"string","value":"#![feature(int_to_from_bytes)]\n\nextern crate byteorder;\nextern crate bytes;\n\nextern crate serde;\nextern crate serde_json;\n#[macro_use] extern crate serde_derive;\n\nextern crate actix;\n#[macro_use] extern crate actix_derive;\n\nextern crate libc;\nextern crate hwaddr;\n\nmod dhcp_frames;\nmod dhcp_options;\nmod config;\n\nmod server_actor;\nmod io_actor;\n\nuse std::thread;\nuse std::fs::File;\nuse std::io::prelude::*;\nuse config::*;\nuse std::net::{UdpSocket, SocketAddr, IpAddr, Ipv4Addr};\nuse actix::prelude::*;\nuse io_actor::OutputActor;\nuse server_actor::ServerActor;\nuse dhcp_frames::DHCPPacket;\nuse std::os::unix::io::AsRawFd;\nuse std::ffi::CString;\nuse libc::c_void;\n\n\n/// Biblioteka standardowa rusta owrapowuje niektóre wywołania funkcji setsockopt, ale nie zapewnia jej całej funkcjonalności.\n/// Ponieważ adres ip do broadcastu nie informuje nas o interfejsie, domyślny zostanie wybrany przez OS.\n/// Pozostaje nam ustawić ręcznie interfejs za pomocą opcji SO_BINDTODEVICE\nunsafe fn set_socket_device(socket: &UdpSocket, iface: &str) {\n let fd = socket.as_raw_fd();\n let lvl = libc::SOL_SOCKET;\n let name = libc::SO_BINDTODEVICE;\n\n let val = CString::new(iface).unwrap();\n let pointer = val.as_ptr() as *const c_void;\n let len = val.as_bytes_with_nul().len();\n\n libc::setsockopt(\n fd,\n lvl,\n name,\n pointer,\n len as libc::socklen_t\n );\n\n}\n\nfn main() {\n let system = actix::System::new(\"dhcp\");\n\n // otwieramy plik konfiguracyjny w formacie JSON, wczytujemy go do struktury Config\n let mut config_file = File::open(\"Config.json\").expect(\"Couldn't open config file\");\n let mut config_content = String::new();\n config_file.read_to_string(&mut config_content).expect(\"Couldn't read config file\");\n let config = get_config(config_content);\n\n //Tworzymy socket zbindowany na 0.0.0.0, na port 67 (standardowy port serwera DHCP), na interfejs podany w konfiguracji, z broadcastem.\n let socket = UdpSocket::bind(SocketAddr::new(IpAddr::from(Ipv4Addr::from([0,0,0,0])), 67)).expect(\"Couldn't bind a socket\");\n unsafe { set_socket_device(&socket, config.interface.as_str()); }\n socket.set_broadcast(true).expect(\"Couldn't set socket to bcast\");\n let input_socket = socket.try_clone().expect(\"Couldn't clone the socket\");\n\n // Aktor odpowiadający za wysyłanie wiadomości na socket\n let output_actor: Addr = OutputActor::new(socket).start();\n // Aktor obsługujący logikę serwera DHCP\n let server_actor: Addr = ServerActor::new(config, output_actor.clone()).start();\n\n // Tworzymy wątek odbierający w tle pakiety (recv_from) i wysyłający je do aktora serwera.\n let _input_thread_handle = thread::spawn(move || {\n loop {\n println!(\"Creating buffer\");\n let mut buf = vec![0u8; 1024];\n let (_, addr) = input_socket.recv_from(&mut buf).unwrap();\n println!(\"Received frame from {}\", addr);\n let packet = DHCPPacket::from_vec(buf).unwrap();\n server_actor.do_send(packet);\n }\n });\n\n //Start systemu aktorów\n system.run();\n\n}\n"},"download_success":{"kind":"bool","value":true,"string":"true"}}},{"rowIdx":1173,"cells":{"blob_id":{"kind":"string","value":"dbe93392a7856f3749ee596cdc81e6a796745de4"},"language":{"kind":"string","value":"Rust"},"repo_name":{"kind":"string","value":"prisma/prisma-engines"},"path":{"kind":"string","value":"/query-engine/prisma-models/src/selection_result.rs"},"src_encoding":{"kind":"string","value":"UTF-8"},"length_bytes":{"kind":"number","value":5231,"string":"5,231"},"score":{"kind":"number","value":3.203125,"string":"3.203125"},"int_score":{"kind":"number","value":3,"string":"3"},"detected_licenses":{"kind":"list like","value":["Apache-2.0"],"string":"[\n \"Apache-2.0\"\n]"},"license_type":{"kind":"string","value":"permissive"},"text":{"kind":"string","value":"use crate::{DomainError, FieldSelection, PrismaValue, ScalarFieldRef, SelectedField};\nuse itertools::Itertools;\nuse std::convert::TryFrom;\n\n/// Represents a set of results.\n#[derive(Default, Clone, PartialEq, Eq, Hash)]\npub struct SelectionResult {\n pub pairs: Vec<(SelectedField, PrismaValue)>,\n}\n\nimpl std::fmt::Debug for SelectionResult {\n fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {\n f.debug_list()\n .entries(\n &self\n .pairs\n .iter()\n .map(|pair| (format!(\"{}\", pair.0), pair.1.clone()))\n .collect_vec(),\n )\n .finish()\n }\n}\n\nimpl SelectionResult {\n pub fn new(pairs: Vec<(T, PrismaValue)>) -> Self\n where\n T: Into,\n {\n Self {\n pairs: pairs.into_iter().map(|(rt, value)| (rt.into(), value)).collect(),\n }\n }\n\n pub fn add(&mut self, pair: (T, PrismaValue))\n where\n T: Into,\n {\n self.pairs.push((pair.0.into(), pair.1));\n }\n\n pub fn get(&self, selection: &SelectedField) -> Option<&PrismaValue> {\n self.pairs.iter().find_map(|(result_selection, value)| {\n if selection == result_selection {\n Some(value)\n } else {\n None\n }\n })\n }\n\n pub fn values(&self) -> impl Iterator + '_ {\n self.pairs.iter().map(|p| p.1.clone())\n }\n\n pub fn len(&self) -> usize {\n self.pairs.len()\n }\n\n pub fn is_empty(&self) -> bool {\n self.len() == 0\n }\n\n pub fn db_names(&self) -> impl Iterator + '_ {\n self.pairs.iter().map(|(field, _)| field.db_name())\n }\n\n /// Consumes this `SelectionResult` and splits it into a set of `SelectionResult`s based on the passed\n /// `FieldSelection`s. Assumes that the transformation can be done.\n pub fn split_into(self, field_selections: &[FieldSelection]) -> Vec {\n field_selections\n .iter()\n .map(|field_selection| {\n let pairs: Vec<_> = field_selection\n .selections()\n .map(|selected_field| {\n self.get(selected_field)\n .map(|value| (selected_field.clone(), value.clone()))\n .expect(\"Error splitting `ReturnValues`: `FieldSelection` doesn't match.\")\n })\n .collect();\n\n SelectionResult::new(pairs)\n })\n .collect()\n }\n\n /// Checks if `self` only contains scalar field selections and if so, returns them all in a list.\n /// If any other selection is contained, returns `None`.\n pub fn as_scalar_fields(&self) -> Option> {\n let scalar_fields: Vec<_> = self\n .pairs\n .iter()\n .filter_map(|(selection, _)| match selection {\n SelectedField::Scalar(sf) => Some(sf.clone()),\n SelectedField::Composite(_) => None,\n })\n .collect();\n\n if scalar_fields.len() == self.pairs.len() {\n Some(scalar_fields)\n } else {\n None\n }\n }\n\n /// Coerces contained values to best fit their type.\n /// - Scalar fields coerce values based on the TypeIdentifier.\n /// - Composite fields must be objects and contained fields must also follow the type coherence.\n pub fn coerce_values(self) -> crate::Result {\n let pairs = self\n .pairs\n .into_iter()\n .map(|(selection, value)| {\n let value = selection.coerce_value(value)?;\n Ok((selection, value))\n })\n .collect::>>()?;\n\n Ok(Self { pairs })\n }\n}\n\nimpl TryFrom for PrismaValue {\n type Error = DomainError;\n\n fn try_from(return_values: SelectionResult) -> crate::Result {\n match return_values.pairs.into_iter().next() {\n Some(value) => Ok(value.1),\n None => Err(DomainError::ConversionFailure(\n \"ReturnValues\".into(),\n \"PrismaValue\".into(),\n )),\n }\n }\n}\n\nimpl IntoIterator for SelectionResult {\n type Item = (SelectedField, PrismaValue);\n type IntoIter = std::vec::IntoIter;\n\n fn into_iter(self) -> Self::IntoIter {\n self.pairs.into_iter()\n }\n}\n\nimpl From<(T, PrismaValue)> for SelectionResult\nwhere\n T: Into,\n{\n fn from((x, value): (T, PrismaValue)) -> Self {\n Self::new(vec![(x.into(), value)])\n }\n}\n\nimpl From> for SelectionResult\nwhere\n T: Into,\n{\n fn from(tuples: Vec<(T, PrismaValue)>) -> Self {\n Self::new(tuples.into_iter().map(|(x, value)| (x.into(), value)).collect())\n }\n}\n\nimpl From<&FieldSelection> for SelectionResult {\n fn from(fs: &FieldSelection) -> Self {\n Self {\n pairs: fs\n .selections()\n .map(|selection| (selection.clone(), PrismaValue::Null))\n .collect(),\n }\n }\n}\n"},"download_success":{"kind":"bool","value":true,"string":"true"}}},{"rowIdx":1174,"cells":{"blob_id":{"kind":"string","value":"e399c03576c227ce4347ada70b8fb4314f443680"},"language":{"kind":"string","value":"Rust"},"repo_name":{"kind":"string","value":"wa7sa34cx/WhyDoYou-bot"},"path":{"kind":"string","value":"/src/utils/locale.rs"},"src_encoding":{"kind":"string","value":"UTF-8"},"length_bytes":{"kind":"number","value":4945,"string":"4,945"},"score":{"kind":"number","value":3.046875,"string":"3.046875"},"int_score":{"kind":"number","value":3,"string":"3"},"detected_licenses":{"kind":"list like","value":["LicenseRef-scancode-unknown-license-reference","MIT"],"string":"[\n \"LicenseRef-scancode-unknown-license-reference\",\n \"MIT\"\n]"},"license_type":{"kind":"string","value":"permissive"},"text":{"kind":"string","value":"//! Localization helper\n\nuse crate::models::error::HandlerError;\nuse include_dir::{include_dir, Dir, File};\nuse lazy_static::lazy_static;\nuse log::{error, info, warn};\nuse regex::{Captures, Regex};\nuse std::collections::HashMap;\n\nlazy_static! {\n /// Shared instance of Locale\n pub static ref TEXTS: Locale = Locale::parse().unwrap();\n static ref ROW_REGEX: Regex = regex::Regex::new(\"\\\"([\\\\w]+)\\\" = \\\"([^\\\"]+)\\\";\").unwrap();\n}\n\nconst LOCALE_DIR: Dir = include_dir!(\"assets/locale\");\n\n#[derive(Debug)]\npub struct Locale {\n locales: Vec,\n}\n\nimpl Locale {\n fn parse() -> Result {\n let mut locales: Vec = Vec::new();\n for file in LOCALE_DIR.files() {\n if let Some(meta) = LocaleFileMeta::from(file) {\n locales.push(meta);\n }\n }\n let item = Self { locales };\n item._test_keys()?;\n Ok(item)\n }\n\n /// Get localized string\n ///\n /// Parameters:\n /// - key: localization key\n /// - lang: language code (ex.: en, ru)\n ///\n /// Return: localized string or key\n pub fn get(&self, key: &str, lang: &str) -> String {\n let result = &self\n .locales\n .iter()\n .find(|l| l.lang.to_lowercase() == lang.to_lowercase())\n .or_else(|| *{ &self.locales.iter().find(|l| l.is_base) })\n .and_then(|l| l.data.get(key).to_owned().and_then(|s| Some(s.as_str())))\n .unwrap_or(key);\n return result.to_string();\n }\n\n fn _test_keys(&self) -> Result<(), HandlerError> {\n let mut is_error = false;\n for locale in &self.locales {\n &self.locales.iter().for_each(|l| {\n if l.lang == locale.lang {\n return;\n };\n locale.data.iter().for_each(|a| {\n if !l.data.contains_key(a.0) {\n if l.is_base {\n is_error = true;\n error!(\n \"{} lang not contain '{}' key which is in {} lang\",\n l.title(),\n a.0,\n locale.title()\n );\n } else {\n warn!(\n \"{} lang not contain '{}' key which is in {} lang\",\n l.title(),\n a.0,\n locale.title()\n );\n }\n }\n })\n });\n }\n if is_error {\n Err(HandlerError::from_str(\"Locales has errors.\"))\n } else {\n Ok(())\n }\n }\n}\n\n#[derive(Debug)]\nstruct LocaleFileMeta {\n lang: String,\n is_base: bool,\n data: HashMap,\n}\n\nimpl LocaleFileMeta {\n fn from(file: &File) -> Option {\n if let Some(os_str_name) = file.path().file_name() {\n if let Some(raw_str) = os_str_name.to_str() {\n let components = raw_str.split(\".\").collect::>();\n if components.len() > 3 || components.len() < 2 {\n return None;\n }\n if components.last().unwrap().to_lowercase() != String::from(\"locale\") {\n return None;\n }\n if let Some(content) = file.contents_utf8() {\n let f_content = content\n .split(\"\\n\")\n .filter(|row| !row.starts_with(\"//\") || !row.is_empty())\n .collect::>()\n .join(\"\\n\");\n let mut data: HashMap = HashMap::new();\n for row in ROW_REGEX\n .captures_iter(&*f_content)\n .collect::>()\n {\n let key = row.get(1).unwrap().as_str().parse().unwrap();\n let value = row.get(2).unwrap().as_str().parse().unwrap();\n data.insert(key, value);\n }\n let item = LocaleFileMeta {\n lang: components.get(0).unwrap().parse().unwrap(),\n is_base: components.len() == 3\n && components.get(1).unwrap().to_lowercase() == String::from(\"base\"),\n data,\n };\n info!(\"{} language found & loaded.\", item.title());\n return Some(item);\n }\n }\n }\n None\n }\n\n fn title(&self) -> String {\n let mut result = String::from(&self.lang.clone());\n if self.is_base {\n result.push_str(\"*\");\n }\n result.to_string().to_uppercase()\n }\n}\n"},"download_success":{"kind":"bool","value":true,"string":"true"}}},{"rowIdx":1175,"cells":{"blob_id":{"kind":"string","value":"6dd8177bfc0f4b5bf78e749af2e13ba80cebe258"},"language":{"kind":"string","value":"Rust"},"repo_name":{"kind":"string","value":"russelltg/srt-rs"},"path":{"kind":"string","value":"/srt-protocol/src/packet/msg_number.rs"},"src_encoding":{"kind":"string","value":"UTF-8"},"length_bytes":{"kind":"number","value":229,"string":"229"},"score":{"kind":"number","value":2.625,"string":"2.625"},"int_score":{"kind":"number","value":3,"string":"3"},"detected_licenses":{"kind":"list like","value":["Apache-2.0"],"string":"[\n \"Apache-2.0\"\n]"},"license_type":{"kind":"string","value":"permissive"},"text":{"kind":"string","value":"use super::modular_num::modular_num;\n\nmodular_num! {\n pub MsgNumber(u32, 26)\n}\n\nimpl MsgNumber {\n #[must_use]\n pub fn increment(&mut self) -> Self {\n let result = *self;\n *self += 1;\n result\n }\n}\n"},"download_success":{"kind":"bool","value":true,"string":"true"}}},{"rowIdx":1176,"cells":{"blob_id":{"kind":"string","value":"bbca1674e59b8a03bc3683d625ef4f58f01db247"},"language":{"kind":"string","value":"Rust"},"repo_name":{"kind":"string","value":"ajm188/advent_of_code"},"path":{"kind":"string","value":"/2015/day01/main.rs"},"src_encoding":{"kind":"string","value":"UTF-8"},"length_bytes":{"kind":"number","value":1158,"string":"1,158"},"score":{"kind":"number","value":3.359375,"string":"3.359375"},"int_score":{"kind":"number","value":3,"string":"3"},"detected_licenses":{"kind":"list like","value":["MIT"],"string":"[\n \"MIT\"\n]"},"license_type":{"kind":"string","value":"permissive"},"text":{"kind":"string","value":"use std::env::args;\n\nstruct Santa {\n position: i32,\n first_time_in_basement: i32,\n num_movements: i32,\n}\n\nimpl Santa {\n fn has_been_in_basement(&self) -> bool {\n self.first_time_in_basement >= 0\n }\n\n fn from_santa(santa: Santa, movement: i32) -> Santa {\n let pos = santa.position + movement;\n let movements = santa.num_movements + 1;\n let basement = if santa.has_been_in_basement() || pos >= 0 {\n santa.first_time_in_basement\n } else {\n movements\n };\n Santa {\n position: pos,\n first_time_in_basement: basement,\n num_movements: movements,\n }\n }\n}\n\nfn main() {\n let instructions = match args().nth(1) {\n Some(v) => v,\n None => \"\".to_string(),\n };\n let santa = Santa {\n position: 0,\n first_time_in_basement: -1,\n num_movements: 0,\n };\n let last_santa: Santa = instructions\n .chars()\n .map(|c: char| if c == '(' { 1 } else { -1 })\n .fold(santa, |s, i| Santa::from_santa(s, i));\n println!(\"{} {}\", last_santa.position, last_santa.first_time_in_basement);\n}\n"},"download_success":{"kind":"bool","value":true,"string":"true"}}},{"rowIdx":1177,"cells":{"blob_id":{"kind":"string","value":"16e7f44ffefec6125de2b0621a53a7dd1e33864a"},"language":{"kind":"string","value":"Rust"},"repo_name":{"kind":"string","value":"flanfly/rust-pottcpp"},"path":{"kind":"string","value":"/numortxt.rs"},"src_encoding":{"kind":"string","value":"UTF-8"},"length_bytes":{"kind":"number","value":480,"string":"480"},"score":{"kind":"number","value":3.953125,"string":"3.953125"},"int_score":{"kind":"number","value":4,"string":"4"},"detected_licenses":{"kind":"list like","value":["MIT"],"string":"[\n \"MIT\"\n]"},"license_type":{"kind":"string","value":"permissive"},"text":{"kind":"string","value":"enum NumberOrText {\n\tNumber(i32),\n\tText(String)\n}\n\nfn print_number_or_text(nt: NumberOrText) {\n\tmatch nt {\n\t\tNumberOrText::Number(i) => println!(\"Number: {}\",i),\n\t\tNumberOrText::Text(t) => println!(\"Text: {}\",t)\n\t}\n}\n\nfn main() {\n let a: NumberOrText = NumberOrText::Number(42);\n let b: NumberOrText = NumberOrText::Text(\"Hello, World\".to_string());\n\n // Prints \"Number: 42\"\n print_number_or_text(a);\n\n // Prints \"Text: Hello, World\"\n print_number_or_text(b);\n}\n"},"download_success":{"kind":"bool","value":true,"string":"true"}}},{"rowIdx":1178,"cells":{"blob_id":{"kind":"string","value":"ef3cdeae12f65e6bfe91cbba96412998279f48d7"},"language":{"kind":"string","value":"Rust"},"repo_name":{"kind":"string","value":"AurelienAubry/lc3-vm"},"path":{"kind":"string","value":"/src/instructions/not.rs"},"src_encoding":{"kind":"string","value":"UTF-8"},"length_bytes":{"kind":"number","value":2267,"string":"2,267"},"score":{"kind":"number","value":3.5,"string":"3.5"},"int_score":{"kind":"number","value":4,"string":"4"},"detected_licenses":{"kind":"list like","value":[],"string":"[]"},"license_type":{"kind":"string","value":"no_license"},"text":{"kind":"string","value":"use crate::bus::Bus;\nuse crate::cpu::{register_from_u16, Register, Registers};\nuse crate::instructions::Instruction;\nuse anyhow::Result;\n\npub struct Not {\n dst_reg: Register,\n src_reg: Register,\n}\n\nimpl Not {\n pub fn new(instruction: u16) -> Result {\n let dst_reg = register_from_u16(instruction >> 9 & 0x7)?;\n let src_reg = register_from_u16(instruction >> 6 & 0x7)?;\n\n Ok(Self { dst_reg, src_reg })\n }\n}\n\nimpl Instruction for Not {\n fn run(&self, registers: &mut Registers, _bus: &mut Bus) -> Result<()> {\n registers.write_register(self.dst_reg, !registers.read_register(self.src_reg));\n\n registers.update_flags(self.dst_reg);\n\n Ok(())\n }\n\n fn to_str(&self) -> String {\n format!(\"NOT {:?}, {:?}\", self.dst_reg, self.src_reg,)\n }\n}\n\n#[cfg(test)]\nmod tests {\n use super::*;\n use crate::bus::Bus;\n use crate::cpu::Flag;\n use crate::instructions::decode;\n\n #[test]\n fn test_run() {\n let mut reg = Registers::new();\n let mut bus = Bus::new().unwrap();\n\n // ZRO FLAG\n reg.write_register(Register::R0, 0b1111_1111_1111_1111);\n let instruction = decode(0b1001_001_000_1_11111).unwrap();\n instruction.run(&mut reg, &mut bus).unwrap();\n assert_eq!(reg.read_register(Register::R1), 0);\n assert_eq!(reg.read_register(Register::COND), Flag::Zro as u16);\n\n // POS FLAG\n reg.write_register(Register::R0, 0b1000_1111_1111_1111);\n let instruction = decode(0b1001_001_000_1_11111).unwrap();\n instruction.run(&mut reg, &mut bus).unwrap();\n assert_eq!(reg.read_register(Register::R1), 0b0111_0000_0000_0000);\n assert_eq!(reg.read_register(Register::COND), Flag::Pos as u16);\n\n // NEG FLAG\n reg.write_register(Register::R0, 0b0111_1010_1010_1010);\n let instruction = decode(0b1001_001_000_1_11111).unwrap();\n instruction.run(&mut reg, &mut bus).unwrap();\n assert_eq!(reg.read_register(Register::R1), 0b1000_0101_0101_0101);\n assert_eq!(reg.read_register(Register::COND), Flag::Neg as u16);\n }\n\n #[test]\n fn test_to_str() {\n let inst = decode(0b1001_001_000_1_11111).unwrap();\n assert_eq!(inst.to_str(), \"NOT R1, R0\");\n }\n}\n"},"download_success":{"kind":"bool","value":true,"string":"true"}}},{"rowIdx":1179,"cells":{"blob_id":{"kind":"string","value":"6ae9a645c26ac86e6b447a7bedc95240071b3be4"},"language":{"kind":"string","value":"Rust"},"repo_name":{"kind":"string","value":"rjloura/proxy-rs"},"path":{"kind":"string","value":"/src/utils.rs"},"src_encoding":{"kind":"string","value":"UTF-8"},"length_bytes":{"kind":"number","value":1207,"string":"1,207"},"score":{"kind":"number","value":3.3125,"string":"3.3125"},"int_score":{"kind":"number","value":3,"string":"3"},"detected_licenses":{"kind":"list like","value":["MIT"],"string":"[\n \"MIT\"\n]"},"license_type":{"kind":"string","value":"permissive"},"text":{"kind":"string","value":"use chrono::prelude::*;\n\nconst DEFAULT_UNIT: f64 = 1024_f64;\nconst SUFFIX: &[&str] = &[\"k\", \"M\", \"G\", \"T\", \"P\", \"E\"];\n\n/// Takes the number of bytes and converts it to a human readable string\npub fn pretty_bytes(b: u64) -> String {\n let b = b as f64;\n\n if b < DEFAULT_UNIT {\n return format!(\"{:.0} B\", b);\n }\n\n let idx = (b.log10() / DEFAULT_UNIT.log10()) as usize;\n let b = b / DEFAULT_UNIT.powi(idx as i32);\n let suffix = SUFFIX[idx.wrapping_sub(1)];\n\n format!(\"{:.1} {}B\", b, suffix)\n}\n\npub fn log>(message: S) {\n let dt: DateTime = Local::now();\n println!(\n \"{} {}\",\n dt.format(\"%Y-%m-%d %H:%M:%S\").to_string(),\n message.as_ref()\n );\n}\n\n#[cfg(test)]\nmod tests {\n use super::*;\n\n #[test]\n fn pretty_bytes_test() {\n assert_eq!(\"1 B\", pretty_bytes(1));\n assert_eq!(\"1.0 kB\", pretty_bytes(1024));\n assert_eq!(\"1.0 MB\", pretty_bytes(1024u64.pow(2)));\n assert_eq!(\"1.0 GB\", pretty_bytes(1024u64.pow(3)));\n assert_eq!(\"1.0 TB\", pretty_bytes(1024u64.pow(4)));\n assert_eq!(\"1.0 PB\", pretty_bytes(1024u64.pow(5)));\n assert_eq!(\"1.0 EB\", pretty_bytes(1024u64.pow(6)));\n }\n}\n"},"download_success":{"kind":"bool","value":true,"string":"true"}}},{"rowIdx":1180,"cells":{"blob_id":{"kind":"string","value":"d796f16109e37e30ecb4c6dd6bc3dbc71133b2d3"},"language":{"kind":"string","value":"Rust"},"repo_name":{"kind":"string","value":"BurntSushi/rust-sorts"},"path":{"kind":"string","value":"/src/lib.rs"},"src_encoding":{"kind":"string","value":"UTF-8"},"length_bytes":{"kind":"number","value":9148,"string":"9,148"},"score":{"kind":"number","value":2.90625,"string":"2.90625"},"int_score":{"kind":"number","value":3,"string":"3"},"detected_licenses":{"kind":"list like","value":["Unlicense"],"string":"[\n \"Unlicense\"\n]"},"license_type":{"kind":"string","value":"permissive"},"text":{"kind":"string","value":"#![crate_id = \"sorts#0.1.0\"]\n#![crate_type = \"lib\"]\n#![license = \"UNLICENSE\"]\n#![doc(html_root_url = \"http://burntsushi.net/rustdoc/rust-sorts\")]\n\n#![feature(phase)]\n#![feature(macro_rules)]\n\n//! A collection of sorting algorithms with tests and benchmarks.\n\n#[phase(syntax, link)] extern crate log;\nextern crate stdtest = \"test\";\nextern crate quickcheck;\nextern crate rand;\n\nuse rand::Rng; // why do I need this?\n\n#[cfg(test)]\nmod bench;\n\n#[cfg(test)]\nmod test;\n\npub static INSERTION_THRESHOLD: uint = 16;\n\n/// The `bogo` sort is the simplest but worst sorting algorithm. It shuffles\n/// the given input until it is sorted. Its worst case space complexity is\n/// `O(n)` but its time complexity is *unbounded*.\npub fn bogo(xs: &mut [T]) {\n fn is_sorted(xs: &[T]) -> bool {\n for win in xs.windows(2) {\n if win[0] > win[1] {\n return false\n }\n }\n true\n }\n let rng = &mut rand::task_rng();\n while !is_sorted(xs) {\n rng.shuffle_mut(xs);\n }\n}\n\n/// Classic in place insertion sort. Worst case time complexity is `O(n^2)`.\npub fn insertion(xs: &mut [T]) {\n let (mut i, len) = (1, xs.len());\n while i < len {\n let mut j = i;\n while j > 0 && xs[j-1] > xs[j] {\n xs.swap(j, j-1);\n j = j - 1;\n }\n i = i + 1;\n }\n}\n\n/// Classic in place bubble sort. Worst case time complexity is `O(n^2)`.\npub fn bubble(xs: &mut [T]) {\n let mut n = xs.len();\n while n > 0 {\n let mut newn = 0;\n let mut i = 1;\n while i < n {\n if xs[i-1] > xs[i] {\n xs.swap(i-1, i);\n newn = i;\n }\n i = i + 1;\n }\n n = newn;\n }\n}\n\n/// Classic in place selection sort. Worst case time complexity is `O(n^2)`.\n/// Note that this is an *unstable* implementation.\npub fn selection(xs: &mut [T]) {\n let (mut i, len) = (0, xs.len());\n while i < len {\n let (mut j, mut cur_min) = (i + 1, i);\n while j < len {\n if xs[j] < xs[cur_min] {\n cur_min = j;\n }\n j = j + 1;\n }\n xs.swap(i, cur_min);\n i = i + 1;\n }\n}\n\npub mod quick {\n use super::INSERTION_THRESHOLD;\n\n /// Standard in-place quicksort that always uses the first element as\n /// a pivot. Average time complexity is `O(nlogn)` and its space complexity\n /// is `O(1)` (limited to vectors of size `N`, which is the maximum number\n /// expressible with a `uint`).\n pub fn dumb(xs: &mut [T]) {\n fn pivot(_: &[T]) -> uint { 0 }\n qsort(xs, pivot)\n }\n\n\n /// Standard in-place quicksort that uses the median of the first, middle\n /// and last elements in each vector for the pivot.\n /// Average time complexity is `O(nlogn)` and its space complexity\n /// is `O(1)` (limited to vectors of size `N`, which is the maximum number\n /// expressible with a `uint`).\n ///\n /// This seems to have the same performance characteristics as the `dumb`\n /// quicksort, except when the input is almost sorted where intelligently\n /// choosing a pivot helps by at least an order of magnitude. (This is\n /// because an almost-sorted vector given to the `dumb` quicksort provokes\n /// worse case `O(n^2)` performance, whereas picking a pivot intelligently\n /// helps keep it closer to the average `O(nlogn)` performance.)\n pub fn smart(xs: &mut [T]) {\n qsort(xs, smart_pivot)\n }\n\n pub fn insertion(xs: &mut [T]) {\n if xs.len() <= 1 {\n return\n }\n let p = smart_pivot(xs);\n let p = partition(xs, p);\n\n if p <= INSERTION_THRESHOLD {\n super::insertion(xs.mut_slice_to(p))\n } else {\n qsort(xs.mut_slice_to(p), smart_pivot);\n }\n if xs.len() - p+1 <= INSERTION_THRESHOLD {\n super::insertion(xs.mut_slice_from(p+1))\n } else {\n qsort(xs.mut_slice_from(p+1), smart_pivot);\n }\n }\n\n fn qsort(xs: &mut [T], pivot: fn(&[T]) -> uint) {\n if xs.len() <= 1 {\n return\n }\n let p = pivot(xs);\n let p = partition(xs, p);\n qsort(xs.mut_slice_to(p), pivot);\n qsort(xs.mut_slice_from(p+1), pivot);\n }\n\n fn partition(xs: &mut [T], p: uint) -> uint {\n if xs.len() <= 1 {\n return p\n }\n\n let lasti = xs.len() - 1;\n let (mut i, mut nextp) = (0, 0);\n xs.swap(lasti, p);\n while i < lasti {\n if xs[i] <= xs[lasti] {\n xs.swap(i, nextp);\n nextp = nextp + 1;\n }\n i = i + 1;\n }\n xs.swap(nextp, lasti);\n nextp\n }\n\n fn smart_pivot(xs: &[T]) -> uint {\n let (l, r) = (0, xs.len() - 1);\n let m = l + ((r - l) / 2);\n let (left, middle, right) = (&xs[l], &xs[m], &xs[r]);\n if middle >= left && middle <= right {\n m\n } else if left >= middle && left <= right {\n l\n } else {\n r\n }\n }\n}\n\npub mod heap {\n pub fn up(xs: &mut [T]) {\n sort(xs, heapify_up);\n }\n\n pub fn down(xs: &mut [T]) {\n sort(xs, heapify_down);\n }\n\n fn sort(xs: &mut [T], heapify: fn(&mut [T])) {\n if xs.len() <= 1 {\n return\n }\n\n heapify(xs);\n let mut end = xs.len() - 1;\n while end > 0 {\n xs.swap(end, 0);\n end = end - 1;\n sift_down(xs, 0, end);\n }\n }\n\n fn heapify_down(xs: &mut [T]) {\n let last = xs.len() - 1;\n let mut start = 1 + ((last - 1) / 2);\n while start > 0 {\n start = start - 1;\n sift_down(xs, start, last);\n }\n }\n\n fn sift_down(xs: &mut [T], start: uint, end: uint) {\n let mut root = start;\n while root * 2 + 1 <= end {\n let child = root * 2 + 1;\n let mut swap = root;\n if xs[swap] < xs[child] {\n swap = child\n }\n if child + 1 <= end && xs[swap] < xs[child+1] {\n swap = child + 1\n }\n\n if swap == root {\n return\n }\n xs.swap(root, swap);\n root = swap;\n }\n }\n\n fn heapify_up(xs: &mut [T]) {\n let mut end = 1;\n while end < xs.len() {\n sift_up(xs, 0, end);\n end = end + 1;\n }\n }\n\n fn sift_up(xs: &mut [T], start: uint, end: uint) {\n let mut child = end;\n while child > start {\n let parent = (child - 1) / 2;\n if xs[parent] >= xs[child] {\n return\n }\n xs.swap(parent, child);\n child = parent;\n }\n }\n}\n\npub mod merge {\n use std::cmp::min;\n use std::slice::MutableCloneableVector;\n\n use super::INSERTION_THRESHOLD;\n\n /// A stable mergesort with worst case `O(nlogn)` performance. This\n /// particular implementation has `O(n)` complexity. Unfortunately, the\n /// constant factor is pretty high.\n ///\n /// (See Rust's standard library `sort` function for a better mergesort\n /// which uses unsafe, I think.)\n pub fn sort(xs: &mut [T]) {\n let (len, mut width) = (xs.len(), 1);\n let mut buf = xs.to_owned();\n while width < len {\n let mut start = 0;\n while start < len {\n let mid = min(len, start + width);\n let end = min(len, start + 2 * width);\n merge(xs, buf, start, mid, end);\n start = start + 2 * width;\n }\n width = width * 2;\n xs.copy_from(buf);\n }\n }\n\n pub fn insertion(xs: &mut [T]) {\n let (len, mut width) = (xs.len(), INSERTION_THRESHOLD);\n let mut i = 0;\n while i < len {\n let upto = min(len, i + INSERTION_THRESHOLD);\n super::insertion(xs.mut_slice(i, upto));\n i = i + INSERTION_THRESHOLD;\n }\n\n let mut buf = xs.to_owned();\n while width < len {\n let mut start = 0;\n while start < len {\n let mid = min(len, start + width);\n let end = min(len, start + 2 * width);\n merge(xs, buf, start, mid, end);\n start = start + 2 * width;\n }\n width = width * 2;\n xs.copy_from(buf);\n }\n }\n\n fn merge\n (xs: &mut [T], buf: &mut [T], l: uint, r: uint, e: uint) {\n let (mut il, mut ir) = (l, r);\n let mut i = l;\n while i < e {\n if il < r && (ir >= e || xs[il] <= xs[ir]) {\n buf[i] = xs[il].clone();\n il = il + 1;\n } else {\n buf[i] = xs[ir].clone();\n ir = ir + 1;\n }\n i = i + 1;\n }\n }\n}\n"},"download_success":{"kind":"bool","value":true,"string":"true"}}},{"rowIdx":1181,"cells":{"blob_id":{"kind":"string","value":"69824be9d4a6657364d2f9835d88edf36306243b"},"language":{"kind":"string","value":"Rust"},"repo_name":{"kind":"string","value":"dalance/nom-greedyerror"},"path":{"kind":"string","value":"/examples/nom7.rs"},"src_encoding":{"kind":"string","value":"UTF-8"},"length_bytes":{"kind":"number","value":1122,"string":"1,122"},"score":{"kind":"number","value":2.8125,"string":"2.8125"},"int_score":{"kind":"number","value":3,"string":"3"},"detected_licenses":{"kind":"list like","value":["LicenseRef-scancode-unknown-license-reference","Apache-2.0","MIT"],"string":"[\n \"LicenseRef-scancode-unknown-license-reference\",\n \"Apache-2.0\",\n \"MIT\"\n]"},"license_type":{"kind":"string","value":"permissive"},"text":{"kind":"string","value":"use nom7::branch::alt;\nuse nom7::character::complete::{alpha1, digit1};\nuse nom7::error::{ErrorKind, ParseError, VerboseError};\nuse nom7::sequence::tuple;\nuse nom7::Err::Error;\nuse nom7::IResult;\nuse nom_greedyerror::{error_position, GreedyError, Position};\nuse nom_locate4::LocatedSpan;\n\ntype Span<'a> = LocatedSpan<&'a str>;\n\nfn parser<'a, E: ParseError>>(\n input: Span<'a>,\n) -> IResult, (Span<'a>, Span<'a>, Span<'a>), E> {\n alt((\n tuple((alpha1, digit1, alpha1)),\n tuple((digit1, alpha1, digit1)),\n ))(input)\n}\n\nfn main() {\n // VerboseError failed at\n // abc012:::\n // ^\n let error = parser::>(Span::new(\"abc012:::\"));\n dbg!(&error);\n match error {\n Err(Error(e)) => assert_eq!(e.errors.first().map(|x| x.0.position()), Some(0)),\n _ => (),\n };\n\n // GreedyError failed at\n // abc012:::\n // ^\n let error = parser::>(Span::new(\"abc012:::\"));\n dbg!(&error);\n match error {\n Err(Error(e)) => assert_eq!(error_position(&e), Some(6)),\n _ => (),\n };\n}\n"},"download_success":{"kind":"bool","value":true,"string":"true"}}},{"rowIdx":1182,"cells":{"blob_id":{"kind":"string","value":"a96a37f9547b1bf69e973f22f148f22517062db9"},"language":{"kind":"string","value":"Rust"},"repo_name":{"kind":"string","value":"Tyler-Zhang/words-with-coworkers"},"path":{"kind":"string","value":"/words-game/src/error.rs"},"src_encoding":{"kind":"string","value":"UTF-8"},"length_bytes":{"kind":"number","value":1025,"string":"1,025"},"score":{"kind":"number","value":3.1875,"string":"3.1875"},"int_score":{"kind":"number","value":3,"string":"3"},"detected_licenses":{"kind":"list like","value":["Apache-2.0"],"string":"[\n \"Apache-2.0\"\n]"},"license_type":{"kind":"string","value":"permissive"},"text":{"kind":"string","value":"use std::error;\nuse std::fmt;\n\n#[derive(Debug)]\npub enum Error {\n BadAction(String),\n NotEnoughTiles,\n InvalidWord(String),\n StartingTileNotCovered,\n WordDoesNotIntersect,\n NoLettersUsed,\n}\n\nimpl fmt::Display for Error {\n fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {\n match *self {\n Error::BadAction(ref err) => write!(f, \"Bad Action error: {}\", err),\n Error::NotEnoughTiles => write!(f, \"Not enough tiles\"),\n Error::InvalidWord(ref word) => write!(f, \"Word <{}> not in the dictionary\", word),\n Error::StartingTileNotCovered => write!(f, \"Starting tile needs to be covered\"),\n Error::WordDoesNotIntersect => write!(f, \"Word does not intersect with another word\"),\n Error::NoLettersUsed => write!(f, \"You must use at least one letter\"),\n }\n }\n}\n\nimpl error::Error for Error {\n fn cause(&self) -> Option<&dyn error::Error> {\n Some(self)\n }\n}\n\npub type Result = std::result::Result>;\n"},"download_success":{"kind":"bool","value":true,"string":"true"}}},{"rowIdx":1183,"cells":{"blob_id":{"kind":"string","value":"f991c37a6fb8194d58418eae04de737454fc9698"},"language":{"kind":"string","value":"Rust"},"repo_name":{"kind":"string","value":"tiredhaydn/project_euler"},"path":{"kind":"string","value":"/src/bin/problem009/main.rs"},"src_encoding":{"kind":"string","value":"UTF-8"},"length_bytes":{"kind":"number","value":343,"string":"343"},"score":{"kind":"number","value":2.953125,"string":"2.953125"},"int_score":{"kind":"number","value":3,"string":"3"},"detected_licenses":{"kind":"list like","value":[],"string":"[]"},"license_type":{"kind":"string","value":"no_license"},"text":{"kind":"string","value":"fn main() {\n let mut answer = 0;\n for a in 3..=998 {\n for b in 4..=997 {\n let c = 1000 - a - b;\n if a * a + b * b == c * c {\n let abc = a * b * c;\n if abc > answer {\n answer = abc;\n }\n }\n }\n }\n println!(\"{}\", answer);\n}\n"},"download_success":{"kind":"bool","value":true,"string":"true"}}},{"rowIdx":1184,"cells":{"blob_id":{"kind":"string","value":"132a4d55730bbebc5b9c22282d60101b60f86f0c"},"language":{"kind":"string","value":"Rust"},"repo_name":{"kind":"string","value":"liang610/flux"},"path":{"kind":"string","value":"/libflux/src/flux/semantic/convert.rs"},"src_encoding":{"kind":"string","value":"UTF-8"},"length_bytes":{"kind":"number","value":118059,"string":"118,059"},"score":{"kind":"number","value":2.828125,"string":"2.828125"},"int_score":{"kind":"number","value":3,"string":"3"},"detected_licenses":{"kind":"list like","value":["MIT"],"string":"[\n \"MIT\"\n]"},"license_type":{"kind":"string","value":"permissive"},"text":{"kind":"string","value":"use crate::ast;\nuse crate::semantic::fresh::Fresher;\nuse crate::semantic::nodes::*;\nuse crate::semantic::types::MonoType;\nuse std::result;\n\npub type SemanticError = String;\npub type Result = result::Result;\n\n/// convert_with converts an AST package node to its semantic representation using\n/// the provided fresher.\n///\n/// Note: most external callers of this function will want to use the analyze()\n/// function in the libstd crate instead, which is aware of everything in the Flux stdlib and prelude.\n///\n/// The function explicitly moves the ast::Package because it adds information to it.\n/// We follow here the principle that every compilation step should be isolated and should add meaning\n/// to the previous one. In other terms, once one converts an AST he should not use it anymore.\n/// If one wants to do so, he should explicitly pkg.clone() and incur consciously in the memory\n/// overhead involved.\npub fn convert_with(pkg: ast::Package, fresher: &mut Fresher) -> Result {\n convert_package(pkg, fresher)\n // TODO(affo): run checks on the semantic graph.\n}\n\nfn convert_package(pkg: ast::Package, fresher: &mut Fresher) -> Result {\n let files = pkg\n .files\n .into_iter()\n .map(|f| convert_file(f, fresher))\n .collect::>>()?;\n Ok(Package {\n loc: pkg.base.location,\n package: pkg.package,\n files,\n })\n}\n\npub fn convert_file(file: ast::File, fresher: &mut Fresher) -> Result {\n let package = convert_package_clause(file.package, fresher)?;\n let imports = file\n .imports\n .into_iter()\n .map(|i| convert_import_declaration(i, fresher))\n .collect::>>()?;\n let body = file\n .body\n .into_iter()\n .map(|s| convert_statement(s, fresher))\n .collect::>>()?;\n Ok(File {\n loc: file.base.location,\n package,\n imports,\n body,\n })\n}\n\nfn convert_package_clause(\n pkg: Option,\n fresher: &mut Fresher,\n) -> Result> {\n if pkg.is_none() {\n return Ok(None);\n }\n let pkg = pkg.unwrap();\n let name = convert_identifier(pkg.name, fresher)?;\n Ok(Some(PackageClause {\n loc: pkg.base.location,\n name,\n }))\n}\n\nfn convert_import_declaration(\n imp: ast::ImportDeclaration,\n fresher: &mut Fresher,\n) -> Result {\n let alias = match imp.alias {\n None => None,\n Some(id) => Some(convert_identifier(id, fresher)?),\n };\n let path = convert_string_literal(imp.path, fresher)?;\n Ok(ImportDeclaration {\n loc: imp.base.location,\n alias,\n path,\n })\n}\n\nfn convert_statement(stmt: ast::Statement, fresher: &mut Fresher) -> Result {\n match stmt {\n ast::Statement::Option(s) => Ok(Statement::Option(Box::new(convert_option_statement(\n *s, fresher,\n )?))),\n ast::Statement::Builtin(s) => {\n Ok(Statement::Builtin(convert_builtin_statement(*s, fresher)?))\n }\n ast::Statement::Test(s) => Ok(Statement::Test(Box::new(convert_test_statement(\n *s, fresher,\n )?))),\n ast::Statement::Expr(s) => Ok(Statement::Expr(convert_expression_statement(*s, fresher)?)),\n ast::Statement::Return(s) => Ok(Statement::Return(convert_return_statement(*s, fresher)?)),\n // TODO(affo): we should fix this to include MemberAssignement.\n // The error lies in AST: the Statement enum does not include that.\n // This is not a problem when parsing, because we parse it only in the option assignment case,\n // and we return an OptionStmt, which is a Statement.\n ast::Statement::Variable(s) => Ok(Statement::Variable(Box::new(\n convert_variable_assignment(*s, fresher)?,\n ))),\n ast::Statement::Bad(_) => {\n Err(\"BadStatement is not supported in semantic analysis\".to_string())\n }\n }\n}\n\nfn convert_assignment(assign: ast::Assignment, fresher: &mut Fresher) -> Result {\n match assign {\n ast::Assignment::Variable(a) => Ok(Assignment::Variable(convert_variable_assignment(\n *a, fresher,\n )?)),\n ast::Assignment::Member(a) => {\n Ok(Assignment::Member(convert_member_assignment(*a, fresher)?))\n }\n }\n}\n\nfn convert_option_statement(stmt: ast::OptionStmt, fresher: &mut Fresher) -> Result {\n Ok(OptionStmt {\n loc: stmt.base.location,\n assignment: convert_assignment(stmt.assignment, fresher)?,\n })\n}\n\nfn convert_builtin_statement(stmt: ast::BuiltinStmt, fresher: &mut Fresher) -> Result {\n Ok(BuiltinStmt {\n loc: stmt.base.location,\n id: convert_identifier(stmt.id, fresher)?,\n })\n}\n\nfn convert_test_statement(stmt: ast::TestStmt, fresher: &mut Fresher) -> Result {\n Ok(TestStmt {\n loc: stmt.base.location,\n assignment: convert_variable_assignment(stmt.assignment, fresher)?,\n })\n}\n\nfn convert_expression_statement(stmt: ast::ExprStmt, fresher: &mut Fresher) -> Result {\n Ok(ExprStmt {\n loc: stmt.base.location,\n expression: convert_expression(stmt.expression, fresher)?,\n })\n}\n\nfn convert_return_statement(stmt: ast::ReturnStmt, fresher: &mut Fresher) -> Result {\n Ok(ReturnStmt {\n loc: stmt.base.location,\n argument: convert_expression(stmt.argument, fresher)?,\n })\n}\n\nfn convert_variable_assignment(\n stmt: ast::VariableAssgn,\n fresher: &mut Fresher,\n) -> Result {\n Ok(VariableAssgn::new(\n convert_identifier(stmt.id, fresher)?,\n convert_expression(stmt.init, fresher)?,\n stmt.base.location,\n ))\n}\n\nfn convert_member_assignment(stmt: ast::MemberAssgn, fresher: &mut Fresher) -> Result {\n Ok(MemberAssgn {\n loc: stmt.base.location,\n member: convert_member_expression(stmt.member, fresher)?,\n init: convert_expression(stmt.init, fresher)?,\n })\n}\n\nfn convert_expression(expr: ast::Expression, fresher: &mut Fresher) -> Result {\n match expr {\n ast::Expression::Function(expr) => Ok(Expression::Function(Box::new(convert_function_expression(*expr, fresher)?))),\n ast::Expression::Call(expr) => Ok(Expression::Call(Box::new(convert_call_expression(*expr, fresher)?))),\n ast::Expression::Member(expr) => Ok(Expression::Member(Box::new(convert_member_expression(*expr, fresher)?))),\n ast::Expression::Index(expr) => Ok(Expression::Index(Box::new(convert_index_expression(*expr, fresher)?))),\n ast::Expression::PipeExpr(expr) => Ok(Expression::Call(Box::new(convert_pipe_expression(*expr, fresher)?))),\n ast::Expression::Binary(expr) => Ok(Expression::Binary(Box::new(convert_binary_expression(*expr, fresher)?))),\n ast::Expression::Unary(expr) => Ok(Expression::Unary(Box::new(convert_unary_expression(*expr, fresher)?))),\n ast::Expression::Logical(expr) => Ok(Expression::Logical(Box::new(convert_logical_expression(*expr, fresher)?))),\n ast::Expression::Conditional(expr) => Ok(Expression::Conditional(Box::new(convert_conditional_expression(*expr, fresher)?))),\n ast::Expression::Object(expr) => Ok(Expression::Object(Box::new(convert_object_expression(*expr, fresher)?))),\n ast::Expression::Array(expr) => Ok(Expression::Array(Box::new(convert_array_expression(*expr, fresher)?))),\n ast::Expression::Identifier(expr) => Ok(Expression::Identifier(convert_identifier_expression(expr, fresher)?)),\n ast::Expression::StringExpr(expr) => Ok(Expression::StringExpr(Box::new(convert_string_expression(*expr, fresher)?))),\n ast::Expression::Paren(expr) => convert_expression(expr.expression, fresher),\n ast::Expression::StringLit(lit) => Ok(Expression::StringLit(convert_string_literal(lit, fresher)?)),\n ast::Expression::Boolean(lit) => Ok(Expression::Boolean(convert_boolean_literal(lit, fresher)?)),\n ast::Expression::Float(lit) => Ok(Expression::Float(convert_float_literal(lit, fresher)?)),\n ast::Expression::Integer(lit) => Ok(Expression::Integer(convert_integer_literal(lit, fresher)?)),\n ast::Expression::Uint(lit) => Ok(Expression::Uint(convert_unsigned_integer_literal(lit, fresher)?)),\n ast::Expression::Regexp(lit) => Ok(Expression::Regexp(convert_regexp_literal(lit, fresher)?)),\n ast::Expression::Duration(lit) => Ok(Expression::Duration(convert_duration_literal(lit, fresher)?)),\n ast::Expression::DateTime(lit) => Ok(Expression::DateTime(convert_date_time_literal(lit, fresher)?)),\n ast::Expression::PipeLit(_) => Err(\"a pipe literal may only be used as a default value for an argument in a function definition\".to_string()),\n ast::Expression::Bad(_) => Err(\"BadExpression is not supported in semantic analysis\".to_string())\n }\n}\n\nfn convert_function_expression(\n expr: ast::FunctionExpr,\n fresher: &mut Fresher,\n) -> Result {\n let params = convert_function_params(expr.params, fresher)?;\n let body = convert_function_body(expr.body, fresher)?;\n Ok(FunctionExpr {\n loc: expr.base.location,\n typ: MonoType::Var(fresher.fresh()),\n params,\n body,\n })\n}\n\nfn convert_function_params(\n props: Vec,\n fresher: &mut Fresher,\n) -> Result> {\n // The iteration here is complex, cannot use iter().map()..., better to write it explicitly.\n let mut params: Vec = Vec::new();\n let mut piped = false;\n for prop in props {\n let id = match prop.key {\n ast::PropertyKey::Identifier(id) => Ok(id),\n _ => Err(\"function params must be identifiers\".to_string()),\n }?;\n let key = convert_identifier(id, fresher)?;\n let mut default: Option = None;\n let mut is_pipe = false;\n if let Some(expr) = prop.value {\n match expr {\n ast::Expression::PipeLit(_) => {\n if piped {\n return Err(\"only a single argument may be piped\".to_string());\n } else {\n piped = true;\n is_pipe = true;\n };\n }\n e => default = Some(convert_expression(e, fresher)?),\n }\n };\n params.push(FunctionParameter {\n loc: prop.base.location,\n is_pipe,\n key,\n default,\n });\n }\n Ok(params)\n}\n\nfn convert_function_body(body: ast::FunctionBody, fresher: &mut Fresher) -> Result {\n match body {\n ast::FunctionBody::Expr(expr) => {\n let argument = convert_expression(expr, fresher)?;\n Ok(Block::Return(ReturnStmt {\n loc: argument.loc().clone(),\n argument,\n }))\n }\n ast::FunctionBody::Block(block) => Ok(convert_block(block, fresher)?),\n }\n}\n\nfn convert_block(block: ast::Block, fresher: &mut Fresher) -> Result {\n let mut body = block.body.into_iter().rev();\n\n let block = if let Some(ast::Statement::Return(stmt)) = body.next() {\n let argument = convert_expression(stmt.argument, fresher)?;\n Block::Return(ReturnStmt {\n loc: stmt.base.location.clone(),\n argument,\n })\n } else {\n return Err(\"missing return statement in block\".to_string());\n };\n\n body.try_fold(block, |acc, s| match s {\n ast::Statement::Variable(dec) => Ok(Block::Variable(\n Box::new(convert_variable_assignment(*dec, fresher)?),\n Box::new(acc),\n )),\n ast::Statement::Expr(stmt) => Ok(Block::Expr(\n convert_expression_statement(*stmt, fresher)?,\n Box::new(acc),\n )),\n _ => Err(format!(\"invalid statement in function block {:#?}\", s)),\n })\n}\n\nfn convert_call_expression(expr: ast::CallExpr, fresher: &mut Fresher) -> Result {\n let callee = convert_expression(expr.callee, fresher)?;\n // TODO(affo): I'd prefer these checks to be in ast.Check().\n if expr.arguments.len() > 1 {\n return Err(\"arguments are more than one object expression\".to_string());\n }\n let mut args = expr\n .arguments\n .into_iter()\n .map(|a| match a {\n ast::Expression::Object(obj) => convert_object_expression(*obj, fresher),\n _ => Err(\"arguments not an object expression\".to_string()),\n })\n .collect::>>()?;\n let arguments = match args.len() {\n 0 => Ok(Vec::new()),\n 1 => Ok(args.pop().expect(\"there must be 1 element\").properties),\n _ => Err(\"arguments are more than one object expression\".to_string()),\n }?;\n Ok(CallExpr {\n loc: expr.base.location,\n typ: MonoType::Var(fresher.fresh()),\n callee,\n arguments,\n pipe: None,\n })\n}\n\nfn convert_member_expression(expr: ast::MemberExpr, fresher: &mut Fresher) -> Result {\n let object = convert_expression(expr.object, fresher)?;\n let property = match expr.property {\n ast::PropertyKey::Identifier(id) => id.name,\n ast::PropertyKey::StringLit(lit) => lit.value,\n };\n Ok(MemberExpr {\n loc: expr.base.location,\n typ: MonoType::Var(fresher.fresh()),\n object,\n property,\n })\n}\n\nfn convert_index_expression(expr: ast::IndexExpr, fresher: &mut Fresher) -> Result {\n let array = convert_expression(expr.array, fresher)?;\n let index = convert_expression(expr.index, fresher)?;\n Ok(IndexExpr {\n loc: expr.base.location,\n typ: MonoType::Var(fresher.fresh()),\n array,\n index,\n })\n}\n\nfn convert_pipe_expression(expr: ast::PipeExpr, fresher: &mut Fresher) -> Result {\n let mut call = convert_call_expression(expr.call, fresher)?;\n let pipe = convert_expression(expr.argument, fresher)?;\n call.pipe = Some(pipe);\n Ok(call)\n}\n\nfn convert_binary_expression(expr: ast::BinaryExpr, fresher: &mut Fresher) -> Result {\n let left = convert_expression(expr.left, fresher)?;\n let right = convert_expression(expr.right, fresher)?;\n Ok(BinaryExpr {\n loc: expr.base.location,\n typ: MonoType::Var(fresher.fresh()),\n operator: expr.operator,\n left,\n right,\n })\n}\n\nfn convert_unary_expression(expr: ast::UnaryExpr, fresher: &mut Fresher) -> Result {\n let argument = convert_expression(expr.argument, fresher)?;\n Ok(UnaryExpr {\n loc: expr.base.location,\n typ: MonoType::Var(fresher.fresh()),\n operator: expr.operator,\n argument,\n })\n}\n\nfn convert_logical_expression(\n expr: ast::LogicalExpr,\n fresher: &mut Fresher,\n) -> Result {\n let left = convert_expression(expr.left, fresher)?;\n let right = convert_expression(expr.right, fresher)?;\n Ok(LogicalExpr {\n loc: expr.base.location,\n typ: MonoType::Var(fresher.fresh()),\n operator: expr.operator,\n left,\n right,\n })\n}\n\nfn convert_conditional_expression(\n expr: ast::ConditionalExpr,\n fresher: &mut Fresher,\n) -> Result {\n let test = convert_expression(expr.test, fresher)?;\n let consequent = convert_expression(expr.consequent, fresher)?;\n let alternate = convert_expression(expr.alternate, fresher)?;\n Ok(ConditionalExpr {\n loc: expr.base.location,\n typ: MonoType::Var(fresher.fresh()),\n test,\n consequent,\n alternate,\n })\n}\n\nfn convert_object_expression(expr: ast::ObjectExpr, fresher: &mut Fresher) -> Result {\n let properties = expr\n .properties\n .into_iter()\n .map(|p| convert_property(p, fresher))\n .collect::>>()?;\n let with = match expr.with {\n Some(with) => Some(convert_identifier_expression(with.source, fresher)?),\n None => None,\n };\n Ok(ObjectExpr {\n loc: expr.base.location,\n typ: MonoType::Var(fresher.fresh()),\n with,\n properties,\n })\n}\n\nfn convert_property(prop: ast::Property, fresher: &mut Fresher) -> Result {\n let key = match prop.key {\n ast::PropertyKey::Identifier(id) => convert_identifier(id, fresher)?,\n ast::PropertyKey::StringLit(lit) => Identifier {\n loc: lit.base.location.clone(),\n name: convert_string_literal(lit, fresher)?.value,\n },\n };\n let value = match prop.value {\n Some(expr) => convert_expression(expr, fresher)?,\n None => Expression::Identifier(IdentifierExpr {\n loc: key.loc.clone(),\n typ: MonoType::Var(fresher.fresh()),\n name: key.name.clone(),\n }),\n };\n Ok(Property {\n loc: prop.base.location,\n key,\n value,\n })\n}\n\nfn convert_array_expression(expr: ast::ArrayExpr, fresher: &mut Fresher) -> Result {\n let elements = expr\n .elements\n .into_iter()\n .map(|e| convert_expression(e.expression, fresher))\n .collect::>>()?;\n Ok(ArrayExpr {\n loc: expr.base.location,\n typ: MonoType::Var(fresher.fresh()),\n elements,\n })\n}\n\nfn convert_identifier(id: ast::Identifier, _fresher: &mut Fresher) -> Result {\n Ok(Identifier {\n loc: id.base.location,\n name: id.name,\n })\n}\n\nfn convert_identifier_expression(\n id: ast::Identifier,\n fresher: &mut Fresher,\n) -> Result {\n Ok(IdentifierExpr {\n loc: id.base.location,\n typ: MonoType::Var(fresher.fresh()),\n name: id.name,\n })\n}\n\nfn convert_string_expression(expr: ast::StringExpr, fresher: &mut Fresher) -> Result {\n let parts = expr\n .parts\n .into_iter()\n .map(|p| convert_string_expression_part(p, fresher))\n .collect::>>()?;\n Ok(StringExpr {\n loc: expr.base.location,\n typ: MonoType::Var(fresher.fresh()),\n parts,\n })\n}\n\nfn convert_string_expression_part(\n expr: ast::StringExprPart,\n fresher: &mut Fresher,\n) -> Result {\n match expr {\n ast::StringExprPart::Text(txt) => Ok(StringExprPart::Text(TextPart {\n loc: txt.base.location,\n value: txt.value,\n })),\n ast::StringExprPart::Interpolated(itp) => {\n Ok(StringExprPart::Interpolated(InterpolatedPart {\n loc: itp.base.location,\n expression: convert_expression(itp.expression, fresher)?,\n }))\n }\n }\n}\n\nfn convert_string_literal(lit: ast::StringLit, fresher: &mut Fresher) -> Result {\n Ok(StringLit {\n loc: lit.base.location,\n typ: MonoType::Var(fresher.fresh()),\n value: lit.value,\n })\n}\n\nfn convert_boolean_literal(lit: ast::BooleanLit, fresher: &mut Fresher) -> Result {\n Ok(BooleanLit {\n loc: lit.base.location,\n typ: MonoType::Var(fresher.fresh()),\n value: lit.value,\n })\n}\n\nfn convert_float_literal(lit: ast::FloatLit, fresher: &mut Fresher) -> Result {\n Ok(FloatLit {\n loc: lit.base.location,\n typ: MonoType::Var(fresher.fresh()),\n value: lit.value,\n })\n}\n\nfn convert_integer_literal(lit: ast::IntegerLit, fresher: &mut Fresher) -> Result {\n Ok(IntegerLit {\n loc: lit.base.location,\n typ: MonoType::Var(fresher.fresh()),\n value: lit.value,\n })\n}\n\nfn convert_unsigned_integer_literal(lit: ast::UintLit, fresher: &mut Fresher) -> Result {\n Ok(UintLit {\n loc: lit.base.location,\n typ: MonoType::Var(fresher.fresh()),\n value: lit.value,\n })\n}\n\nfn convert_regexp_literal(lit: ast::RegexpLit, fresher: &mut Fresher) -> Result {\n Ok(RegexpLit {\n loc: lit.base.location,\n typ: MonoType::Var(fresher.fresh()),\n value: lit.value,\n })\n}\n\nfn convert_duration_literal(lit: ast::DurationLit, fresher: &mut Fresher) -> Result {\n Ok(DurationLit {\n loc: lit.base.location,\n typ: MonoType::Var(fresher.fresh()),\n value: convert_duration(&lit.values)?,\n })\n}\n\nfn convert_date_time_literal(lit: ast::DateTimeLit, fresher: &mut Fresher) -> Result {\n Ok(DateTimeLit {\n loc: lit.base.location,\n typ: MonoType::Var(fresher.fresh()),\n value: lit.value,\n })\n}\n\n// In these tests we test the results of semantic analysis on some ASTs.\n// NOTE: we do not care about locations.\n// We create a default base node and clone it in various AST nodes.\n#[cfg(test)]\nmod tests {\n use super::*;\n use crate::semantic::fresh;\n use crate::semantic::types::{MonoType, Tvar};\n use pretty_assertions::assert_eq;\n\n // type_info() is used for the expected semantic graph.\n // The id for the Tvar does not matter, because that is not compared.\n fn type_info() -> MonoType {\n MonoType::Var(Tvar(0))\n }\n\n fn test_convert(pkg: ast::Package) -> Result {\n convert_with(pkg, &mut fresh::Fresher::default())\n }\n\n #[test]\n fn test_convert_empty() {\n let b = ast::BaseNode::default();\n let pkg = ast::Package {\n base: b.clone(),\n path: \"path\".to_string(),\n package: \"main\".to_string(),\n files: Vec::new(),\n };\n let want = Package {\n loc: b.location.clone(),\n package: \"main\".to_string(),\n files: Vec::new(),\n };\n let got = test_convert(pkg).unwrap();\n assert_eq!(want, got);\n }\n\n #[test]\n fn test_convert_package() {\n let b = ast::BaseNode::default();\n let pkg = ast::Package {\n base: b.clone(),\n path: \"path\".to_string(),\n package: \"main\".to_string(),\n files: vec![ast::File {\n base: b.clone(),\n name: \"foo.flux\".to_string(),\n metadata: String::new(),\n package: Some(ast::PackageClause {\n base: b.clone(),\n name: ast::Identifier {\n base: b.clone(),\n name: \"foo\".to_string(),\n },\n }),\n imports: Vec::new(),\n body: Vec::new(),\n eof: None,\n }],\n };\n let want = Package {\n loc: b.location.clone(),\n package: \"main\".to_string(),\n files: vec![File {\n loc: b.location.clone(),\n package: Some(PackageClause {\n loc: b.location.clone(),\n name: Identifier {\n loc: b.location.clone(),\n name: \"foo\".to_string(),\n },\n }),\n imports: Vec::new(),\n body: Vec::new(),\n }],\n };\n let got = test_convert(pkg).unwrap();\n assert_eq!(want, got);\n }\n\n #[test]\n fn test_convert_imports() {\n let b = ast::BaseNode::default();\n let pkg = ast::Package {\n base: b.clone(),\n path: \"path\".to_string(),\n package: \"main\".to_string(),\n files: vec![ast::File {\n base: b.clone(),\n name: \"foo.flux\".to_string(),\n metadata: String::new(),\n package: Some(ast::PackageClause {\n base: b.clone(),\n name: ast::Identifier {\n base: b.clone(),\n name: \"foo\".to_string(),\n },\n }),\n imports: vec![\n ast::ImportDeclaration {\n base: b.clone(),\n path: ast::StringLit {\n base: b.clone(),\n value: \"path/foo\".to_string(),\n },\n alias: None,\n },\n ast::ImportDeclaration {\n base: b.clone(),\n path: ast::StringLit {\n base: b.clone(),\n value: \"path/bar\".to_string(),\n },\n alias: Some(ast::Identifier {\n base: b.clone(),\n name: \"b\".to_string(),\n }),\n },\n ],\n body: Vec::new(),\n eof: None,\n }],\n };\n let want = Package {\n loc: b.location.clone(),\n package: \"main\".to_string(),\n files: vec![File {\n loc: b.location.clone(),\n package: Some(PackageClause {\n loc: b.location.clone(),\n name: Identifier {\n loc: b.location.clone(),\n name: \"foo\".to_string(),\n },\n }),\n imports: vec![\n ImportDeclaration {\n loc: b.location.clone(),\n path: StringLit {\n loc: b.location.clone(),\n typ: type_info(),\n value: \"path/foo\".to_string(),\n },\n alias: None,\n },\n ImportDeclaration {\n loc: b.location.clone(),\n path: StringLit {\n loc: b.location.clone(),\n typ: type_info(),\n value: \"path/bar\".to_string(),\n },\n alias: Some(Identifier {\n loc: b.location.clone(),\n name: \"b\".to_string(),\n }),\n },\n ],\n body: Vec::new(),\n }],\n };\n let got = test_convert(pkg).unwrap();\n assert_eq!(want, got);\n }\n\n #[test]\n fn test_convert_var_assignment() {\n let b = ast::BaseNode::default();\n let pkg = ast::Package {\n base: b.clone(),\n path: \"path\".to_string(),\n package: \"main\".to_string(),\n files: vec![ast::File {\n base: b.clone(),\n name: \"foo.flux\".to_string(),\n metadata: String::new(),\n package: None,\n imports: Vec::new(),\n body: vec![\n ast::Statement::Variable(Box::new(ast::VariableAssgn {\n base: b.clone(),\n id: ast::Identifier {\n base: b.clone(),\n name: \"a\".to_string(),\n },\n init: ast::Expression::Boolean(ast::BooleanLit {\n base: b.clone(),\n value: true,\n }),\n })),\n ast::Statement::Expr(Box::new(ast::ExprStmt {\n base: b.clone(),\n expression: ast::Expression::Identifier(ast::Identifier {\n base: b.clone(),\n name: \"a\".to_string(),\n }),\n })),\n ],\n eof: None,\n }],\n };\n let want = Package {\n loc: b.location.clone(),\n package: \"main\".to_string(),\n files: vec![File {\n loc: b.location.clone(),\n package: None,\n imports: Vec::new(),\n body: vec![\n Statement::Variable(Box::new(VariableAssgn::new(\n Identifier {\n loc: b.location.clone(),\n name: \"a\".to_string(),\n },\n Expression::Boolean(BooleanLit {\n loc: b.location.clone(),\n typ: type_info(),\n value: true,\n }),\n b.location.clone(),\n ))),\n Statement::Expr(ExprStmt {\n loc: b.location.clone(),\n expression: Expression::Identifier(IdentifierExpr {\n loc: b.location.clone(),\n typ: type_info(),\n name: \"a\".to_string(),\n }),\n }),\n ],\n }],\n };\n let got = test_convert(pkg).unwrap();\n assert_eq!(want, got);\n }\n\n #[test]\n fn test_convert_object() {\n let b = ast::BaseNode::default();\n let pkg = ast::Package {\n base: b.clone(),\n path: \"path\".to_string(),\n package: \"main\".to_string(),\n files: vec![ast::File {\n base: b.clone(),\n name: \"foo.flux\".to_string(),\n metadata: String::new(),\n package: None,\n imports: Vec::new(),\n body: vec![ast::Statement::Expr(Box::new(ast::ExprStmt {\n base: b.clone(),\n expression: ast::Expression::Object(Box::new(ast::ObjectExpr {\n base: b.clone(),\n lbrace: None,\n with: None,\n properties: vec![ast::Property {\n base: b.clone(),\n key: ast::PropertyKey::Identifier(ast::Identifier {\n base: b.clone(),\n name: \"a\".to_string(),\n }),\n separator: None,\n value: Some(ast::Expression::Integer(ast::IntegerLit {\n base: b.clone(),\n value: 10,\n })),\n comma: None,\n }],\n rbrace: None,\n })),\n }))],\n eof: None,\n }],\n };\n let want = Package {\n loc: b.location.clone(),\n package: \"main\".to_string(),\n files: vec![File {\n loc: b.location.clone(),\n package: None,\n imports: Vec::new(),\n body: vec![Statement::Expr(ExprStmt {\n loc: b.location.clone(),\n expression: Expression::Object(Box::new(ObjectExpr {\n loc: b.location.clone(),\n typ: type_info(),\n with: None,\n properties: vec![Property {\n loc: b.location.clone(),\n key: Identifier {\n loc: b.location.clone(),\n name: \"a\".to_string(),\n },\n value: Expression::Integer(IntegerLit {\n loc: b.location.clone(),\n typ: type_info(),\n value: 10,\n }),\n }],\n })),\n })],\n }],\n };\n let got = test_convert(pkg).unwrap();\n assert_eq!(want, got);\n }\n\n #[test]\n fn test_convert_object_with_string_key() {\n let b = ast::BaseNode::default();\n let pkg = ast::Package {\n base: b.clone(),\n path: \"path\".to_string(),\n package: \"main\".to_string(),\n files: vec![ast::File {\n base: b.clone(),\n name: \"foo.flux\".to_string(),\n metadata: String::new(),\n package: None,\n imports: Vec::new(),\n body: vec![ast::Statement::Expr(Box::new(ast::ExprStmt {\n base: b.clone(),\n expression: ast::Expression::Object(Box::new(ast::ObjectExpr {\n base: b.clone(),\n lbrace: None,\n with: None,\n properties: vec![ast::Property {\n base: b.clone(),\n key: ast::PropertyKey::StringLit(ast::StringLit {\n base: b.clone(),\n value: \"a\".to_string(),\n }),\n separator: None,\n value: Some(ast::Expression::Integer(ast::IntegerLit {\n base: b.clone(),\n value: 10,\n })),\n comma: None,\n }],\n rbrace: None,\n })),\n }))],\n eof: None,\n }],\n };\n let want = Package {\n loc: b.location.clone(),\n package: \"main\".to_string(),\n files: vec![File {\n loc: b.location.clone(),\n package: None,\n imports: Vec::new(),\n body: vec![Statement::Expr(ExprStmt {\n loc: b.location.clone(),\n expression: Expression::Object(Box::new(ObjectExpr {\n loc: b.location.clone(),\n typ: type_info(),\n with: None,\n properties: vec![Property {\n loc: b.location.clone(),\n key: Identifier {\n loc: b.location.clone(),\n name: \"a\".to_string(),\n },\n value: Expression::Integer(IntegerLit {\n loc: b.location.clone(),\n typ: type_info(),\n value: 10,\n }),\n }],\n })),\n })],\n }],\n };\n let got = test_convert(pkg).unwrap();\n assert_eq!(want, got);\n }\n\n #[test]\n fn test_convert_object_with_mixed_keys() {\n let b = ast::BaseNode::default();\n let pkg = ast::Package {\n base: b.clone(),\n path: \"path\".to_string(),\n package: \"main\".to_string(),\n files: vec![ast::File {\n base: b.clone(),\n name: \"foo.flux\".to_string(),\n metadata: String::new(),\n package: None,\n imports: Vec::new(),\n body: vec![ast::Statement::Expr(Box::new(ast::ExprStmt {\n base: b.clone(),\n expression: ast::Expression::Object(Box::new(ast::ObjectExpr {\n base: b.clone(),\n lbrace: None,\n with: None,\n properties: vec![\n ast::Property {\n base: b.clone(),\n key: ast::PropertyKey::StringLit(ast::StringLit {\n base: b.clone(),\n value: \"a\".to_string(),\n }),\n separator: None,\n value: Some(ast::Expression::Integer(ast::IntegerLit {\n base: b.clone(),\n value: 10,\n })),\n comma: None,\n },\n ast::Property {\n base: b.clone(),\n key: ast::PropertyKey::Identifier(ast::Identifier {\n base: b.clone(),\n name: \"b\".to_string(),\n }),\n separator: None,\n value: Some(ast::Expression::Integer(ast::IntegerLit {\n base: b.clone(),\n value: 11,\n })),\n comma: None,\n },\n ],\n rbrace: None,\n })),\n }))],\n eof: None,\n }],\n };\n let want = Package {\n loc: b.location.clone(),\n package: \"main\".to_string(),\n files: vec![File {\n loc: b.location.clone(),\n package: None,\n imports: Vec::new(),\n body: vec![Statement::Expr(ExprStmt {\n loc: b.location.clone(),\n expression: Expression::Object(Box::new(ObjectExpr {\n loc: b.location.clone(),\n typ: type_info(),\n with: None,\n properties: vec![\n Property {\n loc: b.location.clone(),\n key: Identifier {\n loc: b.location.clone(),\n name: \"a\".to_string(),\n },\n value: Expression::Integer(IntegerLit {\n loc: b.location.clone(),\n typ: type_info(),\n value: 10,\n }),\n },\n Property {\n loc: b.location.clone(),\n key: Identifier {\n loc: b.location.clone(),\n name: \"b\".to_string(),\n },\n value: Expression::Integer(IntegerLit {\n loc: b.location.clone(),\n typ: type_info(),\n value: 11,\n }),\n },\n ],\n })),\n })],\n }],\n };\n let got = test_convert(pkg).unwrap();\n assert_eq!(want, got);\n }\n\n #[test]\n fn test_convert_object_with_implicit_keys() {\n let b = ast::BaseNode::default();\n let pkg = ast::Package {\n base: b.clone(),\n path: \"path\".to_string(),\n package: \"main\".to_string(),\n files: vec![ast::File {\n base: b.clone(),\n name: \"foo.flux\".to_string(),\n metadata: String::new(),\n package: None,\n imports: Vec::new(),\n body: vec![ast::Statement::Expr(Box::new(ast::ExprStmt {\n base: b.clone(),\n expression: ast::Expression::Object(Box::new(ast::ObjectExpr {\n base: b.clone(),\n lbrace: None,\n with: None,\n properties: vec![\n ast::Property {\n base: b.clone(),\n key: ast::PropertyKey::Identifier(ast::Identifier {\n base: b.clone(),\n name: \"a\".to_string(),\n }),\n separator: None,\n value: None,\n comma: None,\n },\n ast::Property {\n base: b.clone(),\n key: ast::PropertyKey::Identifier(ast::Identifier {\n base: b.clone(),\n name: \"b\".to_string(),\n }),\n separator: None,\n value: None,\n comma: None,\n },\n ],\n rbrace: None,\n })),\n }))],\n eof: None,\n }],\n };\n let want = Package {\n loc: b.location.clone(),\n package: \"main\".to_string(),\n files: vec![File {\n loc: b.location.clone(),\n package: None,\n imports: Vec::new(),\n body: vec![Statement::Expr(ExprStmt {\n loc: b.location.clone(),\n expression: Expression::Object(Box::new(ObjectExpr {\n loc: b.location.clone(),\n typ: type_info(),\n with: None,\n properties: vec![\n Property {\n loc: b.location.clone(),\n key: Identifier {\n loc: b.location.clone(),\n name: \"a\".to_string(),\n },\n value: Expression::Identifier(IdentifierExpr {\n loc: b.location.clone(),\n typ: type_info(),\n name: \"a\".to_string(),\n }),\n },\n Property {\n loc: b.location.clone(),\n key: Identifier {\n loc: b.location.clone(),\n name: \"b\".to_string(),\n },\n value: Expression::Identifier(IdentifierExpr {\n loc: b.location.clone(),\n typ: type_info(),\n name: \"b\".to_string(),\n }),\n },\n ],\n })),\n })],\n }],\n };\n let got = test_convert(pkg).unwrap();\n assert_eq!(want, got);\n }\n\n #[test]\n fn test_convert_options_declaration() {\n let b = ast::BaseNode::default();\n let pkg = ast::Package {\n base: b.clone(),\n path: \"path\".to_string(),\n package: \"main\".to_string(),\n files: vec![ast::File {\n base: b.clone(),\n name: \"foo.flux\".to_string(),\n metadata: String::new(),\n package: None,\n imports: Vec::new(),\n body: vec![ast::Statement::Option(Box::new(ast::OptionStmt {\n base: b.clone(),\n assignment: ast::Assignment::Variable(Box::new(ast::VariableAssgn {\n base: b.clone(),\n id: ast::Identifier {\n base: b.clone(),\n name: \"task\".to_string(),\n },\n init: ast::Expression::Object(Box::new(ast::ObjectExpr {\n base: b.clone(),\n lbrace: None,\n with: None,\n properties: vec![\n ast::Property {\n base: b.clone(),\n key: ast::PropertyKey::Identifier(ast::Identifier {\n base: b.clone(),\n name: \"name\".to_string(),\n }),\n separator: None,\n value: Some(ast::Expression::StringLit(ast::StringLit {\n base: b.clone(),\n value: \"foo\".to_string(),\n })),\n comma: None,\n },\n ast::Property {\n base: b.clone(),\n key: ast::PropertyKey::Identifier(ast::Identifier {\n base: b.clone(),\n name: \"every\".to_string(),\n }),\n separator: None,\n value: Some(ast::Expression::Duration(ast::DurationLit {\n base: b.clone(),\n values: vec![ast::Duration {\n magnitude: 1,\n unit: \"h\".to_string(),\n }],\n })),\n comma: None,\n },\n ast::Property {\n base: b.clone(),\n key: ast::PropertyKey::Identifier(ast::Identifier {\n base: b.clone(),\n name: \"delay\".to_string(),\n }),\n separator: None,\n value: Some(ast::Expression::Duration(ast::DurationLit {\n base: b.clone(),\n values: vec![ast::Duration {\n magnitude: 10,\n unit: \"m\".to_string(),\n }],\n })),\n comma: None,\n },\n ast::Property {\n base: b.clone(),\n key: ast::PropertyKey::Identifier(ast::Identifier {\n base: b.clone(),\n name: \"cron\".to_string(),\n }),\n separator: None,\n value: Some(ast::Expression::StringLit(ast::StringLit {\n base: b.clone(),\n value: \"0 2 * * *\".to_string(),\n })),\n comma: None,\n },\n ast::Property {\n base: b.clone(),\n key: ast::PropertyKey::Identifier(ast::Identifier {\n base: b.clone(),\n name: \"retry\".to_string(),\n }),\n separator: None,\n value: Some(ast::Expression::Integer(ast::IntegerLit {\n base: b.clone(),\n value: 5,\n })),\n comma: None,\n },\n ],\n rbrace: None,\n })),\n })),\n }))],\n eof: None,\n }],\n };\n let want = Package {\n loc: b.location.clone(),\n package: \"main\".to_string(),\n files: vec![File {\n loc: b.location.clone(),\n package: None,\n imports: Vec::new(),\n body: vec![Statement::Option(Box::new(OptionStmt {\n loc: b.location.clone(),\n assignment: Assignment::Variable(VariableAssgn::new(\n Identifier {\n loc: b.location.clone(),\n name: \"task\".to_string(),\n },\n Expression::Object(Box::new(ObjectExpr {\n loc: b.location.clone(),\n typ: type_info(),\n with: None,\n properties: vec![\n Property {\n loc: b.location.clone(),\n key: Identifier {\n loc: b.location.clone(),\n name: \"name\".to_string(),\n },\n value: Expression::StringLit(StringLit {\n loc: b.location.clone(),\n typ: type_info(),\n value: \"foo\".to_string(),\n }),\n },\n Property {\n loc: b.location.clone(),\n key: Identifier {\n loc: b.location.clone(),\n name: \"every\".to_string(),\n },\n value: Expression::Duration(DurationLit {\n loc: b.location.clone(),\n typ: type_info(),\n value: Duration {\n months: 5,\n nanoseconds: 5000,\n negative: false,\n },\n }),\n },\n Property {\n loc: b.location.clone(),\n key: Identifier {\n loc: b.location.clone(),\n name: \"delay\".to_string(),\n },\n value: Expression::Duration(DurationLit {\n loc: b.location.clone(),\n typ: type_info(),\n value: Duration {\n months: 1,\n nanoseconds: 50,\n negative: true,\n },\n }),\n },\n Property {\n loc: b.location.clone(),\n key: Identifier {\n loc: b.location.clone(),\n name: \"cron\".to_string(),\n },\n value: Expression::StringLit(StringLit {\n loc: b.location.clone(),\n typ: type_info(),\n value: \"0 2 * * *\".to_string(),\n }),\n },\n Property {\n loc: b.location.clone(),\n key: Identifier {\n loc: b.location.clone(),\n name: \"retry\".to_string(),\n },\n value: Expression::Integer(IntegerLit {\n loc: b.location.clone(),\n typ: type_info(),\n value: 5,\n }),\n },\n ],\n })),\n b.location.clone(),\n )),\n }))],\n }],\n };\n let got = test_convert(pkg).unwrap();\n assert_eq!(want, got);\n }\n\n #[test]\n fn test_convert_qualified_option_statement() {\n let b = ast::BaseNode::default();\n let pkg = ast::Package {\n base: b.clone(),\n path: \"path\".to_string(),\n package: \"main\".to_string(),\n files: vec![ast::File {\n base: b.clone(),\n name: \"foo.flux\".to_string(),\n metadata: String::new(),\n package: None,\n imports: Vec::new(),\n body: vec![ast::Statement::Option(Box::new(ast::OptionStmt {\n base: b.clone(),\n assignment: ast::Assignment::Member(Box::new(ast::MemberAssgn {\n base: b.clone(),\n member: ast::MemberExpr {\n base: b.clone(),\n object: ast::Expression::Identifier(ast::Identifier {\n base: b.clone(),\n name: \"alert\".to_string(),\n }),\n lbrack: None,\n property: ast::PropertyKey::Identifier(ast::Identifier {\n base: b.clone(),\n name: \"state\".to_string(),\n }),\n rbrack: None,\n },\n init: ast::Expression::StringLit(ast::StringLit {\n base: b.clone(),\n value: \"Warning\".to_string(),\n }),\n })),\n }))],\n eof: None,\n }],\n };\n let want = Package {\n loc: b.location.clone(),\n package: \"main\".to_string(),\n files: vec![File {\n loc: b.location.clone(),\n package: None,\n imports: Vec::new(),\n body: vec![Statement::Option(Box::new(OptionStmt {\n loc: b.location.clone(),\n assignment: Assignment::Member(MemberAssgn {\n loc: b.location.clone(),\n member: MemberExpr {\n loc: b.location.clone(),\n typ: type_info(),\n object: Expression::Identifier(IdentifierExpr {\n loc: b.location.clone(),\n typ: type_info(),\n name: \"alert\".to_string(),\n }),\n property: \"state\".to_string(),\n },\n init: Expression::StringLit(StringLit {\n loc: b.location.clone(),\n typ: type_info(),\n value: \"Warning\".to_string(),\n }),\n }),\n }))],\n }],\n };\n let got = test_convert(pkg).unwrap();\n assert_eq!(want, got);\n }\n\n #[test]\n fn test_convert_function() {\n let b = ast::BaseNode::default();\n let pkg = ast::Package {\n base: b.clone(),\n path: \"path\".to_string(),\n package: \"main\".to_string(),\n files: vec![ast::File {\n base: b.clone(),\n name: \"foo.flux\".to_string(),\n metadata: String::new(),\n package: None,\n imports: Vec::new(),\n body: vec![\n ast::Statement::Variable(Box::new(ast::VariableAssgn {\n base: b.clone(),\n id: ast::Identifier {\n base: b.clone(),\n name: \"f\".to_string(),\n },\n init: ast::Expression::Function(Box::new(ast::FunctionExpr {\n base: b.clone(),\n lparen: None,\n params: vec![\n ast::Property {\n base: b.clone(),\n key: ast::PropertyKey::Identifier(ast::Identifier {\n base: b.clone(),\n name: \"a\".to_string(),\n }),\n separator: None,\n value: None,\n comma: None,\n },\n ast::Property {\n base: b.clone(),\n key: ast::PropertyKey::Identifier(ast::Identifier {\n base: b.clone(),\n name: \"b\".to_string(),\n }),\n separator: None,\n value: None,\n comma: None,\n },\n ],\n rparen: None,\n arrow: None,\n body: ast::FunctionBody::Expr(ast::Expression::Binary(Box::new(\n ast::BinaryExpr {\n base: b.clone(),\n operator: ast::Operator::AdditionOperator,\n left: ast::Expression::Identifier(ast::Identifier {\n base: b.clone(),\n name: \"a\".to_string(),\n }),\n right: ast::Expression::Identifier(ast::Identifier {\n base: b.clone(),\n name: \"b\".to_string(),\n }),\n },\n ))),\n })),\n })),\n ast::Statement::Expr(Box::new(ast::ExprStmt {\n base: b.clone(),\n expression: ast::Expression::Call(Box::new(ast::CallExpr {\n base: b.clone(),\n callee: ast::Expression::Identifier(ast::Identifier {\n base: b.clone(),\n name: \"f\".to_string(),\n }),\n lparen: None,\n arguments: vec![ast::Expression::Object(Box::new(ast::ObjectExpr {\n base: b.clone(),\n lbrace: None,\n with: None,\n properties: vec![\n ast::Property {\n base: b.clone(),\n key: ast::PropertyKey::Identifier(ast::Identifier {\n base: b.clone(),\n name: \"a\".to_string(),\n }),\n separator: None,\n value: Some(ast::Expression::Integer(ast::IntegerLit {\n base: b.clone(),\n value: 2,\n })),\n comma: None,\n },\n ast::Property {\n base: b.clone(),\n key: ast::PropertyKey::Identifier(ast::Identifier {\n base: b.clone(),\n name: \"b\".to_string(),\n }),\n separator: None,\n value: Some(ast::Expression::Integer(ast::IntegerLit {\n base: b.clone(),\n value: 3,\n })),\n comma: None,\n },\n ],\n rbrace: None,\n }))],\n rparen: None,\n })),\n })),\n ],\n eof: None,\n }],\n };\n let want = Package {\n loc: b.location.clone(),\n package: \"main\".to_string(),\n files: vec![File {\n loc: b.location.clone(),\n package: None,\n imports: Vec::new(),\n body: vec![\n Statement::Variable(Box::new(VariableAssgn::new(\n Identifier {\n loc: b.location.clone(),\n name: \"f\".to_string(),\n },\n Expression::Function(Box::new(FunctionExpr {\n loc: b.location.clone(),\n typ: type_info(),\n params: vec![\n FunctionParameter {\n loc: b.location.clone(),\n is_pipe: false,\n key: Identifier {\n loc: b.location.clone(),\n name: \"a\".to_string(),\n },\n default: None,\n },\n FunctionParameter {\n loc: b.location.clone(),\n is_pipe: false,\n key: Identifier {\n loc: b.location.clone(),\n name: \"b\".to_string(),\n },\n default: None,\n },\n ],\n body: Block::Return(ReturnStmt {\n loc: b.location.clone(),\n argument: Expression::Binary(Box::new(BinaryExpr {\n loc: b.location.clone(),\n typ: type_info(),\n operator: ast::Operator::AdditionOperator,\n left: Expression::Identifier(IdentifierExpr {\n loc: b.location.clone(),\n typ: type_info(),\n name: \"a\".to_string(),\n }),\n right: Expression::Identifier(IdentifierExpr {\n loc: b.location.clone(),\n typ: type_info(),\n name: \"b\".to_string(),\n }),\n })),\n }),\n })),\n b.location.clone(),\n ))),\n Statement::Expr(ExprStmt {\n loc: b.location.clone(),\n expression: Expression::Call(Box::new(CallExpr {\n loc: b.location.clone(),\n typ: type_info(),\n pipe: None,\n callee: Expression::Identifier(IdentifierExpr {\n loc: b.location.clone(),\n typ: type_info(),\n name: \"f\".to_string(),\n }),\n arguments: vec![\n Property {\n loc: b.location.clone(),\n key: Identifier {\n loc: b.location.clone(),\n name: \"a\".to_string(),\n },\n value: Expression::Integer(IntegerLit {\n loc: b.location.clone(),\n typ: type_info(),\n value: 2,\n }),\n },\n Property {\n loc: b.location.clone(),\n key: Identifier {\n loc: b.location.clone(),\n name: \"b\".to_string(),\n },\n value: Expression::Integer(IntegerLit {\n loc: b.location.clone(),\n typ: type_info(),\n value: 3,\n }),\n },\n ],\n })),\n }),\n ],\n }],\n };\n let got = test_convert(pkg).unwrap();\n assert_eq!(want, got);\n }\n\n #[test]\n fn test_convert_function_with_defaults() {\n let b = ast::BaseNode::default();\n let pkg = ast::Package {\n base: b.clone(),\n path: \"path\".to_string(),\n package: \"main\".to_string(),\n files: vec![ast::File {\n base: b.clone(),\n name: \"foo.flux\".to_string(),\n metadata: String::new(),\n package: None,\n imports: Vec::new(),\n body: vec![\n ast::Statement::Variable(Box::new(ast::VariableAssgn {\n base: b.clone(),\n id: ast::Identifier {\n base: b.clone(),\n name: \"f\".to_string(),\n },\n init: ast::Expression::Function(Box::new(ast::FunctionExpr {\n base: b.clone(),\n lparen: None,\n params: vec![\n ast::Property {\n base: b.clone(),\n key: ast::PropertyKey::Identifier(ast::Identifier {\n base: b.clone(),\n name: \"a\".to_string(),\n }),\n separator: None,\n value: Some(ast::Expression::Integer(ast::IntegerLit {\n base: b.clone(),\n value: 0,\n })),\n comma: None,\n },\n ast::Property {\n base: b.clone(),\n key: ast::PropertyKey::Identifier(ast::Identifier {\n base: b.clone(),\n name: \"b\".to_string(),\n }),\n separator: None,\n value: Some(ast::Expression::Integer(ast::IntegerLit {\n base: b.clone(),\n value: 0,\n })),\n comma: None,\n },\n ast::Property {\n base: b.clone(),\n key: ast::PropertyKey::Identifier(ast::Identifier {\n base: b.clone(),\n name: \"c\".to_string(),\n }),\n separator: None,\n value: None,\n comma: None,\n },\n ],\n rparen: None,\n arrow: None,\n body: ast::FunctionBody::Expr(ast::Expression::Binary(Box::new(\n ast::BinaryExpr {\n base: b.clone(),\n operator: ast::Operator::AdditionOperator,\n left: ast::Expression::Binary(Box::new(ast::BinaryExpr {\n base: b.clone(),\n operator: ast::Operator::AdditionOperator,\n left: ast::Expression::Identifier(ast::Identifier {\n base: b.clone(),\n name: \"a\".to_string(),\n }),\n right: ast::Expression::Identifier(ast::Identifier {\n base: b.clone(),\n name: \"b\".to_string(),\n }),\n })),\n right: ast::Expression::Identifier(ast::Identifier {\n base: b.clone(),\n name: \"c\".to_string(),\n }),\n },\n ))),\n })),\n })),\n ast::Statement::Expr(Box::new(ast::ExprStmt {\n base: b.clone(),\n expression: ast::Expression::Call(Box::new(ast::CallExpr {\n base: b.clone(),\n callee: ast::Expression::Identifier(ast::Identifier {\n base: b.clone(),\n name: \"f\".to_string(),\n }),\n lparen: None,\n arguments: vec![ast::Expression::Object(Box::new(ast::ObjectExpr {\n base: b.clone(),\n lbrace: None,\n with: None,\n properties: vec![ast::Property {\n base: b.clone(),\n key: ast::PropertyKey::Identifier(ast::Identifier {\n base: b.clone(),\n name: \"c\".to_string(),\n }),\n separator: None,\n value: Some(ast::Expression::Integer(ast::IntegerLit {\n base: b.clone(),\n value: 42,\n })),\n comma: None,\n }],\n rbrace: None,\n }))],\n rparen: None,\n })),\n })),\n ],\n eof: None,\n }],\n };\n let want = Package {\n loc: b.location.clone(),\n package: \"main\".to_string(),\n files: vec![File {\n loc: b.location.clone(),\n package: None,\n imports: Vec::new(),\n body: vec![\n Statement::Variable(Box::new(VariableAssgn::new(\n Identifier {\n loc: b.location.clone(),\n name: \"f\".to_string(),\n },\n Expression::Function(Box::new(FunctionExpr {\n loc: b.location.clone(),\n typ: type_info(),\n params: vec![\n FunctionParameter {\n loc: b.location.clone(),\n is_pipe: false,\n key: Identifier {\n loc: b.location.clone(),\n name: \"a\".to_string(),\n },\n default: Some(Expression::Integer(IntegerLit {\n loc: b.location.clone(),\n typ: type_info(),\n value: 0,\n })),\n },\n FunctionParameter {\n loc: b.location.clone(),\n is_pipe: false,\n key: Identifier {\n loc: b.location.clone(),\n name: \"b\".to_string(),\n },\n default: Some(Expression::Integer(IntegerLit {\n loc: b.location.clone(),\n typ: type_info(),\n value: 0,\n })),\n },\n FunctionParameter {\n loc: b.location.clone(),\n is_pipe: false,\n key: Identifier {\n loc: b.location.clone(),\n name: \"c\".to_string(),\n },\n default: None,\n },\n ],\n body: Block::Return(ReturnStmt {\n loc: b.location.clone(),\n argument: Expression::Binary(Box::new(BinaryExpr {\n loc: b.location.clone(),\n typ: type_info(),\n operator: ast::Operator::AdditionOperator,\n left: Expression::Binary(Box::new(BinaryExpr {\n loc: b.location.clone(),\n typ: type_info(),\n operator: ast::Operator::AdditionOperator,\n left: Expression::Identifier(IdentifierExpr {\n loc: b.location.clone(),\n typ: type_info(),\n name: \"a\".to_string(),\n }),\n right: Expression::Identifier(IdentifierExpr {\n loc: b.location.clone(),\n typ: type_info(),\n name: \"b\".to_string(),\n }),\n })),\n right: Expression::Identifier(IdentifierExpr {\n loc: b.location.clone(),\n typ: type_info(),\n name: \"c\".to_string(),\n }),\n })),\n }),\n })),\n b.location.clone(),\n ))),\n Statement::Expr(ExprStmt {\n loc: b.location.clone(),\n expression: Expression::Call(Box::new(CallExpr {\n loc: b.location.clone(),\n typ: type_info(),\n pipe: None,\n callee: Expression::Identifier(IdentifierExpr {\n loc: b.location.clone(),\n typ: type_info(),\n name: \"f\".to_string(),\n }),\n arguments: vec![Property {\n loc: b.location.clone(),\n key: Identifier {\n loc: b.location.clone(),\n name: \"c\".to_string(),\n },\n value: Expression::Integer(IntegerLit {\n loc: b.location.clone(),\n typ: type_info(),\n value: 42,\n }),\n }],\n })),\n }),\n ],\n }],\n };\n let got = test_convert(pkg).unwrap();\n assert_eq!(want, got);\n }\n\n #[test]\n fn test_convert_function_multiple_pipes() {\n let b = ast::BaseNode::default();\n let pkg = ast::Package {\n base: b.clone(),\n path: \"path\".to_string(),\n package: \"main\".to_string(),\n files: vec![ast::File {\n base: b.clone(),\n name: \"foo.flux\".to_string(),\n metadata: String::new(),\n package: None,\n imports: Vec::new(),\n body: vec![ast::Statement::Variable(Box::new(ast::VariableAssgn {\n base: b.clone(),\n id: ast::Identifier {\n base: b.clone(),\n name: \"f\".to_string(),\n },\n init: ast::Expression::Function(Box::new(ast::FunctionExpr {\n base: b.clone(),\n lparen: None,\n params: vec![\n ast::Property {\n base: b.clone(),\n key: ast::PropertyKey::Identifier(ast::Identifier {\n base: b.clone(),\n name: \"a\".to_string(),\n }),\n separator: None,\n value: None,\n comma: None,\n },\n ast::Property {\n base: b.clone(),\n key: ast::PropertyKey::Identifier(ast::Identifier {\n base: b.clone(),\n name: \"piped1\".to_string(),\n }),\n separator: None,\n value: Some(ast::Expression::PipeLit(ast::PipeLit {\n base: b.clone(),\n })),\n comma: None,\n },\n ast::Property {\n base: b.clone(),\n key: ast::PropertyKey::Identifier(ast::Identifier {\n base: b.clone(),\n name: \"piped2\".to_string(),\n }),\n separator: None,\n value: Some(ast::Expression::PipeLit(ast::PipeLit {\n base: b.clone(),\n })),\n comma: None,\n },\n ],\n rparen: None,\n arrow: None,\n body: ast::FunctionBody::Expr(ast::Expression::Identifier(\n ast::Identifier {\n base: b.clone(),\n name: \"a\".to_string(),\n },\n )),\n })),\n }))],\n eof: None,\n }],\n };\n let got = test_convert(pkg).err().unwrap().to_string();\n assert_eq!(\"only a single argument may be piped\".to_string(), got);\n }\n\n #[test]\n fn test_convert_call_multiple_object_arguments() {\n let b = ast::BaseNode::default();\n let pkg = ast::Package {\n base: b.clone(),\n path: \"path\".to_string(),\n package: \"main\".to_string(),\n files: vec![ast::File {\n base: b.clone(),\n name: \"foo.flux\".to_string(),\n metadata: String::new(),\n package: None,\n imports: Vec::new(),\n body: vec![ast::Statement::Expr(Box::new(ast::ExprStmt {\n base: b.clone(),\n expression: ast::Expression::Call(Box::new(ast::CallExpr {\n base: b.clone(),\n callee: ast::Expression::Identifier(ast::Identifier {\n base: b.clone(),\n name: \"f\".to_string(),\n }),\n lparen: None,\n arguments: vec![\n ast::Expression::Object(Box::new(ast::ObjectExpr {\n base: b.clone(),\n lbrace: None,\n with: None,\n properties: vec![ast::Property {\n base: b.clone(),\n key: ast::PropertyKey::Identifier(ast::Identifier {\n base: b.clone(),\n name: \"a\".to_string(),\n }),\n separator: None,\n value: Some(ast::Expression::Integer(ast::IntegerLit {\n base: b.clone(),\n value: 0,\n })),\n comma: None,\n }],\n rbrace: None,\n })),\n ast::Expression::Object(Box::new(ast::ObjectExpr {\n base: b.clone(),\n lbrace: None,\n with: None,\n properties: vec![ast::Property {\n base: b.clone(),\n key: ast::PropertyKey::Identifier(ast::Identifier {\n base: b.clone(),\n name: \"b\".to_string(),\n }),\n separator: None,\n value: Some(ast::Expression::Integer(ast::IntegerLit {\n base: b.clone(),\n value: 1,\n })),\n comma: None,\n }],\n rbrace: None,\n })),\n ],\n rparen: None,\n })),\n }))],\n eof: None,\n }],\n };\n let got = test_convert(pkg).err().unwrap().to_string();\n assert_eq!(\n \"arguments are more than one object expression\".to_string(),\n got\n );\n }\n\n #[test]\n fn test_convert_pipe_expression() {\n let b = ast::BaseNode::default();\n let pkg = ast::Package {\n base: b.clone(),\n path: \"path\".to_string(),\n package: \"main\".to_string(),\n files: vec![ast::File {\n base: b.clone(),\n name: \"foo.flux\".to_string(),\n metadata: String::new(),\n package: None,\n imports: Vec::new(),\n body: vec![\n ast::Statement::Variable(Box::new(ast::VariableAssgn {\n base: b.clone(),\n id: ast::Identifier {\n base: b.clone(),\n name: \"f\".to_string(),\n },\n init: ast::Expression::Function(Box::new(ast::FunctionExpr {\n base: b.clone(),\n lparen: None,\n params: vec![\n ast::Property {\n base: b.clone(),\n key: ast::PropertyKey::Identifier(ast::Identifier {\n base: b.clone(),\n name: \"piped\".to_string(),\n }),\n separator: None,\n value: Some(ast::Expression::PipeLit(ast::PipeLit {\n base: b.clone(),\n })),\n comma: None,\n },\n ast::Property {\n base: b.clone(),\n key: ast::PropertyKey::Identifier(ast::Identifier {\n base: b.clone(),\n name: \"a\".to_string(),\n }),\n separator: None,\n value: None,\n comma: None,\n },\n ],\n rparen: None,\n arrow: None,\n body: ast::FunctionBody::Expr(ast::Expression::Binary(Box::new(\n ast::BinaryExpr {\n base: b.clone(),\n operator: ast::Operator::AdditionOperator,\n left: ast::Expression::Identifier(ast::Identifier {\n base: b.clone(),\n name: \"a\".to_string(),\n }),\n right: ast::Expression::Identifier(ast::Identifier {\n base: b.clone(),\n name: \"piped\".to_string(),\n }),\n },\n ))),\n })),\n })),\n ast::Statement::Expr(Box::new(ast::ExprStmt {\n base: b.clone(),\n expression: ast::Expression::PipeExpr(Box::new(ast::PipeExpr {\n base: b.clone(),\n argument: ast::Expression::Integer(ast::IntegerLit {\n base: b.clone(),\n value: 3,\n }),\n call: ast::CallExpr {\n base: b.clone(),\n callee: ast::Expression::Identifier(ast::Identifier {\n base: b.clone(),\n name: \"f\".to_string(),\n }),\n lparen: None,\n arguments: vec![ast::Expression::Object(Box::new(\n ast::ObjectExpr {\n base: b.clone(),\n lbrace: None,\n with: None,\n properties: vec![ast::Property {\n base: b.clone(),\n key: ast::PropertyKey::Identifier(ast::Identifier {\n base: b.clone(),\n name: \"a\".to_string(),\n }),\n separator: None,\n value: Some(ast::Expression::Integer(\n ast::IntegerLit {\n base: b.clone(),\n value: 2,\n },\n )),\n comma: None,\n }],\n rbrace: None,\n },\n ))],\n rparen: None,\n },\n })),\n })),\n ],\n eof: None,\n }],\n };\n let want = Package {\n loc: b.location.clone(),\n package: \"main\".to_string(),\n files: vec![File {\n loc: b.location.clone(),\n package: None,\n imports: Vec::new(),\n body: vec![\n Statement::Variable(Box::new(VariableAssgn::new(\n Identifier {\n loc: b.location.clone(),\n name: \"f\".to_string(),\n },\n Expression::Function(Box::new(FunctionExpr {\n loc: b.location.clone(),\n typ: type_info(),\n params: vec![\n FunctionParameter {\n loc: b.location.clone(),\n is_pipe: true,\n key: Identifier {\n loc: b.location.clone(),\n name: \"piped\".to_string(),\n },\n default: None,\n },\n FunctionParameter {\n loc: b.location.clone(),\n is_pipe: false,\n key: Identifier {\n loc: b.location.clone(),\n name: \"a\".to_string(),\n },\n default: None,\n },\n ],\n body: Block::Return(ReturnStmt {\n loc: b.location.clone(),\n argument: Expression::Binary(Box::new(BinaryExpr {\n loc: b.location.clone(),\n typ: type_info(),\n operator: ast::Operator::AdditionOperator,\n left: Expression::Identifier(IdentifierExpr {\n loc: b.location.clone(),\n typ: type_info(),\n name: \"a\".to_string(),\n }),\n right: Expression::Identifier(IdentifierExpr {\n loc: b.location.clone(),\n typ: type_info(),\n name: \"piped\".to_string(),\n }),\n })),\n }),\n })),\n b.location.clone(),\n ))),\n Statement::Expr(ExprStmt {\n loc: b.location.clone(),\n expression: Expression::Call(Box::new(CallExpr {\n loc: b.location.clone(),\n typ: type_info(),\n pipe: Some(Expression::Integer(IntegerLit {\n loc: b.location.clone(),\n typ: type_info(),\n value: 3,\n })),\n callee: Expression::Identifier(IdentifierExpr {\n loc: b.location.clone(),\n typ: type_info(),\n name: \"f\".to_string(),\n }),\n arguments: vec![Property {\n loc: b.location.clone(),\n key: Identifier {\n loc: b.location.clone(),\n name: \"a\".to_string(),\n },\n value: Expression::Integer(IntegerLit {\n loc: b.location.clone(),\n typ: type_info(),\n value: 2,\n }),\n }],\n })),\n }),\n ],\n }],\n };\n let got = test_convert(pkg).unwrap();\n assert_eq!(want, got);\n }\n\n #[test]\n fn test_function_expression_simple() {\n let b = ast::BaseNode::default();\n let f = FunctionExpr {\n loc: b.location.clone(),\n typ: type_info(),\n params: vec![\n FunctionParameter {\n loc: b.location.clone(),\n is_pipe: false,\n key: Identifier {\n loc: b.location.clone(),\n name: \"a\".to_string(),\n },\n default: None,\n },\n FunctionParameter {\n loc: b.location.clone(),\n is_pipe: false,\n key: Identifier {\n loc: b.location.clone(),\n name: \"b\".to_string(),\n },\n default: None,\n },\n ],\n body: Block::Return(ReturnStmt {\n loc: b.location.clone(),\n argument: Expression::Binary(Box::new(BinaryExpr {\n loc: b.location.clone(),\n typ: type_info(),\n operator: ast::Operator::AdditionOperator,\n left: Expression::Identifier(IdentifierExpr {\n loc: b.location.clone(),\n typ: type_info(),\n name: \"a\".to_string(),\n }),\n right: Expression::Identifier(IdentifierExpr {\n loc: b.location.clone(),\n typ: type_info(),\n name: \"b\".to_string(),\n }),\n })),\n }),\n };\n assert_eq!(Vec::<&FunctionParameter>::new(), f.defaults());\n assert_eq!(None, f.pipe());\n }\n\n #[test]\n fn test_function_expression_defaults_and_pipes() {\n let b = ast::BaseNode::default();\n let piped = FunctionParameter {\n loc: b.location.clone(),\n is_pipe: true,\n key: Identifier {\n loc: b.location.clone(),\n name: \"a\".to_string(),\n },\n default: Some(Expression::Integer(IntegerLit {\n loc: b.location.clone(),\n typ: type_info(),\n value: 0,\n })),\n };\n let default1 = FunctionParameter {\n loc: b.location.clone(),\n is_pipe: false,\n key: Identifier {\n loc: b.location.clone(),\n name: \"b\".to_string(),\n },\n default: Some(Expression::Integer(IntegerLit {\n loc: b.location.clone(),\n typ: type_info(),\n value: 1,\n })),\n };\n let default2 = FunctionParameter {\n loc: b.location.clone(),\n is_pipe: false,\n key: Identifier {\n loc: b.location.clone(),\n name: \"c\".to_string(),\n },\n default: Some(Expression::Integer(IntegerLit {\n loc: b.location.clone(),\n typ: type_info(),\n value: 2,\n })),\n };\n let no_default = FunctionParameter {\n loc: b.location.clone(),\n is_pipe: false,\n key: Identifier {\n loc: b.location.clone(),\n name: \"d\".to_string(),\n },\n default: None,\n };\n let defaults = vec![&piped, &default1, &default2];\n let f = FunctionExpr {\n loc: b.location.clone(),\n typ: type_info(),\n params: vec![\n piped.clone(),\n default1.clone(),\n default2.clone(),\n no_default.clone(),\n ],\n body: Block::Return(ReturnStmt {\n loc: b.location.clone(),\n argument: Expression::Binary(Box::new(BinaryExpr {\n loc: b.location.clone(),\n typ: type_info(),\n operator: ast::Operator::AdditionOperator,\n left: Expression::Identifier(IdentifierExpr {\n loc: b.location.clone(),\n typ: type_info(),\n name: \"a\".to_string(),\n }),\n right: Expression::Identifier(IdentifierExpr {\n loc: b.location.clone(),\n typ: type_info(),\n name: \"b\".to_string(),\n }),\n })),\n }),\n };\n assert_eq!(defaults, f.defaults());\n assert_eq!(Some(&piped), f.pipe());\n }\n\n #[test]\n fn test_convert_index_expression() {\n let b = ast::BaseNode::default();\n let pkg = ast::Package {\n base: b.clone(),\n path: \"path\".to_string(),\n package: \"main\".to_string(),\n files: vec![ast::File {\n base: b.clone(),\n name: \"foo.flux\".to_string(),\n metadata: String::new(),\n package: None,\n imports: Vec::new(),\n body: vec![ast::Statement::Expr(Box::new(ast::ExprStmt {\n base: b.clone(),\n expression: ast::Expression::Index(Box::new(ast::IndexExpr {\n base: b.clone(),\n array: ast::Expression::Identifier(ast::Identifier {\n base: b.clone(),\n name: \"a\".to_string(),\n }),\n lbrack: None,\n index: ast::Expression::Integer(ast::IntegerLit {\n base: b.clone(),\n value: 3,\n }),\n rbrack: None,\n })),\n }))],\n eof: None,\n }],\n };\n let want = Package {\n loc: b.location.clone(),\n package: \"main\".to_string(),\n files: vec![File {\n loc: b.location.clone(),\n package: None,\n imports: Vec::new(),\n body: vec![Statement::Expr(ExprStmt {\n loc: b.location.clone(),\n expression: Expression::Index(Box::new(IndexExpr {\n loc: b.location.clone(),\n typ: type_info(),\n array: Expression::Identifier(IdentifierExpr {\n loc: b.location.clone(),\n typ: type_info(),\n name: \"a\".to_string(),\n }),\n index: Expression::Integer(IntegerLit {\n loc: b.location.clone(),\n typ: type_info(),\n value: 3,\n }),\n })),\n })],\n }],\n };\n let got = test_convert(pkg).unwrap();\n assert_eq!(want, got);\n }\n\n #[test]\n fn test_convert_nested_index_expression() {\n let b = ast::BaseNode::default();\n let pkg = ast::Package {\n base: b.clone(),\n path: \"path\".to_string(),\n package: \"main\".to_string(),\n files: vec![ast::File {\n base: b.clone(),\n name: \"foo.flux\".to_string(),\n metadata: String::new(),\n package: None,\n imports: Vec::new(),\n body: vec![ast::Statement::Expr(Box::new(ast::ExprStmt {\n base: b.clone(),\n expression: ast::Expression::Index(Box::new(ast::IndexExpr {\n base: b.clone(),\n array: ast::Expression::Index(Box::new(ast::IndexExpr {\n base: b.clone(),\n array: ast::Expression::Identifier(ast::Identifier {\n base: b.clone(),\n name: \"a\".to_string(),\n }),\n lbrack: None,\n index: ast::Expression::Integer(ast::IntegerLit {\n base: b.clone(),\n value: 3,\n }),\n rbrack: None,\n })),\n lbrack: None,\n index: ast::Expression::Integer(ast::IntegerLit {\n base: b.clone(),\n value: 5,\n }),\n rbrack: None,\n })),\n }))],\n eof: None,\n }],\n };\n let want = Package {\n loc: b.location.clone(),\n package: \"main\".to_string(),\n files: vec![File {\n loc: b.location.clone(),\n package: None,\n imports: Vec::new(),\n body: vec![Statement::Expr(ExprStmt {\n loc: b.location.clone(),\n expression: Expression::Index(Box::new(IndexExpr {\n loc: b.location.clone(),\n typ: type_info(),\n array: Expression::Index(Box::new(IndexExpr {\n loc: b.location.clone(),\n typ: type_info(),\n array: Expression::Identifier(IdentifierExpr {\n loc: b.location.clone(),\n typ: type_info(),\n name: \"a\".to_string(),\n }),\n index: Expression::Integer(IntegerLit {\n loc: b.location.clone(),\n typ: type_info(),\n value: 3,\n }),\n })),\n index: Expression::Integer(IntegerLit {\n loc: b.location.clone(),\n typ: type_info(),\n value: 5,\n }),\n })),\n })],\n }],\n };\n let got = test_convert(pkg).unwrap();\n assert_eq!(want, got);\n }\n\n #[test]\n fn test_convert_access_idexed_object_returned_from_function_call() {\n let b = ast::BaseNode::default();\n let pkg = ast::Package {\n base: b.clone(),\n path: \"path\".to_string(),\n package: \"main\".to_string(),\n files: vec![ast::File {\n base: b.clone(),\n name: \"foo.flux\".to_string(),\n metadata: String::new(),\n package: None,\n imports: Vec::new(),\n body: vec![ast::Statement::Expr(Box::new(ast::ExprStmt {\n base: b.clone(),\n expression: ast::Expression::Index(Box::new(ast::IndexExpr {\n base: b.clone(),\n array: ast::Expression::Call(Box::new(ast::CallExpr {\n base: b.clone(),\n callee: ast::Expression::Identifier(ast::Identifier {\n base: b.clone(),\n name: \"f\".to_string(),\n }),\n lparen: None,\n arguments: vec![],\n rparen: None,\n })),\n lbrack: None,\n index: ast::Expression::Integer(ast::IntegerLit {\n base: b.clone(),\n value: 3,\n }),\n rbrack: None,\n })),\n }))],\n eof: None,\n }],\n };\n let want = Package {\n loc: b.location.clone(),\n package: \"main\".to_string(),\n files: vec![File {\n loc: b.location.clone(),\n package: None,\n imports: Vec::new(),\n body: vec![Statement::Expr(ExprStmt {\n loc: b.location.clone(),\n expression: Expression::Index(Box::new(IndexExpr {\n loc: b.location.clone(),\n typ: type_info(),\n array: Expression::Call(Box::new(CallExpr {\n loc: b.location.clone(),\n typ: type_info(),\n pipe: None,\n callee: Expression::Identifier(IdentifierExpr {\n loc: b.location.clone(),\n typ: type_info(),\n name: \"f\".to_string(),\n }),\n arguments: Vec::new(),\n })),\n index: Expression::Integer(IntegerLit {\n loc: b.location.clone(),\n typ: type_info(),\n value: 3,\n }),\n })),\n })],\n }],\n };\n let got = test_convert(pkg).unwrap();\n assert_eq!(want, got);\n }\n\n #[test]\n fn test_convert_nested_member_expression() {\n let b = ast::BaseNode::default();\n let pkg = ast::Package {\n base: b.clone(),\n path: \"path\".to_string(),\n package: \"main\".to_string(),\n files: vec![ast::File {\n base: b.clone(),\n name: \"foo.flux\".to_string(),\n metadata: String::new(),\n package: None,\n imports: Vec::new(),\n body: vec![ast::Statement::Expr(Box::new(ast::ExprStmt {\n base: b.clone(),\n expression: ast::Expression::Member(Box::new(ast::MemberExpr {\n base: b.clone(),\n object: ast::Expression::Member(Box::new(ast::MemberExpr {\n base: b.clone(),\n object: ast::Expression::Identifier(ast::Identifier {\n base: b.clone(),\n name: \"a\".to_string(),\n }),\n lbrack: None,\n property: ast::PropertyKey::Identifier(ast::Identifier {\n base: b.clone(),\n name: \"b\".to_string(),\n }),\n rbrack: None,\n })),\n lbrack: None,\n property: ast::PropertyKey::Identifier(ast::Identifier {\n base: b.clone(),\n name: \"c\".to_string(),\n }),\n rbrack: None,\n })),\n }))],\n eof: None,\n }],\n };\n let want = Package {\n loc: b.location.clone(),\n package: \"main\".to_string(),\n files: vec![File {\n loc: b.location.clone(),\n package: None,\n imports: Vec::new(),\n body: vec![Statement::Expr(ExprStmt {\n loc: b.location.clone(),\n expression: Expression::Member(Box::new(MemberExpr {\n loc: b.location.clone(),\n typ: type_info(),\n object: Expression::Member(Box::new(MemberExpr {\n loc: b.location.clone(),\n typ: type_info(),\n object: Expression::Identifier(IdentifierExpr {\n loc: b.location.clone(),\n typ: type_info(),\n name: \"a\".to_string(),\n }),\n property: \"b\".to_string(),\n })),\n property: \"c\".to_string(),\n })),\n })],\n }],\n };\n let got = test_convert(pkg).unwrap();\n assert_eq!(want, got);\n }\n\n #[test]\n fn test_convert_member_with_call_expression() {\n let b = ast::BaseNode::default();\n let pkg = ast::Package {\n base: b.clone(),\n path: \"path\".to_string(),\n package: \"main\".to_string(),\n files: vec![ast::File {\n base: b.clone(),\n name: \"foo.flux\".to_string(),\n metadata: String::new(),\n package: None,\n imports: Vec::new(),\n body: vec![ast::Statement::Expr(Box::new(ast::ExprStmt {\n base: b.clone(),\n expression: ast::Expression::Member(Box::new(ast::MemberExpr {\n base: b.clone(),\n object: ast::Expression::Call(Box::new(ast::CallExpr {\n base: b.clone(),\n callee: ast::Expression::Member(Box::new(ast::MemberExpr {\n base: b.clone(),\n object: ast::Expression::Identifier(ast::Identifier {\n base: b.clone(),\n name: \"a\".to_string(),\n }),\n lbrack: None,\n property: ast::PropertyKey::Identifier(ast::Identifier {\n base: b.clone(),\n name: \"b\".to_string(),\n }),\n rbrack: None,\n })),\n lparen: None,\n arguments: vec![],\n rparen: None,\n })),\n lbrack: None,\n property: ast::PropertyKey::Identifier(ast::Identifier {\n base: b.clone(),\n name: \"c\".to_string(),\n }),\n rbrack: None,\n })),\n }))],\n eof: None,\n }],\n };\n let want = Package {\n loc: b.location.clone(),\n package: \"main\".to_string(),\n files: vec![File {\n loc: b.location.clone(),\n package: None,\n imports: Vec::new(),\n body: vec![Statement::Expr(ExprStmt {\n loc: b.location.clone(),\n expression: Expression::Member(Box::new(MemberExpr {\n loc: b.location.clone(),\n typ: type_info(),\n object: Expression::Call(Box::new(CallExpr {\n loc: b.location.clone(),\n typ: type_info(),\n pipe: None,\n callee: Expression::Member(Box::new(MemberExpr {\n loc: b.location.clone(),\n typ: type_info(),\n object: Expression::Identifier(IdentifierExpr {\n loc: b.location.clone(),\n typ: type_info(),\n name: \"a\".to_string(),\n }),\n property: \"b\".to_string(),\n })),\n arguments: Vec::new(),\n })),\n property: \"c\".to_string(),\n })),\n })],\n }],\n };\n let got = test_convert(pkg).unwrap();\n assert_eq!(want, got);\n }\n #[test]\n fn test_convert_bad_stmt() {\n let b = ast::BaseNode::default();\n let pkg = ast::Package {\n base: b.clone(),\n path: \"path\".to_string(),\n package: \"main\".to_string(),\n files: vec![ast::File {\n base: b.clone(),\n name: \"foo.flux\".to_string(),\n metadata: String::new(),\n package: None,\n imports: Vec::new(),\n body: vec![ast::Statement::Bad(Box::new(ast::BadStmt {\n base: b.clone(),\n text: \"bad statement\".to_string(),\n }))],\n eof: None,\n }],\n };\n let want: Result =\n Err(\"BadStatement is not supported in semantic analysis\".to_string());\n let got = test_convert(pkg);\n assert_eq!(want, got);\n }\n #[test]\n fn test_convert_bad_expr() {\n let b = ast::BaseNode::default();\n let pkg = ast::Package {\n base: b.clone(),\n path: \"path\".to_string(),\n package: \"main\".to_string(),\n files: vec![ast::File {\n base: b.clone(),\n name: \"foo.flux\".to_string(),\n metadata: String::new(),\n package: None,\n imports: Vec::new(),\n body: vec![ast::Statement::Expr(Box::new(ast::ExprStmt {\n base: b.clone(),\n expression: ast::Expression::Bad(Box::new(ast::BadExpr {\n base: b.clone(),\n text: \"bad expression\".to_string(),\n expression: None,\n })),\n }))],\n eof: None,\n }],\n };\n let want: Result =\n Err(\"BadExpression is not supported in semantic analysis\".to_string());\n let got = test_convert(pkg);\n assert_eq!(want, got);\n }\n}\n"},"download_success":{"kind":"bool","value":true,"string":"true"}}},{"rowIdx":1185,"cells":{"blob_id":{"kind":"string","value":"12e6e4ad88859f08e5b01b898b0ec9d33a62cb58"},"language":{"kind":"string","value":"Rust"},"repo_name":{"kind":"string","value":"Vechro/roman"},"path":{"kind":"string","value":"/src/lib.rs"},"src_encoding":{"kind":"string","value":"UTF-8"},"length_bytes":{"kind":"number","value":3082,"string":"3,082"},"score":{"kind":"number","value":3.578125,"string":"3.578125"},"int_score":{"kind":"number","value":4,"string":"4"},"detected_licenses":{"kind":"list like","value":[],"string":"[]"},"license_type":{"kind":"string","value":"no_license"},"text":{"kind":"string","value":"mod test;\n\nconst fn roman_lut(numeral: &char) -> Option {\n match numeral {\n 'I' => Some(1),\n 'V' => Some(5),\n 'X' => Some(10),\n 'L' => Some(50),\n 'C' => Some(100),\n 'D' => Some(500),\n 'M' => Some(1000),\n _ => None,\n }\n}\n\nconst fn arabic_lut(digit: &usize) -> Option<&str> {\n match digit {\n 1 => Some(\"I\"),\n 4 => Some(\"IV\"),\n 5 => Some(\"V\"),\n 9 => Some(\"IX\"),\n 10 => Some(\"X\"),\n 40 => Some(\"XL\"),\n 50 => Some(\"L\"),\n 90 => Some(\"XC\"),\n 100 => Some(\"C\"),\n 400 => Some(\"CD\"),\n 500 => Some(\"D\"),\n 900 => Some(\"DM\"),\n 1000 => Some(\"M\"),\n _ => None,\n }\n}\n\nstatic DIGITS_DESC: [usize; 13] = [1000, 900, 500, 400, 100, 90, 50, 40, 10, 9, 5, 4, 1];\n\nstruct Tally {\n total: usize,\n max: usize,\n}\n\n// Impure function as it prints to stdout immediately.\npub fn convert_and_print_numerals(list_of_numerals: &[String]) {\n for number_str in list_of_numerals {\n let result = match number_str.chars().next() {\n Some(c) => match c {\n c if c.is_ascii_alphabetic() => roman_to_arabic(&number_str.to_ascii_uppercase()),\n c if c.is_ascii_digit() => arabic_to_roman(number_str),\n _ => None,\n },\n _ => unreachable!(),\n };\n\n match result {\n Some(s) => println!(\"{}\", s),\n None => println!(\"Invalid numerals!\"),\n };\n }\n}\n\nfn arabic_to_roman(arabic_numerals: &str) -> Option {\n let mut num = match arabic_numerals.parse::() {\n Ok(n) => n,\n Err(_) => return None,\n };\n\n let result = DIGITS_DESC\n .iter()\n .fold(String::new(), |mut state: String, digit| {\n let quot = num / *digit;\n num = num % *digit;\n\n let numeral = match arabic_lut(digit) {\n Some(s) => s,\n None => unreachable!(),\n };\n\n state.push_str(&numeral.repeat(quot));\n state\n });\n\n Some(result)\n}\n\nfn roman_to_arabic(roman_numerals: &str) -> Option {\n let result = roman_numerals.chars().rfold(\n Some(Tally { total: 0, max: 0 }),\n |tally: Option, c| {\n let current_value = match roman_lut(&c) {\n Some(val) => val,\n None => return None,\n };\n\n let (total, mut max) = match tally {\n Some(Tally { total, max }) => (total, max),\n None => return None,\n };\n\n max = current_value.max(max);\n\n if current_value >= max {\n Some(Tally {\n total: total + current_value,\n max,\n })\n } else {\n Some(Tally {\n total: total - current_value,\n max,\n })\n }\n },\n );\n\n match result {\n Some(Tally { total, .. }) => Some(total.to_string()),\n None => None,\n }\n}\n"},"download_success":{"kind":"bool","value":true,"string":"true"}}},{"rowIdx":1186,"cells":{"blob_id":{"kind":"string","value":"eeafdd246e46748861c9efec9a4ce1d0f6e81ccd"},"language":{"kind":"string","value":"Rust"},"repo_name":{"kind":"string","value":"bouzuya/rust-atcoder"},"path":{"kind":"string","value":"/cargo-atcoder/contests/abc296/src/bin/b.rs"},"src_encoding":{"kind":"string","value":"UTF-8"},"length_bytes":{"kind":"number","value":312,"string":"312"},"score":{"kind":"number","value":2.640625,"string":"2.640625"},"int_score":{"kind":"number","value":3,"string":"3"},"detected_licenses":{"kind":"list like","value":[],"string":"[]"},"license_type":{"kind":"string","value":"no_license"},"text":{"kind":"string","value":"use proconio::{input, marker::Chars};\n\nfn main() {\n input! {\n s: [Chars; 8],\n };\n\n for i in 0..8 {\n let n = 8 - i;\n for j in 0..8 {\n let a = (b'a' + j as u8) as char;\n if s[i][j] == '*' {\n println!(\"{}{}\", a, n);\n }\n }\n }\n}\n"},"download_success":{"kind":"bool","value":true,"string":"true"}}},{"rowIdx":1187,"cells":{"blob_id":{"kind":"string","value":"81318dce492702fd85da4204de7e836ba3beca28"},"language":{"kind":"string","value":"Rust"},"repo_name":{"kind":"string","value":"kakoc/leetcode"},"path":{"kind":"string","value":"/src/except_self.rs"},"src_encoding":{"kind":"string","value":"UTF-8"},"length_bytes":{"kind":"number","value":779,"string":"779"},"score":{"kind":"number","value":3.21875,"string":"3.21875"},"int_score":{"kind":"number","value":3,"string":"3"},"detected_licenses":{"kind":"list like","value":[],"string":"[]"},"license_type":{"kind":"string","value":"no_license"},"text":{"kind":"string","value":"pub fn product_except_self(nums: Vec) -> Vec {\n let mut res = vec![0; nums.len()];\n\n let mut p = 1;\n for (i, v) in nums.iter().rev().enumerate() {\n if i == 0 {\n res[nums.len() - 1 - i] = 1;\n p = *v;\n continue;\n }\n\n res[nums.len() - 1 - i] = p;\n p *= v;\n }\n\n let mut p = 1;\n for (i, v) in nums.iter().enumerate() {\n if i == 0 {\n p *= v;\n continue;\n }\n\n res[i] = p * res[i];\n p *= v;\n }\n\n res\n}\n\n#[test]\nfn test_except_self() {\n let i = vec![1, 2, 3, 4];\n let a = product_except_self(i);\n assert_eq!(a, vec![24, 12, 8, 6]);\n\n let i = vec![9, 0, -2];\n let a = product_except_self(i);\n assert_eq!(a, vec![0, -18, 0]);\n}\n"},"download_success":{"kind":"bool","value":true,"string":"true"}}},{"rowIdx":1188,"cells":{"blob_id":{"kind":"string","value":"507353e2300b9ffbfd7a5507ffdbdb51ce737fde"},"language":{"kind":"string","value":"Rust"},"repo_name":{"kind":"string","value":"ebsnet/blockchain"},"path":{"kind":"string","value":"/lib/data/src/tx.rs"},"src_encoding":{"kind":"string","value":"UTF-8"},"length_bytes":{"kind":"number","value":2121,"string":"2,121"},"score":{"kind":"number","value":3.140625,"string":"3.140625"},"int_score":{"kind":"number","value":3,"string":"3"},"detected_licenses":{"kind":"list like","value":[],"string":"[]"},"license_type":{"kind":"string","value":"no_license"},"text":{"kind":"string","value":"//! This module contains transaction specific data.\n\nuse hack::BigArray;\n\nuse bincode::serialize;\n\nuse failure::Error;\n\n/// Size of a Ed25519 signature in bytes.\npub const SIG_SIZE: usize = 64;\n\n/// Convenience type for a signature.\npub type Signature = [u8; SIG_SIZE];\n\n/// Convenience type for signed data inside a block.\npub type BlockData = SignedData;\n\n/// Wrapper for signed date. This struct contains the data and the signature.\n#[derive(Deserialize, Serialize, Clone)]\npub struct SignedData {\n #[serde(with = \"BigArray\")]\n signature: Signature,\n data: T,\n}\n\nimpl SignedData {\n /// Generate a new object from supplied data and a signature.\n pub fn new(signature: Signature, data: T) -> Self {\n Self {\n signature: signature,\n data: data,\n }\n }\n\n /// Returns a reference to the wrapped data.\n pub fn data(&self) -> &T {\n &self.data\n }\n\n /// Returns a reference to the wrapped signature.\n pub fn signature(&self) -> &Signature {\n &self.signature\n }\n}\n\n/// Convenience type for a fingerprint.\npub type Fingerprint = Vec;\n\n/// The data that can be contained in a block.\n#[derive(Deserialize, Serialize, PartialEq, Clone)]\npub enum Data {\n /// Billing operation used to initialize a billing process and indicate that a user has been\n /// billed at a certain point in time.\n Billing(Fingerprint),\n /// Usage operation that protocols the power usage of a user.\n Usage(u64),\n}\n\n/// Typed that implement this trait can be signed.\npub trait Signable {\n /// Converts the data to a list of bytes that can be signed.\n fn get_bytes(&self) -> Result, Error>;\n}\n\nimpl Signable for Data {\n fn get_bytes(&self) -> Result, Error> {\n let res = serialize(self)?;\n Ok(res)\n }\n}\n\nimpl Default for SignedData\nwhere\n T: Default,\n{\n fn default() -> Self {\n Self {\n signature: [0; SIG_SIZE],\n data: Default::default(),\n }\n }\n}\n\nimpl Default for Data {\n fn default() -> Self {\n Data::Billing(Default::default())\n }\n}\n"},"download_success":{"kind":"bool","value":true,"string":"true"}}},{"rowIdx":1189,"cells":{"blob_id":{"kind":"string","value":"c8a539fe0e2a23380ab156c99430c4099d68e83c"},"language":{"kind":"string","value":"Rust"},"repo_name":{"kind":"string","value":"arashout/cover-letter-generator"},"path":{"kind":"string","value":"/src/blurb.rs"},"src_encoding":{"kind":"string","value":"UTF-8"},"length_bytes":{"kind":"number","value":1322,"string":"1,322"},"score":{"kind":"number","value":2.96875,"string":"2.96875"},"int_score":{"kind":"number","value":3,"string":"3"},"detected_licenses":{"kind":"list like","value":[],"string":"[]"},"license_type":{"kind":"string","value":"no_license"},"text":{"kind":"string","value":"use crate::rules::{Rule, apply_rules};\nuse crate::types::TokenizedDescription;\n\n#[derive(Default)]\npub struct Blurb<'a> {\n pub precendence: u8,\n pub long_description: &'a str,\n pub short_description: &'a str,\n pub name: &'a str,\n rules: Vec>,\n}\n\nimpl<'a> Blurb<'a> {\n pub fn new(name: &'a str) -> Self {\n Blurb {\n name: name,\n precendence: 10,\n long_description: &\"\",\n short_description: &\"\",\n rules: vec![],\n }\n }\n\n pub fn with_precedence(mut self, n: u8) -> Self {\n self.precendence = n;\n self\n }\n\n pub fn with_description(mut self, description: &'a str) -> Self {\n self.short_description = description;\n self.long_description = description;\n self\n }\n pub fn with_long_description(mut self, description: &'a str) -> Self {\n self.long_description = description;\n self\n }\n pub fn add_rule(mut self, boxed_rule: Box) -> Self {\n self.rules.push(boxed_rule);\n self\n }\n\n pub fn is_applicable(&self, tokenized_description: &TokenizedDescription) -> bool {\n if self.rules.len() == 0 {\n return false;\n }\n apply_rules(tokenized_description, &self.rules)\n }\n}\npub type BlurbVec<'a> = Vec>;\n"},"download_success":{"kind":"bool","value":true,"string":"true"}}},{"rowIdx":1190,"cells":{"blob_id":{"kind":"string","value":"4ce7f85b9bc8f8e8ddf7a5c2bb82447b83da7e83"},"language":{"kind":"string","value":"Rust"},"repo_name":{"kind":"string","value":"richarddowner/Rust"},"path":{"kind":"string","value":"/rust-by-example/modules/modules.rs"},"src_encoding":{"kind":"string","value":"UTF-8"},"length_bytes":{"kind":"number","value":975,"string":"975"},"score":{"kind":"number","value":3.734375,"string":"3.734375"},"int_score":{"kind":"number","value":4,"string":"4"},"detected_licenses":{"kind":"list like","value":[],"string":"[]"},"license_type":{"kind":"string","value":"no_license"},"text":{"kind":"string","value":"// Rust provides a powerful module system that can be used to hierarchically\n// split code in logical units (modules), and manage visibility (public/priv)\n// between them.\n\n// A module is a collection of items like: functions, structs, traits, impl blocks,\n// and even other modules.\n\nfn function() {\n\tprintln!(\"called `function()`\");\n}\n\n// A module named `my`\nmod my {\n\t// A module can contain items like functions\n\t#[allow(dead_code)]\n\tfn function() {\n\t\tprintln!(\"called `my::function()`\");\n\t}\n\n\t// Modules can be nested\n\tmod nested {\n\t\t#[allow(dead_code)]\n\t\tfn function() {\n\t\t\tprintln!(\"called `my::nested::function()`\");\n\t\t}\n\t}\n}\n\nfn main() {\n\tfunction();\n\n\t// items inside a module can be called using their full path\n\t// the `println` function lives in the stdio module\n\t// the `stdio` module lives in the `io` module\n\t// and the `io` module lives in the `std` crate\n\tstd::io::stdio::println(\"Hello World!\");\n\n\t// Error! `my::function` is private\n\t// my::function();\n}\n"},"download_success":{"kind":"bool","value":true,"string":"true"}}},{"rowIdx":1191,"cells":{"blob_id":{"kind":"string","value":"b8f9029ec2b77aa4922952530b4ef8e23da04d25"},"language":{"kind":"string","value":"Rust"},"repo_name":{"kind":"string","value":"davideGiovannini/rust_sdl2_engine"},"path":{"kind":"string","value":"/leek/src/lib.rs"},"src_encoding":{"kind":"string","value":"UTF-8"},"length_bytes":{"kind":"number","value":3592,"string":"3,592"},"score":{"kind":"number","value":2.78125,"string":"2.78125"},"int_score":{"kind":"number","value":3,"string":"3"},"detected_licenses":{"kind":"list like","value":[],"string":"[]"},"license_type":{"kind":"string","value":"no_license"},"text":{"kind":"string","value":"//! The following code creates an empty window:\n//! ```\n//! fn main() {\n//! Engine::new(\"Title\").start::();\n//! }\n//!\n//!\n//! struct Game;\n//!\n//! impl GameScene for Game {\n//! fn set_up(&mut self) {}\n//!\n//! fn logic(&mut self, context: &EngineContext, engine: &mut Engine, ui: &Ui) -> EngineAction {\n//! EngineAction::default()\n//! }\n//!\n//! fn render(&mut self, context: &EngineContext, engine: &mut Engine, ui: &Ui) {}\n//! }\n//!\n//! impl FromEngine for Game {\n//! fn init(engine: &mut Engine) -> Self {\n//! Game{}\n//! }\n//! }\n//!\n//! ```\n//! Update Game struct with your desired field :)\n//!\nextern crate gl;\npub extern crate sdl2;\n\npub extern crate alto;\nextern crate failure;\npub extern crate lewton;\nextern crate notify;\n\n#[macro_use]\npub extern crate imgui;\n\nuse sdl2::pixels::Color;\n\npub mod alto_utils;\nmod engine;\nmod fps_counter;\nmod game_controllers;\nmod imgui_backend;\nmod opengl;\nmod post_processing;\npub mod prelude;\nmod sdl2_utils;\n\nmod debug;\npub mod resources;\n\n#[macro_use]\nmod common_macros;\n\npub use post_processing::PostProcessEffect as PostProcessingEffect;\n\npub mod math;\npub use engine::game::{AnyGameScene, FromEngine, GameScene};\npub use game_controllers::{GameController, GameControllerManager};\n\npub use engine::action::EngineAction;\npub use engine::context::EngineContext;\npub use engine::Engine;\n\npub use sdl2_utils::log_system_info;\n\npub mod font;\n\nconst WINDOW_SIZE: (u32, u32) = (800, 600);\nconst CLEAR_COLOR: Color = Color {\n r: 0,\n g: 0,\n b: 0,\n a: 255,\n};\n\npub struct EngineBuilder<'window> {\n window_title: &'window str,\n window_size: (u32, u32),\n logical_size: Option<(u32, u32)>,\n fullscreen: bool,\n hide_cursor: bool,\n relative_cursor: bool,\n clear_color: Color,\n imgui_font_scale: f32,\n}\n\nimpl Engine {\n pub fn new(window_title: &str) -> EngineBuilder {\n EngineBuilder {\n window_title,\n window_size: WINDOW_SIZE,\n logical_size: None,\n clear_color: CLEAR_COLOR,\n fullscreen: false,\n hide_cursor: false,\n relative_cursor: false,\n imgui_font_scale: 1.5,\n }\n }\n}\n\nimpl<'window> EngineBuilder<'window> {\n /// Set the initial size of the window.\n pub fn with_window_size(&mut self, width: u32, height: u32) -> &mut Self {\n self.window_size = (width, height);\n self\n }\n\n /// Set the logical render size.\n pub fn with_logical_size(&mut self, width: u32, height: u32) -> &mut Self {\n self.logical_size = Some((width, height));\n self\n }\n\n pub fn with_clear_color(&mut self, color: Color) -> &mut Self {\n self.clear_color = color;\n self\n }\n\n pub fn with_fullscreen(&mut self, fullscreen: bool) -> &mut Self {\n self.fullscreen = fullscreen;\n self\n }\n\n pub fn with_imgui_font_scale(&mut self, font_scale: f32) -> &mut Self {\n self.imgui_font_scale = font_scale;\n self\n }\n\n pub fn with_hidden_cursor(&mut self, hide_cursor: bool) -> &mut Self {\n self.hide_cursor = hide_cursor;\n self\n }\n\n pub fn with_relative_cursor(&mut self, relative_cursor: bool) -> &mut Self {\n self.relative_cursor = relative_cursor;\n self\n }\n\n /// Start the engine.\n pub fn start(&mut self)\n where\n Scene: GameScene + FromEngine,\n {\n if let Err(error) = engine::run_engine::(self) {\n println!(\"{:?}\", error)\n }\n }\n}\n\n// RE-EXPORTS\n\npub mod keyboard {\n pub use sdl2::keyboard::Scancode;\n}\n"},"download_success":{"kind":"bool","value":true,"string":"true"}}},{"rowIdx":1192,"cells":{"blob_id":{"kind":"string","value":"8f22063d9137e61e624b12972c7fa844563d77e2"},"language":{"kind":"string","value":"Rust"},"repo_name":{"kind":"string","value":"zargony/advent-of-code-2016"},"path":{"kind":"string","value":"/src/day20.rs"},"src_encoding":{"kind":"string","value":"UTF-8"},"length_bytes":{"kind":"number","value":1553,"string":"1,553"},"score":{"kind":"number","value":3.359375,"string":"3.359375"},"int_score":{"kind":"number","value":3,"string":"3"},"detected_licenses":{"kind":"list like","value":[],"string":"[]"},"license_type":{"kind":"string","value":"no_license"},"text":{"kind":"string","value":"use std::num;\n\n\n/// Parse multiline-text of ranges into a vector of tuples\npub fn parse(s: &str) -> Result, num::ParseIntError> {\n s.lines().map(|line| {\n let mut nums = line.split('-').map(|s| s.parse::().unwrap());\n Ok((nums.next().unwrap(), nums.next().unwrap()))\n }).collect()\n}\n\n/// Find lowest number not covered by a list of ranges\npub fn find_lowest(ranges: &[(u32, u32)]) -> u32 {\n let mut ranges = ranges.to_owned();\n ranges.sort_by_key(|n| n.0);\n let mut n = 0;\n for (from, to) in ranges {\n if from > n { break; }\n if to >= n { n = to + 1; }\n }\n n\n}\n\n/// Find amount of numbers not covered by a list of ranges\npub fn find_uncovered(ranges: &[(u32, u32)]) -> u32 {\n let mut ranges = ranges.to_owned();\n ranges.sort_by_key(|n| n.0);\n let mut upto = 0;\n let mut count = 0;\n for (from, to) in ranges {\n if from > upto { count += from - upto - 1; }\n if to > upto { upto = to; }\n }\n count\n}\n\nfn main() {\n let ranges = parse(include_str!(\"day20.txt\")).unwrap();\n println!(\"Lowest non-blocked IP: {}\", find_lowest(&ranges));\n println!(\"Number of allowed IPs: {}\", find_uncovered(&ranges));\n}\n\n\n#[cfg(test)]\nmod tests {\n use super::*;\n\n #[test]\n fn finding_lowest() {\n let ranges = parse(\"5-8\\n0-2\\n4-7\").unwrap();\n assert_eq!(find_lowest(&ranges), 3);\n }\n\n #[test]\n fn finding_uncovered() {\n let ranges = parse(\"5-8\\n0-2\\n4-7\").unwrap();\n assert_eq!(find_uncovered(&ranges), 1);\n }\n}\n"},"download_success":{"kind":"bool","value":true,"string":"true"}}},{"rowIdx":1193,"cells":{"blob_id":{"kind":"string","value":"ee0dd34daf298b6848c5a9ed8498979928af2d71"},"language":{"kind":"string","value":"Rust"},"repo_name":{"kind":"string","value":"m1el/refterm-hash-break"},"path":{"kind":"string","value":"/main.rs"},"src_encoding":{"kind":"string","value":"UTF-8"},"length_bytes":{"kind":"number","value":12123,"string":"12,123"},"score":{"kind":"number","value":2.609375,"string":"2.609375"},"int_score":{"kind":"number","value":3,"string":"3"},"detected_licenses":{"kind":"list like","value":[],"string":"[]"},"license_type":{"kind":"string","value":"no_license"},"text":{"kind":"string","value":"#![allow(clippy::needless_return)]\n#![feature(portable_simd)]\n\nuse core_simd::Simd;\nuse core::convert::TryInto;\nuse srng::SRng;\nuse simd_aes::SimdAes;\n\nconst DEFAULT_SEED: Simd = Simd::from_array([\n 178, 201, 95, 240, 40, 41, 143, 216,\n 2, 209, 178, 114, 232, 4, 176, 188,\n]);\n\n#[allow(non_snake_case)]\nfn ComputeGlyphHash(data: &[u8]) -> Simd {\n let zero = Simd::splat(0);\n let mut hash = Simd::::from_array([data.len() as u64, 0]).to_ne_bytes();\n hash ^= DEFAULT_SEED;\n\n let mut chunks = data.chunks_exact(16);\n for chunk in chunks.by_ref() {\n let chunk: &[u8; 16] = chunk.try_into().unwrap();\n let value = Simd::from_array(*chunk);\n hash ^= value;\n hash = hash.aes_dec(zero);\n hash = hash.aes_dec(zero);\n hash = hash.aes_dec(zero);\n hash = hash.aes_dec(zero);\n }\n\n let remainder = chunks.remainder();\n let mut temp = [0_u8; 16];\n temp[..remainder.len()].copy_from_slice(remainder);\n let value = Simd::from_array(temp);\n\n hash ^= value;\n hash = hash.aes_dec(zero);\n hash = hash.aes_dec(zero);\n hash = hash.aes_dec(zero);\n hash = hash.aes_dec(zero);\n return hash;\n}\n\n#[allow(dead_code)]\nfn inv_aes_dec(mut data: Simd, key: Simd) -> Simd {\n data ^= key;\n let zero = Simd::splat(0);\n data = data.aes_dec_last(zero).aes_enc(zero);\n return data.aes_enc_last(zero);\n}\n\nfn inv_aes_decx4(mut hash: Simd) -> Simd {\n let zero = Simd::splat(0);\n hash = hash.aes_dec_last(zero);\n hash = hash.aes_enc(zero);\n hash = hash.aes_enc(zero);\n hash = hash.aes_enc(zero);\n hash = hash.aes_enc(zero);\n hash = hash.aes_enc_last(zero);\n return hash;\n}\n\nfn single_prefix(count: usize, target_hash: Simd) -> Simd {\n // The first stage looks like this:\n // Hash ^ Seed = dec^4(Count ^ Seed ^ Chunk)\n // To get the chunk, we need to reverse these:\n // dec^-4(Hash ^ Seed) = Count ^ Seed ^ Chunk\n // Chunk = dec^4(Hash ^ Seed) ^ Count ^ Seed\n // To create a one-prefix initialization, we want:\n // Hash = Count\n // Count = Count + 16\n let mut hash = target_hash;\n hash = inv_aes_decx4(hash);\n\n let prefix_init = Simd::::from_array([count as u64 + 16, 0]).to_ne_bytes();\n hash ^= prefix_init;\n hash ^= DEFAULT_SEED;\n\n return hash;\n}\n\nfn preimage_prefix_hash(mut hash: Simd, data: &[u8]) -> Simd {\n let chunks = data.len() / 16;\n let tail = &data[chunks*16..];\n let mut tail_buf = [0_u8; 16];\n tail_buf[..tail.len()].copy_from_slice(tail);\n let value = Simd::from_array(tail_buf);\n\n hash = inv_aes_decx4(hash);\n hash ^= value;\n\n for chunk in data.chunks_exact(16).rev() {\n let chunk: &[u8; 16] = chunk.try_into().unwrap();\n let value = Simd::from_array(*chunk);\n hash = inv_aes_decx4(hash);\n hash ^= value;\n }\n\n return hash;\n}\n\nfn invert_block(mut hash: Simd, chunk: &[u8]) -> Simd {\n let chunk: &[u8; 16] = chunk.try_into().unwrap();\n let value = Simd::from_array(*chunk);\n hash = inv_aes_decx4(hash);\n return hash ^ value;\n}\n\nfn invert_last(suffix: &[u8], mut hash: Simd) -> Simd {\n let mut tail_buf = [0_u8; 16];\n tail_buf[..suffix.len()].copy_from_slice(suffix);\n let value = Simd::from_array(tail_buf);\n\n hash = inv_aes_decx4(hash);\n hash ^= value;\n hash = inv_aes_decx4(hash);\n return hash;\n}\n\nfn concat(prefix: Simd, target: &[u8]) -> Vec {\n let mut image = prefix.to_array().to_vec();\n image.extend_from_slice(target);\n image\n}\n\nfn prefix_collision_attack(message: &[u8]) {\n let mut target_hash = Simd::::from_array([message.len() as u64, 0]).to_ne_bytes();\n target_hash ^= DEFAULT_SEED;\n\n let prefix = single_prefix(message.len(), target_hash);\n println!(\"Demonstrating prefix attack\");\n println!(\"message: {:x?}\", message);\n println!(\"hash: {:x?}\", ComputeGlyphHash(b\"hello\"));\n println!(\"prefix: {:x?}\", prefix);\n let forgery = concat(prefix, message);\n println!(\"forgery: {:x?}\", forgery);\n println!(\"hash: {:x?}\", ComputeGlyphHash(&forgery));\n println!();\n}\n\nfn chosen_prefix(prefix: &[u8]) {\n let zero = Simd::splat(0);\n let mut message = prefix.to_vec();\n let remainder = 16 - (message.len() % 16);\n message.extend((0..remainder).map(|_| b'A'));\n message.extend((0..16).map(|_| 0));\n let hash = ComputeGlyphHash(&message);\n let pre_current = invert_last(&[], hash);\n let pre_target = invert_last(&[], zero);\n let last = message.len() - 16;\n let suffix = pre_current ^ pre_target;\n message[last..].copy_from_slice(&suffix.to_array());\n println!(\"Demonstrating chosen prefix attack\");\n println!(\"prefix: {:x?}\", prefix);\n println!(\"forgery: {:x?}\", message);\n println!(\"hash: {:x?}\", ComputeGlyphHash(&message));\n println!();\n}\n\nfn preimage_attack(suffix: &[u8]) {\n println!(\"Demonstrating preimage attack\");\n println!(\"suffix: {:x?}\", suffix);\n let target_hash = Simd::splat(0);\n println!(\"goal hash: {:x?}\", target_hash);\n let prefix_hash = preimage_prefix_hash(target_hash, suffix);\n let preimage_prefix = single_prefix(suffix.len(), prefix_hash);\n println!(\"prefix: {:x?}\", preimage_prefix);\n let message = concat(preimage_prefix, suffix);\n\n println!(\"message: {:x?}\", message);\n println!(\"hash: {:x?}\", ComputeGlyphHash(&message));\n}\n\nfn padding_attack() {\n println!(\"Demonstrating padding attack\");\n println!(r#\"message: \"\", hash: {:x?}\"#, ComputeGlyphHash(b\"\"));\n println!(r#\"message: \"\\x01\", hash: {:x?}\"#, ComputeGlyphHash(b\"\\x01\"));\n println!(r#\"message: \"A\", hash: {:x?}\"#, ComputeGlyphHash(b\"A\"));\n println!(r#\"message: \"B\\x00\", hash: {:x?}\"#, ComputeGlyphHash(b\"B\\x00\"));\n println!(r#\"message: \"BAAAAAAAAAAAAAAA\", hash: {:x?}\"#, ComputeGlyphHash(b\"BAAAAAAAAAAAAAAA\"));\n println!(r#\"message: \"CAAAAAAAAAAAAAAA\\x00\", hash: {:x?}\"#, ComputeGlyphHash(b\"CAAAAAAAAAAAAAAA\\x00\"));\n println!();\n}\n\nfn invert_attack(message: &[u8]) {\n println!(\"Demonstrating invert attack, invert a hash up to 15 bytes\");\n println!(\"Note: due to padding attack, there are actually more messages\");\n println!(\"plaintext: {:x?}\", message);\n let mut hash = ComputeGlyphHash(message);\n println!(\"hash: {:x?}\", hash);\n hash = inv_aes_decx4(hash);\n hash ^= DEFAULT_SEED;\n let mut buffer = hash.to_array();\n let len = buffer.iter().rposition(|&chr| chr != 0).map_or(0, |x| x + 1);\n if len == 16 {\n println!(\"the plaintext mus be shorter than 16 bytes, cannot invert\");\n return;\n }\n buffer[0] ^= len as u8;\n let recovered = &buffer[..len];\n println!(\"recovered: {:x?}\", recovered);\n println!(\"hash: {:x?}\", ComputeGlyphHash(recovered));\n println!();\n}\n\npub fn check_alphanum(bytes: Simd) -> bool {\n // check if the characters are outside of '0'..'z' range\n if (bytes - Simd::splat(b'0')).lanes_gt(Simd::splat(b'z' - b'0')).any() {\n return false;\n }\n // check if the characters are in of '9'+1..'A'-1 range\n if (bytes - Simd::splat(b'9' + 1)).lanes_lt(Simd::splat(b'A' - (b'9' + 1))).any() {\n return false;\n }\n // check if the characters are in of 'Z'+1..'a'-1 range\n if (bytes - Simd::splat(b'Z' + 1)).lanes_lt(Simd::splat(b'a' - (b'Z' + 1))).any() {\n return false;\n }\n return true;\n}\n\nuse core::sync::atomic::{AtomicBool, Ordering};\nstatic FOUND: AtomicBool = AtomicBool::new(false);\nfn find_ascii_zeros(suffix: &[u8], worker: u64) {\n const ATTACK_BYTES: usize = 6;\n let mut target_hash = Simd::::splat(0);\n let mut bsuffix = suffix;\n let suffix_len = 16 - ATTACK_BYTES;\n let mut whole_block = false;\n if suffix.len() >= suffix_len {\n target_hash = preimage_prefix_hash(target_hash, &suffix[suffix_len..]);\n bsuffix = &suffix[..suffix_len];\n whole_block = true;\n }\n let mut controlled = [0u8; 16];\n let total_len = ATTACK_BYTES + suffix.len();\n let controlled_bytes = total_len.min(16);\n let controlled = &mut controlled[..controlled_bytes];\n controlled[ATTACK_BYTES..].copy_from_slice(bsuffix);\n\n let seed = Simd::from_array([\n 17820195240, 4041143216,\n 22093178114, 2324176188,\n ]);\n let mut rng = SRng::new(seed * Simd::splat(worker + 1));\n let start = std::time::Instant::now();\n\n for ii in 0_u64.. {\n if FOUND.load(Ordering::Relaxed) {\n return;\n }\n\n let prefix = rng.random_alphanum();\n controlled[..6].copy_from_slice(&prefix[..6]);\n\n let prefix = {\n let prefix_hash = if whole_block {\n invert_block(target_hash, controlled)\n } else {\n preimage_prefix_hash(target_hash, controlled)\n };\n single_prefix(total_len, prefix_hash)\n };\n\n if check_alphanum(prefix) {\n FOUND.store(true, Ordering::Relaxed);\n let mut buffer = prefix.to_array().to_vec();\n buffer.extend_from_slice(&controlled[..6]);\n buffer.extend_from_slice(suffix);\n let elapsed = start.elapsed();\n let mhs = (ii as f64) / 1e6 / elapsed.as_secs_f64();\n eprintln!(\"found prefix in {}it {:?} {:3.3}MH/s/core\", ii, elapsed, mhs);\n eprintln!(\"hash: {:x?}\", ComputeGlyphHash(&buffer));\n println!(\"{}\", core::str::from_utf8(&buffer).unwrap());\n break;\n }\n }\n}\n\nconst MESSAGE: &[&[u8]] = &[\n b\" Hello Casey! I hope this message finds you well.\",\n b\" Please ignore those 22 random chars to the left for now.\",\n b\" The work you've done on refterm is admirable. There are\",\n b\" not enough performance conscious programmers around, and\",\n b\" we need a demonstration of what is achievable. However,\",\n b\" I would like to address the claim that the hash function\",\n b\" used in refterm is 'cryptographically secure'. There is\",\n b\" a very specific meaning attached to those words, namely:\",\n b\" 1) it is hard to create a message for a given hash value\",\n b\" 2) it is hard to produce two messages with the same hash\",\n b\" If you check, the following strings have the same hash:\",\n b\" xvD7FsaUdGy9UyjalZlFEU, 0XXPpB0wpVszsvSxgsn0su,\",\n b\" IGNwdjol0dxLflcnfW7vsI, jcTHx0zBJbW2tdiX157RSz.\",\n b\" In fact, every line in the message yields the exact same\",\n b\" hash value. That is 0x00000000000000000000000000000000.\",\n b\" I believe this was a clear enough demonstration that the\",\n b\" hash function `ComputeGlyphHash` isn't cryptographically\",\n b\" secure, and that an attacker can corrupt the glyph cache\",\n b\" by printing glyphs with the same hash. The main problem\",\n b\" with this hash function is that all operations consuming\",\n b\" bytes are invertible. Which means an attacker could run\",\n b\" the hash function in reverse, consuming the message from\",\n b\" behind, and calculate the message to get the given hash.\",\n b\" The hash is also weak to a padding attack. For example,\",\n br#\" two strings \"A\" and \"B\\x00\" yield the same hash, because\"#,\n b\" the padding is constant, so zero byte in the end doens't\",\n b\" matter, and the first byte is `xor`ed with input length.\",\n b\" If you'd like to, you can read this blog post explaining\",\n b\" these attacks in detail and how to avoid them using well\",\n b\" known methods: https://m1el.github.io/refterm-hash\",\n b\" Best regards, -- Igor\",\n];\n\nfn main() {\n padding_attack();\n invert_attack(b\"Qwerty123\");\n prefix_collision_attack(b\"hello\");\n chosen_prefix(b\"hello\");\n preimage_attack(b\"hello\");\n\n const THREADS: u64 = 16;\n for msg in MESSAGE {\n FOUND.store(false, Ordering::Relaxed);\n let threads = (0..THREADS)\n .map(|worker| std::thread::spawn(move || find_ascii_zeros(msg, worker)))\n .collect::>();\n for thread in threads {\n thread.join().unwrap();\n }\n };\n}\n"},"download_success":{"kind":"bool","value":true,"string":"true"}}},{"rowIdx":1194,"cells":{"blob_id":{"kind":"string","value":"cced4a2da2d263cc5d424bf18d58bfc715bb4de4"},"language":{"kind":"string","value":"Rust"},"repo_name":{"kind":"string","value":"neosmart/securestore-rs"},"path":{"kind":"string","value":"/securestore/src/tests/secrets.rs"},"src_encoding":{"kind":"string","value":"UTF-8"},"length_bytes":{"kind":"number","value":4788,"string":"4,788"},"score":{"kind":"number","value":3.59375,"string":"3.59375"},"int_score":{"kind":"number","value":4,"string":"4"},"detected_licenses":{"kind":"list like","value":["Apache-2.0","MIT"],"string":"[\n \"Apache-2.0\",\n \"MIT\"\n]"},"license_type":{"kind":"string","value":"permissive"},"text":{"kind":"string","value":"//! Highest-level tests for the secure store\n\nuse crate::errors::ErrorKind;\nuse crate::{KeySource, SecretsManager};\nuse tempfile::NamedTempFile;\n\n/// Verify that basic storage and retrieval of secrets functions correctly.\n#[test]\nfn basic_store_get() {\n // Create a new secrets manager with a known secret so we don't need to muck\n // around with keyfiles later.\n let secrets_path = NamedTempFile::new().unwrap().into_temp_path();\n let mut sman = SecretsManager::new(KeySource::Password(\"mysecret\")).unwrap();\n\n // Make sure that we can set values in different &str/String types\n sman.set(\"foo\", \"bar\");\n sman.set(\"foo\", \"bar\".to_string());\n sman.save_as(&secrets_path).unwrap();\n\n // Do we get the same value back on get?\n let getd: String = sman.get(\"foo\").unwrap();\n assert_eq!(\"bar\", getd);\n\n // Now open the store from the disk with the same settings and make sure the\n // data remains loadable.\n let sman2 = SecretsManager::load(&secrets_path, KeySource::Password(\"mysecret\")).unwrap();\n let getd: String = sman2.get(\"foo\").unwrap();\n assert_eq!(\"bar\", getd);\n}\n\n#[test]\nfn wrong_password() {\n let secrets_path = NamedTempFile::new().unwrap().into_temp_path();\n let mut sman = SecretsManager::new(KeySource::Password(\"mysecret\")).unwrap();\n\n // Set something\n sman.set(\"foo\", \"foo\");\n // And save the store to disk\n sman.save_as(&secrets_path).unwrap();\n\n // Now try loading the store with wrong password\n match SecretsManager::load(&secrets_path, KeySource::Password(\"notmysecret\")) {\n Ok(_) => panic!(\"Sentinel failed to detect wrong password on load\"),\n Err(e) => {\n assert_eq!(ErrorKind::DecryptionFailure, e.kind());\n }\n }\n}\n\n#[test]\nfn secret_not_found() {\n let sman = SecretsManager::new(KeySource::Csprng).unwrap();\n\n assert_eq!(Err(ErrorKind::SecretNotFound.into()), sman.get(\"foo\"));\n}\n\n#[test]\nfn csprng_export() {\n let secrets_path = NamedTempFile::new().unwrap().into_temp_path();\n let key_path = NamedTempFile::new().unwrap().into_temp_path();\n\n {\n let mut sman = SecretsManager::new(KeySource::Csprng).unwrap();\n sman.export_key(&key_path).unwrap();\n\n sman.set(\"foo\", \"bar\");\n sman.save_as(&secrets_path).unwrap();\n }\n\n let sman = SecretsManager::load(secrets_path, KeySource::File(key_path)).unwrap();\n assert_eq!(Ok(\"bar\".to_owned()), sman.get(\"foo\"));\n}\n\n#[test]\nfn password_export() {\n let secrets_path = NamedTempFile::new().unwrap().into_temp_path();\n let key_path = NamedTempFile::new().unwrap().into_temp_path();\n\n {\n let mut sman = SecretsManager::new(KeySource::Password(\"password123\")).unwrap();\n // Use legacy .export() alias .export_keyfile() to make sure it works\n sman.export_keyfile(&key_path).unwrap();\n\n sman.set(\"foo\", \"bar\");\n sman.save_as(&secrets_path).unwrap();\n }\n\n let sman = SecretsManager::load(secrets_path, KeySource::File(key_path)).unwrap();\n assert_eq!(Ok(\"bar\".to_owned()), sman.get(\"foo\"));\n}\n\n#[test]\nfn invalid_key_file() {\n let key_path = NamedTempFile::new().unwrap().into_temp_path();\n\n match SecretsManager::new(KeySource::File(key_path)) {\n Ok(_) => panic!(\"SecretsManager loaded with invalid key file!\"),\n Err(e) => assert_eq!(ErrorKind::InvalidKeyfile, e.kind()),\n }\n}\n\n#[test]\nfn binary_secret() {\n let mut sman = SecretsManager::new(KeySource::Csprng).unwrap();\n\n let (key, value) = (\"secret\", b\"Hello, world!\");\n sman.set(key, &value[..]);\n\n assert_eq!(&value[..], sman.get_as::>(key).unwrap().as_slice());\n}\n\n#[test]\n/// A release added generics to KeySource which were later removed because the\n/// default generic fallback doesn't work on current rust versions. This had\n/// let `KeySource::File(path: AsRef)` work, but broke `KeySource::Csprng`\n/// and `KeySource::Password` because the `P: AsRef` wasn't defined for\n/// those variants (unless it was explicitly provided, though not used).\n///\n/// `KeySource::File` was renamed to `KeySource::Path` and takes a `&Path` only,\n/// but a function masquerading as a variant called `KeySource::File()` was\n/// introduced that returns `impl GenericKeySource`, the trait which we now\n/// accept in the `new()` and `load()` functions. This function is hidden from\n/// the docs and is for backwards-compatibility only.\nfn legacy_generic_keysource() {\n // We just want to verify that this compiles, we don't test the result here.\n let _ = SecretsManager::load(\"secrets.json\", KeySource::File(\"secrets.key\"));\n}\n\n#[test]\nfn str_as_generic_keysource() {\n // We just want to verify that this compiles, we don't test the result here.\n let _ = SecretsManager::load(\"secrets.json\", \"secrets.key\");\n}\n"},"download_success":{"kind":"bool","value":true,"string":"true"}}},{"rowIdx":1195,"cells":{"blob_id":{"kind":"string","value":"68d081dfc8abf1dcb438287b74bb54f66deef276"},"language":{"kind":"string","value":"Rust"},"repo_name":{"kind":"string","value":"jneem/rust-playground"},"path":{"kind":"string","value":"/src/skyline.rs"},"src_encoding":{"kind":"string","value":"UTF-8"},"length_bytes":{"kind":"number","value":9228,"string":"9,228"},"score":{"kind":"number","value":3.28125,"string":"3.28125"},"int_score":{"kind":"number","value":3,"string":"3"},"detected_licenses":{"kind":"list like","value":[],"string":"[]"},"license_type":{"kind":"string","value":"no_license"},"text":{"kind":"string","value":"#[deriving(Clone, Show)]\nstruct Building {\n m: f64,\n b: f64,\n end: f64\n}\n\n// To prevent numerical instability, we don't allow large slopes.\nstatic MAX_SLOPE: f64 = 1e3;\n\nimpl Building {\n fn from_points(x1: f64, y1: f64, x2: f64, y2: f64) -> Building {\n // To avoid NaNs, we deal with vertical line segments separately.\n if x1 == x2 {\n return Building {\n m: 0.0,\n b: y1.max(y2),\n end: x1\n }\n }\n\n let m_orig = (y2 - y1) / (x2 - x1);\n let m = m_orig.max(-MAX_SLOPE).min(MAX_SLOPE);\n let b = (y1 - m*x1).max(y2 - m*x2);\n Building { m: m, b: b, end: x1.max(x2) }\n }\n\n fn intersection(&self, other: &Building) -> f64 {\n let x = (other.b - self.b) / (self.m - other.m);\n if x.is_nan() { Float::neg_infinity() } else { x }\n }\n\n fn conceals(&self, other: &Building, x: f64) -> bool {\n self.conceals_with_intersect(other, x, self.intersection(other))\n }\n\n fn conceals_with_intersect(&self,\n other: &Building,\n x: f64,\n intersect: f64) -> bool {\n if self.m == other.m {\n self.b >= other.b\n } else {\n (intersect <= x && self.m > other.m)\n || (intersect > x && self.m < other.m)\n }\n }\n\n fn empty(end: f64) -> Building {\n Building {\n m: 0.0,\n b: Float::neg_infinity(),\n end: end\n }\n }\n\n fn chop(&self, new_end: f64) -> Building {\n Building {\n m: self.m,\n b: self.b,\n end: new_end\n }\n }\n\n fn y(&self, x: f64) -> f64 {\n // We assume that the slope is not infinite. Then\n // the only way to get NaN out of m*x + b is if\n // b is infinite. But if b is infinite\n // then it should be negative infinity, and we just return it.\n if self.b.is_infinite() { self.b } else { self.m * x + self.b }\n }\n}\n\n// FIXME: the parameter of type Option is a work-around\n// for not having UFCS. See\n// https://mail.mozilla.org/pipermail/rust-dev/2014-May/009850.html\npub trait Direction {\n fn direction_multiplier(_: Option) -> f64;\n}\n\n#[deriving(Show)]\npub struct Up;\n\n#[deriving(Show)]\npub struct Down;\n\n#[deriving(Show)]\npub struct Left;\n\n#[deriving(Show)]\npub struct Right;\n\nimpl Direction for Up { fn direction_multiplier(_: Option) -> f64 { 1.0 } }\nimpl Direction for Down { fn direction_multiplier(_: Option) -> f64 { -1.0 } }\nimpl Direction for Left { fn direction_multiplier(_: Option) -> f64 { -1.0 } }\nimpl Direction for Right { fn direction_multiplier(_: Option) -> f64 { 1.0 } }\n\npub trait Flip {}\nimpl Flip for Down {}\nimpl Flip for Up {}\nimpl Flip for Left {}\nimpl Flip for Right {}\n\n#[deriving(Clone, Show)]\npub struct Skyline {\n buildings: Vec\n}\n\nimpl Skyline {\n pub fn empty() -> Box> {\n box Skyline {\n buildings: vec![Building::empty(Float::infinity())]\n }\n }\n\n pub fn single(x1: f64, y1: f64, x2: f64, y2: f64) -> Box> {\n let mult = Direction::direction_multiplier(None::);\n let b = Building::from_points(x1, y1 * mult, x2, y2 * mult);\n let start = Building::empty(x1.min(x2));\n let end = Building::empty(Float::infinity());\n\n box Skyline {\n buildings: vec![start, b, end]\n }\n }\n\n #[cfg(test)]\n fn from_buildings(bldgs: Vec) -> Box> {\n box Skyline {\n buildings: bldgs\n }\n }\n\n pub fn overlap>(&self, other: &Skyline) -> f64 {\n let mut dist: f64 = Float::neg_infinity();\n let mut start: f64 = Float::neg_infinity();\n let mut i = 0u;\n let mut j = 0u;\n let imax = self.buildings.len();\n let jmax = other.buildings.len();\n\n while i < imax && j < jmax {\n // Loop invariant: b1 and b2 start at or after `start`.\n let b1 = self.buildings[i];\n let b2 = other.buildings[j];\n\n let end: f64;\n if b1.end < b2.end {\n end = b1.end;\n i += 1;\n } else {\n end = b2.end;\n j += 1;\n }\n\n dist = dist.max(b1.y(start) + b2.y(start));\n dist = dist.max(b1.y(end) + b2.y(end));\n\n start = end;\n }\n\n dist\n }\n\n fn first_intersection(b: &Building,\n bldgs: &[Building],\n mut start: f64,\n idx: &mut uint) -> f64 {\n let idxmax = bldgs.len();\n while *idx < idxmax {\n let other = &bldgs[*idx];\n let intersect = b.intersection(other);\n if b.conceals_with_intersect(other, start, intersect) {\n if intersect > start && intersect < b.end.min(other.end) {\n // This building intersects with the other one.\n return intersect;\n } else if b.end < other.end {\n // This building ends before the other one.\n return b.end;\n } else {\n // The other building ends first (or they end together).\n *idx += 1;\n start = other.end;\n }\n } else {\n return start;\n }\n }\n return Float::infinity();\n }\n\n fn internal_merge(in1: &[Building],\n in2: &[Building],\n out: &mut Vec) {\n let mut start: f64 = Float::neg_infinity();\n let mut i = 0u;\n let mut j = 0u;\n let imax = in1.len();\n let jmax = in2.len();\n\n // Loop invariant: if j == jmax then i == imax-1.\n while i < imax && j < jmax {\n let b1 = &in1[i];\n let b2 = &in2[j];\n\n if b1.conceals(b2, start) {\n start = Skyline::::first_intersection(b1, in2, start, &mut j);\n out.push(b1.chop(start));\n\n // If i == imax-1 then b1.end == inf. If in addition,\n // start >= b1.end then we must have j == jmax-1\n // (i.e., we're done with with input skylines).\n if start >= b1.end {\n i += 1;\n }\n } else {\n start = Skyline::::first_intersection(b2, in1, start, &mut i);\n out.push(b2.chop(start));\n if start >= b2.end {\n j += 1;\n }\n }\n }\n }\n\n pub fn merge(&mut self, other: &Skyline) {\n let mut new_bldgs = Vec::new();\n Skyline::::internal_merge(self.buildings.as_slice(),\n other.buildings.as_slice(),\n &mut new_bldgs);\n\n self.buildings = new_bldgs;\n }\n\n pub fn slide(&mut self, x: f64) {\n for b in self.buildings.iter_mut() {\n b.end += x\n }\n }\n\n pub fn bump(&mut self, y: f64) {\n let y = y * Direction::direction_multiplier(None::);\n for b in self.buildings.iter_mut() {\n b.b += y\n }\n }\n}\n\n#[cfg(test)]\nmod test {\n use test_utils::ApproxEq;\n mod test_utils;\n\n impl<'a> ApproxEq for &'a Building {\n fn approx_eq<'b>(self, other: &'b Building) -> bool {\n self.m.approx_eq(other.m) &&\n self.b.approx_eq(other.b) &&\n self.end.approx_eq(other.end)\n }\n }\n\n impl<'a, T: Direction> ApproxEq for &'a Skyline {\n fn approx_eq<'b>(self, other: &'b Skyline) -> bool {\n if self.buildings.len() != other.buildings.len() {\n return false;\n }\n\n for i in range(0, self.buildings.len()) {\n if !self.buildings[i].approx_eq(&other.buildings[i]) {\n return false;\n }\n }\n return true;\n }\n }\n\n #[test]\n fn basic_skyline_merge() {\n let mut sky1 = Skyline::::single(-2.0, 0.0, -1.0, 0.0);\n let mut sky2 = Skyline::::single(1.0, 0.0, 2.0, 0.0);\n sky2.merge(&*sky1);\n\n let target = Skyline::from_buildings(\n vec!(Building::empty(-2.0),\n Building { m: 0.0, b: 0.0, end: -1.0 },\n Building::empty(1.0),\n Building { m: 0.0, b: 0.0, end: 2.0 },\n Building::empty(Float::infinity())));\n\n assert!(sky2.approx_eq(&*target));\n sky1.merge(&*sky2);\n assert!(sky1.approx_eq(&*target));\n }\n\n #[test]\n fn basic_skyline_overlap() {\n let sky1 = Skyline::::single(-1.0, 3.0, 1.0, 3.0);\n let sky2 = Skyline::::single(-1.0, 2.0, 1.0, 2.0);\n\n let d = sky1.overlap(&*sky2);\n assert!(d.approx_eq(1.0), \"d = {}, should be 1.0\", d);\n }\n\n // TODO: once compilefail tests are available, add some to make\n // sure we can't compare skylines with different directions.\n\n // TODO: test slide and bump\n}\n"},"download_success":{"kind":"bool","value":true,"string":"true"}}},{"rowIdx":1196,"cells":{"blob_id":{"kind":"string","value":"fcb01a63ca183f09223a866c6536afe442a45c51"},"language":{"kind":"string","value":"Rust"},"repo_name":{"kind":"string","value":"yoav-steinberg/jsonpath"},"path":{"kind":"string","value":"/tests/filter.rs"},"src_encoding":{"kind":"string","value":"UTF-8"},"length_bytes":{"kind":"number","value":6059,"string":"6,059"},"score":{"kind":"number","value":2.671875,"string":"2.671875"},"int_score":{"kind":"number","value":3,"string":"3"},"detected_licenses":{"kind":"list like","value":["MIT"],"string":"[\n \"MIT\"\n]"},"license_type":{"kind":"string","value":"permissive"},"text":{"kind":"string","value":"#[macro_use]\nextern crate serde_json;\n\nuse common::{read_json, select_and_then_compare, setup};\n\nmod common;\n\n#[test]\nfn quote() {\n setup();\n\n select_and_then_compare(\n r#\"$['single\\'quote']\"#,\n json!({\"single'quote\":\"value\"}),\n json!([\"value\"]),\n );\n select_and_then_compare(\n r#\"$[\"double\\\"quote\"]\"#,\n json!({\"double\\\"quote\":\"value\"}),\n json!([\"value\"]),\n );\n}\n\n#[test]\nfn filter_next_all() {\n setup();\n\n for path in &[r#\"$.*\"#, r#\"$[*]\"#] {\n select_and_then_compare(\n path,\n json!([\"string\", 42, { \"key\": \"value\" }, [0, 1]]),\n json!([\"string\", 42, { \"key\": \"value\" }, [0, 1]]),\n );\n }\n}\n\n#[test]\nfn filter_all() {\n setup();\n\n for path in &[r#\"$..*\"#, r#\"$..[*]\"#] {\n select_and_then_compare(\n path,\n json!([\"string\", 42, { \"key\": \"value\" }, [0, 1]]),\n json!([ \"string\", 42, { \"key\" : \"value\" }, [ 0, 1 ], \"value\", 0, 1 ]),\n );\n }\n}\n\n#[test]\nfn filter_array_next_all() {\n setup();\n\n for path in &[r#\"$.*.*\"#, r#\"$[*].*\"#, r#\"$.*[*]\"#, r#\"$[*][*]\"#] {\n select_and_then_compare(\n path,\n json!([\"string\", 42, { \"key\": \"value\" }, [0, 1]]),\n json!([\"value\", 0, 1]),\n );\n }\n}\n\n#[test]\nfn filter_all_complex() {\n setup();\n\n for path in &[r#\"$..friends.*\"#, r#\"$[*].friends.*\"#] {\n select_and_then_compare(\n path,\n read_json(\"./benchmark/data_array.json\"),\n json!([\n { \"id\" : 0, \"name\" : \"Millicent Norman\" },\n { \"id\" : 1, \"name\" : \"Vincent Cannon\" },\n { \"id\" : 2, \"name\" : \"Gray Berry\" },\n { \"id\" : 0, \"name\" : \"Tillman Mckay\" },\n { \"id\" : 1, \"name\" : \"Rivera Berg\" },\n { \"id\" : 2, \"name\" : \"Rosetta Erickson\" }\n ]),\n );\n }\n}\n\n#[test]\nfn filter_parent_with_matched_child() {\n setup();\n\n select_and_then_compare(\n \"$.a[?(@.b.c == 1)]\",\n json!({\n \"a\": {\n \"b\": {\n \"c\": 1\n }\n }\n }),\n json!([\n {\n \"b\" : {\n \"c\" : 1\n }\n }\n ]),\n );\n}\n\n#[test]\nfn filter_parent_exist_child() {\n setup();\n\n select_and_then_compare(\n \"$.a[?(@.b.c)]\",\n json!({\n \"a\": {\n \"b\": {\n \"c\": 1\n }\n }\n }),\n json!([\n {\n \"b\" : {\n \"c\" : 1\n }\n }\n ]),\n );\n}\n\n#[test]\nfn filter_parent_paths() {\n setup();\n\n select_and_then_compare(\n \"$[?(@.key.subKey == 'subKey2')]\",\n json!([\n {\"key\": {\"seq\": 1, \"subKey\": \"subKey1\"}},\n {\"key\": {\"seq\": 2, \"subKey\": \"subKey2\"}},\n {\"key\": 42},\n {\"some\": \"value\"}\n ]),\n json!([{\"key\": {\"seq\": 2, \"subKey\": \"subKey2\"}}]),\n );\n}\n\n#[test]\nfn bugs33_exist_in_all() {\n setup();\n\n select_and_then_compare(\n \"$..[?(@.first.second)]\",\n json!({\n \"foo\": {\n \"first\": { \"second\": \"value\" }\n },\n \"foo2\": {\n \"first\": {}\n },\n \"foo3\": {\n }\n }),\n json!([\n {\n \"first\": {\n \"second\": \"value\"\n }\n }\n ]),\n );\n}\n\n#[test]\nfn bugs33_exist_left_in_all_with_and_condition() {\n setup();\n\n select_and_then_compare(\n \"$..[?(@.first && @.first.second)]\",\n json!({\n \"foo\": {\n \"first\": { \"second\": \"value\" }\n },\n \"foo2\": {\n \"first\": {}\n },\n \"foo3\": {\n }\n }),\n json!([\n {\n \"first\": {\n \"second\": \"value\"\n }\n }\n ]),\n );\n}\n\n#[test]\nfn bugs33_exist_right_in_all_with_and_condition() {\n setup();\n\n select_and_then_compare(\n \"$..[?(@.b.c.d && @.b)]\",\n json!({\n \"a\": {\n \"b\": {\n \"c\": {\n \"d\" : {\n \"e\" : 1\n }\n }\n }\n }\n }),\n json!([\n {\n \"b\" : {\n \"c\" : {\n \"d\" : {\n \"e\" : 1\n }\n }\n }\n }\n ]),\n );\n}\n\n#[test]\nfn bugs38_array_notation_in_filter() {\n setup();\n\n select_and_then_compare(\n \"$[?(@['key']==42)]\",\n json!([\n {\"key\": 0},\n {\"key\": 42},\n {\"key\": -1},\n {\"key\": 41},\n {\"key\": 43},\n {\"key\": 42.0001},\n {\"key\": 41.9999},\n {\"key\": 100},\n {\"some\": \"value\"}\n ]),\n json!([{\"key\": 42}]),\n );\n\n select_and_then_compare(\n \"$[?(@['key'].subKey == 'subKey2')]\",\n json!([\n {\"key\": {\"seq\": 1, \"subKey\": \"subKey1\"}},\n {\"key\": {\"seq\": 2, \"subKey\": \"subKey2\"}},\n {\"key\": 42},\n {\"some\": \"value\"}\n ]),\n json!([{\"key\": {\"seq\": 2, \"subKey\": \"subKey2\"}}]),\n );\n\n select_and_then_compare(\n \"$[?(@['key']['subKey'] == 'subKey2')]\",\n json!([\n {\"key\": {\"seq\": 1, \"subKey\": \"subKey1\"}},\n {\"key\": {\"seq\": 2, \"subKey\": \"subKey2\"}},\n {\"key\": 42},\n {\"some\": \"value\"}\n ]),\n json!([{\"key\": {\"seq\": 2, \"subKey\": \"subKey2\"}}]),\n );\n\n select_and_then_compare(\n \"$..key[?(@['subKey'] == 'subKey2')]\",\n json!([\n {\"key\": {\"seq\": 1, \"subKey\": \"subKey1\"}},\n {\"key\": {\"seq\": 2, \"subKey\": \"subKey2\"}},\n {\"key\": 42},\n {\"some\": \"value\"}\n ]),\n json!([{\"seq\": 2, \"subKey\": \"subKey2\"}]),\n );\n}"},"download_success":{"kind":"bool","value":true,"string":"true"}}},{"rowIdx":1197,"cells":{"blob_id":{"kind":"string","value":"582843f9fd9e5b818bade37d0bf1ab284e8d4432"},"language":{"kind":"string","value":"Rust"},"repo_name":{"kind":"string","value":"JiahaiHu/kv-server"},"path":{"kind":"string","value":"/src/store/mod.rs"},"src_encoding":{"kind":"string","value":"UTF-8"},"length_bytes":{"kind":"number","value":417,"string":"417"},"score":{"kind":"number","value":2.71875,"string":"2.71875"},"int_score":{"kind":"number","value":3,"string":"3"},"detected_licenses":{"kind":"list like","value":[],"string":"[]"},"license_type":{"kind":"string","value":"no_license"},"text":{"kind":"string","value":"use std::collections::HashMap;\n\npub mod engine;\n\npub type Key = String;\npub type Value = String;\n\npub trait Engine {\n fn get(&self, key: &Key) -> Result, ()>;\n fn put(&mut self, key: &Key, value: &Value) -> Result, ()>;\n fn delete(&mut self, key: &Key) -> Result, ()>;\n fn scan(&self, key_start: &Key, key_end: &Key) -> Result>, ()>;\n}\n"},"download_success":{"kind":"bool","value":true,"string":"true"}}},{"rowIdx":1198,"cells":{"blob_id":{"kind":"string","value":"f3729e4c24aac1e3e0c3c89a1e7c07b2a18a1a5c"},"language":{"kind":"string","value":"Rust"},"repo_name":{"kind":"string","value":"Lakelezz/white_rabbit"},"path":{"kind":"string","value":"/src/lib.rs"},"src_encoding":{"kind":"string","value":"UTF-8"},"length_bytes":{"kind":"number","value":10550,"string":"10,550"},"score":{"kind":"number","value":3.53125,"string":"3.53125"},"int_score":{"kind":"number","value":4,"string":"4"},"detected_licenses":{"kind":"list like","value":["ISC"],"string":"[\n \"ISC\"\n]"},"license_type":{"kind":"string","value":"permissive"},"text":{"kind":"string","value":"//! *“I'm late! I'm late! For a very important date!”*\n//! *by “The White Rabbit”* 『Alice's Adventures in Wonderland』\n//!\n//! `white_rabbit` schedules your tasks and can repeat them!\n//!\n//! One funny use case are chat bot commands: Imagine a *remind me*-command,\n//! the command gets executed and you simply create a one-time job to be\n//! scheduled for whatever time the user desires.\n//!\n//! We are using chrono's `DateTime`, enabling you to serialise and thus\n//! backup currently running tasks,\n//! in case you want to shutdown/restart your application,\n//! constructing a new scheduler is doable.\n//! However, please make sure your internal clock is synced.\n#![deny(rust_2018_idioms)]\nuse chrono::Duration as ChronoDuration;\nuse parking_lot::{Condvar, Mutex, RwLock};\nuse std::{cmp::Ordering, collections::BinaryHeap, sync::Arc, time::Duration as StdDuration};\nuse threadpool::ThreadPool;\n\npub use chrono::{DateTime, Duration, Utc};\n\n/// Compare if an `enum`-variant matches another variant.\nmacro_rules! cmp_variant {\n ($expression:expr, $($variant:tt)+) => {\n match $expression {\n $($variant)+ => true,\n _ => false\n }\n }\n}\n\n/// When a task is due, this will be passed to the task.\n/// Currently, there is not much use to this. However, this might be extended\n/// in the future.\npub struct Context {\n time: DateTime,\n}\n\n/// Every task will return this `enum`.\npub enum DateResult {\n /// The task is considered finished and can be fully removed.\n Done,\n /// The task will be scheduled for a new date on passed `DateTime`.\n Repeat(DateTime),\n}\n\n/// Every job gets a planned `Date` with the scheduler.\npub struct Date {\n pub context: Context,\n pub job: Box DateResult + Send + Sync + 'static>,\n}\n\nimpl Eq for Date {}\n\n/// Invert comparisions to create a min-heap.\nimpl Ord for Date {\n fn cmp(&self, other: &Date) -> Ordering {\n match self.context.time.cmp(&other.context.time) {\n Ordering::Less => Ordering::Greater,\n Ordering::Greater => Ordering::Less,\n Ordering::Equal => Ordering::Equal,\n }\n }\n}\n\n/// Invert comparisions to create a min-heap.\nimpl PartialOrd for Date {\n fn partial_cmp(&self, other: &Date) -> Option {\n Some(match self.context.time.cmp(&other.context.time) {\n Ordering::Less => Ordering::Greater,\n Ordering::Greater => Ordering::Less,\n Ordering::Equal => Ordering::Equal,\n })\n }\n}\n\nimpl PartialEq for Date {\n fn eq(&self, other: &Date) -> bool {\n self.context.time == other.context.time\n }\n}\n\n/// The [`Scheduler`]'s worker thread switches through different states\n/// while running, each state changes the behaviour.\n///\n/// [`Scheduler`]: struct.Scheduler.html\nenum SchedulerState {\n /// No dates being awaited, sleep until one gets added.\n PauseEmpty,\n /// Pause until next date is due.\n PauseTime(StdDuration),\n /// If the next date is already waiting to be executed,\n /// the thread continues running without sleeping.\n Run,\n /// Exits the thread.\n Exit,\n}\n\nimpl SchedulerState {\n fn is_running(&self) -> bool {\n cmp_variant!(*self, SchedulerState::Run)\n }\n\n fn new_pause_time(duration: ChronoDuration) -> Self {\n SchedulerState::PauseTime(\n duration\n .to_std()\n .unwrap_or_else(|_| StdDuration::from_millis(0)),\n )\n }\n}\n\n/// This scheduler exists on two levels: The handle, granting you the\n/// ability of adding new tasks, and the executor, dating and executing these\n/// tasks when specified time is met.\n///\n/// **Info**: This scheduler may not be precise due to anomalies such as\n/// preemption or platform differences.\npub struct Scheduler {\n /// The mean of communication with the running scheduler.\n condvar: Arc<(Mutex, Condvar)>,\n /// Every job has its date listed inside this.\n dates: Arc>>,\n}\n\nimpl Scheduler {\n /// Add a task to be executed when `time` is reached.\n pub fn add_task_datetime(&mut self, time: DateTime, to_execute: T)\n where\n T: FnMut(&mut Context) -> DateResult + Send + Sync + 'static,\n {\n let &(ref state_lock, ref notifier) = &*self.condvar;\n\n let task = Date {\n context: Context { time },\n job: Box::new(to_execute),\n };\n\n let mut locked_heap = self.dates.write();\n\n if locked_heap.is_empty() {\n let mut scheduler_state = state_lock.lock();\n let left = task.context.time.signed_duration_since(Utc::now());\n\n if !scheduler_state.is_running() {\n *scheduler_state = SchedulerState::new_pause_time(left);\n notifier.notify_one();\n }\n } else {\n let mut scheduler_state = state_lock.lock();\n\n if let SchedulerState::PauseTime(_) = *scheduler_state {\n let peeked = locked_heap.peek().expect(\"Expected heap to be filled.\");\n\n if task.context.time < peeked.context.time {\n let left = task.context.time.signed_duration_since(Utc::now());\n\n if !scheduler_state.is_running() {\n *scheduler_state = SchedulerState::PauseTime(\n left.to_std()\n .unwrap_or_else(|_| StdDuration::from_millis(0)),\n );\n notifier.notify_one();\n }\n }\n }\n }\n\n locked_heap.push(task);\n }\n\n pub fn add_task_duration(&mut self, how_long: ChronoDuration, to_execute: T)\n where\n T: FnMut(&mut Context) -> DateResult + Send + Sync + 'static,\n {\n let time = Utc::now() + how_long;\n self.add_task_datetime(time, to_execute);\n }\n}\n\nfn set_state_lock(state_lock: &Mutex, to_set: SchedulerState) {\n let mut state = state_lock.lock();\n *state = to_set;\n}\n\n#[inline]\nfn _push_and_notfiy(date: Date, heap: &mut BinaryHeap, notifier: &Condvar) {\n heap.push(date);\n notifier.notify_one();\n}\n\n/// This function pushes a `date` onto `data_pooled` and notifies the\n/// dispatching-thread in case they are sleeping.\n#[inline]\nfn push_and_notfiy(\n dispatcher_pair: &Arc<(Mutex, Condvar)>,\n data_pooled: &Arc>>,\n when: &DateTime,\n date: Date,\n) {\n let &(ref state_lock, ref notifier) = &**dispatcher_pair;\n\n let mut state = state_lock.lock();\n\n let mut heap_lock = data_pooled.write();\n\n if let Some(peek) = heap_lock.peek() {\n if peek.context.time < *when {\n let left = peek.context.time.signed_duration_since(Utc::now());\n\n *state = SchedulerState::new_pause_time(left);\n _push_and_notfiy(date, &mut heap_lock, &notifier);\n } else {\n let left = when.signed_duration_since(Utc::now());\n\n *state = SchedulerState::new_pause_time(left);\n _push_and_notfiy(date, &mut heap_lock, &notifier);\n }\n } else {\n let left = when.signed_duration_since(Utc::now());\n\n *state = SchedulerState::new_pause_time(left);\n _push_and_notfiy(date, &mut heap_lock, &notifier);\n }\n\n}\n\n#[must_use]\nenum Break {\n Yes,\n No,\n}\n\n#[inline]\nfn process_states(state_lock: &Mutex, notifier: &Condvar) -> Break {\n let mut scheduler_state = state_lock.lock();\n\n while let SchedulerState::PauseEmpty = *scheduler_state {\n notifier.wait(&mut scheduler_state);\n }\n\n while let SchedulerState::PauseTime(duration) = *scheduler_state {\n if notifier\n .wait_for(&mut scheduler_state, duration)\n .timed_out()\n {\n break;\n }\n }\n\n if let SchedulerState::Exit = *scheduler_state {\n return Break::Yes;\n }\n\n Break::No\n}\n\nfn dispatch_date(\n threadpool: &ThreadPool,\n dates: &Arc>>,\n pair_scheduler: &Arc<(Mutex, Condvar)>,\n) {\n let mut date = {\n let mut dates = dates.write();\n\n dates.pop().expect(\"Should not run on empty heap.\")\n };\n\n let date_dispatcher = dates.clone();\n let dispatcher_pair = pair_scheduler.clone();\n\n threadpool.execute(move || {\n if let DateResult::Repeat(when) = (date.job)(&mut date.context) {\n date.context.time = when;\n\n push_and_notfiy(&dispatcher_pair, &date_dispatcher, &when, date);\n }\n });\n}\n\nfn check_peeking_date(dates: &Arc>>, state_lock: &Mutex) {\n if let Some(next) = dates.read().peek() {\n let now = Utc::now();\n\n if next.context.time > now {\n let left = next.context.time.signed_duration_since(now);\n\n set_state_lock(&state_lock, SchedulerState::new_pause_time(left));\n } else {\n set_state_lock(&state_lock, SchedulerState::Run);\n }\n } else {\n set_state_lock(&state_lock, SchedulerState::PauseEmpty);\n }\n}\n\nimpl Scheduler {\n /// Creates a new [`Scheduler`] which will use `thread_count` number of\n /// threads when tasks are being dispatched/dated.\n ///\n /// [`Scheduler`]: struct.Scheduler.html\n pub fn new(thread_count: usize) -> Self {\n let pair = Arc::new((Mutex::new(SchedulerState::PauseEmpty), Condvar::new()));\n let pair_scheduler = pair.clone();\n let dates: Arc>> = Arc::new(RwLock::new(BinaryHeap::new()));\n\n let dates_scheduler = Arc::clone(&dates);\n std::thread::spawn(move || {\n let &(ref state_lock, ref notifier) = &*pair_scheduler;\n let threadpool = ThreadPool::new(thread_count);\n\n loop {\n if let Break::Yes = process_states(&state_lock, &notifier) {\n break;\n }\n\n dispatch_date(&threadpool, &dates_scheduler, &pair_scheduler);\n\n check_peeking_date(&dates_scheduler, &state_lock);\n }\n });\n\n Scheduler {\n condvar: pair,\n dates,\n }\n }\n}\n\n/// Once the scheduler is dropped, we also need to join and finish the thread.\nimpl<'a> Drop for Scheduler {\n fn drop(&mut self) {\n let &(ref state_lock, ref notifier) = &*self.condvar;\n\n let mut state = state_lock.lock();\n *state = SchedulerState::Exit;\n notifier.notify_one();\n }\n}\n"},"download_success":{"kind":"bool","value":true,"string":"true"}}},{"rowIdx":1199,"cells":{"blob_id":{"kind":"string","value":"faa3c8a3555c2ab38c745576140f3d8811832fd9"},"language":{"kind":"string","value":"Rust"},"repo_name":{"kind":"string","value":"neont21/pjos-rust-programming"},"path":{"kind":"string","value":"/chapter11/assert_format/src/lib.rs"},"src_encoding":{"kind":"string","value":"UTF-8"},"length_bytes":{"kind":"number","value":341,"string":"341"},"score":{"kind":"number","value":3.390625,"string":"3.390625"},"int_score":{"kind":"number","value":3,"string":"3"},"detected_licenses":{"kind":"list like","value":["Apache-2.0"],"string":"[\n \"Apache-2.0\"\n]"},"license_type":{"kind":"string","value":"permissive"},"text":{"kind":"string","value":"pub fn greeting(name: &str) -> String {\n String::from(\"Hello?\")\n}\n\n#[cfg(test)]\nmod tests {\n use super::*;\n\n #[test]\n fn greeting_contains_name() {\n let result = greeting(\"Peter\");\n assert!(result.contains(\"Peter\"),\n \"Greeting did not contain name, value was '{}'\",\n result\n );\n }\n}\n"},"download_success":{"kind":"bool","value":true,"string":"true"}}}],"truncated":false,"partial":false},"paginationData":{"pageIndex":11,"numItemsPerPage":100,"numTotalItems":1135379,"offset":1100,"length":100}},"jwt":"eyJhbGciOiJFZERTQSJ9.eyJyZWFkIjp0cnVlLCJwZXJtaXNzaW9ucyI6eyJyZXBvLmNvbnRlbnQucmVhZCI6dHJ1ZX0sImlhdCI6MTc1Njc2MTc4Miwic3ViIjoiL2RhdGFzZXRzL2hvbmdsaXU5OTAzL3N0YWNrX2VkdV9ydXN0IiwiZXhwIjoxNzU2NzY1MzgyLCJpc3MiOiJodHRwczovL2h1Z2dpbmdmYWNlLmNvIn0.aUyJCO90DQP87jKsN34V39Xzs_vwPLz6jzo8QYKbkvekRxL7dtCuQTuvXt3ZxyFFBc18QrA026uMKc0Fq7y5Aw","displayUrls":true},"discussionsStats":{"closed":0,"open":0,"total":0},"fullWidth":true,"hasGatedAccess":true,"hasFullAccess":true,"isEmbedded":false,"savedQueries":{"community":[],"user":[]}}">
blob_id
stringlengths
40
40
language
stringclasses
1 value
repo_name
stringlengths
5
140
path
stringlengths
5
183
src_encoding
stringclasses
6 values
length_bytes
int64
12
5.32M
score
float64
2.52
4.94
int_score
int64
3
5
detected_licenses
listlengths
0
47
license_type
stringclasses
2 values
text
stringlengths
12
5.32M
download_success
bool
1 class
8d772e6588eaf7fbaa3012e3bfefce62f6255328
Rust
yatender-oktalk/fun-rust
/play_with_vectors/src/main.rs
UTF-8
1,025
3.921875
4
[]
no_license
//! #[derive(Debug)] struct Human { name: String, age: Option<u16>, } impl Human { pub fn new(n: String, a: Option<u16>) -> Self { Self { name: n, age: a } } } fn main() { let mut v: Vec<i32> = Vec::new(); push_in_list(&mut v, 23); push_in_list(&mut v, 33); push_in_list(&mut v, 53); println!("{:?}", v); let mut v_human: Vec<Human> = Vec::new(); let human_1 = Human::new(String::from("Yatender"), Some(10)); let human_2 = Human::new(String::from("milky silky"), Some(2)); push_into_vec(&mut v_human, human_1); push_into_vec(&mut v_human, human_2); for i in &v_human { println!("{}", is_present(&i)); } println!("{:?}", v_human); } fn is_present(human: &Human) -> bool { match human.age { Some(10) => true, _x => false, } } fn push_in_list(v: &mut Vec<i32>, number: i32) -> &Vec<i32> { v.push(number); v } fn push_into_vec(v: &mut Vec<Human>, human: Human) -> &Vec<Human> { v.push(human); v }
true
9918bc0a073d3851844300ad32d5ef2e83dd93a8
Rust
kklibo/panorama-explorer
/src/photo.rs
UTF-8
5,720
3.109375
3
[ "LicenseRef-scancode-unknown-license-reference", "Apache-2.0", "MIT" ]
permissive
use std::rc::Rc; use std::fmt::{Display,Formatter}; use three_d::{Mat4,Texture,InnerSpace}; use serde::{Serialize, Deserialize, Serializer}; use serde::ser::SerializeStruct; pub use crate::entities::LoadedImageMesh; use crate::viewport_geometry::{WorldCoords, PixelCoords}; use crate::world_rectangle::{WorldRectangle,LocalCoords}; pub struct Photo { pub loaded_image_mesh: Rc<LoadedImageMesh>, ///this Photo's world space orientation: ///* scales 1 (unwarped) pixel to 1 WorldCoords unit ///* translates center from world origin in WorldCoords units ///* rotates around photo center orientation: WorldRectangle, } //todo: make this complete? impl Serialize for Photo { fn serialize<S>(&self, serializer: S) -> Result<<S as Serializer>::Ok, <S as Serializer>::Error> where S: Serializer { let mut state = serializer.serialize_struct("Photo", 2)?; state.serialize_field("translate", &self.orientation.translate)?; state.serialize_field("rotate", &self.orientation.rotate)?; state.end() } } impl Display for Photo { fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result { writeln!(f, "scale: {:?}", self.orientation.scale)?; writeln!(f, "translate: {:?}", self.orientation.translate)?; Ok(()) } } impl Photo { pub fn from_loaded_image_mesh(m: Rc<LoadedImageMesh>) -> Self { let orientation = WorldRectangle::new(m.texture_2d.width() as f32,m.texture_2d.height() as f32); Self { loaded_image_mesh: m, orientation, } } pub fn set_from_json_serde_string(&mut self, s: &str) -> Result<(), Box<dyn std::error::Error>> { #[derive(Deserialize)] struct SavedFields { translate: Mat4, rotate: Mat4, } let saved_fields: SavedFields = serde_json::from_str(s)?; self.orientation.translate = saved_fields.translate; self.orientation.rotate = saved_fields.rotate; Ok(()) } pub fn orientation(&self) -> &WorldRectangle { &self.orientation } pub fn set_translation(&mut self, center: WorldCoords) { self.orientation.set_translation(center) } pub fn set_rotation(&mut self, angle: f32) { self.orientation.set_rotation(angle) } pub fn rotate_around_point(&mut self, angle: f32, point: WorldCoords) { self.orientation.rotate_around_point(angle, point) } /// gets the WorldCoords location of pixel coords in this photo pub fn world_coords(&self, pixel_coords: PixelCoords) -> WorldCoords { Self::world_coords_impl(&self.orientation, pixel_coords) } fn world_coords_impl(world_rectangle: &WorldRectangle, pixel_coords: PixelCoords) -> WorldCoords { let local_coords = Self::local_coords(world_rectangle, pixel_coords); world_rectangle.world_coords(local_coords) } fn local_coords(world_rectangle: &WorldRectangle, pixel_coords: PixelCoords) -> LocalCoords { let width = world_rectangle.scale.x.magnitude() as f64; let height = world_rectangle.scale.y.magnitude() as f64; let local_x = if width == 0.0 { //if width is somehow 0, center on origin 0.0 } else { //scale to width = 1 let x = pixel_coords.x / width; //center on origin let x = x - 0.5; x }; let local_y = if height == 0.0 { //if height is somehow 0, center on origin 0.0 } else { //scale to width = 1 let y = pixel_coords.y / height; //center on origin let y = y - 0.5; //flip y-coords to positive = up let y = -y; y }; LocalCoords{x: local_x, y: local_y} } } #[cfg(test)] mod test { use super::*; #[test] fn world_coords_test() { //at origin, no rotation { let mut orientation = WorldRectangle::new(200.0, 100.0); orientation.set_rotation(0.0); orientation.set_translation(WorldCoords { x: 0.0, y: 0.0 }); //top left corner { let pixel_coords = PixelCoords { x: 0.0, y: 0.0 }; let world_coords = Photo::world_coords_impl(&orientation, pixel_coords); assert_eq!(world_coords, WorldCoords { x: -100.0, y: 50.0 }); } //bottom right corner { let pixel_coords = PixelCoords { x: 200.0, y: 100.0 }; let world_coords = Photo::world_coords_impl(&orientation, pixel_coords); assert_eq!(world_coords, WorldCoords { x: 100.0, y: -50.0 }); } } //rotated + translated { let mut orientation = WorldRectangle::new(200.0, 100.0); orientation.set_rotation(90.0); let x = 2000.0; let y = 1000.0; orientation.set_translation(WorldCoords {x,y}); //top left corner { let pixel_coords = PixelCoords { x: 0.0, y: 0.0 }; let world_coords = Photo::world_coords_impl(&orientation, pixel_coords); assert_eq!(world_coords, WorldCoords { x: x - 50.0, y: y - 100.0 }); } //bottom right corner { let pixel_coords = PixelCoords { x: 200.0, y: 100.0 }; let world_coords = Photo::world_coords_impl(&orientation, pixel_coords); assert_eq!(world_coords, WorldCoords { x: x + 50.0, y: y + 100.0 }); } } } }
true
2d434de272daa220cf3cf98bbd35ad2d1b81d34e
Rust
joephon/PoloDB
/src/polodb_core/meta_doc_helper.rs
UTF-8
3,245
2.6875
3
[ "MIT" ]
permissive
use polodb_bson::{Document, Value, mk_document}; use std::rc::Rc; use crate::DbResult; use crate::error::DbErr; /// root_btree schema /// { /// _id: String, /// name: String, /// root_pid: Int, /// flags: Int, /// } /// /// flags indicates: /// key_ty: 1byte /// ... /// pub(crate) struct MetaDocEntry { name: String, doc: Rc<Document>, } pub(crate) const KEY_TY_FLAG: u32 = 0b11111111; impl MetaDocEntry { pub fn new(id: u32, name: String, root_pid: u32) -> MetaDocEntry { let doc = mk_document! { "_id": id, "name": name.clone(), "root_pid": root_pid, "flags": 0, }; MetaDocEntry { name, doc: Rc::new(doc), } } pub(crate) fn from_doc(doc: Rc<Document>) -> MetaDocEntry { let name = doc.get(meta_doc_key::NAME).unwrap().unwrap_string(); MetaDocEntry { name: name.into(), doc, } } #[allow(dead_code)] pub(crate) fn name(&self) -> &str { self.name.as_str() } pub(crate) fn root_pid(&self) -> u32 { self.doc.get(meta_doc_key::ROOT_PID).unwrap().unwrap_int() as u32 } pub(crate) fn set_root_pid(&mut self, new_root_pid: u32) { let doc_mut = Rc::get_mut(&mut self.doc).unwrap(); doc_mut.insert(meta_doc_key::ROOT_PID.into(), Value::from(new_root_pid)); } pub(crate) fn flags(&self) -> u32 { self.doc.get(meta_doc_key::FLAGS).unwrap().unwrap_int() as u32 } pub(crate) fn set_flags(&mut self, flags: u32) { let doc_mut = Rc::get_mut(&mut self.doc).unwrap(); doc_mut.insert(meta_doc_key::FLAGS.into(), Value::from(flags)); } #[inline] fn key_ty(&self) -> u8 { (self.flags() & KEY_TY_FLAG) as u8 } pub(crate) fn check_pkey_ty(&self, primary_key: &Value, skipped: &mut bool) -> DbResult<()> { let expected = self.key_ty(); if expected == 0 { *skipped = true; return Ok(()) } let actual_ty = primary_key.ty_int(); if expected != actual_ty { return Err(DbErr::UnexpectedIdType(expected, actual_ty)) } Ok(()) } pub(crate) fn merge_pkey_ty_to_meta(&mut self, value_doc: &Document) { let pkey_ty = value_doc.pkey_id().unwrap().ty_int(); self.set_flags(self.flags() | ((pkey_ty as u32) & KEY_TY_FLAG)); } #[inline] pub(crate) fn doc_ref(&self) -> &Document { self.doc.as_ref() } pub(crate) fn set_indexes(&mut self, indexes: Document) { let doc_mut = Rc::get_mut(&mut self.doc).unwrap(); doc_mut.insert(meta_doc_key::INDEXES.into(), Value::from(indexes)); } } pub(crate) mod meta_doc_key { pub(crate) static ID: &str = "_id"; pub(crate) static ROOT_PID: &str = "root_pid"; pub(crate) static NAME: &str = "name"; pub(crate) static FLAGS: &str = "flags"; pub(crate) static INDEXES: &str = "indexes"; pub(crate) mod index { pub(crate) static NAME: &str = "name"; pub(crate) static V: &str = "v"; pub(crate) static UNIQUE: &str = "unique"; pub(crate) static ROOT_PID: &str = "root_pid"; } }
true
a9723ebc52864227a7fde3e20647415f5ab8ffa0
Rust
davidpdrsn/oops-lang
/src/prep/mod.rs
UTF-8
9,760
2.703125
3
[]
no_license
use crate::ast::{visit_ast, Ast, Visitor}; use crate::{ ast::{self, Ident}, error::{Error, Result}, interpret::{ClassVTable, VTable}, Span, }; use std::{collections::HashMap, rc::Rc}; pub type Classes<'a> = VTable<'a, Rc<Class<'a>>>; pub fn find_classes_and_methods<'a>( ast: &'a Ast<'a>, built_in_classes: Classes<'a>, ) -> Result<'a, Classes<'a>> { let classes = find_classes(ast, built_in_classes)?; find_methods(ast, classes) } fn find_classes<'a>(ast: &'a Ast<'a>, built_in_classes: Classes<'a>) -> Result<'a, Classes<'a>> { let mut f = FindClasses { table: built_in_classes, }; visit_ast(&mut f, ast)?; f.setup_super_classes()?; Ok(f.table) } struct FindClasses<'a> { table: Classes<'a>, } impl<'a> Visitor<'a> for FindClasses<'a> { type Error = Error<'a>; fn visit_define_class(&mut self, node: &'a ast::DefineClass<'a>) -> Result<'a, ()> { let name = &node.name.class_name.0; let key = name.name; self.check_for_existing_class_with_same_name(key, node)?; let fields = self.make_fields(node); let super_class_name = &node.super_class.class_name.0; let class = Class::new(name, super_class_name, fields, node.span); self.table.insert(key, Rc::new(class)); Ok(()) } } impl<'a> FindClasses<'a> { fn check_for_existing_class_with_same_name( &self, key: &'a str, node: &'a ast::DefineClass<'a>, ) -> Result<'a, ()> { if let Some(other) = self.table.get(key) { Err(Error::ClassAlreadyDefined { class: &key, first_span: other.span, second_span: node.span, }) } else { Ok(()) } } fn make_fields(&self, node: &'a ast::DefineClass<'a>) -> VTable<'a, Field<'a>> { node.fields .iter() .map(|field| { let ident = &field.ident; let field = Field { name: ident }; (ident.name, field) }) .collect() } fn setup_super_classes(&mut self) -> Result<'a, ()> { let mut acc = HashMap::new(); for (class_name, class) in &self.table { let super_class_name = &class.super_class_name; // Object isn't supposed to have a super class if class_name == &"Object" { continue; } let super_class = self.table .get(&super_class_name.name) .ok_or_else(|| Error::ClassNotDefined { class: super_class_name.name, span: class.span, })?; let super_class = Rc::clone(&super_class); acc.insert(*class_name, (super_class, class.span)); } for (class_name, (super_class, span)) in acc { let mut class = self.table .get_mut(class_name) .ok_or_else(|| Error::ClassNotDefined { class: class_name, span, })?; Rc::get_mut(&mut class) .expect("Internal error: Rc borrowed mut more than once") .super_class = Some(super_class); } Ok(()) } } struct FindMethods<'a> { classes: Classes<'a>, } fn find_methods<'a>(ast: &'a Ast<'a>, classes: Classes<'a>) -> Result<'a, Classes<'a>> { let mut f = FindMethods { classes }; visit_ast(&mut f, ast)?; Ok(f.classes) } impl<'a> Visitor<'a> for FindMethods<'a> { type Error = Error<'a>; fn visit_define_method(&mut self, node: &'a ast::DefineMethod<'a>) -> Result<'a, ()> { let method_name = &node.method_name.ident; let key = method_name.name; let class_name = &node.class_name.0.name; { let class = self .classes .get(class_name) .ok_or_else(|| Error::ClassNotDefined { class: class_name, span: node.span, })?; self.check_for_existing_method_with_same_name(class, key, node)?; } let method = self.make_method(method_name, &node.block, node.span); let mut class = self .classes .get_mut(class_name) .ok_or_else(|| Error::ClassNotDefined { class: class_name, span: node.span, })?; let class = Rc::get_mut(&mut class) .expect("Internal error: FindMethods.classes borrowed mut more than once"); class.methods.insert(key, method); Ok(()) } } impl<'a> FindMethods<'a> { fn check_for_existing_method_with_same_name( &self, class: &Class<'a>, key: &'a str, node: &'a ast::DefineMethod<'a>, ) -> Result<'a, ()> { if let Some(other) = class.methods.get(key) { return Err(Error::MethodAlreadyDefined { class: class.name.name, method: key, first_span: other.span, second_span: node.span, }); } else { Ok(()) } } fn make_method( &self, method_name: &'a Ident<'a>, block: &'a ast::Block<'a>, span: Span, ) -> Method<'a> { Method { name: method_name, parameters: &block.parameters, body: &block.body, span, } } } #[derive(Debug)] pub struct Class<'a> { pub name: &'a Ident<'a>, pub super_class_name: &'a Ident<'a>, pub super_class: Option<Rc<Class<'a>>>, pub fields: VTable<'a, Field<'a>>, pub methods: VTable<'a, Method<'a>>, pub span: Span, } impl<'a> Class<'a> { fn new( name: &'a Ident<'a>, super_class_name: &'a Ident<'a>, fields: VTable<'a, Field<'a>>, span: Span, ) -> Self { Self { name, fields, super_class_name, super_class: None, methods: VTable::new(), span, } } pub fn get_method_named( &self, method_name: &'a str, call_site: Span, ) -> Result<'a, &Method<'a>> { let method = self.methods.get(method_name); if let Some(method) = method { return Ok(method); } if let Some(super_class) = &self.super_class { // TODO: Change method name of returned error // Otherwise it'll always be "Object" return super_class.get_method_named(method_name, call_site); } Err(Error::UndefinedMethod { class: &self.name.name, method: method_name, span: call_site, }) } } #[derive(Debug, Eq, PartialEq, Hash)] pub struct Field<'a> { pub name: &'a Ident<'a>, } #[derive(Debug)] pub struct Method<'a> { pub name: &'a Ident<'a>, pub parameters: &'a Vec<ast::Parameter<'a>>, pub body: &'a Vec<ast::Stmt<'a>>, pub span: Span, } // TODO: Bring back // #[cfg(test)] // mod test { // #[allow(unused_imports)] // use super::*; // use crate::{lex::lex, parse::parse}; // #[test] // fn finds_classes_and_methods() { // let program = r#" // [User def: #foo do: || { return 123; }]; // [Class subclass name: #User fields: [#id]]; // "#; // let tokens = lex(&program).unwrap(); // let ast = parse(&tokens).unwrap(); // let classes = find_classes_and_methods(&ast).unwrap(); // let class = classes.get("User").unwrap(); // assert_eq!("User", class.name.name); // assert_eq!( // vec!["id"], // class // .fields // .values() // .map(|v| v.name.name) // .collect::<Vec<_>>() // ); // assert_eq!(vec![&"id"], class.fields.keys().collect::<Vec<_>>()); // assert_eq!( // vec!["foo"], // class // .methods // .values() // .map(|v| v.name.name) // .collect::<Vec<_>>() // ); // assert_eq!(vec![&"foo"], class.methods.keys().collect::<Vec<_>>()); // } // #[test] // fn errors_if_class_is_defined_twice() { // let program = r#" // [Class subclass name: #User fields: [#foo]]; // [Class subclass name: #User fields: [#bar]]; // "#; // let tokens = lex(&program).unwrap(); // let ast = parse(&tokens).unwrap(); // let result = find_classes_and_methods(&ast); // assert_error!(result, Error::ClassAlreadyDefined { .. }); // } // #[test] // fn errors_if_method_is_defined_twice() { // let program = r#" // [Class subclass name: #User fields: [#foo]]; // [User def: #foo do: || { return 1; }]; // [User def: #foo do: || { return 2; }]; // "#; // let tokens = lex(&program).unwrap(); // let ast = parse(&tokens).unwrap(); // let result = find_classes_and_methods(&ast); // assert_error!(result, Error::MethodAlreadyDefined { .. }); // } // #[test] // fn errors_if_you_define_methods_on_classes_that_dont_exist() { // let program = r#" // [User def: #foo do: || { return 1; }]; // "#; // let tokens = lex(&program).unwrap(); // let ast = parse(&tokens).unwrap(); // let result = find_classes_and_methods(&ast); // assert_error!(result, Error::ClassNotDefined { .. }); // } // }
true
611cb8859515989e081216c117599e4bbeb2b45f
Rust
Alligator/advent-of-code-2019
/day-3/src/main.rs
UTF-8
4,923
3.390625
3
[]
no_license
use std::fs; use std::collections::{HashSet, HashMap}; type Point = (i64, i64); fn get_points(path: &[&str]) -> (HashSet<Point>, HashMap<Point, i64>) { let mut current_point: Point = (0, 0); let mut points = HashSet::<Point>::new(); let mut steps = HashMap::new(); let mut total_steps = 0; points.insert(current_point); for item in path { let count = &item[1..].parse::<i64>().unwrap(); let mut x_diff = 0; let mut y_diff = 0; if item.starts_with("R") { x_diff = 1; y_diff = 0; } else if item.starts_with("L") { x_diff = -1; y_diff = 0; } else if item.starts_with("U") { x_diff = 0; y_diff = -1; } else if item.starts_with("D") { x_diff = 0; y_diff = 1; } for i in 0..=*count { let point = (current_point.0 + (i * x_diff), current_point.1 + (i * y_diff)); points.insert(point); if !steps.contains_key(&point) { steps.insert(point, total_steps); } total_steps += 1; } // take off the extra step we counted total_steps -= 1; current_point = (current_point.0 + (*count * x_diff), current_point.1 + (*count * y_diff)); } return (points, steps); } fn get_closest_distance(path1: &[&str], path2: &[&str]) -> i64 { let (points1, _steps1) = get_points(&path1); let (points2, _steps2) = get_points(&path2); return points1 .intersection(&points2) .copied() .filter(|p| !(p.0 == 0 && p.1 == 0)) .map(|p| (p.0.abs() + p.1.abs())) .min() .unwrap(); } fn get_closest_steps(path1: &[&str], path2: &[&str]) -> i64 { let (points1, steps1) = get_points(&path1); let (points2, steps2) = get_points(&path2); return points1 .intersection(&points2) .copied() .filter(|p| !(p.0 == 0 && p.1 == 0)) .map(|p| steps1.get(&p).unwrap() + steps2.get(&p).unwrap()) .min() .unwrap(); } fn main() { let src = fs::read_to_string("input.txt").unwrap(); let mut lines = src .split_whitespace() .map(|x| x.split(",").collect()); let path1: Vec<&str> = lines.next().unwrap(); let path2: Vec<&str> = lines.next().unwrap(); let point = get_closest_distance(&path1, &path2); println!("part1: {}", point); let steps = get_closest_steps(&path1, &path2); println!("part2: {}", steps); } #[cfg(test)] mod tests { use super::*; #[test] fn test_get_points() { let path = ["R8","U5","L5","D3"]; let (points, _steps) = get_points(&path); let mut expected_points = HashSet::new(); // R8 expected_points.insert((0, 0)); expected_points.insert((1, 0)); expected_points.insert((2, 0)); expected_points.insert((3, 0)); expected_points.insert((4, 0)); expected_points.insert((5, 0)); expected_points.insert((6, 0)); expected_points.insert((7, 0)); expected_points.insert((8, 0)); // U5 expected_points.insert((8, -1)); expected_points.insert((8, -2)); expected_points.insert((8, -3)); expected_points.insert((8, -4)); expected_points.insert((8, -5)); // L5 expected_points.insert((7, -5)); expected_points.insert((6, -5)); expected_points.insert((5, -5)); expected_points.insert((4, -5)); expected_points.insert((3, -5)); // D3 expected_points.insert((3, -4)); expected_points.insert((3, -3)); expected_points.insert((3, -2)); assert_eq!(points, expected_points); } #[test] fn test_get_closest_point_1() { let path1 = ["R8","U5","L5","D3"]; let path2 = ["U7","R6","D4","L4"]; assert_eq!(get_closest_distance(&path1, &path2), 6); } #[test] fn test_get_closest_point_2() { let path1 = ["R75","D30","R83","U83","L12","D49","R71","U7","L72"]; let path2 = ["U62","R66","U55","R34","D71","R55","D58","R83"]; assert_eq!(get_closest_distance(&path1, &path2), 159); } #[test] fn test_get_closest_point_3() { let path1 = ["R98","U47","R26","D63","R33","U87","L62","D20","R33","U53","R51"]; let path2 = ["U98","R91","D20","R16","D67","R40","U7","R15","U6","R7"]; assert_eq!(get_closest_distance(&path1, &path2), 135); } #[test] fn test_get_closest_point_cross_at_zero() { let path1 = ["R10"]; let path2 = ["U5", "R5", "D5"]; assert_eq!(get_closest_distance(&path1, &path2), 5); } #[test] fn test_get_closest_steps() { let path1 = ["R8","U5","L5","D3"]; let path2 = ["U7","R6","D4","L4"]; assert_eq!(get_closest_steps(&path1, &path2), 30); } }
true
067ed15e7ce5e7faef7377ca7365d5e219952858
Rust
tatamiya/actix-web-tutorial
/application/src/main.rs
UTF-8
1,897
2.890625
3
[]
no_license
use actix_web::{get, web, App, HttpServer, HttpResponse, Responder}; use std::sync::Mutex; async fn index() -> impl Responder { "Hello world!" } struct AppState { app_name: String, } #[get("/")] async fn state(data: web::Data<AppState>) -> String { let app_name = &data.app_name; format!("Hello {}!", app_name) } struct AppStateWithCounter { counter: Mutex<i32>, } async fn shared_mutable_state(data: web::Data<AppStateWithCounter>) -> String { let mut counter = data.counter.lock().unwrap(); *counter += 1; format!("Request number: {}", counter) } fn scoped_config(cfg: &mut web::ServiceConfig) { cfg.service( web::resource("/test") .route(web::get().to(|| HttpResponse::Ok().body("test"))) .route(web::head().to(|| HttpResponse::MethodNotAllowed())), ); } fn config(cfg: &mut web::ServiceConfig) { cfg.service( web::resource("/hoge") .route(web::get().to(|| HttpResponse::Ok().body("hoge"))) .route(web::head().to(|| HttpResponse::MethodNotAllowed())), ); } #[actix_web::main] async fn main() -> std::io::Result<()> { let counter = web::Data::new(AppStateWithCounter { counter: Mutex::new(0), }); HttpServer::new(move || { App::new() .data(AppState { app_name: String::from("Actix-web"), }) .service( web::scope("/app") .route("/index.html", web::get().to(index)), ) .service(state) .app_data(counter.clone()) .route("/shared_mutable_state.html", web::get().to(shared_mutable_state)) .configure(config) .service(web::scope("/api").configure(scoped_config)) .route("/", web::get().to(|| HttpResponse::Ok().body("/"))) }) .bind("127.0.0.1:8080")? .run() .await }
true
6e2ddde408de01c3b7658bb7f51fd7d02461260d
Rust
greendwin/rust_ray
/src/world/scene.rs
UTF-8
1,026
2.953125
3
[]
no_license
use crate::math::*; pub trait LightDecl { fn orig(&self) -> Vec3; fn radius(&self) -> f64; fn color_at(&self, pt: Vec3) -> Vec3; } pub trait Scene { type Mat: Material; type Obj: HitRay<Self::Mat>; type Light: LightDecl; fn objs(&self) -> &[Self::Obj]; fn lights(&self) -> &[Self::Light]; } impl<Scn> HitRay<Scn::Mat> for Scn where Scn: Scene, { fn hit(&self, ray: &Ray, t_min: f64, t_max: f64) -> Option<(Hit, <Self as Scene>::Mat)> { let mut closest_hit = None; let mut cur_t_max = t_max; for obj in self.objs() { if let Some((hit, mat)) = obj.hit(ray, t_min, cur_t_max) { cur_t_max = hit.t; closest_hit.replace((hit, mat)); } } // for lgt in self.lights() { // if let Some((hit, mat)) = lgt.hit(ray, t_min, cur_t_max) { // cur_t_max = hit.t; // closest_hit.replace((hit, mat)); // } // } closest_hit } }
true
88b5dc70fbe177e0ddcaf20e7e09dbffb4d5971f
Rust
KanchiShimono/rust-rocket-juniper-graphql-example
/src/db/repository.rs
UTF-8
3,012
2.96875
3
[]
no_license
use crate::db::{ models::{Person, PersonWithPosts, Post}, Db, }; use crate::graphql::schema::{CreatePersonInput, CreatePostInput}; use chrono::Utc; use diesel::result::Error; use uuid::Uuid; pub trait PersonRepository { fn find_all(&self) -> Result<Vec<Person>, Error>; fn find_by_id(&self, id: Uuid) -> Result<Person, Error>; fn save(&self, input: CreatePersonInput) -> Result<Person, Error>; fn delete(&self, id: Uuid) -> Result<Person, Error>; } pub trait PostRepository { fn find_all(&self) -> Result<Vec<Post>, Error>; fn find_by_id(&self, id: Uuid) -> Result<Post, Error>; fn find_by_person_id(&self, person_id: Uuid) -> Result<Vec<Post>, Error>; fn save(&self, input: CreatePostInput) -> Result<Post, Error>; fn delete(&self, id: Uuid) -> Result<Post, Error>; } pub trait PersonWithPostsRepository { fn find_all(&self) -> Result<Vec<PersonWithPosts>, Error>; fn find_by_id(&self, id: Uuid) -> Result<PersonWithPosts, Error>; } pub struct PgPersonRepository { pub conn: Db, } impl PersonRepository for PgPersonRepository { fn find_all(&self) -> Result<Vec<Person>, Error> { Person::find_all(&self.conn) } fn find_by_id(&self, id: Uuid) -> Result<Person, Error> { Person::find_by_id(&self.conn, id) } fn save(&self, input: CreatePersonInput) -> Result<Person, Error> { let now = Utc::now().naive_utc(); let new_person = Person { id: Uuid::new_v4(), name: input.name, create_at: now, update_at: now, }; Person::save(&self.conn, new_person) } fn delete(&self, id: Uuid) -> Result<Person, Error> { Person::delete(&self.conn, id) } } pub struct PgPostRepository { pub conn: Db, } impl PostRepository for PgPostRepository { fn find_all(&self) -> Result<Vec<Post>, Error> { Post::find_all(&self.conn) } fn find_by_id(&self, id: Uuid) -> Result<Post, Error> { Post::find_by_id(&self.conn, id) } fn find_by_person_id(&self, person_id: Uuid) -> Result<Vec<Post>, Error> { Post::find_by_person_id(&self.conn, person_id) } fn save(&self, input: CreatePostInput) -> Result<Post, Error> { let now = Utc::now().naive_utc(); let new_post = Post { id: Uuid::new_v4(), person_id: input.person_id, text: input.text, create_at: now, update_at: now, }; Post::save(&self.conn, new_post) } fn delete(&self, id: Uuid) -> Result<Post, Error> { Post::delete(&self.conn, id) } } pub struct PgPersonWithPostsRepository { pub conn: Db, } impl PersonWithPostsRepository for PgPersonWithPostsRepository { fn find_all(&self) -> Result<Vec<PersonWithPosts>, Error> { PersonWithPosts::find_all(&self.conn) } fn find_by_id(&self, id: Uuid) -> Result<PersonWithPosts, Error> { PersonWithPosts::find_by_id(&self.conn, id) } }
true
064df29d49d4b6c65284677b2f90fad30d23de45
Rust
lovoror/ruffle
/core/src/avm1/globals/color.rs
UTF-8
7,142
2.765625
3
[ "MIT", "LicenseRef-scancode-unknown-license-reference", "Apache-2.0" ]
permissive
//! Color object //! //! TODO: This should change when `ColorTransform` changes to match Flash's representation //! (See GitHub #193) use crate::avm1::property::Attribute::*; use crate::avm1::return_value::ReturnValue; use crate::avm1::{Avm1, Error, Object, ScriptObject, TObject, UpdateContext, Value}; use crate::display_object::{DisplayObject, TDisplayObject}; use enumset::EnumSet; use gc_arena::MutationContext; pub fn constructor<'gc>( avm: &mut Avm1<'gc>, context: &mut UpdateContext<'_, 'gc, '_>, mut this: Object<'gc>, args: &[Value<'gc>], ) -> Result<ReturnValue<'gc>, Error> { // The target display object that this color will modify. let target = args.get(0).cloned().unwrap_or(Value::Undefined); // Set undocumented `target` property this.set("target", target, avm, context)?; this.set_attributes( context.gc_context, Some("target"), DontDelete | ReadOnly | DontEnum, EnumSet::empty(), ); Ok(Value::Undefined.into()) } pub fn create_proto<'gc>( gc_context: MutationContext<'gc, '_>, proto: Object<'gc>, fn_proto: Object<'gc>, ) -> Object<'gc> { let mut object = ScriptObject::object(gc_context, Some(proto)); object.force_set_function( "getRGB", get_rgb, gc_context, DontDelete | ReadOnly | DontEnum, Some(fn_proto), ); object.force_set_function( "getTransform", get_transform, gc_context, DontDelete | ReadOnly | DontEnum, Some(fn_proto), ); object.force_set_function( "setRGB", set_rgb, gc_context, DontDelete | ReadOnly | DontEnum, Some(fn_proto), ); object.force_set_function( "setTransform", set_transform, gc_context, DontDelete | ReadOnly | DontEnum, Some(fn_proto), ); object.into() } /// Gets the target display object of this color transform. fn target<'gc>( avm: &mut Avm1<'gc>, context: &mut UpdateContext<'_, 'gc, '_>, this: Object<'gc>, ) -> Result<Option<DisplayObject<'gc>>, Error> { // The target path resolves based on the active tellTarget clip of the stack frame. // This means calls on the same `Color` object could set the color of different clips // depending on which timeline its called from! let target = this.get("target", avm, context)?.resolve(avm, context)?; let start_clip = avm.target_clip_or_root(context); avm.resolve_target_display_object(context, start_clip, target) } fn get_rgb<'gc>( avm: &mut Avm1<'gc>, context: &mut UpdateContext<'_, 'gc, '_>, this: Object<'gc>, _args: &[Value<'gc>], ) -> Result<ReturnValue<'gc>, Error> { if let Some(target) = target(avm, context, this)? { let color_transform = target.color_transform(); let r = ((color_transform.r_add * 255.0) as i32) << 16; let g = ((color_transform.g_add * 255.0) as i32) << 8; let b = (color_transform.b_add * 255.0) as i32; Ok((r | g | b).into()) } else { Ok(Value::Undefined.into()) } } fn get_transform<'gc>( avm: &mut Avm1<'gc>, context: &mut UpdateContext<'_, 'gc, '_>, this: Object<'gc>, _args: &[Value<'gc>], ) -> Result<ReturnValue<'gc>, Error> { if let Some(target) = target(avm, context, this)? { let color_transform = target.color_transform(); let out = ScriptObject::object(context.gc_context, Some(avm.prototypes.object)); out.set("ra", (color_transform.r_mult * 100.0).into(), avm, context)?; out.set("ga", (color_transform.g_mult * 100.0).into(), avm, context)?; out.set("ba", (color_transform.b_mult * 100.0).into(), avm, context)?; out.set("aa", (color_transform.a_mult * 100.0).into(), avm, context)?; out.set("rb", (color_transform.r_add * 255.0).into(), avm, context)?; out.set("gb", (color_transform.g_add * 255.0).into(), avm, context)?; out.set("bb", (color_transform.b_add * 255.0).into(), avm, context)?; out.set("ab", (color_transform.a_add * 255.0).into(), avm, context)?; Ok(out.into()) } else { Ok(Value::Undefined.into()) } } fn set_rgb<'gc>( avm: &mut Avm1<'gc>, context: &mut UpdateContext<'_, 'gc, '_>, this: Object<'gc>, args: &[Value<'gc>], ) -> Result<ReturnValue<'gc>, Error> { if let Some(target) = target(avm, context, this)? { let mut color_transform = target.color_transform_mut(context.gc_context); let rgb = args .get(0) .unwrap_or(&Value::Undefined) .as_number(avm, context)? as i32; let r = (((rgb >> 16) & 0xff) as f32) / 255.0; let g = (((rgb >> 8) & 0xff) as f32) / 255.0; let b = ((rgb & 0xff) as f32) / 255.0; color_transform.r_mult = 0.0; color_transform.g_mult = 0.0; color_transform.b_mult = 0.0; color_transform.r_add = r; color_transform.g_add = g; color_transform.b_add = b; } Ok(Value::Undefined.into()) } fn set_transform<'gc>( avm: &mut Avm1<'gc>, context: &mut UpdateContext<'_, 'gc, '_>, this: Object<'gc>, args: &[Value<'gc>], ) -> Result<ReturnValue<'gc>, Error> { if let Some(target) = target(avm, context, this)? { let mut color_transform = target.color_transform_mut(context.gc_context); if let Ok(transform) = args.get(0).unwrap_or(&Value::Undefined).as_object() { color_transform.r_mult = transform .get("ra", avm, context)? .resolve(avm, context)? .as_number(avm, context)? as f32 / 100.0; color_transform.g_mult = transform .get("ga", avm, context)? .resolve(avm, context)? .as_number(avm, context)? as f32 / 100.0; color_transform.b_mult = transform .get("ba", avm, context)? .resolve(avm, context)? .as_number(avm, context)? as f32 / 100.0; color_transform.a_mult = transform .get("aa", avm, context)? .resolve(avm, context)? .as_number(avm, context)? as f32 / 100.0; color_transform.r_add = transform .get("rb", avm, context)? .resolve(avm, context)? .as_number(avm, context)? as f32 / 255.0; color_transform.g_add = transform .get("gb", avm, context)? .resolve(avm, context)? .as_number(avm, context)? as f32 / 255.0; color_transform.b_add = transform .get("bb", avm, context)? .resolve(avm, context)? .as_number(avm, context)? as f32 / 255.0; color_transform.a_add = transform .get("ab", avm, context)? .resolve(avm, context)? .as_number(avm, context)? as f32 / 255.0; } } Ok(Value::Undefined.into()) }
true
1ff027f567eab8468cc0b753954b9ce6074aa5e1
Rust
oli-obk/regex
/src/input.rs
UTF-8
3,213
3.0625
3
[ "Apache-2.0", "MIT", "LicenseRef-scancode-other-permissive", "LicenseRef-scancode-unknown-license-reference" ]
permissive
// Copyright 2014-2015 The Rust Project Developers. See the COPYRIGHT // file at the top-level directory of this distribution and at // http://rust-lang.org/COPYRIGHT. // // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your // option. This file may not be copied, modified, or distributed // except according to those terms. use std::ops; use char::Char; use prefix::Prefix; /// Represents a location in the input. #[derive(Clone, Copy, Debug)] pub struct InputAt { pos: usize, c: Char, len: usize, } impl InputAt { /// Returns true iff this position is at the beginning of the input. pub fn is_beginning(&self) -> bool { self.pos == 0 } /// Returns the character at this position. /// /// If this position is just before or after the input, then an absent /// character is returned. pub fn char(&self) -> Char { self.c } /// Returns the UTF-8 width of the character at this position. pub fn len(&self) -> usize { self.len } /// Returns the byte offset of this position. pub fn pos(&self) -> usize { self.pos } /// Returns the byte offset of the next position in the input. pub fn next_pos(&self) -> usize { self.pos + self.len } } /// An abstraction over input used in the matching engines. pub trait Input { /// Return an encoding of the position at byte offset `i`. fn at(&self, i: usize) -> InputAt; /// Return an encoding of the char position just prior to byte offset `i`. fn previous_at(&self, i: usize) -> InputAt; /// Scan the input for a matching prefix. fn prefix_at(&self, prefixes: &Prefix, at: InputAt) -> Option<InputAt>; } /// An input reader over characters. /// /// (This is the only implementation of `Input` at the moment.) #[derive(Debug)] pub struct CharInput<'t>(&'t str); impl<'t> CharInput<'t> { /// Return a new character input reader for the given string. pub fn new(s: &'t str) -> CharInput<'t> { CharInput(s) } } impl<'t> ops::Deref for CharInput<'t> { type Target = str; fn deref(&self) -> &str { self.0 } } impl<'t> Input for CharInput<'t> { // This `inline(always)` increases throughput by almost 25% on the `hard` // benchmarks over a normal `inline` annotation. // // I'm not sure why `#[inline]` isn't enough to convince LLVM, but it is // used *a lot* in the guts of the matching engines. #[inline(always)] fn at(&self, i: usize) -> InputAt { let c = self[i..].chars().next().into(); InputAt { pos: i, c: c, len: c.len_utf8(), } } fn previous_at(&self, i: usize) -> InputAt { let c: Char = self[..i].chars().rev().next().into(); let len = c.len_utf8(); InputAt { pos: i - len, c: c, len: len, } } fn prefix_at(&self, prefixes: &Prefix, at: InputAt) -> Option<InputAt> { prefixes.find(&self[at.pos()..]).map(|(s, _)| self.at(at.pos() + s)) } }
true
920e392cc3bce508f48a5c9d863fe40c23fa0cd6
Rust
mrsekut/rytl
/src/lexer/lexer.rs
UTF-8
3,770
3.40625
3
[]
no_license
use crate::lexer::{LexerError, Loc, Token}; // fn recognize_many(input: &[u8], mut pos: usize, mut f: impl FnMut(u8) -> bool) -> usize { // while pos < input.len() && f(input[pos]) { // println!("pos: {:?}", pos); // println!("len: {:?}", input.len()); // pos += 1; // } // pos // } pub fn lexer(input: &str) -> Result<Vec<Token>, LexerError> { let mut tokens = Vec::new(); let input = input.as_bytes(); let mut pos = 0; macro_rules! lex_a_token { ($token_method:ident, $pos:ident) => {{ tokens.push(Token::$token_method(Loc(pos, pos + 1))); pos = $pos + 1; }}; } while pos < input.len() { match input[pos] { b'0'...b'9' => { // TODO: clean use std::str::from_utf8; let start = pos; while pos < input.len() && b"1234567890".contains(&input[pos]) { pos += 1; } let n = from_utf8(&input[start..pos]).unwrap().parse().unwrap(); tokens.push(Token::number(n, Loc(start, pos))); // let end = recognize_many(input, pos, |b| b"0123456789".contains(&b)); // let n = from_utf8(&input[start..end]).unwrap().parse().unwrap(); // tokens.push(Token::number(n, Loc(start, end))); } b'a'...b'z' => { // TODO: clean use std::str::from_utf8; let start = pos; // let end = recognize_many(input, start, |b| b"abcdefghijklmnopqrstuvwxyz".contains(&b)); // let s = from_utf8(&input[start..end]).unwrap(); // tokens.push(Token::var(s, Loc(start, end))); while pos < input.len() && b"abcdefghijklmnopqrstuvwxyz".contains(&input[pos]) { pos += 1; } let s = from_utf8(&input[start..pos]).unwrap(); tokens.push(Token::var(s, Loc(start, pos))); } b'+' => lex_a_token!(plus, pos), b'-' => lex_a_token!(minus, pos), b'*' => lex_a_token!(asterisk, pos), b'/' => lex_a_token!(slash, pos), b'(' => lex_a_token!(lparen, pos), b')' => lex_a_token!(rparen, pos), b':' => { // TODO: clean let start = pos; // let end = recognize_many(input, start, |b| b"=".contains(&b)); // tokens.push(Token::bind(Loc(start, end))); // FIXME: while pos < input.len() && b":=".contains(&input[pos]) { pos += 1; } tokens.push(Token::bind(Loc(start, pos))); } b' ' | b'\n' | b'\t' => { pos = pos + 1; } b => return Err(LexerError::invalid_char(b as char, Loc(pos, pos + 1))), } } Ok(tokens) } #[test] fn test_lexer() { assert_eq!( lexer("12 + (3 - 123) * 3 / 4"), Ok(vec![ Token::number(12, Loc(0, 2)), Token::plus(Loc(3, 4)), Token::lparen(Loc(5, 6)), Token::number(3, Loc(6, 7)), Token::minus(Loc(8, 9)), Token::number(123, Loc(10, 13)), Token::rparen(Loc(13, 14)), Token::asterisk(Loc(15, 16)), Token::number(3, Loc(17, 18)), Token::slash(Loc(19, 20)), Token::number(4, Loc(21, 22)), ]) ) } #[test] fn test_bind_lexer() { assert_eq!( lexer("hoge := 42"), Ok(vec![ Token::var("hoge", Loc(0, 4)), Token::bind(Loc(5, 7)), Token::number(42, Loc(8, 10)), ]) ) }
true
d895644361ccf54127a89cc741d41f70e7bd15c7
Rust
18616378431/myCode
/rust/test8-39/src/main.rs
UTF-8
458
3.34375
3
[]
no_license
//字符串回顾 fn main() { let s = r"1234 5678 9876 4321"; let (mut x, mut y) = (0, 0); for (idx, val) in s.lines().enumerate() { let val = val.trim(); let left = val.get(idx..idx+1).unwrap().parse::<u32>().unwrap(); let right = val.get((3 - idx)..(3 - idx + 1)).unwrap().parse::<u32>().unwrap(); x += left; y += right; } assert_eq!(38, x + y); }
true
2e70401732620bdd078944c51688846ee5ce0c5c
Rust
mattmahn/rosetta-code
/tasks/cum-stdev/rust/src/main.rs
UTF-8
673
3.6875
4
[ "Unlicense" ]
permissive
pub struct CumulativeStandardDeviation { n: f64, sum: f64, sum_sq: f64 } impl CumulativeStandardDeviation { pub fn new() -> Self { CumulativeStandardDeviation { n: 0., sum: 0., sum_sq: 0. } } fn push(&mut self, x: f64) -> f64 { self.n += 1.; self.sum += x; self.sum_sq += x * x; (self.sum_sq / self.n - self.sum * self.sum / self.n / self.n).sqrt() } } fn main() { let nums = [2, 4, 4, 4, 5, 5, 7, 9]; let mut cum_stdev = CumulativeStandardDeviation::new(); for num in nums.iter() { println!("{}", cum_stdev.push(*num as f64)); } }
true
7f22d79324bc0b01611a15a97f1b7d6345cd459d
Rust
Farooq-azam-khan/rust-practice
/borrow2.rs
UTF-8
230
3.546875
4
[]
no_license
fn main() { let mut s1 = String::from("Hello"); let len = string_len(&mut s1); println!("{} has {} chars", s1, len); } fn string_len(word: &mut String) -> usize { word.push_str(", world"); word.len() }
true
31e5c200fdaa6adf66b9dde168b61241044651cd
Rust
silverweed/ecsde
/ecs_game/src/spatial.rs
UTF-8
13,139
2.546875
3
[]
no_license
use inle_alloc::temp::*; use inle_app::app::Engine_State; use inle_ecs::ecs_world::{Ecs_World, Entity, Evt_Entity_Destroyed}; use inle_events::evt_register::{with_cb_data, wrap_cb_data, Event_Callback_Data}; use inle_math::vector::Vec2f; use inle_physics::collider::C_Collider; use inle_physics::phys_world::{Collider_Handle, Physics_World}; use inle_physics::spatial::Spatial_Accelerator; use std::cmp::Ordering; use std::collections::HashMap; #[cfg(debug_assertions)] use {inle_debug::painter::Debug_Painter, std::collections::HashSet}; // @Speed: tune these numbers const CHUNK_WIDTH: f32 = 200.; const CHUNK_HEIGHT: f32 = 200.; #[derive(Default, Copy, Clone, Debug, PartialEq, Eq, Hash)] pub struct Chunk_Coords { pub x: i32, pub y: i32, } impl PartialOrd for Chunk_Coords { fn partial_cmp(&self, other: &Self) -> Option<Ordering> { Some(self.cmp(other)) } } impl Ord for Chunk_Coords { fn cmp(&self, other: &Self) -> Ordering { match self.y.cmp(&other.y) { Ordering::Greater => Ordering::Greater, Ordering::Less => Ordering::Less, Ordering::Equal => self.x.cmp(&other.x), } } } impl Chunk_Coords { pub fn from_pos(pos: Vec2f) -> Self { Self { x: (pos.x / CHUNK_WIDTH).floor() as i32, y: (pos.y / CHUNK_HEIGHT).floor() as i32, } } pub fn to_world_pos(self) -> Vec2f { Vec2f { x: self.x as f32 * CHUNK_WIDTH, y: self.y as f32 * CHUNK_HEIGHT, } } } pub struct World_Chunks { chunks: HashMap<Chunk_Coords, World_Chunk>, to_destroy: Event_Callback_Data, } #[derive(Default, Debug)] pub struct World_Chunk { pub colliders: Vec<Collider_Handle>, } impl World_Chunks { pub fn new() -> Self { Self { chunks: HashMap::new(), to_destroy: wrap_cb_data(Vec::<Entity>::new()), } } pub fn init(&mut self, engine_state: &mut Engine_State) { engine_state .systems .evt_register .subscribe::<Evt_Entity_Destroyed>( Box::new(|entity, to_destroy| { with_cb_data(to_destroy.unwrap(), |to_destroy: &mut Vec<Entity>| { to_destroy.push(entity); }); }), Some(self.to_destroy.clone()), ); } pub fn update(&mut self, ecs_world: &Ecs_World, phys_world: &Physics_World) { trace!("world_chunks::update"); let mut to_remove = vec![]; with_cb_data(&mut self.to_destroy, |to_destroy: &mut Vec<Entity>| { for &entity in to_destroy.iter() { if let Some(collider) = ecs_world.get_component::<C_Collider>(entity) { for (cld, handle) in phys_world.get_all_colliders_with_handles(collider.phys_body_handle) { to_remove.push((handle, cld.position, cld.shape.extent())); } } } to_destroy.clear(); }); for (cld, pos, extent) in to_remove { self.remove_collider(cld, pos, extent); } } pub fn n_chunks(&self) -> usize { self.chunks.len() } pub fn add_collider(&mut self, cld_handle: Collider_Handle, pos: Vec2f, extent: Vec2f) { let mut chunks = vec![]; self.get_all_chunks_containing(pos, extent, &mut chunks); for coords in chunks { self.add_collider_coords(cld_handle, coords); } } fn add_collider_coords(&mut self, cld_handle: Collider_Handle, coords: Chunk_Coords) { let chunk = self .chunks .entry(coords) .or_insert_with(World_Chunk::default); debug_assert!( !chunk.colliders.contains(&cld_handle), "Duplicate collider {:?} in chunk {:?}!", cld_handle, coords ); chunk.colliders.push(cld_handle); } pub fn remove_collider(&mut self, cld_handle: Collider_Handle, pos: Vec2f, extent: Vec2f) { let mut chunks = vec![]; self.get_all_chunks_containing(pos, extent, &mut chunks); for coords in chunks { self.remove_collider_coords(cld_handle, coords); } } fn remove_collider_coords(&mut self, cld_handle: Collider_Handle, coords: Chunk_Coords) { let chunk = self.chunks.get_mut(&coords).unwrap_or_else(|| { fatal!( "Collider {:?} should be in chunk {:?}, but that chunk does not exist.", cld_handle, coords ) }); let idx = chunk.colliders.iter().position(|&c| c == cld_handle); if let Some(idx) = idx { chunk.colliders.remove(idx); if chunk.colliders.is_empty() { self.chunks.remove(&coords); } } else { lerr!( "Collider {:?} not found in expected chunk {:?}.", cld_handle, coords ); } } pub fn update_collider( &mut self, cld_handle: Collider_Handle, prev_pos: Vec2f, new_pos: Vec2f, extent: Vec2f, frame_alloc: &mut Temp_Allocator, ) { trace!("world_chunks::update_collider"); let mut prev_coords = excl_temp_array(frame_alloc); self.get_all_chunks_containing(prev_pos, extent, &mut prev_coords); let prev_coords = unsafe { prev_coords.into_read_only() }; let mut new_coords = excl_temp_array(frame_alloc); self.get_all_chunks_containing(new_pos, extent, &mut new_coords); let new_coords = unsafe { new_coords.into_read_only() }; let mut all_chunks = excl_temp_array(frame_alloc); // Pre-allocate enough memory to hold all the chunks; then `chunks_to_add` starts at index 0, // while `chunks_to_remove` starts at index `new_coords.len()`. // This works because we can have at most `new_coords.len()` chunks to add and `prev_coords.len()` // chunks to remove. unsafe { all_chunks.alloc_additional_uninit(new_coords.len() + prev_coords.len()); } let mut n_chunks_to_add = 0; let mut n_chunks_to_remove = 0; let chunks_to_add_offset = 0; let chunks_to_remove_offset = new_coords.len(); // Find chunks to add and to remove in O(n). // This algorithm assumes that both prev_coords and new_coords are sorted and deduped. let mut p_idx = 0; let mut n_idx = 0; while p_idx < prev_coords.len() && n_idx < new_coords.len() { let pc = prev_coords[p_idx]; let nc = new_coords[n_idx]; match pc.cmp(&nc) { Ordering::Less => { all_chunks[chunks_to_remove_offset + n_chunks_to_remove] = pc; n_chunks_to_remove += 1; p_idx += 1; } Ordering::Greater => { all_chunks[chunks_to_add_offset + n_chunks_to_add] = nc; n_chunks_to_add += 1; n_idx += 1; } Ordering::Equal => { p_idx += 1; n_idx += 1; } } } if p_idx < prev_coords.len() { let diff = prev_coords.len() - p_idx; for i in 0..diff { all_chunks[chunks_to_remove_offset + n_chunks_to_remove + i] = prev_coords[p_idx + i]; } n_chunks_to_remove += diff; } else if n_idx < new_coords.len() { let diff = new_coords.len() - n_idx; for i in 0..diff { all_chunks[chunks_to_add_offset + n_chunks_to_add + i] = new_coords[n_idx + i]; } n_chunks_to_add += diff; } #[cfg(debug_assertions)] { let to_remove = all_chunks .iter() .cloned() .skip(chunks_to_remove_offset) .take(n_chunks_to_remove) .collect::<HashSet<_>>(); let to_add = all_chunks .iter() .cloned() .skip(chunks_to_add_offset) .take(n_chunks_to_add) .collect::<HashSet<_>>(); debug_assert_eq!(to_remove.intersection(&to_add).count(), 0); } for coord in all_chunks .iter() .skip(chunks_to_add_offset) .take(n_chunks_to_add) { self.add_collider_coords(cld_handle, *coord); } for coord in all_chunks .iter() .skip(chunks_to_remove_offset) .take(n_chunks_to_remove) { self.remove_collider_coords(cld_handle, *coord); } } fn get_all_chunks_containing<T>(&self, pos: Vec2f, extent: Vec2f, coords: &mut T) where T: Extend<Chunk_Coords>, { trace!("get_all_chunks_containing"); #[cfg(debug_assertions)] let mut chk_coords = vec![]; // We need to @Cleanup the -extent*0.5 offset we need to apply and make it consistent throughout the game! let pos = pos - extent * 0.5; let coords_topleft = Chunk_Coords::from_pos(pos); coords.extend(Some(coords_topleft)); #[cfg(debug_assertions)] chk_coords.push(coords_topleft); let coords_botright = Chunk_Coords::from_pos(pos + extent); // Note: we cycle y-major so the result is automatically sorted (as for Chunk_Coords::cmp) for y in 0..=coords_botright.y - coords_topleft.y { for x in 0..=coords_botright.x - coords_topleft.x { if x == 0 && y == 0 { continue; } coords.extend(Some(Chunk_Coords::from_pos( pos + v2!(x as f32 * CHUNK_WIDTH, y as f32 * CHUNK_HEIGHT), ))); #[cfg(debug_assertions)] chk_coords.push(Chunk_Coords::from_pos( pos + v2!(x as f32 * CHUNK_WIDTH, y as f32 * CHUNK_HEIGHT), )); } } #[cfg(debug_assertions)] { // Result should be sorted and deduped // @WaitForStable //debug_assert!(coords.iter().is_sorted()); for i in 1..chk_coords.len() { debug_assert!(chk_coords[i] > chk_coords[i - 1]); } let mut deduped = chk_coords.clone(); deduped.dedup(); debug_assert!(chk_coords.len() == deduped.len()); } } } impl Spatial_Accelerator<Collider_Handle> for World_Chunks { fn get_neighbours<R>(&self, pos: Vec2f, extent: Vec2f, result: &mut R) where R: Extend<Collider_Handle>, { let mut chunks = vec![]; self.get_all_chunks_containing(pos, extent, &mut chunks); for coords in chunks { if let Some(chunk) = self.chunks.get(&coords) { result.extend(chunk.colliders.iter().copied()); } } } } #[cfg(debug_assertions)] impl World_Chunks { pub fn debug_draw(&self, painter: &mut Debug_Painter) { use inle_common::colors; use inle_common::paint_props::Paint_Properties; use inle_math::transform::Transform2D; if self.chunks.is_empty() { return; } let max_colliders = self .chunks .iter() .map(|(_, chk)| chk.colliders.len()) .max() .unwrap_or(0) as f32; for (coords, chunk) in &self.chunks { let world_pos = v2!(coords.to_world_pos().x, coords.to_world_pos().y); let col = colors::lerp_col( colors::rgba(0, 150, 0, 100), colors::rgba(150, 0, 0, 100), chunk.colliders.len() as f32 / max_colliders, ); painter.add_rect( v2!(CHUNK_WIDTH, CHUNK_HEIGHT), &Transform2D::from_pos(world_pos), Paint_Properties { color: col, border_color: colors::darken(col, 0.7), border_thick: (CHUNK_WIDTH / 50.).max(5.), ..Default::default() }, ); painter.add_text( &format!("{},{}: {}", coords.x, coords.y, chunk.colliders.len()), world_pos + v2!(10., 5.), (CHUNK_WIDTH as u16 / 10).max(20), colors::rgba(50, 220, 0, 250), ); } } } #[cfg(tests)] mod tests { use super::*; #[test] fn chunk_coords_ord() { assert!(Chunk_Coords { x: 0, y: 0 } < Chunk_Coords { x: 1, y: 0 }); assert!(Chunk_Coords { x: 1, y: 0 } < Chunk_Coords { x: 0, y: 1 }); assert!(Chunk_Coords { x: 1, y: 1 } < Chunk_Coords { x: 2, y: 1 }); assert!(Chunk_Coords { x: 2, y: 1 } < Chunk_Coords { x: 1, y: 2 }); } }
true
a9a5c4c9549a95c9da984863b8cf7e28092d624e
Rust
baitcenter/sled
/crates/sled/src/node.rs
UTF-8
7,492
2.859375
3
[ "Apache-2.0", "MIT" ]
permissive
use std::{fmt, mem::size_of}; use super::*; #[derive(Clone, PartialEq, Serialize, Deserialize)] pub(crate) struct Node { pub(crate) data: Data, pub(crate) next: Option<PageId>, pub(crate) lo: IVec, pub(crate) hi: IVec, pub(crate) merging_child: Option<PageId>, pub(crate) merging: bool, } impl fmt::Debug for Node { fn fmt( &self, f: &mut fmt::Formatter<'_>, ) -> std::result::Result<(), fmt::Error> { let data = self.data.fmt_keys(&self.lo); write!( f, "Node {{ \ lo: {:?} \ hi: {:?} \ next: {:?} \ merging_child: {:?} \ merging: {} \ data: {:?} }}", self.lo, self.hi, self.next, self.merging_child, self.merging, data ) } } impl Node { #[inline] pub(crate) fn size_in_bytes(&self) -> u64 { let self_sz = size_of::<Self>() as u64; let lo_sz = self.lo.size_in_bytes(); let hi_sz = self.hi.size_in_bytes(); let data_sz = self.data.size_in_bytes(); self_sz .saturating_add(lo_sz) .saturating_add(hi_sz) .saturating_add(data_sz) } pub(crate) fn apply(&mut self, frag: &Frag, merge_operator: Option<usize>) { use self::Frag::*; assert!( !self.merging, "somehow a frag was applied to a node after it was merged" ); match *frag { Set(ref k, ref v) => { // (when hi is empty, it means it's unbounded) if self.hi.is_empty() || prefix_cmp_encoded(k, &self.hi, &self.lo) == std::cmp::Ordering::Less { self.set_leaf(k.clone(), v.clone()); } else { panic!( "tried to consolidate set at key <= hi.\ Set({:?}, {:?}) to node {:?}", k, v, self ) } } Merge(ref k, ref v) => { // (when hi is empty, it means it's unbounded) if self.hi.is_empty() || prefix_cmp_encoded(k, &self.hi, &self.lo) == std::cmp::Ordering::Less { let merge_fn_ptr = merge_operator.expect("must have a merge operator set"); unsafe { let merge_fn: MergeOperator = std::mem::transmute(merge_fn_ptr); self.merge_leaf(k.clone(), v.clone(), merge_fn); } } else { panic!("tried to consolidate set at key <= hi") } } Del(ref k) => { // (when hi is empty, it means it's unbounded) if self.hi.is_empty() || prefix_cmp_encoded(k, &self.hi, &self.lo) == std::cmp::Ordering::Less { self.del_leaf(k); } else { panic!("tried to consolidate del at key <= hi") } } Base(_) => panic!("encountered base page in middle of chain"), ParentMergeIntention(pid) => { assert!( self.merging_child.is_none(), "trying to merge {:?} into node {:?} which \ is already merging another child", frag, self ); self.merging_child = Some(pid); } ParentMergeConfirm => { assert!(self.merging_child.is_some()); let merged_child = self.merging_child.take().expect( "we should have a specific \ child that was merged if this \ frag appears here", ); self.data.parent_merge_confirm(merged_child); } ChildMergeCap => { self.merging = true; } } } pub(crate) fn set_leaf(&mut self, key: IVec, val: IVec) { if let Data::Leaf(ref mut records) = self.data { let search = records.binary_search_by(|(k, _)| prefix_cmp(k, &key)); match search { Ok(idx) => records[idx] = (key, val), Err(idx) => records.insert(idx, (key, val)), } } else { panic!("tried to Set a value to an index"); } } pub(crate) fn merge_leaf( &mut self, key: IVec, val: IVec, merge_fn: MergeOperator, ) { if let Data::Leaf(ref mut records) = self.data { let search = records.binary_search_by(|(k, _)| prefix_cmp(k, &key)); let decoded_k = prefix_decode(&self.lo, &key); match search { Ok(idx) => { let new = merge_fn(&*decoded_k, Some(&records[idx].1), &val); if let Some(new) = new { records[idx] = (key, new.into()); } else { records.remove(idx); } } Err(idx) => { let new = merge_fn(&*decoded_k, None, &val); if let Some(new) = new { records.insert(idx, (key, new.into())); } } } } else { panic!("tried to Merge a value to an index"); } } pub(crate) fn parent_split(&mut self, at: &[u8], to: PageId) -> bool { if let Data::Index(ref mut ptrs) = self.data { let encoded_sep = prefix_encode(&self.lo, at); match ptrs.binary_search_by(|a| prefix_cmp(&a.0, &encoded_sep)) { Ok(_) => { debug!( "parent_split skipped because \ parent already contains child at split point \ due to deep race" ); return false; } Err(idx) => ptrs.insert(idx, (encoded_sep, to)), } } else { panic!("tried to attach a ParentSplit to a Leaf chain"); } true } pub(crate) fn del_leaf(&mut self, key: &IVec) { if let Data::Leaf(ref mut records) = self.data { let search = records .binary_search_by(|&(ref k, ref _v)| prefix_cmp(k, &*key)); if let Ok(idx) = search { records.remove(idx); } } else { panic!("tried to attach a Del to an Index chain"); } } pub(crate) fn split(&self) -> Node { let (split, right_data) = self.data.split(&self.lo); Node { data: right_data, next: self.next, lo: split, hi: self.hi.clone(), merging_child: None, merging: false, } } pub(crate) fn receive_merge(&self, rhs: &Node) -> Node { let mut merged = self.clone(); merged.hi = rhs.hi.clone(); merged.data.receive_merge( rhs.lo.as_ref(), merged.lo.as_ref(), &rhs.data, ); merged.next = rhs.next; merged } }
true
f90234ba82ae6210d218545be97354487e2a964a
Rust
lianhuiwang/tokamak
/src/parser2/error_handler.rs
UTF-8
3,425
3.046875
3
[ "Apache-2.0" ]
permissive
extern crate term; pub use self::Level::*; use std::cell::Cell; use std::fmt; use codemap::{MultiSpan}; #[derive(Copy, PartialEq, Clone, Debug)] pub enum Level { Bug, Fatal, // An error which while not immediately fatal, should stop the compiler // progressing beyond the current phase. PhaseFatal, Error, Warning, Note, Help, Cancelled, } impl fmt::Display for Level { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { self.to_str().fmt(f) } } impl Level { #[allow(dead_code)] fn color(self) -> term::color::Color { match self { Bug | Fatal | PhaseFatal | Error => term::color::BRIGHT_RED, Warning => term::color::YELLOW, Note => term::color::BRIGHT_GREEN, Help => term::color::BRIGHT_CYAN, Cancelled => unreachable!(), } } #[allow(dead_code)] fn to_str(self) -> &'static str { match self { Bug => "error: internal compiler error", Fatal | PhaseFatal | Error => "error", Warning => "warning", Note => "note", Help => "help", Cancelled => panic!("Shouldn't call on cancelled error"), } } } #[derive(Clone)] pub struct DiagnosticBuilder; impl DiagnosticBuilder { /// Emit the diagnostic. pub fn emit(&mut self) { unimplemented!() } /// Cancel the diagnostic (a structured diagnostic must either be emitted or /// cancelled or it will panic when dropped). /// BEWARE: if this DiagnosticBuilder is an error, then creating it will /// bump the error count on the Handler and cancelling it won't undo that. /// If you want to decrement the error count you should use `Handler::cancel`. pub fn cancel(&mut self) { unimplemented!() } #[allow(unused_variables)] pub fn note(&mut self, msg: &str) -> &mut DiagnosticBuilder { unimplemented!() } #[allow(unused_variables)] pub fn span_note<S: Into<MultiSpan>>(&mut self, sp: S, msg: &str) -> &mut DiagnosticBuilder { unimplemented!() } #[allow(unused_variables)] pub fn help(&mut self , msg: &str) -> &mut DiagnosticBuilder { unimplemented!() } #[allow(unused_variables)] pub fn span_help<S: Into<MultiSpan>>(&mut self, sp: S, msg: &str) -> &mut DiagnosticBuilder { unimplemented!() } } pub struct Handler { pub err_count: Cell<usize>, } impl Handler { #[allow(unused_variables)] pub fn cancel(&mut self, err: &mut DiagnosticBuilder) { unimplemented!() } #[allow(unused_variables)] pub fn span_err<S: Into<MultiSpan>>(&self, sp: S, msg: &str) { unimplemented!() } #[allow(unused_variables)] pub fn span_bug<S: Into<MultiSpan>>(&self, sp: S, msg: &str) -> ! { unimplemented!() } #[allow(unused_variables)] pub fn span_bug_no_panic<S: Into<MultiSpan>>(&self, sp: S, msg: &str) { unimplemented!() } #[allow(unused_variables)] pub fn struct_span_err<S: Into<MultiSpan>>(&self, sp: S, msg: &str) -> DiagnosticBuilder { unimplemented!() } #[allow(unused_variables)] pub fn struct_span_fatal<S: Into<MultiSpan>>(&self, sp: S, msg: &str) -> DiagnosticBuilder { println!("{}", msg); unimplemented!() } pub fn bug(&self, msg: &str) -> ! { unimplemented!() } }
true
93f7788948110b906462bcc5836954c931544ec6
Rust
thomashk0/chip8-rs
/packages/chip8/src/screen.rs
UTF-8
1,604
2.96875
3
[ "MIT" ]
permissive
type Point2i = (i32, i32); pub const CHIP8_FB_W: usize = 64; pub const CHIP8_FB_H: usize = 32; pub type Chip8Fb = [u32; CHIP8_FB_W * CHIP8_FB_H]; #[derive(Clone)] pub struct Screen { inverted_y: bool, fb: Chip8Fb, width: u32, height: u32, } impl Screen { pub fn new() -> Self { Screen { inverted_y: true, fb: [0; CHIP8_FB_W * CHIP8_FB_H], width: CHIP8_FB_W as u32, height: CHIP8_FB_H as u32, } } pub fn set_inverted_y(&mut self, b: bool) { self.inverted_y = b; } fn px_index(&self, coords: Point2i) -> usize { let h = self.height - 1; let y = if self.inverted_y { h - coords.1 as u32 } else { coords.1 as u32 }; let k = y * self.width + (coords.0 as u32); k as usize } pub fn data(&self) -> &[u32] { &self.fb } /// Get (width, height) pub fn dims(&self) -> (u32, u32) { (self.width, self.height) } pub fn width(&self) -> u32 { self.width } pub fn height(&self) -> u32 { self.height } pub fn set_pixel(&mut self, coords: Point2i, value: u32) { let k = (coords.1 as u32) * self.width + coords.0 as u32; self.fb[k as usize] = value; } pub fn xor_pixel(&mut self, coords: Point2i, value: u32) -> bool { let k = self.px_index(coords); let old_px = self.fb[k as usize]; self.fb[k] ^= value; old_px != 0 && value != 0 } pub fn clear(&mut self, color: u32) { self.fb.iter_mut().for_each(|x| *x = color); } }
true
08582e72c9372fcf9c746d54461d48e31f4f26ce
Rust
Rantanen/intercom
/intercom-common/src/idents.rs
UTF-8
1,586
2.53125
3
[ "MIT" ]
permissive
use crate::prelude::*; use crate::tyhandlers::ModelTypeSystem; use syn::{Ident, Path}; pub trait SomeIdent { fn get_some_ident(&self) -> Option<Ident>; } impl SomeIdent for Path { fn get_some_ident(&self) -> Option<Ident> { self.get_ident() .cloned() .or_else(|| self.segments.last().map(|l| l.ident.clone())) } } pub fn vtable(itf: &Ident, ts: ModelTypeSystem) -> Path { let vtable_ident = format_ident!("__{}{}VTable", itf, ts); parse_quote!(#vtable_ident) } pub fn com_to_rust_method_impl(itf: &Ident, method: &Ident, ts: ModelTypeSystem) -> Ident { Ident::new(&format!("__{}_{}_{:?}", itf, method, ts), method.span()) } pub fn with_ts(ident: &Ident, ts: ModelTypeSystem) -> Ident { Ident::new(&format!("{}_{:?}", ident, ts), Span::call_site()) } pub fn clsid_path(struct_path: &Path) -> Path { let mut clsid_path = struct_path.clone(); if let Some(mut last) = clsid_path.segments.last_mut() { last.ident = clsid(&last.ident); } clsid_path } pub fn clsid(struct_name: &Ident) -> Ident { new_ident(&format!("CLSID_{}", struct_name)) } pub fn iid(itf_name: &Ident, span: Span) -> Ident { Ident::new(&format!("IID_{}", itf_name), span) } pub fn method_impl<TMethod: std::fmt::Display>( struct_ident: &Ident, itf_ident: &Ident, method_name: TMethod, ts: ModelTypeSystem, ) -> Ident { new_ident(&format!( "__{}_{}_{}_{:?}", struct_ident, itf_ident, method_name, ts )) } fn new_ident(s: &str) -> Ident { Ident::new(s, Span::call_site()) }
true
2b1ec48934f11906d2a70c25108b5d203b2d566c
Rust
media-io/rs_mpegts
/src/mpegts/packet.rs
UTF-8
1,784
2.65625
3
[ "MIT" ]
permissive
use std::fmt; use mpegts::adaptation_field::AdaptationField; use mpegts::payload::Payload; use mpegts::program_association::*; use mpegts::program_map::*; #[derive(Debug, Clone)] pub struct Packet { pub transport_error_indicator: bool, pub transport_priority: bool, pub program_id: u16, pub transport_scrambling_control: u8, pub continuity_counter: u8, pub payload_presence: bool, pub adaptation_field: Option<AdaptationField>, pub payload: Option<Payload>, pub data: Vec<u8>, } impl fmt::Display for Packet { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { if self.data.len() > 0 { write!(f, "Packet with PID: {:04} (data size = {}), payload {:?}", self.program_id, self.data.len(), self.payload) } else { // write!(f, "PID: {:04}", self.program_id) write!(f, "Packet: {:?}", self) } } } impl Packet { pub fn new() -> Packet { Packet { transport_error_indicator: false, transport_priority: false, program_id: 0, transport_scrambling_control: 0x00, continuity_counter: 0x00, payload_presence: false, adaptation_field: None, payload: None, data: vec![], } } pub fn new_pat(pat: ProgramAssociation) -> Packet { let mut p = Packet::new(); p.payload_presence = true; p.payload = Some( Payload{ pat: Some(pat), pmt: None, pes: None, }); p } pub fn new_pmt(id: u16, pmt: ProgramMap) -> Packet { let mut p = Packet::new(); p.program_id = id; p.payload_presence = true; p.payload = Some( Payload{ pat: None, pmt: Some(pmt), pes: None, }); p } pub fn new_null() -> Packet { let mut p = Packet::new(); p.program_id = 0x1FFF; p } }
true
243e5b5dc8a7c1e8c4264239519675eecbe1de69
Rust
l4l/whos-online
/src/whosb.rs
UTF-8
1,289
2.640625
3
[ "MIT" ]
permissive
use telebot::bot; use telebot::functions::*; use reqwest::get; use serde_json::from_str; use tokio_core::reactor::Core; use futures::stream::Stream; use status::Map; const NO_USERS: &'static str = "No users found"; const FETCH_ERROR: &'static str = "Nothing found"; fn fetch(host: &str) -> Option<Map> { get(host).and_then(|mut x| x.text()).ok().and_then(|resp| { from_str(&resp).ok() }) } fn print_all(map: Map) -> String { let s = map.into_iter() .map(|(k, v)| { let data_prnt = v.map(|d| format!("online [{}]", d.description)).unwrap_or( "offline".to_string(), ); format!("{} is {}", k, data_prnt) }) .collect::<String>(); match s.is_empty() { true => NO_USERS.to_string(), _ => s, } } pub fn launch(token: &str, host: &str) { let mut lp = Core::new().unwrap(); let host = host.to_string(); let bot = bot::RcBot::new(lp.handle(), &token).update_interval(200); let handle = bot.new_cmd("/ask").and_then(move |(bot, msg)| { let text = fetch(&host).map(print_all).unwrap_or( FETCH_ERROR.to_string(), ); bot.message(msg.chat.id, text).send() }); bot.register(handle); bot.run(&mut lp).unwrap(); }
true
9630129249a991ab73ddf3c7036ef4f26756f1f8
Rust
yujie21ic/datacannon-rs-core
/src/message_structure/kafka_queue.rs
UTF-8
342
2.59375
3
[ "Apache-2.0" ]
permissive
//! Structure and functions storing data for when kafka acts as a queue //! //! --- //! author: Andrew Evans //! --- /// Creates a kafka structure /// /// # Arguments /// * `queue` - Name of the queue /// * `default_exchange` - Exchange for te pub struct KafkaQueue{ queue: String, default_exchange: String, ha_policy: Option<i8>, }
true
1e325050de63d568e99722f04c3a955399bd8841
Rust
lilydjwg/chinese-num
/src/lib.rs
UTF-8
4,224
3.375
3
[]
no_license
//! Convert a decimal number to its Chinese form. //! //! [![Build Status](https://travis-ci.org/lilydjwg/chinese-num.svg)](https://travis-ci.org/lilydjwg/chinese-num) //! [![Crates.io Version](https://img.shields.io/crates/v/chinese-num.svg)](https://crates.io/crates/chinese-num) //! [![GitHub stars](https://img.shields.io/github/stars/lilydjwg/chinese-num.svg?style=social&label=Star)](https://github.com/lilydjwg/chinese-num) //! //! //! # Examples //! //! ``` //! let s = chinese_num::to_chinese_num("121").unwrap(); //! assert_eq!(s, "一百二十一"); //! ``` //! //! ``` //! let s = chinese_num::to_chinese_num("1004000007000500").unwrap(); //! assert_eq!(s, "一千零四万亿零七百万零五百"); //! ``` //! //! ``` //! let s = chinese_num::to_chinese_num("123000520").unwrap(); //! assert_eq!(s, "一亿二千三百万零五百二十"); //! ``` //! //! ``` //! let s = chinese_num::to_chinese_num("1234070000123780000087006786520988800000").unwrap(); //! assert_eq!(s, "一千二百三十四万零七百亿零一十二万三千七百八十亿零八千七百亿六千七百八十六万五千二百零九亿八千八百八十万"); //! ``` //! //! If the given string is not a number, or begins with "0", `None` is returned: //! //! ``` //! let s = chinese_num::to_chinese_num("不是数字"); //! assert!(s.is_none()); //! ``` //! //! ``` //! let s = chinese_num::to_chinese_num("020"); //! assert!(s.is_none()); //! ``` //! //! The algorithm is taken from here: //! http://zhuanlan.zhihu.com/iobject/20370983. const DIGITS: [char; 10] = ['零', '一', '二', '三', '四', '五', '六', '七', '八', '九']; const TENS_NAME: [char; 4] = ['个', '十', '百', '千']; const UNIT_RANK: [char; 6] = ['个', '十', '百', '千', '万', '亿']; fn digit_pos_to_name(pos: usize) -> char { if pos == 0 { '个' } else if pos % 8 == 0 { '亿' } else if pos % 4 == 0 { '万' } else { TENS_NAME[pos % 4] } } struct ResultS (String, bool, char); #[inline] fn get_unit_rank(u: char) -> usize { UNIT_RANK.iter().position(|&x| x == u).unwrap() } fn append_digit(result: ResultS, tuple: (usize, char)) -> ResultS { let (digit, this_unit) = tuple; let ResultS(mut result, pending_zero, last_unit) = result; let this_str = DIGITS[digit]; if digit == 0 { if get_unit_rank(last_unit) > get_unit_rank(this_unit) { ResultS(result, true, last_unit) } else { result.push(this_unit); ResultS(result, false, this_unit) } } else { if pending_zero { result.push('零'); } result.push(this_str); result.push(this_unit); ResultS(result, false, this_unit) } } pub fn to_chinese_num<N: AsRef<str>>(n: N) -> Option<String> { let n = n.as_ref(); // special cases if n == "0" { return Some("零".to_owned()); } // non-digit found, nothing, leading zeros if !n.chars().all(|x| x.is_digit(10)) || n.len() == 0 || n.chars().nth(0).unwrap() == '0' { return None; } let v = n.as_bytes().iter().rev().enumerate().map( |(i, c)| ((c - '0' as u8) as usize, digit_pos_to_name(i))) .rev().fold(ResultS(String::new(), false, '个'), append_digit); let mut r = v.0; if r.chars().last().unwrap() == '个' { r.pop();; } if r.starts_with("一十") { r.remove(0); } Some(r) } /// A trait adding a `to_chinese_num` method to types, e.g.: /// /// ``` /// use chinese_num::ToChineseNum; /// /// assert_eq!(20.to_chinese_num(), Some(String::from("二十"))); /// ``` pub trait ToChineseNum { fn to_chinese_num(&self) -> Option<String>; } impl ToChineseNum for usize { fn to_chinese_num(&self) -> Option<String> { to_chinese_num(self.to_string()) } } #[test] fn empty_number() { let s = to_chinese_num(""); assert!(s.is_none()); } #[test] fn num_0() { let s = to_chinese_num("0").unwrap(); assert_eq!(s, "零"); } #[test] fn num_1() { let s = to_chinese_num("1").unwrap(); assert_eq!(s, "一"); } #[test] fn num_10() { let s = to_chinese_num("10").unwrap(); assert_eq!(s, "十"); } #[test] fn num_12() { let s = to_chinese_num("12").unwrap(); assert_eq!(s, "十二"); } #[test] fn num_20() { let s = to_chinese_num("20").unwrap(); assert_eq!(s, "二十"); }
true
eeb9102e847eb34d29fdf87b2edbff4521ebf08e
Rust
synecdoche/pelikan
/src/rust/core/server/src/process/mod.rs
UTF-8
1,943
2.921875
3
[ "Apache-2.0", "LicenseRef-scancode-unknown-license-reference" ]
permissive
// Copyright 2021 Twitter, Inc. // Licensed under the Apache License, Version 2.0 // http://www.apache.org/licenses/LICENSE-2.0 mod builder; mod worker_builder; pub use builder::ProcessBuilder; pub use worker_builder::WorkerBuilder; use common::signal::Signal; use queues::QueuePairs; use std::thread::JoinHandle; /// A structure which represents a running Pelikan cache process. /// /// Note: for long-running daemon, be sure to call `wait()` on this structure to /// block the process until the threads terminate. For use within tests, be sure /// to call `shutdown()` to terminate the threads and block until termination. pub struct Process { threads: Vec<JoinHandle<()>>, /// used to send signals to and from the admin thread signal_queue: QueuePairs<Signal, Signal>, } impl Process { /// Attempts to gracefully shutdown the `Process` by sending a shutdown to /// each thread and then waiting to join those threads. /// /// Will terminate ungracefully if it encounters an error in sending a /// shutdown to any of the threads. /// /// This function will block until all threads have terminated. pub fn shutdown(mut self) { // this sends a shutdown to the admin thread, which will broadcast the // signal to all sibling threads in the process if self.signal_queue.broadcast(Signal::Shutdown).is_err() { fatal!("error sending shutdown signal to thread"); } // try to wake the admin thread to process the signal if self.signal_queue.wake_all().is_err() { error!("error waking threads for shutdown"); } // wait and join all threads self.wait() } /// Will block until all threads terminate. This should be used to keep the /// process alive while the child threads run. pub fn wait(self) { for thread in self.threads { let _ = thread.join(); } } }
true
7e165052c93b69c07fdd2bf4aa9ae7fd1c954796
Rust
bytebuddha/bevy_tiled_world
/examples/utils/colliders.rs
UTF-8
4,980
2.765625
3
[]
no_license
use bevy::prelude::*; use bevy_rapier2d::prelude::*; pub fn draw_colliders( mut lines: ResMut<bevy_prototype_debug_lines::DebugLines>, query: Query<(&ColliderShape, &ColliderPosition)>, ) { for (shape, position) in query.iter() { match shape.0.as_typed_shape() { TypedShape::Cuboid(cuboid) => { let points = cuboid.to_polyline(); lines.line_colored( Vec3::new( position.translation.x + points[0][0], position.translation.y - points[1][1], 10.0, ), Vec3::new( position.translation.x + points[1][0], position.translation.y - points[1][1], 10.0, ), 0.0, Color::RED, ); lines.line_colored( Vec3::new( position.translation.x + points[1][0], position.translation.y - points[1][1], 10.0, ), Vec3::new( position.translation.x + points[2][0], position.translation.y - points[2][1], 10.0, ), 0.0, Color::RED, ); lines.line_colored( Vec3::new( position.translation.x + points[2][0], position.translation.y - points[2][1], 10.0, ), Vec3::new( position.translation.x + points[3][0], position.translation.y - points[3][1], 10.0, ), 0.0, Color::RED, ); lines.line_colored( Vec3::new( position.translation.x + points[3][0], position.translation.y - points[3][1], 10.0, ), Vec3::new( position.translation.x + points[0][0], position.translation.y - points[0][1], 10.0, ), 0.0, Color::RED, ); } TypedShape::Polyline(polyline) => { let segments: Vec<Segment> = polyline.segments().collect(); for segment in segments.iter() { lines.line_colored( Vec3::new( position.translation.x + segment.a[0], position.translation.y - segment.a[1], 10.0, ), Vec3::new( position.translation.x + segment.b[0], position.translation.y - segment.b[1], 10.0, ), 0.0, Color::RED, ); } lines.line_colored( Vec3::new( position.translation.x + segments[segments.len() - 1].a[0], position.translation.y - segments[segments.len() - 1].a[1], 10.0, ), Vec3::new( position.translation.x + segments[0].a[0], position.translation.y - segments[0].a[1], 10.0, ), 0.0, Color::RED, ); }, TypedShape::Ball(_) => { lines.line_colored( Vec3::new( position.translation.x - 6.0, position.translation.y - 6.0, 10.0, ), Vec3::new( position.translation.x + 6.0, position.translation.y + 6.0, 10.0, ), 0.0, Color::RED, ); lines.line_colored( Vec3::new( position.translation.x + 6.0, position.translation.y - 6.0, 10.0, ), Vec3::new( position.translation.x - 6.0, position.translation.y + 6.0, 10.0, ), 0.0, Color::RED, ); } _ => {} } } }
true
25e954ff01d7d287a2e4baab75193f236fb3e22e
Rust
whitfin/efflux
/src/lib.rs
UTF-8
1,737
2.734375
3
[ "MIT" ]
permissive
//! Efflux is a set of Rust interfaces for MapReduce and Hadoop Streaming. //! //! This crate provides easy interfaces for working with MapReduce, whether //! or not you're running on the Hadoop platform. Usage is as simple as a //! struct which implements either the `Mapper` or `Reducer` trait, as all //! other interaction is taken care of internally. //! //! Macros are provided for IO, to provide a compile-time guarantee of things //! such as counter/status updates, or writing to the Hadoop task logs. #![doc(html_root_url = "https://docs.rs/efflux/2.0.1")] #[macro_use] pub mod macros; pub mod context; pub mod io; pub mod mapper; pub mod reducer; use self::mapper::Mapper; use self::reducer::Reducer; use self::mapper::MapperLifecycle; use self::reducer::ReducerLifecycle; use self::io::run_lifecycle; /// Executes a `Mapper` against the current `stdin`. #[inline] pub fn run_mapper<M>(mapper: M) where M: Mapper + 'static, { run_lifecycle(MapperLifecycle::new(mapper)); } /// Executes a `Reducer` against the current `stdin`. #[inline] pub fn run_reducer<R>(reducer: R) where R: Reducer + 'static, { run_lifecycle(ReducerLifecycle::new(reducer)); } // prelude module pub mod prelude { //! A "prelude" for crates using the `efflux` crate. //! //! This prelude contains the required imports for almost all use cases, to //! avoid having to include modules and structures directly: //! //! ```rust //! use efflux::prelude::*; //! ``` //! //! The prelude may grow over time, but it is unlikely to shrink. pub use super::context::{Configuration, Context, Contextual}; pub use super::log; pub use super::mapper::Mapper; pub use super::reducer::Reducer; }
true
429d6d641cbe8615aef22571401e5314aa4970b6
Rust
Woyten/fileserver
/src/main.rs
UTF-8
1,701
2.6875
3
[ "MIT" ]
permissive
use iron::middleware::Handler; use iron::mime::Mime; use iron::prelude::*; use iron::status::Status; use staticfile::Static; use std::fs; use std::fs::ReadDir; use std::path::Path; use std::path::PathBuf; static FILE_FOLDER: &str = "www"; fn main() { let static_file_handler = Static::new(Path::new(FILE_FOLDER)); Iron::new(move |request: &mut Request| browse(request, &static_file_handler)) .http("0.0.0.0:3000") .unwrap(); } fn browse(request: &mut Request, static_file_handler: &Static) -> IronResult<Response> { let file_response = static_file_handler.handle(request); if file_response.is_ok() { return file_response; } let mut path = PathBuf::new(); path.push(FILE_FOLDER); for path_element in request.url.path() { path.push(path_element); } match fs::read_dir(&path) { Ok(paths) => list_paths(request, paths), Err(_) => Ok(Response::with((Status::NotFound, "Invalid path"))), } } fn list_paths(request: &Request, paths: ReadDir) -> IronResult<Response> { let mut response = String::new(); response.push_str(&format!("<div>Content of {}</div>", request.url)); for path in paths { let to_push = match path { Ok(file) => format!( r#"<a href="{0}">{0}</a>"#, file.path().file_name().unwrap().to_str().unwrap() ), Err(err) => format!("{}", err), }; response.push_str(&format!("<div>{}\n</div>", to_push)); } let mime: Mime = "text/html".parse().unwrap(); response = format!("<html><meta></meta><body>{}</body></html>", response); Ok(Response::with((Status::Ok, response, mime))) }
true
1b11341c511b87dd9b770d99be27b805aaba22e9
Rust
nikolabr/base64rs
/src/lib.rs
UTF-8
3,224
3.375
3
[]
no_license
pub mod base64 { static BASE64_TABLE: &str = "ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+/="; fn get_ascii(ch: u8) -> u8 { match BASE64_TABLE.chars().position(|x| x == ch as char) { None => 0, Some(n) => n as u8 } } fn get_b64_char(val: u8) -> char { match BASE64_TABLE.chars().nth((val & 0x3F) as usize) { None => '0', Some(x) => x } } pub fn is_valid_b64(data: &String) -> bool { if (data.len() == 0) || (data.len() % 4 != 0) { return false } else { let char_is_b64 = |x| { (0x30..=0x39).contains(x) || (0x41..=0x90).contains(x) || (0x61..=0x80).contains(x) || *x == 0x2F || *x == 0x2B || *x == 0x3D }; return data.as_bytes().iter().all(char_is_b64) } } fn encode_chunk(chunk: &[u8]) -> [char; 4]{ let mut tmp : [char; 4] = ['0'; 4]; tmp[0] = get_b64_char(chunk[0] >> 2); tmp[1] = get_b64_char((chunk[0] << 4) | (chunk[1] >> 4)); tmp[2] = get_b64_char((chunk[1] << 2) | (chunk[2] >> 6)); tmp[3] = get_b64_char(chunk[2]); tmp } fn add_padding(chunk: &[u8]) -> [char; 4]{ let mut tmp : [char; 4] = ['0'; 4]; tmp[0] = get_b64_char(chunk[0] >> 2); match chunk.len() { 1 => { tmp[1] = get_b64_char((chunk[0] << 4) | 0x00); tmp[2] = '='; } 2 => { tmp[1] = get_b64_char((chunk[0] << 4) | (chunk[1] >> 4)); tmp[2] = get_b64_char((chunk[1] << 2) | (chunk[2] >> 6)); } _ => panic!("Invalid padding!") }; tmp[3] = '='; tmp } fn decode_chunk(chunk: &[u8]) -> Vec<u8>{ let mut tmp : Vec<u8> = Vec::new(); tmp.push((get_ascii(chunk[0]) << 2) | (get_ascii(chunk[1]) >> 4)); if chunk[2] != '=' as u8 { tmp.push((get_ascii(chunk[1]) << 4) | (get_ascii(chunk[2]) >> 2)); if chunk[3] != '=' as u8 { tmp.push((get_ascii(chunk[2]) << 6) | get_ascii(chunk[3])); } } tmp } pub fn encode(data: &Vec<u8>) -> String { let mut res = String::new(); let chunks = data.chunks_exact(3); let remainder = chunks.remainder(); // For loop will consume the iterator, so the remainder must be copied for chunk in chunks { res.extend(encode_chunk(chunk)); } if !remainder.is_empty() { res.extend(add_padding(remainder)); } res } pub fn decode(data: &String) -> Result<Vec<u8>, String> { match is_valid_b64(data){ true => { let mut res: Vec<u8> = Vec::new(); let bytes = data.as_bytes(); let chunks = bytes.chunks(4); for chunk in chunks { res.extend(decode_chunk(chunk)); } return Ok(res); }, false => { return Err("Input is not Base64!".to_string()); } }; } }
true
810107b60d4368049d58d73dd702c5b8dc9f4921
Rust
arothstein/sandbox-rust
/the-book/03/shadowing/src/main.rs
UTF-8
798
4.375
4
[]
no_license
fn main() { let x = 5; // We can shadow a variable by using the same variable’s name and repeating the use of the let keyword as follows: let x = x + 1; let x = x * 2; println!("The value of x is: {}", x); // Shadowing is different from marking a variable as mut, because we’ll get a compile-time error if we // accidentally try to reassign to this variable without using the let keyword. By using let, we can perform // a few transformations on a value but have the variable be immutable after those transformations have been completed. // The other difference between mut and shadowing is that because we’re effectively creating a new // variable when we use the let keyword again, we can change the type of the value but reuse the same name. }
true
14e00dc3fb6c55fcec1bb334ebc82ba020b5af9b
Rust
EFanZh/n-body
/src/basic_renderer.rs
UTF-8
1,785
2.8125
3
[ "MIT" ]
permissive
use crate::configuration::Color; use crate::renderer::Renderer; use cgmath::Vector2; use itertools::izip; use wasm_bindgen::JsValue; use web_sys::CanvasRenderingContext2d; pub struct BasicRenderer { canvas_context: CanvasRenderingContext2d, body_colors: Vec<String>, trail_widths: Vec<f64>, } impl BasicRenderer { pub fn new( canvas_context: CanvasRenderingContext2d, width: f64, height: f64, body_colors: Vec<Color>, trail_widths: Vec<f64>, ) -> BasicRenderer { canvas_context.set_global_composite_operation("screen").unwrap(); canvas_context.set_fill_style(&JsValue::from_str("black")); canvas_context.fill_rect(-width * 0.5, -height * 0.5, width, height); BasicRenderer { canvas_context, body_colors: body_colors.iter().map(|c| c.to_rgba()).collect(), trail_widths, } } } impl Renderer for BasicRenderer { fn render(&mut self, position_histories: &[Vec<Vector2<f64>>]) { for (position_history, color, trail_width) in izip!(position_histories, &self.body_colors, &self.trail_widths) { if position_history.len() > 1 { self.canvas_context.set_stroke_style(&JsValue::from_str(&color)); self.canvas_context.set_line_width(*trail_width); self.canvas_context.begin_path(); let (first_position, rest_positions) = position_history.split_first().unwrap(); self.canvas_context.move_to(first_position.x, first_position.y); for position in rest_positions { self.canvas_context.line_to(position.x, position.y); } self.canvas_context.stroke(); } } } }
true
e23266534b9a41311fe49104eec99a6650a9eaea
Rust
Vicfred/codeforces-rust
/insomnia_cure_148A.rs
UTF-8
1,055
2.828125
3
[ "BSD-3-Clause" ]
permissive
// https://codeforces.com/problemset/problem/148/A // implementation, simulation, simple math use std::io; fn main() { let mut k = String::new(); io::stdin() .read_line(&mut k) .unwrap(); let k: i64 = k.trim().parse().unwrap(); let mut l = String::new(); io::stdin() .read_line(&mut l) .unwrap(); let l: i64 = l.trim().parse().unwrap(); let mut m = String::new(); io::stdin() .read_line(&mut m) .unwrap(); let m: i64 = m.trim().parse().unwrap(); let mut n = String::new(); io::stdin() .read_line(&mut n) .unwrap(); let n: i64 = n.trim().parse().unwrap(); let mut d = String::new(); io::stdin() .read_line(&mut d) .unwrap(); let d: i64 = d.trim().parse().unwrap(); let mut dragons = 0; for idx in 1..d+1 { if idx % k == 0 || idx % l == 0 || idx % m == 0 || idx % n == 0 { dragons += 1; } } println!("{}", dragons); }
true
b31eb9662d953601504e039d375052a60afc0190
Rust
geom3trik/fft_resample
/examples/demo.rs
UTF-8
1,768
2.703125
3
[ "MIT" ]
permissive
use std::{error::Error, fs::File, io::BufReader, usize}; use fft_resample::fft_upsample; use hound::{WavReader, WavSpec, SampleFormat, WavWriter}; fn main() -> Result<(), hound::Error> { // Replace with path to test file let path = "C:/Users/Setup/Music/file_example_WAV_5MG.wav"; let mut reader = WavReader::open(path)?; let spec = reader.spec(); let mut data = Vec::with_capacity((spec.channels as usize) * (reader.duration() as usize)); match (spec.bits_per_sample, spec.sample_format) { (16, SampleFormat::Int) => { for sample in reader.samples::<i16>() { data.push((sample? as f32) / (0x7fffi32 as f32)); } } (24, SampleFormat::Int) => { for sample in reader.samples::<i32>() { let val = (sample? as f32) / (0x00ff_ffffi32 as f32); data.push(val); } } (32, SampleFormat::Int) => { for sample in reader.samples::<i32>() { data.push((sample? as f32) / (0x7fff_ffffi32 as f32)); } } (32, SampleFormat::Float) => { for sample in reader.samples::<f32>() { data.push(sample?); } } _ => return Err(hound::Error::Unsupported), } let upsample_length = (data.len() as f32 / 44100.0) * 48000.0; let resampled_buffer = fft_upsample(&data, upsample_length.round() as usize, spec.channels as usize); let mut writer = WavWriter::create("test3.wav", spec)?; for t in 0..resampled_buffer.len() { let sample = resampled_buffer[t]; let amplitude = i16::MAX as f32; writer.write_sample((sample * amplitude) as i16)?; } writer.finalize()?; Ok(()) }
true
4c05ac454e5ce39285e5543013d9bd27fae4b091
Rust
SeijiEmery/subliminal
/structural_parser/src/text_style.rs
UTF-8
641
3.5
4
[ "MIT" ]
permissive
pub struct TextStyle { font: Option<String>, color: Option<Color>, size: Option<f32>, italic: Option<bool>, bold: Option<bool>, } impl Default for TextStyle { fn default () -> TextStyle { TextStyle { font: None, color: None, size: None, italic: false, bold: false } } } impl BitOr for TextStyle { type Output = Self; fn bitor (self, rhs: Self) -> Self { TextStyle { font: font.or(rhs.font), color: color.or(rhs.font), size: size.or(rhs.size), italic: italic.or(rhs.italic), bold: bold.or(rhs.bold), } } }
true
77058d7ec4c60610a551e6b77abc64b8a98b6227
Rust
mchesser/platformer
/src/map.rs
UTF-8
2,903
3.078125
3
[ "MIT" ]
permissive
use std::{fs::File, io::Read, path::Path}; use anyhow::Context; use macroquad::prelude::{Rect, Vec2}; use crate::tiles::{TileInfo, TileSet}; pub struct Map { pub width: usize, pub height: usize, tiles: Vec<u16>, tileset: TileSet, } impl Map { /// Loads a map from a file pub fn load_map(path: &Path, tileset: TileSet) -> anyhow::Result<Self> { static VERSION: u8 = 1; static MAGIC_ID: [u8; 3] = *b"MAP"; let mut file = File::open(path).with_context(|| format!("failed to open: {}", path.display()))?; let mut header = [0; 12]; // Load header into the buffer match file.read(&mut header) { Ok(n) if n == 12 => {} _ => anyhow::bail!("Could not read file header"), } // Check the magic id if &header[0..3] != &MAGIC_ID { anyhow::bail!("Invalid magic id"); } // Check the version number if header[3] != VERSION { anyhow::bail!("Invalid map version"); } // Get the width and height of the map let width = u32::from_le_bytes(header[4..8].try_into().unwrap()) as usize; let height = u32::from_le_bytes(header[8..12].try_into().unwrap()) as usize; // Read the tiles let length = width * height * 2; let mut tile_buffer = vec![0; length]; match file.read(&mut tile_buffer) { Ok(n) if n == length => {} Ok(n) => anyhow::bail!("Invalid number of tiles, expected: {length}, but found: {n}"), _ => anyhow::bail!("Could not load map tiles"), } let tiles = tile_buffer.chunks(2).map(|x| u16::from_le_bytes(x.try_into().unwrap())).collect(); Ok(Self { tiles, width, height, tileset }) } pub fn size(&self) -> Vec2 { let tile_size = self.tile_size() as f32; Vec2::new(self.width as f32 * tile_size, self.height as f32 * tile_size) } pub fn tile_size(&self) -> i32 { self.tileset.tile_size } pub fn tile_info_at(&self, x: usize, y: usize) -> TileInfo { self.tileset.id(self.get(x, y)) } fn get(&self, x: usize, y: usize) -> u16 { assert!(x < self.width); assert!(y < self.height); self.tiles[x + y * self.width] } pub fn draw(&self, camera: Vec2) { for tile_x in 0..self.width { for tile_y in 0..self.height { let x = (tile_x * self.tile_size() as usize) as f32; let y = (tile_y * self.tile_size() as usize) as f32; let dest_rect = Rect::new( x - camera.x, y - camera.y, self.tile_size() as f32, self.tile_size() as f32, ); self.tileset.draw(self.get(tile_x, tile_y), dest_rect); } } } }
true
84b82fbb5622e37e3ebdfbf0163a4f74cbfcf4b0
Rust
HappyCodingRust/async_executors
/src/tokio_ct.rs
UTF-8
5,475
2.84375
3
[ "Unlicense" ]
permissive
use { crate :: { SpawnHandle, LocalSpawnHandle, JoinHandle, join_handle::InnerJh } , std :: { sync::Arc, future::Future, sync::atomic::AtomicBool } , tokio :: { task::LocalSet, runtime::{ Runtime } } , futures_task :: { FutureObj, LocalFutureObj, Spawn, LocalSpawn, SpawnError } , }; /// An executor that uses a [`tokio::runtime::Runtime`] with the [current thread](tokio::runtime::Builder::new_current_thread) /// and a [`tokio::task::LocalSet`]. Can spawn `!Send` futures. /// /// ## Creation of the runtime /// /// You must use [`TokioCtBuilder`](crate::TokioCtBuilder) to create the executor. /// /// ``` /// // Make sure to set the `tokio_ct` feature on async_executors. /// // /// use /// { /// async_executors :: { TokioCt, TokioCtBuilder, LocalSpawnHandleExt } , /// tokio :: { runtime::Builder } , /// std :: { rc::Rc } , /// }; /// /// // You must use the builder. This guarantees that TokioCt is always backed by a single threaded runtime. /// // You can set other configurations by calling `tokio_builder()` on TokioCtBuilder, so you get /// // access to the `tokio::runtime::Builder`. /// // /// let exec = TokioCtBuilder::new().build().expect( "create tokio runtime" ); /// /// // block_on takes a &self, so if you need to `async move`, /// // just clone it for use inside the async block. /// // /// exec.block_on( async /// { /// let not_send = async { let rc = Rc::new(()); }; /// /// // We can spawn !Send futures here. /// // /// let join_handle = exec.spawn_handle_local( not_send ).expect( "spawn" ); /// /// join_handle.await; /// }); ///``` /// /// ## Unwind Safety. /// /// When a future spawned on this wrapper panics, the panic will be caught by tokio in the poll function. /// /// You must only spawn futures to this API that are unwind safe. Tokio will wrap spawned tasks in /// [`std::panic::AssertUnwindSafe`] and wrap the poll invocation with [`std::panic::catch_unwind`]. /// /// They reason that this is fine because they require `Send + 'static` on the task. As far /// as I can tell this is wrong. Unwind safety can be circumvented in several ways even with /// `Send + 'static` (eg. `parking_lot::Mutex` is `Send + 'static` but `!UnwindSafe`). /// /// You should make sure that if your future panics, no code that lives on after the panic, /// nor any destructors called during the unwind can observe data in an inconsistent state. /// /// Note: the future running from within `block_on` as opposed to `spawn` does not exhibit this behavior and will panic /// the current thread. /// /// Note that these are logic errors, not related to the class of problems that cannot happen /// in safe rust (memory safety, undefined behavior, unsoundness, data races, ...). See the relevant /// [catch_unwind RFC](https://github.com/rust-lang/rfcs/blob/master/text/1236-stabilize-catch-panic.md) /// and it's discussion threads for more info as well as the documentation of [std::panic::UnwindSafe] /// for more information. /// // #[ derive( Debug, Clone ) ] // #[ cfg_attr( nightly, doc(cfg( feature = "tokio_ct" )) ) ] // pub struct TokioCt { pub(crate) exec : Arc< Runtime > , pub(crate) local : Arc< LocalSet > , } impl TokioCt { /// This is the entry point for this executor. Once this call returns, no remaining tasks shall be polled anymore. /// However the tasks stay in the executor, so if you make a second call to `block_on` with a new task, the older /// tasks will start making progress again. /// /// For simplicity, it's advised to just create top level task that you run through `block_on` and make sure your /// program is done when it returns. /// /// See: [tokio::runtime::Runtime::block_on] /// /// ## Panics /// /// This function will panic if it is called from an async context, including but not limited to making a nested /// call. It will also panic if the provided future panics. // pub fn block_on< F: Future >( &self, f: F ) -> F::Output { self.exec.block_on( self.local.run_until( f ) ) } } impl Spawn for TokioCt { fn spawn_obj( &self, future: FutureObj<'static, ()> ) -> Result<(), SpawnError> { // We drop the JoinHandle, so the task becomes detached. // let _ = self.local.spawn_local( future ); Ok(()) } } impl LocalSpawn for TokioCt { fn spawn_local_obj( &self, future: LocalFutureObj<'static, ()> ) -> Result<(), SpawnError> { // We drop the JoinHandle, so the task becomes detached. // let _ = self.local.spawn_local( future ); Ok(()) } } impl<Out: 'static + Send> SpawnHandle<Out> for TokioCt { fn spawn_handle_obj( &self, future: FutureObj<'static, Out> ) -> Result<JoinHandle<Out>, SpawnError> { Ok( JoinHandle{ inner: InnerJh::Tokio { handle : self.exec.spawn( future ) , detached: AtomicBool::new( false ) , }}) } } impl<Out: 'static> LocalSpawnHandle<Out> for TokioCt { fn spawn_handle_local_obj( &self, future: LocalFutureObj<'static, Out> ) -> Result<JoinHandle<Out>, SpawnError> { Ok( JoinHandle{ inner: InnerJh::Tokio { handle : self.local.spawn_local( future ) , detached: AtomicBool::new( false ) , }}) } } #[ cfg(test) ] // mod tests { use super::*; // It's important that this is not Send, as we allow spawning !Send futures on it. // static_assertions::assert_not_impl_any!( TokioCt: Send, Sync ); }
true
7e852c5803efe0d60fab202ff6f01b44360f7835
Rust
rovangju/vacuum-robot-simulator
/src/geometry/vector.rs
UTF-8
1,738
3.796875
4
[]
no_license
use std::cmp; use std::fmt; use std::ops; use math::{Angle, Scalar}; #[derive(Debug, Clone, Copy)] pub struct Vector { pub x: Scalar, pub y: Scalar, } impl fmt::Display for Vector { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { write!(f, "({}, {})", self.x, self.y) } } impl ops::Add for Vector { type Output = Vector; fn add(self, other: Vector) -> Vector { Vector::new(self.x + other.x, self.y + other.y) } } impl ops::Sub for Vector { type Output = Vector; fn sub(self, other: Vector) -> Vector { Vector::new(self.x - other.x, self.y - other.y) } } impl ops::Mul<Scalar> for Vector { type Output = Vector; fn mul(self, s: Scalar) -> Vector { Vector::new(self.x * s, self.y * s) } } impl cmp::PartialEq for Vector { fn eq(&self, other: &Vector) -> bool { self.x == other.x && self.y == other.y } } impl Vector { pub fn new(x: Scalar, y: Scalar) -> Vector { Vector { x, y } } pub fn from_angle(angle: Angle) -> Vector { // 0° is in forward direction (along Y-axis) Vector { x: -angle.sin(), y: angle.cos(), } } pub fn length(&self) -> Scalar { (self.x.powi(2) + self.y.powi(2)).sqrt() } pub fn dot(&self, q: Vector) -> Scalar { self.x * q.x + q.y * self.y } pub fn cross(&self, q: Vector) -> Scalar { self.x * q.y - q.x * self.y } pub fn angle(&self) -> Scalar { -self.x.atan2(self.y) } pub fn rotate(&self, angle: Angle) -> Vector { let c = angle.cos(); let s = angle.sin(); Vector::new(c * self.x - s * self.y, s * self.x + c * self.y) } }
true
d36dd1e963fe5fee1c70aba9b5486e97a945cf83
Rust
Vanille-N/rask
/src/parse/mod.rs
UTF-8
2,833
2.671875
3
[ "MIT" ]
permissive
mod build; mod lex; mod split; mod util; pub use build::build; pub use lex::distribute_lex as lex; pub use split::split; use std::rc::Rc; pub use util::*; pub fn parse(src: &str) -> Vec<Result<Rc<Expr>, ParseErr>> { let symbols = split(src); if let Err(e) = symbols { return vec![Err(e)]; } let tokens = lex(&symbols.unwrap()); if let Err(e) = tokens { return vec![Err(e)]; } build(&tokens.unwrap()) } #[cfg(test)] #[cfg_attr(tarpaulin, skip)] mod integrate { const ASSETS: [&str; 9] = [ "sort", "set-construct", "word-count", "printer", "interprete", "unification", "timer", "sprintf", "matrix", ]; use super::*; use crate::source; #[test] fn read_sources() { for file in ASSETS.iter() { let prog = source(&("assets/".to_owned() + *file)).unwrap(); let symbols = split(&prog[..]); if let Err(e) = symbols { panic!("Could not split {} properly: {:?}", file, e); } let symbols = symbols.ok().unwrap(); let tokens = lex(&symbols); if let Err(e) = tokens { panic!("Could not tokenize {} properly: {:?}", file, e); } let tokens = tokens.ok().unwrap(); let exprs = build(&tokens); for expr in exprs.iter() { if let Err(e) = expr { match e { ParseErr::MismatchedOpenBrace(n) | ParseErr::MismatchedOpenParen(n) | ParseErr::MismatchedCloseBrace(n) | ParseErr::MismatchedCloseParen(n) => panic!( "Could not build {} properly: {:?}\nContext: {:?}", file, e, &tokens[n - 5..n + 5] ), e => panic!("Could not build {} properly: {:?}", file, e), } } } } } #[test] fn failures() { assert_eq!( *parse("(")[0].as_ref().err().unwrap(), ParseErr::MismatchedOpenParen(0) ); assert_eq!( *parse("#")[0].as_ref().err().unwrap(), ParseErr::LoneNumbersign ); assert_eq!(source("nofile"), None); assert_eq!( *parse("abc |# x")[0].as_ref().err().unwrap(), ParseErr::NoCommentStart ); assert_eq!( *parse("x #| abc")[0].as_ref().err().unwrap(), ParseErr::UnterminatedComment ); assert_eq!( *parse("\"abc")[0].as_ref().err().unwrap(), ParseErr::UnterminatedString(1) ); } }
true
2b1e6aba5adabab5c7179b13fd62d2395bc06ca0
Rust
rust-lang/rust-analyzer
/crates/ide-assists/src/handlers/generate_default_from_enum_variant.rs
UTF-8
3,765
2.859375
3
[ "Apache-2.0", "MIT" ]
permissive
use ide_db::{famous_defs::FamousDefs, RootDatabase}; use syntax::ast::{self, AstNode, HasName}; use crate::{AssistContext, AssistId, AssistKind, Assists}; // Assist: generate_default_from_enum_variant // // Adds a Default impl for an enum using a variant. // // ``` // enum Version { // Undefined, // Minor$0, // Major, // } // ``` // -> // ``` // enum Version { // Undefined, // Minor, // Major, // } // // impl Default for Version { // fn default() -> Self { // Self::Minor // } // } // ``` pub(crate) fn generate_default_from_enum_variant( acc: &mut Assists, ctx: &AssistContext<'_>, ) -> Option<()> { let variant = ctx.find_node_at_offset::<ast::Variant>()?; let variant_name = variant.name()?; let enum_name = variant.parent_enum().name()?; if !matches!(variant.kind(), ast::StructKind::Unit) { cov_mark::hit!(test_gen_default_on_non_unit_variant_not_implemented); return None; } if existing_default_impl(&ctx.sema, &variant).is_some() { cov_mark::hit!(test_gen_default_impl_already_exists); return None; } let target = variant.syntax().text_range(); acc.add( AssistId("generate_default_from_enum_variant", AssistKind::Generate), "Generate `Default` impl from this enum variant", target, |edit| { let start_offset = variant.parent_enum().syntax().text_range().end(); let buf = format!( r#" impl Default for {enum_name} {{ fn default() -> Self {{ Self::{variant_name} }} }}"#, ); edit.insert(start_offset, buf); }, ) } fn existing_default_impl( sema: &'_ hir::Semantics<'_, RootDatabase>, variant: &ast::Variant, ) -> Option<()> { let variant = sema.to_def(variant)?; let enum_ = variant.parent_enum(sema.db); let krate = enum_.module(sema.db).krate(); let default_trait = FamousDefs(sema, krate).core_default_Default()?; let enum_type = enum_.ty(sema.db); if enum_type.impls_trait(sema.db, default_trait, &[]) { Some(()) } else { None } } #[cfg(test)] mod tests { use crate::tests::{check_assist, check_assist_not_applicable}; use super::*; #[test] fn test_generate_default_from_variant() { check_assist( generate_default_from_enum_variant, r#" //- minicore: default enum Variant { Undefined, Minor$0, Major, } "#, r#" enum Variant { Undefined, Minor, Major, } impl Default for Variant { fn default() -> Self { Self::Minor } } "#, ); } #[test] fn test_generate_default_already_implemented() { cov_mark::check!(test_gen_default_impl_already_exists); check_assist_not_applicable( generate_default_from_enum_variant, r#" //- minicore: default enum Variant { Undefined, Minor$0, Major, } impl Default for Variant { fn default() -> Self { Self::Minor } } "#, ); } #[test] fn test_add_from_impl_no_element() { cov_mark::check!(test_gen_default_on_non_unit_variant_not_implemented); check_assist_not_applicable( generate_default_from_enum_variant, r#" //- minicore: default enum Variant { Undefined, Minor(u32)$0, Major, } "#, ); } #[test] fn test_generate_default_from_variant_with_one_variant() { check_assist( generate_default_from_enum_variant, r#" //- minicore: default enum Variant { Undefi$0ned } "#, r#" enum Variant { Undefined } impl Default for Variant { fn default() -> Self { Self::Undefined } } "#, ); } }
true
b06e4f1d03551cd08f93419ec93a7285cf13e6e8
Rust
xgillard/ddo
/ddo/examples/max2sat/errors.rs
UTF-8
1,981
2.6875
3
[ "MIT" ]
permissive
// Copyright 2020 Xavier Gillard // // Permission is hereby granted, free of charge, to any person obtaining a copy of // this software and associated documentation files (the "Software"), to deal in // the Software without restriction, including without limitation the rights to // use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of // the Software, and to permit persons to whom the Software is furnished to do so, // subject to the following conditions: // // The above copyright notice and this permission notice shall be included in all // copies or substantial portions of the Software. // // THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR // IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS // FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR // COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER // IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN // CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. //! This module contains the definition of the errors that can be triggered when //! parsing an instance of the max2sat problem. use std::num::ParseIntError; /// This enumeration simply groups the kind of errors that might occur when parsing a /// instance file. There can be io errors (file unavailable ?), format error /// (e.g. the file is not an instance but contains the text of your next paper), /// or parse int errors (which are actually a variant of the format error since it tells /// you that the parser expected an integer number but got ... something else). #[derive(Debug, thiserror::Error)] pub enum Error { /// There was an io related error #[error("io error {0}")] Io(#[from] std::io::Error), /// The parser expected to read something that was an integer but got some garbage #[error("parse int {0}")] ParseInt(#[from] ParseIntError), }
true
27cbbbb07c1dc63f818962407e4be6ed5ab15d49
Rust
CryZe/livesplit-lite-core
/src/segment.rs
UTF-8
309
3.09375
3
[ "MIT" ]
permissive
use std::fmt; #[derive(Clone)] pub struct Segment { pub name: String, } impl fmt::Display for Segment { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { write!(f, "{}", self.name) } } impl Segment { pub fn new(name: String) -> Segment { Segment { name: name } } }
true
6121cd16bfb95c5752f517480a371dfc984682df
Rust
alexbispo/rust_the_book
/cap03/temperatures_converter/src/main.rs
UTF-8
1,664
3.75
4
[]
no_license
use std::io; fn main() { println!("Temperature converter!"); println!("==============================================================="); loop { println!("Quit: q <enter>"); println!("Fahrenheit to Celsius: f <temperature_number> <enter>"); println!("Celsius to Fahrenheit: c <temperature_number> <enter>"); let mut user_input = String::new(); io::stdin() .read_line(&mut user_input) .expect("Something was bad!"); println!(""); let user_input = user_input.trim(); if user_input == "q" { println!("Bye!"); break; } let inputs: Vec<&str> = user_input.trim().split(' ').collect(); if inputs.len() != 2 { println!("Sorry! Invalid option.\n"); continue; } let origin_temperature = inputs[0]; let temperature: f32 = match inputs[1].trim().parse() { Ok(num) => num, Err(_) => { println!("Sorry! Invalid option.\n"); continue; } }; if origin_temperature == "f" { let converted_temperature = (temperature - 32.0) * (5.0/9.0); println!("{} fahrenheit to celsius is {:.2}", temperature, converted_temperature); } else if origin_temperature == "c" { let converted_temperature = (temperature * (9.0/5.0)) + 32.0; println!("{} celsius to fahrenheit is {:.2}", temperature, converted_temperature); } else { println!("Sorry! Invalid option.\n"); continue; } println!(""); } }
true
70d343ea2276c78f5e6b20ef9090473d65964fd8
Rust
cbrewster/alcova
/alcova/src/live_view.rs
UTF-8
5,393
2.578125
3
[ "MIT" ]
permissive
use crate::{ live_socket::{ClientMessage, LiveSocketContext, SocketViewMessage}, LiveSocket, LiveTemplate, }; use actix::{Actor, ActorContext, Addr, Context, Handler, Message}; use actix_web::{HttpRequest, HttpResponse, Responder}; use jsonwebtoken::{encode, EncodingKey, Header}; use serde::{de::DeserializeOwned, Deserialize, Serialize}; #[derive(Debug, Clone, Copy, Hash, PartialEq, Eq, Serialize, Deserialize)] #[serde(transparent)] pub struct LiveViewId(pub usize); pub type LiveViewContext<T> = Context<LiveViewActor<T>>; pub(crate) fn signing_secret() -> String { std::env::var("ALCOVA_SECRET_KEY").unwrap_or_else(|_| { warn!("No secret key set! Using unsecure default"); "secret".into() }) } #[derive(Debug, Clone, Serialize, Deserialize)] pub(crate) struct Claims<T> { exp: u64, pub(crate) data: T, } impl<T> Claims<T> { fn new(minutes: u64, data: T) -> Self { let exp = std::time::SystemTime::now() .duration_since(std::time::SystemTime::UNIX_EPOCH) .unwrap() .as_secs() + (minutes * 60); Self { exp, data } } } pub trait LiveView: Sized + Unpin + 'static { type Template: LiveTemplate + Unpin; type SessionData: Serialize + DeserializeOwned; fn name() -> &'static str; fn mount(socket_ctx: &LiveSocketContext, session: Self::SessionData) -> Self; fn started(&mut self, _ctx: &mut LiveViewContext<Self>) {} fn handle_event(&mut self, _event: &str, _value: &str, _ctx: &mut LiveViewContext<Self>) {} fn template(&self) -> Self::Template; fn to_string(&self, session: &Self::SessionData) -> String { let key = signing_secret(); // TODO: Not sure how we should handle tokens expiring. Maybe reload the page on the // client? let claims = Claims::new(60, session); let token = encode( &Header::default(), &claims, &EncodingKey::from_secret(key.as_bytes()), ) .unwrap(); self.template() .render_with_wrapper(Self::name(), token.as_str()) } fn to_response(self, session: Self::SessionData) -> LiveViewResponse<Self> { LiveViewResponse { live_view: self, session, } } } pub trait LiveMessage: Message<Result = ()> {} pub struct LiveViewResponse<T: LiveView> { live_view: T, session: T::SessionData, } impl<T> Responder for LiveViewResponse<T> where T: LiveView, { type Error = actix_web::Error; type Future = futures::future::Ready<Result<HttpResponse, actix_web::Error>>; fn respond_to(self, _req: &HttpRequest) -> Self::Future { let body = self.live_view.to_string(&self.session); // Create response and set content type futures::future::ready(Ok(HttpResponse::Ok().body(body))) } } #[derive(Message, Debug, Deserialize)] #[rtype(result = "()")] pub enum LiveViewMessage { ClientAction(LiveViewAction), Stop, } #[derive(Debug, Deserialize)] pub struct LiveViewAction { action: String, value: Option<String>, } pub struct LiveViewActor<T: LiveView> { id: LiveViewId, pub view: T, socket: Addr<LiveSocket>, old_template: Option<T::Template>, } impl<T: LiveView + Unpin + 'static> LiveViewActor<T> { pub fn new( id: LiveViewId, socket: Addr<LiveSocket>, context: &LiveSocketContext, session: T::SessionData, ) -> Self { LiveViewActor { id, view: T::mount(context, session), socket, old_template: None, } } pub fn send_changes(&mut self) { let template = self.view.template(); let message = ClientMessage::Changes(template.changes(self.old_template.as_ref().unwrap())); self.old_template = Some(template); self.socket.do_send(SocketViewMessage { message }); } } impl<T> Actor for LiveViewActor<T> where T: LiveView + Unpin + 'static, { type Context = Context<Self>; fn started(&mut self, ctx: &mut Self::Context) { self.view.started(ctx); let template = self.view.template(); let message = ClientMessage::Template { template: template.render(), id: self.id, }; self.old_template = Some(template); self.socket.do_send(SocketViewMessage { message }); } } impl<T> Handler<LiveViewMessage> for LiveViewActor<T> where T: LiveView + Unpin + 'static, { type Result = (); fn handle(&mut self, msg: LiveViewMessage, ctx: &mut Self::Context) -> Self::Result { match msg { LiveViewMessage::ClientAction(LiveViewAction { action, value }) => { let value = value.unwrap_or(String::new()); self.view.handle_event(&action, &value, ctx); self.send_changes(); } LiveViewMessage::Stop => ctx.stop(), } } } pub trait LiveHandler<M: LiveMessage> where Self: LiveView, { fn handle(&mut self, msg: M, ctx: &mut LiveViewContext<Self>); } impl<T, M> Handler<M> for LiveViewActor<T> where T: LiveView + Unpin + LiveHandler<M> + 'static, M: LiveMessage, { type Result = (); fn handle(&mut self, msg: M, ctx: &mut Self::Context) -> Self::Result { self.view.handle(msg, ctx); self.send_changes(); } }
true
ebedcc11fadf54c9cb0f39de30e39b06574346c1
Rust
ypoluektovich/tmux-interface-rs
/src/commands/windows_and_panes/select_pane.rs
UTF-8
4,684
2.953125
3
[ "MIT", "LicenseRef-scancode-unknown-license-reference" ]
permissive
use crate::commands::constants::*; use crate::{Error, TmuxCommand, TmuxOutput}; use std::borrow::Cow; /// Make pane `target-pane` the active pane in window `target-window` /// /// # Manual /// /// tmux ^3.1: /// ```text /// tmux select-pane [-DdeLlMmRUZ] [-T title] [-t target-pane] /// (alias: selectp) /// ``` /// /// tmux ^2.6: /// ```text /// tmux select-pane [-DdeLlMmRU] [-T title] [-t target-pane] /// (alias: selectp) /// ``` /// /// tmux ^2.1: /// ```text /// tmux select-pane [-DdegLlMmRU] [-P style] [-t target-pane] /// (alias: selectp) /// ``` /// /// tmux ^2.0: /// ```text /// tmux select-pane [-DdeLlRU] [-t target-pane] /// (alias: selectp) /// ``` /// /// tmux ^1.5: /// ```text /// tmux select-pane [-DLlRU] [-t target-pane] /// (alias: selectp) /// ``` /// /// tmux ^1.3: /// ```text /// tmux select-pane [-DLRU] [-t target-pane] /// (alias: selectp) /// ``` /// /// tmux ^1.0: /// ```text /// tmux select-pane [-t target-pane] /// (alias: selectp) /// ``` /// /// tmux ^0.8: /// ```text /// tmux select-pane [-p pane-index] [-t target-window] /// (alias: selectp) /// ``` #[derive(Debug, Clone)] pub struct SelectPane<'a>(pub TmuxCommand<'a>); impl<'a> Default for SelectPane<'a> { fn default() -> Self { Self(TmuxCommand { cmd: Some(Cow::Borrowed(SELECT_PANE)), ..Default::default() }) } } impl<'a> SelectPane<'a> { pub fn new() -> Self { Default::default() } /// `[-D]` - pane below #[cfg(feature = "tmux_1_3")] pub fn down(&mut self) -> &mut Self { self.0.push_flag(D_UPPERCASE_KEY); self } /// `[-d]` - disable input #[cfg(feature = "tmux_2_0")] pub fn disable(&mut self) -> &mut Self { self.0.push_flag(D_LOWERCASE_KEY); self } /// `[-e]` - enable input #[cfg(feature = "tmux_2_0")] pub fn enable(&mut self) -> &mut Self { self.0.push_flag(E_LOWERCASE_KEY); self } /// `[-g]` - show the current pane style #[cfg(feature = "tmux_2_1")] pub fn show_style(&mut self) -> &mut Self { self.0.push_flag(G_LOWERCASE_KEY); self } /// `[-L]` - pane left #[cfg(feature = "tmux_1_3")] pub fn left(&mut self) -> &mut Self { self.0.push_flag(L_UPPERCASE_KEY); self } /// `[-l]` - equivalent to last-pane command #[cfg(feature = "tmux_1_5")] pub fn last(&mut self) -> &mut Self { self.0.push_flag(L_LOWERCASE_KEY); self } /// `[-M]` - clear marked pane #[cfg(feature = "tmux_2_1")] pub fn set_marked(&mut self) -> &mut Self { self.0.push_flag(M_UPPERCASE_KEY); self } /// `[-m]` - set marked pane #[cfg(feature = "tmux_2_1")] pub fn clear_marked(&mut self) -> &mut Self { self.0.push_flag(M_LOWERCASE_KEY); self } /// `[-R]` - pane right #[cfg(feature = "tmux_1_3")] pub fn right(&mut self) -> &mut Self { self.0.push_flag(R_UPPERCASE_KEY); self } /// `[-U]` - pane above #[cfg(feature = "tmux_1_3")] pub fn up(&mut self) -> &mut Self { self.0.push_flag(U_UPPERCASE_KEY); self } /// `[-Z]` - keep the window zoomed if it was zoomed #[cfg(feature = "tmux_3_1")] pub fn keep_zoomed(&mut self) -> &mut Self { self.0.push_flag(Z_UPPERCASE_KEY); self } /// `[-P style]` - set the style for a single pane #[cfg(feature = "tmux_2_1")] pub fn style<S: Into<Cow<'a, str>>>(&mut self, style: S) -> &mut Self { self.0.push_option(P_UPPERCASE_KEY, style); self } /// `[-T title]` - title #[cfg(feature = "tmux_2_6")] pub fn title<S: Into<Cow<'a, str>>>(&mut self, title: S) -> &mut Self { self.0.push_option(T_UPPERCASE_KEY, title); self } /// `[-t target-pane]` - target-pane #[cfg(feature = "tmux_1_0")] pub fn target_pane<S: Into<Cow<'a, str>>>(&mut self, target_pane: S) -> &mut Self { self.0.push_option(T_LOWERCASE_KEY, target_pane); self } pub fn output(&self) -> Result<TmuxOutput, Error> { self.0.output() } } impl<'a> From<TmuxCommand<'a>> for SelectPane<'a> { fn from(item: TmuxCommand<'a>) -> Self { Self(TmuxCommand { bin: item.bin, cmd: Some(Cow::Borrowed(SELECT_PANE)), ..Default::default() }) } } impl<'a> From<&TmuxCommand<'a>> for SelectPane<'a> { fn from(item: &TmuxCommand<'a>) -> Self { Self(TmuxCommand { bin: item.bin.clone(), cmd: Some(Cow::Borrowed(SELECT_PANE)), ..Default::default() }) } }
true
1b436e87e29d2332e959e3d751d15137af83a859
Rust
Ryan1729/rote
/libs/move_mod/src/move_mod.rs
UTF-8
1,600
3.09375
3
[ "Apache-2.0", "MIT" ]
permissive
use macros::{fmt_display, ord}; #[derive(Clone, Copy, Debug, PartialEq, Eq, Hash)] pub enum Move { Up, Down, Left, Right, ToLineStart, ToLineEnd, ToBufferStart, ToBufferEnd, ToPreviousLikelyEditLocation, ToNextLikelyEditLocation, } use Move::*; fmt_display!(for Move: r#move in "{}", match r#move { Up => "^", Down => "v", Left => "<", Right => ">", ToLineStart => "Line<", ToLineEnd => "Line>", ToBufferStart => "Buffer<", ToBufferEnd => "Buffer>", ToPreviousLikelyEditLocation => "Edit<", ToNextLikelyEditLocation => "Edit>", }); macro_rules! to_num { ($m: expr) => { match $m { Up => 0, Down => 1, Left => 2, Right => 3, ToLineStart => 4, ToLineEnd => 5, ToBufferStart => 6, ToBufferEnd => 7, ToPreviousLikelyEditLocation => 8, ToNextLikelyEditLocation => 9, } }; } ord!(for Move: r#move, other in to_num!(r#move).cmp(&to_num!(other))); impl std::ops::Not for Move { type Output = Move; fn not(self) -> Self::Output { match self { Up => Down, Down => Up, Left => Right, Right => Left, ToLineStart => ToLineEnd, ToLineEnd => ToLineStart, ToBufferStart => ToBufferEnd, ToBufferEnd => ToBufferStart, ToPreviousLikelyEditLocation => ToNextLikelyEditLocation, ToNextLikelyEditLocation => ToPreviousLikelyEditLocation, } } }
true
c8d35e534af7e67f65945615931fceb055007d00
Rust
mishaszu/RUST-INTRO
/src/ex1/match_statement/src/basic_matching.rs
UTF-8
328
3.15625
3
[]
no_license
fn match_test() { let country_code = 1000; let country = match country_code { 44 => "UK", 46 => "Sweden", 1...999 => "unknown", _ => "invalid", }; println!("the country for this code is: {}", country); } pub fn run() { println!("Running basic maching"); match_test(); }
true
40d6a584f86d61530d0458014433319562701614
Rust
rust-lang/crates.io
/src/email.rs
UTF-8
9,186
2.796875
3
[ "MIT", "Apache-2.0" ]
permissive
use std::path::PathBuf; use std::sync::Mutex; use crate::util::errors::{server_error, AppResult}; use crate::config; use crate::Env; use lettre::message::header::ContentType; use lettre::transport::file::FileTransport; use lettre::transport::smtp::authentication::{Credentials, Mechanism}; use lettre::transport::smtp::SmtpTransport; use lettre::{Message, Transport}; use rand::distributions::{Alphanumeric, DistString}; #[derive(Debug)] pub struct Emails { backend: EmailBackend, } impl Emails { /// Create a new instance detecting the backend from the environment. This will either connect /// to a SMTP server or store the emails on the local filesystem. pub fn from_environment(config: &config::Server) -> Self { let backend = match ( dotenvy::var("MAILGUN_SMTP_LOGIN"), dotenvy::var("MAILGUN_SMTP_PASSWORD"), dotenvy::var("MAILGUN_SMTP_SERVER"), ) { (Ok(login), Ok(password), Ok(server)) => EmailBackend::Smtp { server, login, password, }, _ => EmailBackend::FileSystem { path: "/tmp".into(), }, }; if config.base.env == Env::Production && !matches!(backend, EmailBackend::Smtp { .. }) { panic!("only the smtp backend is allowed in production"); } Self { backend } } /// Create a new test backend that stores all the outgoing emails in memory, allowing for tests /// to later assert the mails were sent. pub fn new_in_memory() -> Self { Self { backend: EmailBackend::Memory { mails: Mutex::new(Vec::new()), }, } } /// Attempts to send a confirmation email. pub fn send_user_confirm(&self, email: &str, user_name: &str, token: &str) -> AppResult<()> { // Create a URL with token string as path to send to user // If user clicks on path, look email/user up in database, // make sure tokens match let subject = "Please confirm your email address"; let body = format!( "Hello {}! Welcome to Crates.io. Please click the link below to verify your email address. Thank you!\n https://{}/confirm/{}", user_name, crate::config::domain_name(), token ); self.send(email, subject, &body) } /// Attempts to send an ownership invitation. pub fn send_owner_invite( &self, email: &str, user_name: &str, crate_name: &str, token: &str, ) -> AppResult<()> { let subject = "Crate ownership invitation"; let body = format!( "{user_name} has invited you to become an owner of the crate {crate_name}!\n Visit https://{domain}/accept-invite/{token} to accept this invitation, or go to https://{domain}/me/pending-invites to manage all of your crate ownership invitations.", domain = crate::config::domain_name() ); self.send(email, subject, &body) } /// Attempts to send an API token exposure notification email pub fn send_token_exposed_notification( &self, email: &str, url: &str, reporter: &str, source: &str, token_name: &str, ) -> AppResult<()> { let subject = "Exposed API token found"; let mut body = format!( "{reporter} has notified us that your crates.io API token {token_name}\n has been exposed publicly. We have revoked this token as a precaution.\n Please review your account at https://{domain} to confirm that no\n unexpected changes have been made to your settings or crates.\n \n Source type: {source}\n", domain = crate::config::domain_name() ); if url.is_empty() { body.push_str("\nWe were not informed of the URL where the token was found.\n"); } else { body.push_str(&format!("\nURL where the token was found: {url}\n")); } self.send(email, subject, &body) } /// This is supposed to be used only during tests, to retrieve the messages stored in the /// "memory" backend. It's not cfg'd away because our integration tests need to access this. pub fn mails_in_memory(&self) -> Option<Vec<StoredEmail>> { if let EmailBackend::Memory { mails } = &self.backend { Some(mails.lock().unwrap().clone()) } else { None } } fn send(&self, recipient: &str, subject: &str, body: &str) -> AppResult<()> { // The message ID is normally generated by the SMTP server, but if we let it generate the // ID there will be no way for the crates.io application to know the ID of the message it // just sent, as it's not included in the SMTP response. // // Our support staff needs to know the message ID to be able to find misdelivered emails. // Because of that we're generating a random message ID, hoping the SMTP server doesn't // replace it when it relays the message. let message_id = format!( "<{}@{}>", Alphanumeric.sample_string(&mut rand::thread_rng(), 32), crate::config::domain_name(), ); let email = Message::builder() .message_id(Some(message_id.clone())) .to(recipient.parse()?) .from(self.sender_address().parse()?) .subject(subject) .header(ContentType::TEXT_PLAIN) .body(body.to_string())?; match &self.backend { EmailBackend::Smtp { server, login, password, } => { SmtpTransport::relay(server) .and_then(|transport| { transport .credentials(Credentials::new(login.clone(), password.clone())) .authentication(vec![Mechanism::Plain]) .build() .send(&email) }) .map_err(|error| { error!(?error, "Failed to send email"); server_error("Failed to send the email") })?; info!(?message_id, ?subject, "Email sent"); } EmailBackend::FileSystem { path } => { let id = FileTransport::new(path).send(&email).map_err(|error| { error!(?error, "Failed to send email"); server_error("Email file could not be generated") })?; info!( path = ?path.join(format!("{id}.eml")), ?subject, "Email sent" ); } EmailBackend::Memory { mails } => { mails.lock().unwrap().push(StoredEmail { to: recipient.into(), subject: subject.into(), body: body.into(), }); } } Ok(()) } fn sender_address(&self) -> &str { match &self.backend { EmailBackend::Smtp { login, .. } => login, EmailBackend::FileSystem { .. } => "test@localhost", EmailBackend::Memory { .. } => "test@localhost", } } } enum EmailBackend { /// Backend used in production to send mails using SMTP. Smtp { server: String, login: String, password: String, }, /// Backend used locally during development, will store the emails in the provided directory. FileSystem { path: PathBuf }, /// Backend used during tests, will keep messages in memory to allow tests to retrieve them. Memory { mails: Mutex<Vec<StoredEmail>> }, } // Custom Debug implementation to avoid showing the SMTP password. impl std::fmt::Debug for EmailBackend { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { match self { EmailBackend::Smtp { server, login, .. } => { // The password field is *intentionally* not included f.debug_struct("Smtp") .field("server", server) .field("login", login) .finish()?; } EmailBackend::FileSystem { path } => { f.debug_struct("FileSystem").field("path", path).finish()?; } EmailBackend::Memory { .. } => f.write_str("Memory")?, } Ok(()) } } #[derive(Debug, Clone)] pub struct StoredEmail { pub to: String, pub subject: String, pub body: String, } #[cfg(test)] mod tests { use super::*; #[test] fn sending_to_invalid_email_fails() { let emails = Emails::new_in_memory(); assert_err!(emails.send( "String.Format(\"{0}.{1}@live.com\", FirstName, LastName)", "test", "test", )); } #[test] fn sending_to_valid_email_succeeds() { let emails = Emails::new_in_memory(); assert_ok!(emails.send("[email protected]", "test", "test")); } }
true
0264df150e0ef52391e318759d05ad4079086336
Rust
michaelherger/librespot
/core/src/util.rs
UTF-8
589
3.09375
3
[ "LicenseRef-scancode-warranty-disclaimer", "MIT" ]
permissive
use std::mem; pub trait Seq { fn next(&self) -> Self; } macro_rules! impl_seq { ($($ty:ty)*) => { $( impl Seq for $ty { fn next(&self) -> Self { (*self).wrapping_add(1) } } )* } } impl_seq!(u8 u16 u32 u64 usize); #[derive(Debug, Clone, Copy, Hash, PartialEq, Eq, PartialOrd, Ord, Default)] pub struct SeqGenerator<T: Seq>(T); impl<T: Seq> SeqGenerator<T> { pub fn new(value: T) -> Self { SeqGenerator(value) } pub fn get(&mut self) -> T { let value = self.0.next(); mem::replace(&mut self.0, value) } }
true
3e59f3b73d6bfb037621cc2624509352efe28e92
Rust
aheart/hearth
/src/metrics/mod.rs
UTF-8
1,523
2.671875
3
[ "MIT", "LicenseRef-scancode-unknown-license-reference", "Apache-2.0" ]
permissive
pub mod aggregator; mod cpu; mod disk; pub mod hub; mod la; mod metric_buffer; mod network; mod ram; mod space; use std::time::SystemTime; #[derive(PartialEq, Debug)] pub enum Metrics { Cpu(cpu::CpuMetrics), Disk(disk::DiskMetrics), La(la::LaMetrics), Net(network::NetMetrics), Ram(ram::RamMetrics), Space(space::SpaceMetrics), } /// Interface for Metric Plugins that possess the knowledge of retrieving raw metric data and /// processing this raw data into structured Metric key value pairs. pub trait MetricPlugin: Send + 'static { /// Returns a command that should be run in order to retrieve raw data fn get_query(&self) -> &str; /// Transforms raw data into a HashMap of metrics fn process_data(&mut self, raw_data: &str, timestamp: &SystemTime) -> Metrics; /// Returns a HashMap with keys and empty values fn empty_metrics(&self) -> Metrics; } /// Creates all possible metric plugins and returns them as a HashMap fn metric_plugin_factory( disk: &str, filesystem: &str, network_interface: &str, ) -> Vec<Box<dyn MetricPlugin>> { let metric_plugins: Vec<Box<dyn MetricPlugin>> = vec![ Box::new(cpu::CpuMetricPlugin::new()), Box::new(ram::RamMetricPlugin::new()), Box::new(la::LoadAverageMetricPlugin::new()), Box::new(disk::DiskMetricPlugin::new(disk)), Box::new(network::NetworkMetricPlugin::new(network_interface)), Box::new(space::SpaceMetricPlugin::new(filesystem)), ]; metric_plugins }
true
81754a6d5d7adb8cc231319c4b1e3c1837370eda
Rust
lorenzoditucci/calyx
/calyx/src/passes/group_to_invoke.rs
UTF-8
4,197
2.828125
3
[ "MIT" ]
permissive
use std::rc::Rc; use itertools::Itertools; use crate::analysis::ReadWriteSet; use crate::ir::RRC; use crate::ir::{ self, traversal::{Action, Named, VisResult, Visitor}, }; /// Transform groups that are structurally invoking components into equivalent /// [ir::Invoke] statements. /// /// For a group to meet the requirements of this pass, it must /// 1. Only use unguarded assignments /// 2. Only assign to input ports of one component /// 3. Assign `1'd1` to the @go port of the component, and /// 4. Depend directly on the @done port of the component for its done /// condition. #[derive(Default)] pub struct GroupToInvoke; impl Named for GroupToInvoke { fn name() -> &'static str { "group2invoke" } fn description() -> &'static str { "covert groups that structurally invoke one component into invoke statements" } } /// Construct an [ir::Invoke] from an [ir::Group] that has been validated by this pass. fn construct_invoke( assigns: &[ir::Assignment], comp: RRC<ir::Cell>, ) -> ir::Control { let mut inputs = Vec::new(); let mut outputs = Vec::new(); let cell_is_parent = |port: &ir::Port| -> bool { if let ir::PortParent::Cell(cell_wref) = &port.parent { Rc::ptr_eq(&cell_wref.upgrade(), &comp) } else { false } }; for assign in assigns { // If the cell's port is being used as a source, add the dst to // outputs if cell_is_parent(&assign.src.borrow()) && assign.src != comp.borrow().get_with_attr("done") { let name = assign.src.borrow().name.clone(); outputs.push((name, Rc::clone(&assign.dst))); } // If the cell's port is being used as a dest, add the source to // inputs if cell_is_parent(&assign.dst.borrow()) && assign.dst != comp.borrow().get_with_attr("go") { let name = assign.dst.borrow().name.clone(); inputs.push((name, Rc::clone(&assign.src))); } } ir::Control::invoke(comp, inputs, outputs) } impl Visitor for GroupToInvoke { fn enable( &mut self, s: &mut ir::Enable, _comp: &mut ir::Component, _sigs: &ir::LibrarySignatures, ) -> VisResult { let group = s.group.borrow(); // There should be exactly one component being written to in the // group. let mut writes = ReadWriteSet::write_set(&group.assignments).collect_vec(); if writes.len() != 1 { return Ok(Action::Continue); } // Component must define a @go/@done interface let cell = writes.pop().unwrap(); let maybe_go_port = cell.borrow().find_with_attr("go"); let maybe_done_port = cell.borrow().find_with_attr("done"); if maybe_go_port.is_none() || maybe_done_port.is_none() { return Ok(Action::Continue); } let go_port = maybe_go_port.unwrap(); let mut go_multi_write = false; let done_port = maybe_done_port.unwrap(); let mut done_multi_write = false; for assign in &group.assignments { // All assignments should be unguaraded. if !assign.guard.is_true() { return Ok(Action::Continue); } // @go port should have exactly one write and the src should be 1. if assign.dst == go_port { if go_multi_write { return Ok(Action::Continue); } if !go_multi_write && assign.src.borrow().is_constant(1, 1) { go_multi_write = true; } } // @done port should have exactly one read and the dst should be // group's done signal. if assign.src == done_port { if done_multi_write { return Ok(Action::Continue); } if !done_multi_write && assign.dst == group.get("done") { done_multi_write = true; } } } Ok(Action::Change(construct_invoke(&group.assignments, cell))) } }
true
b93dae2a656f75b8afab53af8cbad710419bc1ae
Rust
Sophie-Williams/GameRoom-Bot
/src/command.rs
UTF-8
836
3.09375
3
[]
no_license
use std::string::String; use discord::model::{Message, ChannelId, User}; #[derive(Debug)] pub struct Command { user: User, channel_id: ChannelId, command: String, args: Vec<String>, } impl Command { pub fn parse(message: &Message) -> Command { let mut args: Vec<String> = message.content.split_whitespace().map(|s| String::from(s)).collect(); Command { user: message.author.clone(), channel_id: message.channel_id.clone(), command: args.remove(0), args: args, } } pub fn user(&self) -> &User { &self.user } pub fn channel_id(&self) -> &ChannelId { &self.channel_id } pub fn command(&self) -> &str { &*self.command } pub fn args(&self) -> &Vec<String> { &self.args } }
true
4177bdbb65c162c1eac67135c784f12bd836fda9
Rust
tinaun/playbot_ng_serenity
/src/context.rs
UTF-8
3,842
3.046875
3
[]
no_license
use serenity; use serenity::model::{ channel::Message, id::{ChannelId, UserId}, }; use threadpool::ThreadPool; use regex::Regex; use std::rc::Rc; type SendFn = fn(&ThreadPool, ChannelId, &str) -> serenity::Result<()>; #[derive(Clone)] pub struct Context<'a> { body: &'a str, is_directly_addressed: bool, send_fn: SendFn, source: UserId, source_nickname: &'a str, target: ChannelId, client: &'a Message, pool: &'a ThreadPool, current_nickname: Rc<String>, } impl<'a> Context<'a> { pub fn new(pool: &'a ThreadPool, message: &'a Message) -> Option<Self> { lazy_static! { static ref MENTION: Regex = Regex::new(r"<@[0-9]*>").unwrap(); } let mut body = &message.content[..]; let id = serenity::CACHE.read().user.id; let current_nickname = Rc::new(serenity::CACHE.read().user.name.to_owned()); let source_nickname = &message.author.name; let source = message.author.id; let target = message.channel_id; let is_directly_addressed = { if body.starts_with(current_nickname.as_str()) { let new_body = body[current_nickname.len()..].trim_left(); let has_separator = new_body.starts_with(":") || new_body.starts_with(","); if has_separator { body = new_body[1..].trim_left(); } has_separator } else { let mentioned = message.mentions_user_id(id); if mentioned { let mention = MENTION .captures(body) .and_then(|cap| cap.get(0)) .unwrap(); body = body[mention.end()..].trim_left(); } mentioned } }; let send_fn: SendFn = |_pool, channel_id, msg| { channel_id.say(msg).map(|_| ()) }; Some(Self { client: message, pool, body, send_fn, source, source_nickname, target, is_directly_addressed, current_nickname }) } pub fn body(&self) -> &'a str { self.body } /// Wether the message was aimed directetly at the bot, /// either via private message or by prefixing a channel message with /// the bot's name, followed by ',' or ':'. pub fn is_directly_addressed(&self) -> bool { self.is_directly_addressed } pub fn is_ctcp(&self) -> bool { false } pub fn reply<S: AsRef<str>>(&self, message: S) { let message = message.as_ref(); eprintln!("Replying: {:?}", message); for line in message.lines() { if line.len() > 2000 { let _ = (self.send_fn)(self.pool, self.target, "<<<message too long for irc>>>"); continue; } let _ = (self.send_fn)(self.pool, self.target, line); } } pub fn source(&self) -> UserId { self.source } pub fn source_nickname(&self) -> &'a str { self.source_nickname } pub fn current_nickname(&self) -> Rc<String> { self.current_nickname.clone() } pub fn inline_contexts<'b>(&'b self) -> impl Iterator<Item = Context<'a>> + 'b { lazy_static! { static ref INLINE_CMD: Regex = Regex::new(r"\{(.*?)}").unwrap(); } let body = if self.is_directly_addressed() { "" } else { self.body }; let contexts = INLINE_CMD .captures_iter(body) .flat_map(|caps| caps.get(1)) .map(move |body| Context { body: body.as_str(), .. self.clone() }); Box::new(contexts) } }
true
dee11345a693a0d28811763657a85ef4b000e0b2
Rust
Larusso/unity-version-manager
/uvm_core/src/unity/version/mod.rs
UTF-8
24,312
2.59375
3
[ "Apache-2.0" ]
permissive
use crate::unity::Installation; use log::{debug, info}; use regex::Regex; use semver; use serde::{self, Deserialize, Deserializer, Serialize, Serializer}; use std::cmp::Ordering; use std::convert::{AsMut, AsRef, From, TryFrom}; use std::fmt; use std::path::{Path, PathBuf}; use std::result; use std::str::FromStr; mod error; mod hash; pub use error::{Result, VersionError}; pub mod manifest; pub mod module; use crate::sys::unity::version as version_impl; pub use self::hash::all_versions; use self::hash::UnityHashError; pub use self::version_impl::read_version_from_path; #[derive(PartialEq, Eq, Ord, Hash, Debug, Clone, Copy, Deserialize)] pub enum VersionType { Alpha, Beta, Patch, Final, } impl PartialOrd for VersionType { fn partial_cmp(&self, other: &VersionType) -> Option<Ordering> { Some(self.cmp(other)) } } #[derive(Eq, Debug, Clone, Hash, PartialOrd)] pub struct Version { base: semver::Version, release_type: VersionType, revision: u64, hash: Option<String>, } impl Ord for Version { fn cmp(&self, other: &Version) -> Ordering { self.base .cmp(&other.base) .then(self.release_type.cmp(&other.release_type)) .then(self.revision.cmp(&other.revision)) } } impl Serialize for Version { fn serialize<S>(&self, serializer: S) -> result::Result<S::Ok, S::Error> where S: Serializer, { let s = self.to_string(); serializer.serialize_str(&s) } } impl<'de> Deserialize<'de> for Version { fn deserialize<D>(deserializer: D) -> result::Result<Self, D::Error> where D: Deserializer<'de>, { let s = String::deserialize(deserializer)?; Version::from_str(&s).map_err(serde::de::Error::custom) } } impl Version { pub fn new( major: u64, minor: u64, patch: u64, release_type: VersionType, revision: u64, ) -> Version { let base = semver::Version::new(major, minor, patch); Version { base, release_type, revision, hash: None, } } pub fn from_path<P: AsRef<Path>>(path: P) -> Result<Version> { version_impl::read_version_from_path(path) } pub fn a(major: u64, minor: u64, patch: u64, revision: u64) -> Version { let base = semver::Version::new(major, minor, patch); Version { base, release_type: VersionType::Alpha, revision, hash: None, } } pub fn b(major: u64, minor: u64, patch: u64, revision: u64) -> Version { let base = semver::Version::new(major, minor, patch); Version { base, release_type: VersionType::Beta, revision, hash: None, } } pub fn p(major: u64, minor: u64, patch: u64, revision: u64) -> Version { let base = semver::Version::new(major, minor, patch); Version { base, release_type: VersionType::Patch, revision, hash: None, } } pub fn f(major: u64, minor: u64, patch: u64, revision: u64) -> Version { let base = semver::Version::new(major, minor, patch); Version { base, release_type: VersionType::Final, revision, hash: None, } } pub fn release_type(&self) -> &VersionType { &self.release_type } pub fn version_hash(&self) -> Result<String> { self.hash .as_ref() .map(|h| h.to_owned()) .ok_or_else(|| VersionError::HashMissing { source: UnityHashError::Other, version: self.to_string(), }) .or_else(|_err| { hash::hash_for_version(self).map_err(|source| VersionError::HashMissing { source, version: self.to_string(), }) }) } pub fn major(&self) -> u64 { self.base.major } pub fn minor(&self) -> u64 { self.base.minor } pub fn patch(&self) -> u64 { self.base.patch } pub fn revision(&self) -> u64 { self.revision } #[cfg(unix)] pub fn find_version_in_file<P: AsRef<Path>>(path: P) -> Result<Version> { use std::process::{Command, Stdio}; let path = path.as_ref(); debug!("find unity version in Unity executable {}", path.display()); let child = Command::new("strings") .arg("--") .arg(path) .stdout(Stdio::piped()) .stderr(Stdio::piped()) .spawn()?; let output = child.wait_with_output()?; if !output.status.success() { return Err(VersionError::ExecutableContainsNoVersion( path.display().to_string(), )); } let version = Version::from_str(&String::from_utf8_lossy(&output.stdout))?; debug!("found version {}", &version); Ok(version) } pub fn base(&self) -> &semver::Version { &self.base } pub fn as_semver(&self) -> semver::Version { let mut v = self.base.clone(); if self.release_type != VersionType::Final { v.pre = semver::Prerelease::new(&format!("{}.{}", self.release_type, self.revision)) .unwrap(); } v } pub fn set_version_hash<S: AsRef<str>>(&mut self, hash: Option<S>) { self.hash = hash.map(|s| s.as_ref().to_owned()); } pub fn has_version_hash(&self) -> bool { self.hash.is_some() } } impl PartialEq for Version { fn eq(&self, other: &Self) -> bool { let eq = self.base == other.base && self.release_type == other.release_type && self.revision == other.revision; if self.hash.is_some() && other.hash.is_some() { return eq && self.hash == other.hash } eq } } impl From<(u64, u64, u64, u64)> for Version { fn from(tuple: (u64, u64, u64, u64)) -> Version { let (major, minor, patch, revision) = tuple; Version::f(major, minor, patch, revision) } } impl TryFrom<PathBuf> for Version { type Error = VersionError; fn try_from(path: PathBuf) -> Result<Self> { Version::from_path(path) } } impl TryFrom<&Path> for Version { type Error = VersionError; fn try_from(path: &Path) -> Result<Self> { Version::from_path(path) } } impl fmt::Display for VersionType { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { if f.alternate() { match *self { VersionType::Final => write!(f, "final"), VersionType::Patch => write!(f, "patch"), VersionType::Beta => write!(f, "beta"), VersionType::Alpha => write!(f, "alpha"), } } else { match *self { VersionType::Final => write!(f, "f"), VersionType::Patch => write!(f, "p"), VersionType::Beta => write!(f, "b"), VersionType::Alpha => write!(f, "a"), } } } } impl Default for VersionType { fn default() -> VersionType { VersionType::Final } } impl fmt::Display for Version { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { write!( f, "{}{}{}", self.base, self.release_type.to_string(), self.revision ) } } impl AsRef<Version> for Version { fn as_ref(&self) -> &Self { self } } impl AsMut<Version> for Version { fn as_mut(&mut self) -> &mut Self { self } } impl FromStr for Version { type Err = VersionError; fn from_str(s: &str) -> Result<Self> { let version_pattern = Regex::new(r"([0-9]{1,4})\.([0-9]{1,4})\.([0-9]{1,4})(f|p|b|a)([0-9]{1,4})( \(([a-z0-9]{12})\)|/([a-z0-9]{12}))?").unwrap(); match version_pattern.captures(s) { Some(caps) => { let major: u64 = caps.get(1).map_or("0", |m| m.as_str()).parse().unwrap(); let minor: u64 = caps.get(2).map_or("0", |m| m.as_str()).parse().unwrap(); let patch: u64 = caps.get(3).map_or("0", |m| m.as_str()).parse().unwrap(); let release_type = match caps.get(4).map_or("", |m| m.as_str()) { "f" => Some(VersionType::Final), "p" => Some(VersionType::Patch), "b" => Some(VersionType::Beta), "a" => Some(VersionType::Alpha), _ => None, }; let revision: u64 = caps.get(5).map_or("0", |m| m.as_str()).parse().unwrap(); let hash = caps.get(7).or(caps.get(8)).map(|m| m.as_str().to_owned()); let base = semver::Version::new(major, minor, patch); Ok(Version { base, revision, release_type: release_type.unwrap(), hash: hash, }) } None => Err(VersionError::ParsingFailed(s.to_string())), } } } impl FromStr for VersionType { type Err = VersionError; fn from_str(s: &str) -> Result<Self> { match s { "f" => Ok(VersionType::Final), "p" => Ok(VersionType::Patch), "b" => Ok(VersionType::Beta), "a" => Ok(VersionType::Alpha), "final" => Ok(VersionType::Final), "patch" => Ok(VersionType::Patch), "beta" => Ok(VersionType::Beta), "alpha" => Ok(VersionType::Alpha), _ => Err(VersionError::VersionTypeParsingFailed(s.to_string())), } } } impl From<Installation> for Version { fn from(item: Installation) -> Self { item.version_owned() } } pub fn fetch_matching_version<I: Iterator<Item = Version>>( versions: I, version_req: semver::VersionReq, release_type: VersionType, ) -> Result<Version> { versions .filter(|version| { let semver_version = if version.release_type() < &release_type { debug!( "version {} release type is smaller than specified type {:#}", version, release_type ); let mut semver_version = version.base().clone(); semver_version.pre = semver::Prerelease::new(&format!( "{}.{}", version.release_type, version.revision )) .unwrap(); semver_version } else { let b = version.base().clone(); debug!( "use base semver version {} of {} for comparison", b, version ); b }; let is_match = version_req.matches(&semver_version); if is_match { info!("version {} is a match", version); } else { info!("version {} is not a match", version); } is_match }) .max() .ok_or_else(|| VersionError::NoMatch(version_req.to_string())) } #[cfg(test)] mod tests { use super::*; macro_rules! invalid_version_input { ($($name:ident: $input:expr),*) => { $( #[test] fn $name() { let version_string = $input; let version = Version::from_str(version_string); assert!(version.is_err(), "invalid input returns None") } )* }; } macro_rules! valid_version_input { ($($name:ident: $input:expr),*) => { $( #[test] fn $name() { let version_string = $input; let version = Version::from_str(version_string); assert!(version.is_ok(), "valid input returns a version") } )* }; } invalid_version_input! { when_version_is_empty: "dsd", when_version_is_a_random_string: "sdfrersdfgsdf", when_version_is_a_short_version: "1.2", when_version_is_semver: "1.2.3", when_version_contains_unknown_release_type: "1.2.3g2" } valid_version_input! { when_version_has_single_digits: "1.2.3f4", when_version_has_long_digits: "0.0.0f43", when_version_has_only_zero_digits: "0.0.0f0", when_version_has_optional_hash_project_settings_style: "2020.3.38f1 (8f5fde82e2dc)", when_version_has_optional_hash_unity_hub_style: "2020.3.38f1/8f5fde82e2dc" } #[test] fn parse_version_string_with_valid_input() { let version_string = "1.2.3f4"; let version = Version::from_str(version_string); assert!(version.is_ok(), "valid input returns a version") } #[test] fn splits_version_string_into_components() { let version_string = "1.2.3f4"; let version = Version::from_str(version_string).ok().unwrap(); assert!(version.base.major == 1, "parse correct major component"); assert!(version.base.minor == 2, "parse correct minor component"); assert!(version.base.patch == 3, "parse correct patch component"); assert_eq!(version.release_type, VersionType::Final); assert!(version.revision == 4, "parse correct revision component"); assert!(version.hash.is_none(), "parse correct optional hash") } #[test] fn splits_version_string_into_components_with_hash() { let version_string = "1.2.3f4 (abcdefghijkm)"; let version = Version::from_str(version_string).ok().unwrap(); assert!(version.base.major == 1, "parse correct major component"); assert!(version.base.minor == 2, "parse correct minor component"); assert!(version.base.patch == 3, "parse correct patch component"); assert_eq!(version.release_type, VersionType::Final); assert!(version.revision == 4, "parse correct revision component"); assert!(version.hash.unwrap() == "abcdefghijkm", "parse correct optional hash") } #[test] fn splits_version_string_into_components_with_hash_unity_hub_style() { let version_string = "1.2.3f4/abcdefghijkm"; let version = Version::from_str(version_string).ok().unwrap(); assert!(version.base.major == 1, "parse correct major component"); assert!(version.base.minor == 2, "parse correct minor component"); assert!(version.base.patch == 3, "parse correct patch component"); assert_eq!(version.release_type, VersionType::Final); assert!(version.revision == 4, "parse correct revision component"); assert!(version.hash.unwrap() == "abcdefghijkm", "parse correct optional hash") } #[test] fn orders_version_final_release_greater_than_patch() { let version_a = Version::from_str("1.2.3f4").ok().unwrap(); let version_b = Version::from_str("1.2.3p4").ok().unwrap(); assert_eq!(Ordering::Greater, version_a.cmp(&version_b)); } #[test] fn orders_version_patch_release_greater_than_beta() { let version_a = Version::from_str("1.2.3p4").ok().unwrap(); let version_b = Version::from_str("1.2.3b4").ok().unwrap(); assert_eq!(Ordering::Greater, version_a.cmp(&version_b)); } #[test] fn orders_version_final_release_greater_than_beta() { let version_a = Version::from_str("1.2.3f4").ok().unwrap(); let version_b = Version::from_str("1.2.3b4").ok().unwrap(); assert_eq!(Ordering::Greater, version_a.cmp(&version_b)); } #[test] fn orders_version_all_equak() { let version_a = Version::from_str("1.2.3f4").ok().unwrap(); let version_b = Version::from_str("1.2.3f4").ok().unwrap(); assert_eq!(Ordering::Equal, version_a.cmp(&version_b)); } #[test] fn orders_version_major_smaller() { let version_a = Version::from_str("1.2.3f4").ok().unwrap(); let version_b = Version::from_str("0.2.3f4").ok().unwrap(); assert_eq!(Ordering::Greater, version_a.cmp(&version_b)); } #[test] fn orders_version_minor_smaller() { let version_a = Version::from_str("1.2.3f4").ok().unwrap(); let version_b = Version::from_str("1.1.3f4").ok().unwrap(); assert_eq!(Ordering::Greater, version_a.cmp(&version_b)); } #[test] fn orders_version_patch_smaller() { let version_a = Version::from_str("1.2.3f4").ok().unwrap(); let version_b = Version::from_str("1.2.2f4").ok().unwrap(); assert_eq!(Ordering::Greater, version_a.cmp(&version_b)); } #[test] fn orders_version_revision_smaller() { let version_a = Version::from_str("1.2.3f4").ok().unwrap(); let version_b = Version::from_str("1.2.3f3").ok().unwrap(); assert_eq!(Ordering::Greater, version_a.cmp(&version_b)); } #[test] fn fetch_hash_for_known_version() { let version = Version::f(2017, 1, 0, 2); assert_eq!( version.version_hash().unwrap(), String::from("66e9e4bfc850") ); } #[test] fn compares_versions() { let version_a = Version::from_str("1.2.3f4").ok().unwrap(); let version_b = Version::from_str("1.2.3f4").ok().unwrap(); assert_eq!(version_a, version_b, "testing version equality"); let version_c = Version::from_str("1.2.3f4").ok().unwrap(); let version_d = Version::from_str("1.2.3f5").ok().unwrap(); assert_ne!(version_c, version_d, "testing version nonequality"); let version_c = Version::from_str("1.2.3f4").ok().unwrap(); let version_d = Version::from_str("1.2.3f4/1234567890ab").ok().unwrap(); assert_eq!(version_c, version_d, "testing version equality when one version has hash other not"); let version_c = Version::from_str("1.2.3f4/0987654321ab").ok().unwrap(); let version_d = Version::from_str("1.2.3f4/1234567890ab").ok().unwrap(); assert_ne!(version_c, version_d, "testing version equality when one version hash is different"); } #[cfg(unix)] #[test] fn reads_version_from_binary_file() { use std::io::Write; use tempfile::Builder; let mut test_file = Builder::new() .prefix("version_binary") .rand_bytes(5) .tempfile() .unwrap(); let version = "2018.2.1f2"; let version_hash = "dft74dsds844"; //Some known result patterns let test_value_1 = format!("Unity {}\n", version); let test_value_2 = format!("{}_{}\n", version, version_hash); let test_value_3 = format!("{} ({})\n", version, version_hash); let test_value_4 = format!("Mozilla/5.0 (MacIntel; ) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/37.0.2062.94 Safari/537.36 Unity/{} (unity3d.com;\n", version); let test_value_5 = format!("Invalid serialized file version. File: \"%s\". Expected version: {}. Actual version: %s.\n", version); let test_value_6 = format!( "UnityPlayer/{} (UnityWebRequest/1.0, libcurl/7.52.0-DEV)\n", version ); let f = test_file.as_file_mut(); let random_bytes: Vec<u8> = (0..2048).map(|_| rand::random::<u8>()).collect(); f.write_all(&random_bytes).unwrap(); f.write_all(test_value_1.as_bytes()).unwrap(); f.write_all(&random_bytes).unwrap(); f.write_all(test_value_2.as_bytes()).unwrap(); f.write_all(&random_bytes).unwrap(); f.write_all(test_value_3.as_bytes()).unwrap(); f.write_all(&random_bytes).unwrap(); f.write_all(test_value_4.as_bytes()).unwrap(); f.write_all(&random_bytes).unwrap(); f.write_all(test_value_5.as_bytes()).unwrap(); f.write_all(&random_bytes).unwrap(); f.write_all(test_value_6.as_bytes()).unwrap(); f.write_all(&random_bytes).unwrap(); let v = Version::find_version_in_file(test_file.path()).unwrap(); assert_eq!(v, Version::f(2018, 2, 1, 2)); } #[cfg(unix)] #[test] fn fails_to_read_version_from_binary_file_if_verion_can_not_be_found() { use std::io::Write; use tempfile::Builder; let mut test_file = Builder::new() .prefix("version_binary") .rand_bytes(5) .tempfile() .unwrap(); let f = test_file.as_file_mut(); let random_bytes: Vec<u8> = (0..8000).map(|_| rand::random::<u8>()).collect(); f.write_all(&random_bytes).unwrap(); let v = Version::find_version_in_file(test_file.path()); assert!(v.is_err()); } #[test] fn fetch_hash_for_unknown_version_yields_none() { let version = Version::f(2080, 2, 0, 2); assert!(version.version_hash().is_err()); } proptest! { #[test] fn doesnt_crash(ref s in "\\PC*") { let _ = Version::from_str(s); } #[test] fn parses_all_valid_versions(ref s in r"[0-9]{1,4}\.[0-9]{1,4}\.[0-9]{1,4}[fpb][0-9]{1,4}") { Version::from_str(s).ok().unwrap(); } #[test] fn parses_version_back_to_original(major in 0u64..9999, minor in 0u64..9999, patch in 0u64..9999, revision in 0u64..9999 ) { let v1 = Version { base: semver::Version::new(major, minor, patch), revision, release_type: VersionType::Final, hash: None }; let v2 = Version::from_str(&format!("{:04}.{:04}.{:04}f{:04}", major, minor, patch, revision)).ok().unwrap(); prop_assert_eq!(v1, v2); } #[test] fn create_version_from_tuple(major in 0u64..9999, minor in 0u64..9999, patch in 0u64..9999, revision in 0u64..9999 ) { let v1 = Version { base: semver::Version::new(major, minor, patch), revision, release_type: VersionType::Final, hash: None }; let v2:Version = (major, minor, patch, revision).into(); prop_assert_eq!(v1, v2); } #[test] fn create_version_final_versions(major in 0u64..9999, minor in 0u64..9999, patch in 0u64..9999, revision in 0u64..9999 ) { let v1 = Version { base: semver::Version::new(major, minor, patch), revision, release_type: VersionType::Final, hash: None }; let v2:Version = Version::f(major, minor, patch, revision); prop_assert_eq!(v1, v2); } #[test] fn create_version_beta_versions(major in 0u64..9999, minor in 0u64..9999, patch in 0u64..9999, revision in 0u64..9999 ) { let v1 = Version { base: semver::Version::new(major, minor, patch), revision, release_type: VersionType::Beta, hash: None }; let v2:Version = Version::b(major, minor, patch, revision); prop_assert_eq!(v1, v2); } #[test] fn create_version_alpha_versions(major in 0u64..9999, minor in 0u64..9999, patch in 0u64..9999, revision in 0u64..9999 ) { let v1 = Version { base: semver::Version::new(major, minor, patch), revision, release_type: VersionType::Alpha, hash: None }; let v2:Version = Version::a(major, minor, patch, revision); prop_assert_eq!(v1, v2); } #[test] fn create_version_patch_versions(major in 0u64..9999, minor in 0u64..9999, patch in 0u64..9999, revision in 0u64..9999 ) { let v1 = Version { base: semver::Version::new(major, minor, patch), revision, release_type: VersionType::Patch, hash: None }; let v2:Version = Version::p(major, minor, patch, revision); prop_assert_eq!(v1, v2); } } }
true
3a3e6db3ec6b081684634f6365f9e1bffc59153d
Rust
thchittenden/rust-kernel
/src/interrupt/timer.rs
UTF-8
1,688
2.9375
3
[]
no_license
#![allow(dead_code)] // Constants. use util::asm; const TIMER_CHAN0: u16 = 0x0040; const TIMER_CHAN1: u16 = 0x0041; const TIMER_CHAN2: u16 = 0x0042; const TIMER_COMM: u16 = 0x0043; /// The timer frequency in hertz. const TIMER_FREQ: u32 = 1_193_182; /// The desired interrupt frequency in hertz. const INT_FREQ: u32 = 1_000; /// The timer divider. const TIMER_DIV: u32 = TIMER_FREQ / INT_FREQ; /// x86 timer commands. bitflags! { flags TimerCommand: u8 { const Binary = 0b0000_0000, const BCD = 0b0000_0001, const Mode0 = 0b0000_0000, // Interrupt on terminal count. const Mode1 = 0b0000_0010, // Hardware one shot. const Mode2 = 0b0000_0100, // Rate generator. const Mode3 = 0b0000_0110, // Square wave. const Mode4 = 0b0000_1000, // Software strobe. const Mode5 = 0b0000_1010, // Hardware strobe. const LoOnly = 0b0001_0000, const HiOnly = 0b0010_0000, const LoHi = 0b0011_0000, const Chan0 = 0b0000_0000, const Chan1 = 0b0100_0000, const Chan2 = 0b1000_0000, } } /// Initializes the timer and sets the default frequency. pub fn init_timer() { set_frequency(INT_FREQ); } /// Sets the frequency of the timer. /// /// # Panics /// /// Panics if the requested frequency cannot be set. pub fn set_frequency(req_freq: u32) { let div = TIMER_FREQ / req_freq; assert!(div <= u16::max_value() as u32); let div_lo = getbyte!(div, 0); let div_hi = getbyte!(div, 1); let command = (Binary | Mode3 | LoHi | Chan0).bits; asm::outb8(TIMER_COMM, command); asm::outb8(TIMER_CHAN0, div_lo); asm::outb8(TIMER_CHAN0, div_hi); }
true
991a1ed94869dde2f068b53d42c6ba8b34ee28c3
Rust
sirxyzzy/ilbm
/src/lib.rs
UTF-8
4,464
2.84375
3
[]
no_license
#[macro_use] extern crate log; pub mod iff; mod bytes; mod compression; mod read; use iff::ChunkId; use thiserror::Error; use std::path::Path; /// Global settings when reading image files pub struct ReadOptions { pub read_pixels: bool, pub page_scale: bool, } /// Main entry point pub fn read_from_file<P: AsRef<Path>>(file: P, options: ReadOptions) -> Result<IlbmImage> { read::read_file(file, options) } /// Custom errors for ilbm library #[derive(Error, Debug)] pub enum IlbmError { #[error("invalid header (expected {expected:?}, found {actual:?})")] InvalidHeader { expected: String, actual: String, }, #[error("invalid data: {0}")] InvalidData ( String ), #[error("File does not contain image data")] NoImage, #[error("No planes, possibly a color map with no image data")] NoPlanes, #[error("File does not contain image header (FORM.BMHD)")] NoHeader, #[error("Color map of map_size {map_size:?} has no entry for {index:?}")] NoMapEntry{ index: usize, map_size: usize}, #[error("Unexpected end of image data")] NoData, #[error("{0} not supported")] NotSupported(String), #[error("IO Error")] Io { #[from] source: std::io::Error }, } /// Standardize my result Errors pub type Result<T> = std::result::Result<T,IlbmError>; #[derive(Debug,Clone,Copy, PartialEq)] pub enum Masking { NoMask, HasMask, HasTransparentColor, Lasso } impl Default for Masking { fn default() -> Self { Masking::NoMask } } fn as_masking(v: u8) -> Masking { match v { 0 => Masking::NoMask, 1 => Masking::HasMask, 2 => Masking::HasTransparentColor, 3 => Masking::Lasso, x => { error!("Masking value of {} unsupported, mapping to None", x); Masking::NoMask } } } /// Display mode, aka ModeID is Amiga specific, and quite complex /// in terms of interpretation. However, our usage is pretty trivial // It comes from the CAMG chunk #[derive(Copy, Debug, Clone, Default)] pub struct DisplayMode (u32); impl DisplayMode { pub fn is_ham(&self) -> bool {self.0 & 0x800 != 0} pub fn is_halfbrite(&self) -> bool {self.0 & 0x80 != 0} pub fn new(mode: u32) -> DisplayMode { DisplayMode(mode) } pub fn ham() -> DisplayMode { DisplayMode(0x800) } } impl std::fmt::Display for DisplayMode { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::result::Result<(), std::fmt::Error> { if self.is_ham() { write!(f, "0x{:X} HAM", self.0) } else if self.is_halfbrite() { write!(f, "0x{:X} HALF", self.0) } else { write!(f, "0x{:X}", self.0) } } } #[derive(Copy, Debug, Clone, Default)] pub struct RgbValue (u8, u8, u8); /// This is an amalgam of information drawn from /// various chunks in the ILBM, mapped to more native /// types such as usize for u16, and enums for masking #[derive(Debug, Default)] pub struct IlbmImage { pub size: Size2D, pub map_size: usize, pub planes: usize, pub masking: Masking, pub compression: bool, pub display_mode: DisplayMode, pub dpi: Size2D, pub pixel_aspect: Size2D, pub transparent_color: usize, // Actually a color index pub page_size: Size2D, /// RGB data triples /// Left to right in row, then top to bottom /// so indexes look like y * width + x where /// y=0 is the top pub pixels: Vec<u8> } impl std::fmt::Display for IlbmImage { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::result::Result<(), std::fmt::Error> { let compressed = if self.compression { "Comp" } else { "" }; write!(f, "{} dpi:{} p:{} {} {:?} map:{} mode:{} aspect:{} trans:{} page:{}", self.size, self.dpi, self.planes, compressed, self.masking, self.map_size, self.display_mode, self.pixel_aspect, self.transparent_color, self.page_size) } } #[derive(Debug, Copy, Clone, Default)] pub struct Size2D (usize,usize); impl Size2D { pub fn width(&self) -> usize {self.0} pub fn height(&self) -> usize {self.1} } impl std::fmt::Display for Size2D { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::result::Result<(), std::fmt::Error> { write!(f, "{}x{}", self.width(), self.height()) } } #[derive(Debug, Clone)] struct ColorMap { colors: Vec<RgbValue> }
true
98e4d999ddc132ac5e4863dcddad3bb61ae8a932
Rust
crlf0710/modern-web
/mweb/src/classical/lexer.rs
UTF-8
61,619
3.234375
3
[]
no_license
use crate::utils::U8SpanRef; use thiserror::Error; pub mod ascii_char { #[derive(Copy, Clone, PartialEq)] enum AsciiCharCategory { Alphabetic, Digit, Symbol, InlineWhitespace, LineFeedWhitespace, Invalid, } fn ascii_char_category(ch: u8) -> AsciiCharCategory { match ch { 0x09 | 0xC | b' ' => AsciiCharCategory::InlineWhitespace, 0xA | 0xD => AsciiCharCategory::LineFeedWhitespace, b'A'..=b'Z' | b'a'..=b'z' => AsciiCharCategory::Alphabetic, b'0'..=b'9' => AsciiCharCategory::Digit, 0x0..=0x8 | 0xB | 0xE..=0x1F | 0x7F..=0xFF => AsciiCharCategory::Invalid, _ => AsciiCharCategory::Symbol, } } #[allow(dead_code)] pub fn is_invalid_char(ch: u8) -> bool { ascii_char_category(ch) == AsciiCharCategory::Invalid } pub fn is_inline_whitespace_char(ch: u8) -> bool { let category = ascii_char_category(ch); category == AsciiCharCategory::InlineWhitespace } pub fn is_whitespace_char(ch: u8) -> bool { let category = ascii_char_category(ch); category == AsciiCharCategory::InlineWhitespace || category == AsciiCharCategory::LineFeedWhitespace } #[allow(dead_code)] pub fn is_alphanumeric_char(ch: u8) -> bool { let category = ascii_char_category(ch); category == AsciiCharCategory::Alphabetic || category == AsciiCharCategory::Digit } pub fn is_numeric_char(ch: u8) -> bool { let category = ascii_char_category(ch); category == AsciiCharCategory::Digit } pub fn is_id_start(ch: u8) -> bool { match ch { b'A'..=b'Z' | b'a'..=b'z' | b'_' => true, _ => false, } } pub fn is_id_continue(ch: u8) -> bool { match ch { b'A'..=b'Z' | b'a'..=b'z' | b'0'..=b'9' | b'_' => true, _ => false, } } pub fn is_punct_char(ch: u8) -> bool { let category = ascii_char_category(ch); category == AsciiCharCategory::Symbol } pub fn is_octal_digit(ch: u8) -> bool { match ch { b'0'..=b'7' => true, _ => false, } } pub fn is_hex_digit(ch: u8) -> bool { match ch { b'0'..=b'9' | b'A'..=b'F' | b'a'..=b'f' => true, _ => false, } } } pub mod ascii_str { use std::fmt::{self, Debug}; use thiserror::Error; #[repr(transparent)] #[derive(PartialEq)] pub struct AsciiStr(pub [u8]); impl AsciiStr { pub fn try_split_ending_substr(&self, bytes: &Self) -> (&Self, Option<&Self>) { if (self.0).ends_with(&bytes.0) { let pos = self.0.len() - bytes.0.len(); unsafe { std::mem::transmute((&(self.0)[..pos], Some(&(self.0)[pos..]))) } } else { (self, None) } } } #[derive(Error, Debug)] #[error("not 7-bit ascii string")] pub struct NotAsciiStrError; pub fn from_bytes(bytes: &[u8]) -> Result<&AsciiStr, NotAsciiStrError> { for &byte in bytes { if byte >= 0x80 { return Err(NotAsciiStrError); } } unsafe { Ok(std::mem::transmute(bytes)) } } impl<'x> Debug for &'x AsciiStr { fn fmt(&self, fmt: &mut fmt::Formatter<'_>) -> fmt::Result { let str = std::str::from_utf8(&self.0).map_err(|_| fmt::Error)?; write!(fmt, "{:?}", str).map_err(|_| fmt::Error)?; Ok(()) } } } use bitflags::bitflags; bitflags! { pub struct LexModeSet : u8 { const NOTHING = 0; const COMMENT = 0x1; const LIMBO = 0x2; const MODULE_NAME = 0x4; const STRING_LITERAL = 0x8; const PASCAL_TEXT = 0x10; const TEX_TEXT = 0x20; const DEFINITION_TEXT = 0x40; const INLINE_PASCAL_TEXT = 0x80; } } impl LexModeSet { // workaround for https://github.com/bitflags/bitflags/issues/180 const fn const_or(self, other: LexModeSet) -> Self { LexModeSet::from_bits_truncate(self.bits() | other.bits()) } const fn contains_mode(&self, mode: LexMode) -> bool { (self.bits & mode.0) != 0 } } #[derive(Copy, Clone, PartialEq, Eq)] pub struct LexMode(u8); use std::fmt; impl fmt::Debug for LexMode { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { let mode_text = match *self { LexMode::LIMBO => "Limbo", LexMode::TEX_TEXT => "TeXText", LexMode::MODULE_NAME => "ModuleName", LexMode::PASCAL_TEXT => "PascalText", LexMode::COMMENT => "Comment", LexMode::STRING_LITERAL => "StrLiteral", LexMode::DEFINITION_TEXT => "DefinitionText", LexMode::INLINE_PASCAL_TEXT => "InlinePascalText", _ => unreachable!(), }; write!(f, "{}", mode_text).map_err(|_| fmt::Error)?; Ok(()) } } impl LexMode { pub const LIMBO: LexMode = LexMode(LexModeSet::LIMBO.bits); pub const TEX_TEXT: LexMode = LexMode(LexModeSet::TEX_TEXT.bits); pub const MODULE_NAME: LexMode = LexMode(LexModeSet::MODULE_NAME.bits); pub const PASCAL_TEXT: LexMode = LexMode(LexModeSet::PASCAL_TEXT.bits); pub const COMMENT: LexMode = LexMode(LexModeSet::COMMENT.bits); pub const STRING_LITERAL: LexMode = LexMode(LexModeSet::STRING_LITERAL.bits); pub const DEFINITION_TEXT: LexMode = LexMode(LexModeSet::DEFINITION_TEXT.bits); pub const INLINE_PASCAL_TEXT: LexMode = LexMode(LexModeSet::INLINE_PASCAL_TEXT.bits); } pub mod control_code { use super::token::BoxedTokenList; use super::LexModeSet; #[derive(Copy, Clone)] pub enum SpecialHandling { None, GroupTitle, ModuleName, MacroDefinition, FormatDefinition, OctalConst, HexConst, ControlTextUpToAtGT, WarnAndIgnore, // occurred in xetex.web:9057 } #[derive(Copy, Clone, PartialEq, Debug)] pub enum ControlCodeKind { EscapedAt, DefineModule, DefineStarredModule, DefineMacro, DefineFormat, DefineProgram, ModuleName, OctalConst, HexConst, StringPoolChecksum, MetaCommentBegin, MetaCommentEnd, ProgramAdjacent, ForceIndex, ForceIndexMono, ForceIndexStyle9, ForceHBox, ForceVerbatim, ForceEOL, UnderlineFlag, NoUnderlineFlag, FormatThinSpace, FormatLineBreak, FormatSuggestLineBreak, FormatLineBreakLarge, FormatNoLineBreak, FormatInvisibleSemicolon, HiddenEndOfModuleName, Ignored, } #[derive(Debug, PartialEq)] pub struct ControlCode<'x> { pub kind: ControlCodeKind, pub param: Option<BoxedTokenList<'x>>, } pub struct ControlCodeInfoRecord { pub selector: &'static [u8], pub kind: ControlCodeKind, pub special_handling: SpecialHandling, pub terminating_modes: LexModeSet, pub appliable_modes: LexModeSet, } pub const CONTROL_CODE_DATA: &'static [ControlCodeInfoRecord] = &[ ControlCodeInfoRecord { selector: b"@", kind: ControlCodeKind::EscapedAt, special_handling: SpecialHandling::None, terminating_modes: LexModeSet::NOTHING, appliable_modes: LexModeSet::COMMENT .const_or(LexModeSet::LIMBO) .const_or(LexModeSet::MODULE_NAME) .const_or(LexModeSet::PASCAL_TEXT) .const_or(LexModeSet::STRING_LITERAL) .const_or(LexModeSet::TEX_TEXT) .const_or(LexModeSet::DEFINITION_TEXT) .const_or(LexModeSet::INLINE_PASCAL_TEXT), }, ControlCodeInfoRecord { selector: b" \t\r\n", kind: ControlCodeKind::DefineModule, special_handling: SpecialHandling::None, terminating_modes: LexModeSet::LIMBO .const_or(LexModeSet::PASCAL_TEXT) .const_or(LexModeSet::TEX_TEXT) .const_or(LexModeSet::DEFINITION_TEXT) .const_or(LexModeSet::INLINE_PASCAL_TEXT), appliable_modes: LexModeSet::LIMBO .const_or(LexModeSet::PASCAL_TEXT) .const_or(LexModeSet::TEX_TEXT) .const_or(LexModeSet::DEFINITION_TEXT) .const_or(LexModeSet::INLINE_PASCAL_TEXT), }, ControlCodeInfoRecord { selector: b"*", kind: ControlCodeKind::DefineStarredModule, special_handling: SpecialHandling::GroupTitle, terminating_modes: LexModeSet::LIMBO .const_or(LexModeSet::PASCAL_TEXT) .const_or(LexModeSet::TEX_TEXT) .const_or(LexModeSet::DEFINITION_TEXT), appliable_modes: LexModeSet::LIMBO .const_or(LexModeSet::PASCAL_TEXT) .const_or(LexModeSet::TEX_TEXT) .const_or(LexModeSet::DEFINITION_TEXT), }, ControlCodeInfoRecord { selector: b"dD", kind: ControlCodeKind::DefineMacro, special_handling: SpecialHandling::MacroDefinition, terminating_modes: LexModeSet::PASCAL_TEXT .const_or(LexModeSet::TEX_TEXT) .const_or(LexModeSet::DEFINITION_TEXT), appliable_modes: LexModeSet::PASCAL_TEXT .const_or(LexModeSet::TEX_TEXT) .const_or(LexModeSet::DEFINITION_TEXT), }, ControlCodeInfoRecord { selector: b"fF", kind: ControlCodeKind::DefineFormat, special_handling: SpecialHandling::FormatDefinition, terminating_modes: LexModeSet::PASCAL_TEXT .const_or(LexModeSet::TEX_TEXT) .const_or(LexModeSet::DEFINITION_TEXT), appliable_modes: LexModeSet::PASCAL_TEXT .const_or(LexModeSet::TEX_TEXT) .const_or(LexModeSet::DEFINITION_TEXT), }, ControlCodeInfoRecord { selector: b"pP", kind: ControlCodeKind::DefineProgram, special_handling: SpecialHandling::None, terminating_modes: LexModeSet::PASCAL_TEXT .const_or(LexModeSet::TEX_TEXT) .const_or(LexModeSet::DEFINITION_TEXT), appliable_modes: LexModeSet::PASCAL_TEXT .const_or(LexModeSet::TEX_TEXT) .const_or(LexModeSet::DEFINITION_TEXT), }, ControlCodeInfoRecord { selector: b"<", kind: ControlCodeKind::ModuleName, special_handling: SpecialHandling::ModuleName, terminating_modes: LexModeSet::TEX_TEXT.const_or(LexModeSet::DEFINITION_TEXT), appliable_modes: LexModeSet::PASCAL_TEXT .const_or(LexModeSet::DEFINITION_TEXT) .const_or(LexModeSet::TEX_TEXT), }, ControlCodeInfoRecord { selector: b"\'", kind: ControlCodeKind::OctalConst, special_handling: SpecialHandling::OctalConst, terminating_modes: LexModeSet::NOTHING, appliable_modes: LexModeSet::PASCAL_TEXT .const_or(LexModeSet::DEFINITION_TEXT) .const_or(LexModeSet::TEX_TEXT) .const_or(LexModeSet::INLINE_PASCAL_TEXT), }, ControlCodeInfoRecord { selector: b"\"", kind: ControlCodeKind::HexConst, special_handling: SpecialHandling::HexConst, terminating_modes: LexModeSet::NOTHING, appliable_modes: LexModeSet::PASCAL_TEXT .const_or(LexModeSet::DEFINITION_TEXT) .const_or(LexModeSet::TEX_TEXT) .const_or(LexModeSet::INLINE_PASCAL_TEXT) .const_or(LexModeSet::COMMENT /*xetex.web:8641*/), }, ControlCodeInfoRecord { selector: b"$", kind: ControlCodeKind::StringPoolChecksum, special_handling: SpecialHandling::None, terminating_modes: LexModeSet::NOTHING, appliable_modes: LexModeSet::PASCAL_TEXT .const_or(LexModeSet::DEFINITION_TEXT) .const_or(LexModeSet::INLINE_PASCAL_TEXT), }, ControlCodeInfoRecord { selector: b"{", kind: ControlCodeKind::MetaCommentBegin, special_handling: SpecialHandling::None, terminating_modes: LexModeSet::NOTHING, appliable_modes: LexModeSet::PASCAL_TEXT .const_or(LexModeSet::DEFINITION_TEXT) .const_or(LexModeSet::INLINE_PASCAL_TEXT), }, ControlCodeInfoRecord { selector: b"}", kind: ControlCodeKind::MetaCommentEnd, special_handling: SpecialHandling::None, terminating_modes: LexModeSet::NOTHING, appliable_modes: LexModeSet::PASCAL_TEXT .const_or(LexModeSet::DEFINITION_TEXT) .const_or(LexModeSet::INLINE_PASCAL_TEXT), }, ControlCodeInfoRecord { selector: b"&", kind: ControlCodeKind::ProgramAdjacent, special_handling: SpecialHandling::None, terminating_modes: LexModeSet::NOTHING, appliable_modes: LexModeSet::PASCAL_TEXT .const_or(LexModeSet::DEFINITION_TEXT) .const_or(LexModeSet::INLINE_PASCAL_TEXT), }, ControlCodeInfoRecord { selector: b"^", kind: ControlCodeKind::ForceIndex, special_handling: SpecialHandling::ControlTextUpToAtGT, terminating_modes: LexModeSet::NOTHING, appliable_modes: LexModeSet::PASCAL_TEXT .const_or(LexModeSet::DEFINITION_TEXT) .const_or(LexModeSet::TEX_TEXT) .const_or(LexModeSet::INLINE_PASCAL_TEXT), }, ControlCodeInfoRecord { selector: b".", kind: ControlCodeKind::ForceIndexMono, special_handling: SpecialHandling::ControlTextUpToAtGT, terminating_modes: LexModeSet::NOTHING, appliable_modes: LexModeSet::PASCAL_TEXT .const_or(LexModeSet::DEFINITION_TEXT) .const_or(LexModeSet::TEX_TEXT) .const_or(LexModeSet::INLINE_PASCAL_TEXT), }, ControlCodeInfoRecord { selector: b":", kind: ControlCodeKind::ForceIndexStyle9, special_handling: SpecialHandling::ControlTextUpToAtGT, terminating_modes: LexModeSet::NOTHING, appliable_modes: LexModeSet::PASCAL_TEXT .const_or(LexModeSet::DEFINITION_TEXT) .const_or(LexModeSet::TEX_TEXT) .const_or(LexModeSet::INLINE_PASCAL_TEXT), }, ControlCodeInfoRecord { selector: b"t", kind: ControlCodeKind::ForceHBox, special_handling: SpecialHandling::ControlTextUpToAtGT, terminating_modes: LexModeSet::NOTHING, appliable_modes: LexModeSet::PASCAL_TEXT .const_or(LexModeSet::DEFINITION_TEXT) .const_or(LexModeSet::INLINE_PASCAL_TEXT), }, ControlCodeInfoRecord { selector: b"=", kind: ControlCodeKind::ForceVerbatim, special_handling: SpecialHandling::ControlTextUpToAtGT, terminating_modes: LexModeSet::NOTHING, appliable_modes: LexModeSet::PASCAL_TEXT .const_or(LexModeSet::DEFINITION_TEXT) .const_or(LexModeSet::INLINE_PASCAL_TEXT), }, ControlCodeInfoRecord { selector: b"\\", kind: ControlCodeKind::ForceEOL, special_handling: SpecialHandling::None, terminating_modes: LexModeSet::NOTHING, appliable_modes: LexModeSet::PASCAL_TEXT .const_or(LexModeSet::DEFINITION_TEXT) .const_or(LexModeSet::INLINE_PASCAL_TEXT), }, ControlCodeInfoRecord { selector: b"!", kind: ControlCodeKind::UnderlineFlag, special_handling: SpecialHandling::None, terminating_modes: LexModeSet::NOTHING, appliable_modes: LexModeSet::PASCAL_TEXT .const_or(LexModeSet::DEFINITION_TEXT) .const_or(LexModeSet::TEX_TEXT) .const_or(LexModeSet::INLINE_PASCAL_TEXT), }, ControlCodeInfoRecord { selector: b"?", kind: ControlCodeKind::NoUnderlineFlag, special_handling: SpecialHandling::None, terminating_modes: LexModeSet::NOTHING, appliable_modes: LexModeSet::PASCAL_TEXT .const_or(LexModeSet::DEFINITION_TEXT) .const_or(LexModeSet::TEX_TEXT) .const_or(LexModeSet::INLINE_PASCAL_TEXT), }, ControlCodeInfoRecord { selector: b",", kind: ControlCodeKind::FormatThinSpace, special_handling: SpecialHandling::None, terminating_modes: LexModeSet::NOTHING, appliable_modes: LexModeSet::PASCAL_TEXT .const_or(LexModeSet::DEFINITION_TEXT) .const_or(LexModeSet::INLINE_PASCAL_TEXT), }, ControlCodeInfoRecord { selector: b"/", kind: ControlCodeKind::FormatLineBreak, special_handling: SpecialHandling::None, terminating_modes: LexModeSet::NOTHING, appliable_modes: LexModeSet::PASCAL_TEXT .const_or(LexModeSet::DEFINITION_TEXT) .const_or(LexModeSet::INLINE_PASCAL_TEXT), }, ControlCodeInfoRecord { selector: b"|", kind: ControlCodeKind::FormatSuggestLineBreak, special_handling: SpecialHandling::None, terminating_modes: LexModeSet::NOTHING, appliable_modes: LexModeSet::PASCAL_TEXT .const_or(LexModeSet::DEFINITION_TEXT) .const_or(LexModeSet::INLINE_PASCAL_TEXT), }, ControlCodeInfoRecord { selector: b"#", kind: ControlCodeKind::FormatLineBreakLarge, special_handling: SpecialHandling::None, terminating_modes: LexModeSet::NOTHING, appliable_modes: LexModeSet::PASCAL_TEXT .const_or(LexModeSet::DEFINITION_TEXT) .const_or(LexModeSet::INLINE_PASCAL_TEXT), }, ControlCodeInfoRecord { selector: b"+", kind: ControlCodeKind::FormatNoLineBreak, special_handling: SpecialHandling::None, terminating_modes: LexModeSet::NOTHING, appliable_modes: LexModeSet::PASCAL_TEXT .const_or(LexModeSet::DEFINITION_TEXT) .const_or(LexModeSet::INLINE_PASCAL_TEXT), }, ControlCodeInfoRecord { selector: b";", kind: ControlCodeKind::FormatInvisibleSemicolon, special_handling: SpecialHandling::None, terminating_modes: LexModeSet::NOTHING, appliable_modes: LexModeSet::PASCAL_TEXT .const_or(LexModeSet::DEFINITION_TEXT) .const_or(LexModeSet::INLINE_PASCAL_TEXT), }, ControlCodeInfoRecord { selector: b"z", kind: ControlCodeKind::Ignored, special_handling: SpecialHandling::WarnAndIgnore, terminating_modes: LexModeSet::NOTHING, appliable_modes: LexModeSet::PASCAL_TEXT, }, ]; pub fn get_control_code_info_record_for_selector( selector: u8, ) -> Option<&'static ControlCodeInfoRecord> { use once_cell::sync::Lazy; static CONTROL_CODE_TABLE: Lazy<[Option<&'static ControlCodeInfoRecord>; 256]> = Lazy::new(|| { let mut table = [None; 256]; for item in CONTROL_CODE_DATA.iter() { for &ch in item.selector.iter() { assert!(table[ch as usize].is_none()); table[ch as usize] = Some(item); } } table }); CONTROL_CODE_TABLE[selector as usize] } } pub mod operator { #[derive(Clone, PartialEq, Debug)] pub enum Operator { Plus, Subtract, Dereference, Equal, NotEqual, GreaterThan, LessThan, GreaterEq, LessEq, Multiply, Divide, Assign, } } pub mod punctuation { use super::operator::Operator; #[derive(Clone, PartialEq, Debug)] pub enum Punctuation { Op(Operator), LParen, RParen, LBracket, RBracket, RangeUntil, WithType, ArgumentSeparator, EndOfStatement, DotOrEndOfProgram, DefineAs, Dollar, Backslash, /*xetex.web:24446*/ } pub struct PunctuationInfo { pub literal: &'static [u8], pub kind: Punctuation, } pub const PUNCTUATION_TABLE: &[PunctuationInfo] = &[ PunctuationInfo { literal: b"..", kind: Punctuation::RangeUntil, }, PunctuationInfo { literal: b":=", kind: Punctuation::Op(Operator::Assign), }, PunctuationInfo { literal: b"<>", kind: Punctuation::Op(Operator::NotEqual), }, PunctuationInfo { literal: b"==", kind: Punctuation::DefineAs, }, PunctuationInfo { literal: b">=", kind: Punctuation::Op(Operator::GreaterEq), }, PunctuationInfo { literal: b"<=", kind: Punctuation::Op(Operator::LessEq), }, PunctuationInfo { literal: b">", kind: Punctuation::Op(Operator::GreaterThan), }, PunctuationInfo { literal: b"<", kind: Punctuation::Op(Operator::LessThan), }, PunctuationInfo { literal: b":", kind: Punctuation::WithType, }, PunctuationInfo { literal: b"^", kind: Punctuation::Op(Operator::Dereference), }, PunctuationInfo { literal: b"(", kind: Punctuation::LParen, }, PunctuationInfo { literal: b")", kind: Punctuation::RParen, }, PunctuationInfo { literal: b"[", kind: Punctuation::LBracket, }, PunctuationInfo { literal: b"]", kind: Punctuation::RBracket, }, PunctuationInfo { literal: b",", kind: Punctuation::ArgumentSeparator, }, PunctuationInfo { literal: b";", kind: Punctuation::EndOfStatement, }, PunctuationInfo { literal: b".", kind: Punctuation::DotOrEndOfProgram, }, PunctuationInfo { literal: b"$", kind: Punctuation::Dollar, }, PunctuationInfo { literal: b"=", kind: Punctuation::Op(Operator::Equal), }, PunctuationInfo { literal: b"+", kind: Punctuation::Op(Operator::Plus), }, PunctuationInfo { literal: b"-", kind: Punctuation::Op(Operator::Subtract), }, PunctuationInfo { literal: b"*", kind: Punctuation::Op(Operator::Multiply), }, PunctuationInfo { literal: b"/", kind: Punctuation::Op(Operator::Divide), }, PunctuationInfo { literal: b"\\", kind: Punctuation::Backslash, }, ]; } pub mod literal { use super::ascii_str::AsciiStr; use super::token::BoxedTokenList; #[derive(Debug, PartialEq)] pub enum Literal<'x> { IntegerU32(u32), RealF64(f64), StringLiteral(&'x AsciiStr), PreprocessedStringLiteral(BoxedTokenList<'x>), } } #[derive(Error, Debug)] pub enum LexError { #[error("Unexpected EOF reached before proper finish")] UnexpectedEOF, #[error("Not 7-bit ascii byte occurred")] Not7BitAscii(#[from] ascii_str::NotAsciiStrError), #[error("Invalid control code")] InvalidControlCodeChar { control_code: u8, pos: usize }, #[error("Unrecognized symbol starting with '{0}'")] UnrecognizedPunctuation(char), #[error("Control code used where it's not usable")] ControlCodeInNonApplicableMode, #[error("Control code character '{0}' used where it's not usable")] ControlCodeCharInNonApplicableMode(char), #[error("Integer literal overflow: {0} with radix {1}")] IntegerLiteralOverflow(String, u32), #[error("Float literal lex error: {0}")] FloatLiteralLexError(String), #[error("Numeric literal not properly finished")] NumericLiteralNotProperlyFinished, #[error("Control text not properly finished with @>")] ControlTextNotProperlyFinished, #[error("Group title not properly finished with .")] GroupTitleNotProperlyFinished, #[error("Inline program fragment not properly finished")] InlineProgFragmentNotProperlyFinished, #[error("Comment not properly finished with }}")] CommentNotProperlyFinished, #[error("Comment nesting too deep")] CommentNestingTooDeep, #[error("String literal not properly finished with \'")] StringLiteralNotProperlyFinished, #[error("String literal not properly finished with \"")] PreprocessedStringLiteralNotProperlyFinished, #[error("Any lex error!")] AnyLexError, } pub enum LexControlFlowNewItem { Module, Definition, ProgramText, } pub enum LexControlFlow<'x> { Continue(U8SpanRef<'x>), Finish(U8SpanRef<'x>), StartNew(LexControlFlowNewItem, LexMode, U8SpanRef<'x>), ModuleNameInlineProgAbort(U8SpanRef<'x>), } pub mod token { use super::ascii_char::{is_hex_digit, is_inline_whitespace_char, is_octal_digit}; use super::ascii_str::{self, AsciiStr}; use super::control_code::ControlCode; use super::literal::Literal; use super::punctuation::Punctuation; use super::{LexControlFlow, LexControlFlowNewItem, LexError, LexMode}; use crate::utils::U8SpanRef; #[derive(Debug, PartialEq)] pub enum Token<'x> { CtrlCode(ControlCode<'x>), WS, MacroParamMark, IdentOrKw(&'x AsciiStr), Punct(Punctuation), Literal(Literal<'x>), Comment(BoxedTokenList<'x>), InlineProgramFragment(BoxedTokenList<'x>), TextFragment(&'x AsciiStr), ModuleNameInlineProgAbort, } pub type TokenList<'x> = Vec<Token<'x>>; pub type BoxedTokenList<'x> = Box<Vec<Token<'x>>>; fn continue_or_finish(l: U8SpanRef<'_>) -> LexControlFlow<'_> { if l.is_empty() { LexControlFlow::Finish(l) } else { LexControlFlow::Continue(l) } } fn switch_mode<'x>(control_code: &ControlCode<'x>, l: U8SpanRef<'x>) -> LexControlFlow<'x> { use super::control_code::ControlCodeKind; match control_code.kind { ControlCodeKind::DefineModule | ControlCodeKind::DefineStarredModule => { LexControlFlow::StartNew(LexControlFlowNewItem::Module, LexMode::TEX_TEXT, l) } ControlCodeKind::DefineMacro | ControlCodeKind::DefineFormat => { LexControlFlow::StartNew( LexControlFlowNewItem::Definition, LexMode::DEFINITION_TEXT, l, ) } ControlCodeKind::DefineProgram | ControlCodeKind::ModuleName => { LexControlFlow::StartNew( LexControlFlowNewItem::ProgramText, LexMode::PASCAL_TEXT, l, ) } _ => unreachable! {}, } } pub const CONTROL_CODE_PREFIX: u8 = b'@'; pub const INLINE_PROGRAM_FRAGMENT: u8 = b'|'; pub const ESCAPE_CHARACTER: u8 = b'\\'; pub const START_OF_COMMENT: u8 = b'{'; pub const END_OF_COMMENT: u8 = b'}'; pub const LINE_FEED: u8 = b'\n'; pub const CARRIAGE_RETURN: u8 = b'\r'; pub const SIMPLE_ESCAPED_ATAIL: &'static [u8] = b"@@"; pub const END_OF_CONTROL_TEXT: &'static [u8] = b"@>"; pub const START_OF_MACRO_DEFINITION: &'static [u8] = b"@d"; pub const START_OF_FORMAT_DEFINITION: &'static [u8] = b"@f"; pub const MODULE_NAME_INLINE_PROGFRAG_ABORT: &'static [u8] = b"...@>"; pub fn lex_u32_literal_with_radix(l: &[u8], radix: usize) -> Result<Literal, LexError> { use std::str::from_utf8; let str = from_utf8(l).unwrap(); if let Ok(v) = u32::from_str_radix(str, radix as u32) { Ok(Literal::IntegerU32(v)) } else { Err(LexError::IntegerLiteralOverflow( str.to_owned(), radix as u32, )) } } pub fn lex_f64_literal(l: &[u8]) -> Result<Literal, LexError> { use std::str::{from_utf8, FromStr}; let str = from_utf8(l).unwrap(); if let Ok(v) = f64::from_str(str) { Ok(Literal::RealF64(v)) } else { Err(LexError::FloatLiteralLexError(str.to_owned())) } } pub fn lex_numeric_literal(l: U8SpanRef<'_>) -> Result<(Literal, U8SpanRef<'_>), LexError> { use super::ascii_char::is_numeric_char; let count_int = l .bytes() .iter() .copied() .take_while(|&ch| is_numeric_char(ch)) .count(); let has_dot = count_int > 0 && l.bytes()[count_int..].starts_with(b"."); let count_fraction = if has_dot { l.bytes()[count_int + 1..] .iter() .copied() .take_while(|&ch| is_numeric_char(ch)) .count() } else { 0 }; if has_dot && count_fraction > 0 { let (numeric, rest) = l.split_at(count_int + 1 + count_fraction); let literal = lex_f64_literal(numeric)?; Ok((literal, rest)) } else if count_int > 0 { let (numeric, rest) = l.split_at(count_int); let literal = lex_u32_literal_with_radix(numeric, 10)?; Ok((literal, rest)) } else { Err(LexError::NumericLiteralNotProperlyFinished) } } fn lex_maybe_whitespace<'x>(l: U8SpanRef<'x>) -> (&'x [u8], U8SpanRef<'x>) { use super::ascii_char::is_whitespace_char; let pos = l .bytes() .iter() .copied() .take_while(|&ch| is_whitespace_char(ch)) .count(); l.split_at(pos) } fn lex_identifier<'x>(l: U8SpanRef<'x>) -> (Option<&'x [u8]>, U8SpanRef<'x>) { use super::ascii_char::{is_id_continue, is_id_start}; let pos = l .bytes() .iter() .copied() .enumerate() .take_while(|&(n, ch)| { if n == 0 { is_id_start(ch) } else { is_id_continue(ch) } }) .count(); if pos == 0 { (None, l) } else { let (head, rest) = l.split_at(pos); (Some(head), rest) } } fn lex_punct<'x>(l: U8SpanRef<'x>) -> (Option<Punctuation>, U8SpanRef<'x>) { use super::punctuation::PUNCTUATION_TABLE; for table_item in PUNCTUATION_TABLE { if l.starts_with(table_item.literal) { let literal_len = table_item.literal.len(); let (_, rest) = l.split_at(literal_len); return (Some(table_item.kind.clone()), rest); } } (None, l) } fn lex_control_code_rest<'x>( l: U8SpanRef<'x>, mode: LexMode, ) -> Result<(ControlCode<'x>, U8SpanRef<'x>, bool), LexError> { use super::control_code::get_control_code_info_record_for_selector; use super::control_code::SpecialHandling; let selector = l.front_cloned().ok_or_else(|| LexError::UnexpectedEOF)?; let control_code_info = get_control_code_info_record_for_selector(selector).ok_or_else(|| { LexError::InvalidControlCodeChar { control_code: selector, pos: l.pos(), } })?; if !control_code_info.appliable_modes.contains_mode(mode) { return Err(LexError::ControlCodeCharInNonApplicableMode( selector as char, )); } let is_terminator = control_code_info.terminating_modes.contains_mode(mode); let rest = l.range(1..); let (control_code, rest) = match control_code_info.special_handling { SpecialHandling::None => { let control_code = ControlCode { kind: control_code_info.kind, param: None, }; (control_code, rest) } SpecialHandling::GroupTitle => { let group_title_start = rest .bytes() .iter() .take_while(|&&ch| is_inline_whitespace_char(ch)) .count(); let group_title_end = memchr::memchr2(b'.', b'\n', rest.bytes()).unwrap_or(rest.len()); let control_text_end; if !rest.range(group_title_end..).starts_with(b".") { eprintln!( "WARN: module group title not finished with dot character, continuing." ); control_text_end = group_title_end; //return Err(LexError::GroupTitleNotProperlyFinished); } else { control_text_end = group_title_end + 1; } let group_title_text = ascii_str::from_bytes(&rest.bytes()[group_title_start..group_title_end])?; let control_code = ControlCode { kind: control_code_info.kind, param: Some(Box::new(vec![Token::TextFragment(group_title_text)])), }; (control_code, rest.range(control_text_end..)) } SpecialHandling::ModuleName => { let mode = LexMode::MODULE_NAME; let mut data = rest; let mut tokens = vec![]; 'module_name_loop: loop { use super::control_code::ControlCodeKind; let (token, control_flow) = lex_token(data, mode)?; match control_flow { LexControlFlow::Continue(rest_data) => { data = rest_data; match token { Token::CtrlCode(ControlCode { kind: ControlCodeKind::HiddenEndOfModuleName, .. }) => { break 'module_name_loop; } _ => { tokens.push(token); } } } LexControlFlow::Finish(..) => { return Err(LexError::UnexpectedEOF); } LexControlFlow::StartNew(..) => { return Err(LexError::ControlCodeInNonApplicableMode); } LexControlFlow::ModuleNameInlineProgAbort(..) => { return Err(LexError::ControlCodeInNonApplicableMode); } } } let control_code = ControlCode { kind: control_code_info.kind, param: Some(Box::new(tokens)), }; (control_code, data) } SpecialHandling::FormatDefinition | SpecialHandling::MacroDefinition => { let mode = LexMode::DEFINITION_TEXT; let mut data = rest; let mut tokens = vec![]; 'definition_loop: loop { if data.starts_with(START_OF_MACRO_DEFINITION) || data.starts_with(START_OF_FORMAT_DEFINITION) { break 'definition_loop; } let (token, control_flow) = lex_token(data, mode)?; match control_flow { LexControlFlow::Continue(rest_data) => { data = rest_data; tokens.push(token); } LexControlFlow::Finish(rest_data) => { data = rest_data; tokens.push(token); break 'definition_loop; } LexControlFlow::StartNew(..) => { break 'definition_loop; } LexControlFlow::ModuleNameInlineProgAbort(..) => { return Err(LexError::ControlCodeInNonApplicableMode); } } } let control_code = ControlCode { kind: control_code_info.kind, param: Some(Box::new(tokens)), }; (control_code, data) } SpecialHandling::OctalConst => { let octal_digit_count = rest .bytes() .iter() .copied() .take_while(|&ch| is_octal_digit(ch)) .count(); let (octal_digits, rest) = rest.split_at(octal_digit_count); let literal = lex_u32_literal_with_radix(octal_digits, 8)?; let control_code = ControlCode { kind: control_code_info.kind, param: Some(Box::new(vec![Token::Literal(literal)])), }; (control_code, rest) } SpecialHandling::HexConst => { let hex_digit_count = rest .bytes() .iter() .copied() .take_while(|&ch| is_hex_digit(ch)) .count(); let (hex_digits, rest) = rest.split_at(hex_digit_count); let literal = lex_u32_literal_with_radix(hex_digits, 16)?; let control_code = ControlCode { kind: control_code_info.kind, param: Some(Box::new(vec![Token::Literal(literal)])), }; (control_code, rest) } SpecialHandling::ControlTextUpToAtGT => { let control_text_len = memchr::memchr3( CONTROL_CODE_PREFIX, LINE_FEED, CARRIAGE_RETURN, rest.bytes(), ) .unwrap_or(rest.len()); if !rest .range(control_text_len..) .starts_with(END_OF_CONTROL_TEXT) { return Err(LexError::ControlTextNotProperlyFinished); } let control_code = ControlCode { kind: control_code_info.kind, param: Some(Box::new(vec![Token::TextFragment(ascii_str::from_bytes( &rest.bytes()[..control_text_len], )?)])), }; ( control_code, rest.range(control_text_len + END_OF_CONTROL_TEXT.len()..), ) } SpecialHandling::WarnAndIgnore => { use super::control_code::ControlCodeKind; eprintln!( "WARN: %{} occurred in the web file, ignoring.", selector as char ); let control_code = ControlCode { kind: ControlCodeKind::Ignored, param: None, }; (control_code, rest) } }; Ok((control_code, rest, is_terminator)) } pub fn lex_comment_rest<'x>( l: U8SpanRef<'x>, ) -> Result<(Token<'x>, LexControlFlow<'x>), LexError> { let mode = LexMode::COMMENT; let mut l = l; let mut tokens = vec![]; let mut level = 1usize; 'comment_loop: loop { if l.starts_with(b"\\") { let (head, rest) = l.split_at(2); if l.len() >= 2 { let escaped_fragment = Token::TextFragment(ascii_str::from_bytes(head)?); tokens.push(escaped_fragment); l = rest; } else { return Err(LexError::CommentNotProperlyFinished); } } else if l.starts_with(b"{") { let (head, rest) = l.split_at(1); let fragment = Token::TextFragment(ascii_str::from_bytes(head)?); tokens.push(fragment); level = level .checked_add(1) .ok_or(LexError::CommentNestingTooDeep)?; l = rest; } else if l.starts_with(b"}") { let (head, rest) = l.split_at(1); level -= 1; if level != 0 { let fragment = Token::TextFragment(ascii_str::from_bytes(head)?); tokens.push(fragment); } l = rest; if level == 0 { break 'comment_loop; } } else { let (token, control_flow) = lex_token(l, mode)?; match control_flow { LexControlFlow::Continue(rest_data) => { l = rest_data; tokens.push(token); } LexControlFlow::Finish(..) => { return Err(LexError::UnexpectedEOF); } LexControlFlow::StartNew(..) => { return Err(LexError::ControlCodeInNonApplicableMode); } LexControlFlow::ModuleNameInlineProgAbort(..) => { return Err(LexError::ControlCodeInNonApplicableMode); } } } } let token = Token::Comment(Box::new(tokens)); Ok((token, continue_or_finish(l))) } fn lex_string_literal_rest<'x>( l: U8SpanRef<'x>, ) -> Result<(Token<'x>, LexControlFlow<'x>), LexError> { // fixme: properly parse string literal let text_end = memchr::memchr2(b'\'', b'\n', l.bytes()).unwrap_or(l.len()); if !l.range(text_end..).starts_with(b"\'") { return Err(LexError::StringLiteralNotProperlyFinished); } let literal_end = text_end + 1; let literal_text = ascii_str::from_bytes(&l.bytes()[..text_end])?; let token = Token::Literal(Literal::StringLiteral(literal_text)); Ok((token, continue_or_finish(l.range(literal_end..)))) } fn lex_preprocessed_string_literal_rest<'x>( l: U8SpanRef<'x>, ) -> Result<(Token<'x>, LexControlFlow<'x>), LexError> { // fixme: properly parse string literal let text_end = memchr::memchr2(b'\"', b'\n', l.bytes()).unwrap_or(l.len()); if !l.range(text_end..).starts_with(b"\"") { return Err(LexError::PreprocessedStringLiteralNotProperlyFinished); } let literal_end = text_end + 1; let mut tokens = vec![]; tokens.push(Token::TextFragment(ascii_str::from_bytes( &l.bytes()[..text_end], )?)); let token = Token::Literal(Literal::PreprocessedStringLiteral(Box::new(tokens))); Ok((token, continue_or_finish(l.range(literal_end..)))) } fn lex_inline_prog_rest<'x>( l: U8SpanRef<'x>, parent_mode: LexMode, ) -> Result<(Token<'x>, LexControlFlow<'x>), LexError> { let mode = LexMode::INLINE_PASCAL_TEXT; let mut data = l; let mut tokens = vec![]; 'inline_prog_loop: loop { if data.starts_with(b"|") { data = data.range(1..); break 'inline_prog_loop; } else { let (token, control_flow) = lex_token(data, mode)?; match control_flow { LexControlFlow::Continue(rest_data) => { data = rest_data; tokens.push(token); } LexControlFlow::ModuleNameInlineProgAbort(rest_data) if parent_mode == LexMode::MODULE_NAME => { data = rest_data; tokens.push(token); break 'inline_prog_loop; } _ => { return Err(LexError::InlineProgFragmentNotProperlyFinished); } } } } let token = Token::InlineProgramFragment(Box::new(tokens)); Ok((token, continue_or_finish(data))) } pub fn lex_token<'x>( l: U8SpanRef<'x>, mode: LexMode, ) -> Result<(Token<'x>, LexControlFlow<'x>), LexError> { let (l_is_empty, first_ch) = match l.front_cloned() { Some(ch) => (false, ch), None => (true, 0), }; match mode { LexMode::LIMBO | LexMode::TEX_TEXT if l_is_empty => { let empty = ascii_str::from_bytes(l.bytes())?; return Ok((Token::TextFragment(empty), LexControlFlow::Finish(l))); } LexMode::DEFINITION_TEXT | LexMode::PASCAL_TEXT if l_is_empty => { return Ok((Token::WS, LexControlFlow::Finish(l))); } _ if l_is_empty => { return Err(LexError::UnexpectedEOF); } LexMode::LIMBO | LexMode::TEX_TEXT | LexMode::PASCAL_TEXT | LexMode::INLINE_PASCAL_TEXT | LexMode::DEFINITION_TEXT | LexMode::COMMENT if first_ch == CONTROL_CODE_PREFIX => { let rest = l.range(1..); let (control_code, rest, is_terminator) = lex_control_code_rest(rest, mode)?; if !is_terminator { return Ok((Token::CtrlCode(control_code), continue_or_finish(rest))); } else { let new_mode = switch_mode(&control_code, rest); return Ok((Token::CtrlCode(control_code), new_mode)); } } LexMode::MODULE_NAME if first_ch == CONTROL_CODE_PREFIX => { use super::control_code::ControlCodeKind; if l.starts_with(SIMPLE_ESCAPED_ATAIL) { let control_code = ControlCode { kind: ControlCodeKind::EscapedAt, param: None, }; return Ok(( Token::CtrlCode(control_code), continue_or_finish(l.range(2..)), )); } else if l.starts_with(END_OF_CONTROL_TEXT) { let control_code = ControlCode { kind: ControlCodeKind::HiddenEndOfModuleName, param: None, }; return Ok(( Token::CtrlCode(control_code), continue_or_finish(l.range(2..)), )); } else { return Err(LexError::ControlCodeInNonApplicableMode); } } LexMode::LIMBO | LexMode::TEX_TEXT | LexMode::MODULE_NAME | LexMode::COMMENT if first_ch == INLINE_PROGRAM_FRAGMENT => { let rest = l.range(1..); return lex_inline_prog_rest(rest, mode); } LexMode::LIMBO | LexMode::TEX_TEXT | LexMode::MODULE_NAME | LexMode::COMMENT => { use memchr::{memchr, memchr2}; debug_assert_ne!(first_ch, CONTROL_CODE_PREFIX); debug_assert_ne!(first_ch, INLINE_PROGRAM_FRAGMENT); let text_len = if mode == LexMode::LIMBO { memchr(CONTROL_CODE_PREFIX, l.bytes()) } else if mode != LexMode::COMMENT { memchr2(CONTROL_CODE_PREFIX, INLINE_PROGRAM_FRAGMENT, l.bytes()) } else { let count = l .bytes() .iter() .take_while(|&&ch| { ch != CONTROL_CODE_PREFIX && ch != INLINE_PROGRAM_FRAGMENT && ch != ESCAPE_CHARACTER && ch != START_OF_COMMENT && ch != END_OF_COMMENT }) .count(); Some(count) } .unwrap_or_else(|| l.len()); let (text, rest) = l.split_at(text_len); let text = ascii_str::from_bytes(text)?; return Ok((Token::TextFragment(text), continue_or_finish(rest))); } LexMode::PASCAL_TEXT | LexMode::DEFINITION_TEXT | LexMode::INLINE_PASCAL_TEXT => { use super::ascii_char; debug_assert!(first_ch != CONTROL_CODE_PREFIX); if ascii_char::is_whitespace_char(first_ch) { let (_, rest) = lex_maybe_whitespace(l); return Ok((Token::WS, continue_or_finish(rest))); } else if ascii_char::is_id_start(first_ch) { let (id, rest) = lex_identifier(l); let id = id.expect(""); return Ok(( Token::IdentOrKw(ascii_str::from_bytes(id)?), continue_or_finish(rest), )); } else if first_ch == b'{' { let rest = l.range(1..); return lex_comment_rest(rest); } else if first_ch == b'\'' { let rest = l.range(1..); return lex_string_literal_rest(rest); } else if first_ch == b'\"' { let rest = l.range(1..); return lex_preprocessed_string_literal_rest(rest); } else if first_ch == b'#' { let rest = l.range(1..); return Ok((Token::MacroParamMark, continue_or_finish(rest))); } else if mode == LexMode::INLINE_PASCAL_TEXT && first_ch == b'.' && l.starts_with(MODULE_NAME_INLINE_PROGFRAG_ABORT) { return Ok(( Token::ModuleNameInlineProgAbort, LexControlFlow::ModuleNameInlineProgAbort(l), )); } else if ascii_char::is_punct_char(first_ch) { let (punct, rest) = lex_punct(l); let punct = punct.ok_or_else(|| LexError::UnrecognizedPunctuation(first_ch as char))?; return Ok((Token::Punct(punct), continue_or_finish(rest))); } else if ascii_char::is_numeric_char(first_ch) { let (numeric, rest) = lex_numeric_literal(l)?; return Ok((Token::Literal(numeric), continue_or_finish(rest))); } else { unimplemented!("{:?}", first_ch); } } _ => unimplemented!(), } } } pub struct LexerRawBuf<'x> { mode: LexMode, data: U8SpanRef<'x>, } #[derive(Default)] pub struct LexerLimboBuf<'x> { pub(crate) limbo_tokens: token::TokenList<'x>, } pub struct LexerModuleBuf<'x> { pub(crate) module_type: token::Token<'x>, pub(crate) text_in_tex: token::TokenList<'x>, pub(crate) definitions: token::TokenList<'x>, pub(crate) code_in_pascal: token::TokenList<'x>, } #[derive(Clone, Copy)] enum LexerInternalState { LimboDirty, LimboFilledModuleDirty, LimboFilledEOF, LimboTakenModuleDirty, ModuleFilledNextModuleDirty, ModuleFilledEOF, EOF, } pub struct WEBLexer<'x> { raw_buf: LexerRawBuf<'x>, state: LexerInternalState, limbo_buf: Option<LexerLimboBuf<'x>>, module_buf: Option<LexerModuleBuf<'x>>, next_module_buf: Option<LexerModuleBuf<'x>>, } impl<'x> WEBLexer<'x> { pub fn new(data: &'x [u8]) -> Self { let raw_buf = LexerRawBuf { mode: LexMode::LIMBO, data: U8SpanRef::new(data), }; let limbo_buf = Some(Default::default()); let state = LexerInternalState::LimboDirty; let module_buf = None; let next_module_buf = None; WEBLexer { raw_buf, state, limbo_buf, module_buf, next_module_buf, } } fn refill(&mut self) -> Result<(), LexError> { let mut output_module; match self.state { LexerInternalState::LimboDirty => { output_module = None; } LexerInternalState::LimboTakenModuleDirty | LexerInternalState::LimboFilledModuleDirty => { output_module = Some(self.module_buf.as_mut().unwrap()); } LexerInternalState::ModuleFilledNextModuleDirty => { output_module = Some(self.next_module_buf.as_mut().unwrap()); } LexerInternalState::LimboFilledEOF | LexerInternalState::ModuleFilledEOF | LexerInternalState::EOF => { return Ok(()); } } let mut pending_token = None; 'outer: loop { let output_tokenlist; if let Some(module) = &mut output_module { output_tokenlist = match self.raw_buf.mode { LexMode::TEX_TEXT => &mut module.text_in_tex, LexMode::DEFINITION_TEXT => &mut module.definitions, LexMode::PASCAL_TEXT => &mut module.code_in_pascal, _ => unreachable!(), }; } else { assert!(self.raw_buf.mode == LexMode::LIMBO); output_tokenlist = &mut self.limbo_buf.as_mut().unwrap().limbo_tokens; } if let Some(token) = pending_token.take() { output_tokenlist.push(token); } 'inner: loop { let (token, control_flow) = token::lex_token(self.raw_buf.data, self.raw_buf.mode)?; match control_flow { LexControlFlow::Continue(rest_data) => { output_tokenlist.push(token); self.raw_buf.data = rest_data; continue 'inner; } LexControlFlow::Finish(rest_data) => { output_tokenlist.push(token); self.raw_buf.data = rest_data; self.state = match self.state { LexerInternalState::LimboDirty => LexerInternalState::LimboFilledEOF, LexerInternalState::LimboTakenModuleDirty => { LexerInternalState::ModuleFilledEOF } LexerInternalState::LimboFilledModuleDirty | LexerInternalState::ModuleFilledNextModuleDirty | LexerInternalState::LimboFilledEOF | LexerInternalState::ModuleFilledEOF | LexerInternalState::EOF => unreachable!(), }; break 'outer; } LexControlFlow::StartNew( LexControlFlowNewItem::Module, new_mode, rest_data, ) => { self.raw_buf.mode = new_mode; self.raw_buf.data = rest_data; let new_module = LexerModuleBuf { module_type: token, text_in_tex: Default::default(), definitions: Default::default(), code_in_pascal: Default::default(), }; self.state = match self.state { LexerInternalState::LimboDirty => { assert!(self.module_buf.is_none()); self.module_buf = Some(new_module); LexerInternalState::LimboFilledModuleDirty } LexerInternalState::LimboTakenModuleDirty => { assert!(self.next_module_buf.is_none()); self.next_module_buf = Some(new_module); LexerInternalState::ModuleFilledNextModuleDirty } LexerInternalState::LimboFilledModuleDirty | LexerInternalState::ModuleFilledNextModuleDirty | LexerInternalState::LimboFilledEOF | LexerInternalState::ModuleFilledEOF | LexerInternalState::EOF => unreachable!(), }; break 'outer; } LexControlFlow::StartNew( LexControlFlowNewItem::Definition, new_mode, rest_data, ) | LexControlFlow::StartNew( LexControlFlowNewItem::ProgramText, new_mode, rest_data, ) => { assert!(pending_token.is_none()); pending_token = Some(token); self.raw_buf.mode = new_mode; self.raw_buf.data = rest_data; continue 'outer; } LexControlFlow::ModuleNameInlineProgAbort(..) => { unreachable!(); } } } } Ok(()) } pub fn lex_limbo(&mut self) -> Result<Option<LexerLimboBuf<'x>>, LexError> { self.refill()?; let result; self.state = match self.state { LexerInternalState::LimboDirty | LexerInternalState::LimboTakenModuleDirty => unreachable!(), LexerInternalState::LimboFilledModuleDirty => { result = self.limbo_buf.take(); LexerInternalState::LimboTakenModuleDirty } LexerInternalState::LimboFilledEOF => { result = self.limbo_buf.take(); LexerInternalState::EOF } LexerInternalState::ModuleFilledNextModuleDirty | LexerInternalState::ModuleFilledEOF | LexerInternalState::EOF => { result = None; self.state } }; Ok(result) } pub fn lex_module(&mut self) -> Result<Option<LexerModuleBuf<'x>>, LexError> { self.refill()?; let result; self.state = match self.state { LexerInternalState::LimboDirty | LexerInternalState::LimboTakenModuleDirty => unreachable!(), LexerInternalState::LimboFilledModuleDirty | LexerInternalState::LimboFilledEOF => { // must be called in the wrong order. unreachable!(); } LexerInternalState::ModuleFilledNextModuleDirty => { use std::mem::swap; result = self.module_buf.take(); swap(&mut self.module_buf, &mut self.next_module_buf); LexerInternalState::LimboTakenModuleDirty } LexerInternalState::ModuleFilledEOF => { result = self.module_buf.take(); LexerInternalState::EOF } LexerInternalState::EOF => { result = None; self.state } }; Ok(result) } }
true
24ec11896677f646b439567f03f5daa166f81c60
Rust
unovor/frame
/asn1_der-0.6.3/src/types/boolean.rs
UTF-8
702
2.671875
3
[ "BSD-2-Clause", "MIT" ]
permissive
use ::{ Asn1DerError, types::{ FromDerObject, IntoDerObject }, der::{ DerObject, DerTag} }; impl FromDerObject for bool { fn from_der_object(der_object: DerObject) -> Result<Self, Asn1DerError> { if der_object.tag != DerTag::Boolean { return Err(Asn1DerError::InvalidTag) } match der_object.value.data.as_slice() { &[0x00u8] => Ok(false), &[0xffu8] => Ok(true), _ => return Err(Asn1DerError::InvalidEncoding) } } } impl IntoDerObject for bool { fn into_der_object(self) -> DerObject { DerObject::new(DerTag::Boolean, match self { true => vec![0xffu8], false => vec![0x00u8] }.into()) } fn serialized_len(&self) -> usize { DerObject::compute_serialized_len(1) } }
true
88d58e93555a8aad6ff888974be5d5196671451d
Rust
esavier/keynesis
/src/passport/block/content.rs
UTF-8
7,809
2.828125
3
[ "MIT", "Apache-2.0", "LicenseRef-scancode-unknown-license-reference" ]
permissive
use crate::passport::block::{EntryError, EntrySlice, EntryType, Hash, Hasher}; use std::{ convert::TryInto as _, fmt::{self, Formatter}, iter::FusedIterator, ops::Deref, }; use thiserror::Error; #[derive(Ord, PartialOrd, Eq, PartialEq, Clone, Hash)] pub struct Content(Box<[u8]>); #[derive(Ord, PartialOrd, Eq, PartialEq)] pub(crate) struct ContentMut<'a>(&'a mut Vec<u8>); #[derive(Ord, PartialOrd, Eq, PartialEq, Copy, Clone, Hash)] pub struct ContentSlice<'a>(&'a [u8]); #[derive(Debug, Error)] pub enum ContentError { #[error("Content's max size has been reached, cannot add the entry")] MaxSizeReached, #[error("The content has {extra} bytes we do not know what they are for, it could the buffer was truncated")] InvalidLength { extra: usize }, #[error("Invalid entry")] Entry( #[from] #[source] EntryError, ), } pub struct ContentSliceIter<'a>(&'a [u8]); impl Content { pub const MAX_SIZE: usize = u16::MAX as usize; pub fn as_slice(&self) -> ContentSlice<'_> { ContentSlice(&self.0) } pub fn iter(&self) -> ContentSliceIter<'_> { self.as_slice().iter() } pub fn hash(&self) -> Hash { self.as_slice().hash() } } impl<'a> ContentMut<'a> { pub(crate) fn new(bytes: &'a mut Vec<u8>) -> Self { Self(bytes) } #[cfg(test)] fn into_content(self) -> Content { Content(self.0.to_owned().into_boxed_slice()) } pub(crate) fn push(&mut self, entry: EntrySlice<'_>) -> Result<(), ContentError> { let current_size = self.0.len(); let needed_size = current_size + entry.as_ref().len(); if needed_size > Content::MAX_SIZE { return Err(ContentError::MaxSizeReached); } self.0.extend_from_slice(entry.as_ref()); Ok(()) } } impl<'a> ContentSlice<'a> { pub fn iter(&self) -> ContentSliceIter<'a> { ContentSliceIter(self.0) } pub fn to_content(&self) -> Content { Content(self.0.to_vec().into_boxed_slice()) } pub fn from_slice_unchecked(slice: &'a [u8]) -> Self { Self(slice) } pub fn try_from_slice(slice: &'a [u8]) -> Result<Self, ContentError> { if slice.len() > Content::MAX_SIZE { return Err(ContentError::MaxSizeReached); } let content = Self(slice); let mut slice = content.0; while slice.len() >= 2 { let entry_type = EntryType::try_from_u16(u16::from_be_bytes(slice[..2].try_into().unwrap()))?; let size = entry_type.size(&slice[2..]); let _ = EntrySlice::try_from_slice(&slice[..size])?; slice = &slice[size..]; } if slice.is_empty() { Ok(content) } else { Err(ContentError::InvalidLength { extra: slice.len() }) } } pub fn hash(&self) -> Hash { Hasher::hash(self.0) } } impl<'a> IntoIterator for ContentSlice<'a> { type IntoIter = ContentSliceIter<'a>; type Item = EntrySlice<'a>; fn into_iter(self) -> Self::IntoIter { self.iter() } } impl<'a> Iterator for ContentSliceIter<'a> { type Item = EntrySlice<'a>; fn next(&mut self) -> Option<Self::Item> { if self.0.is_empty() { None } else { let entry_type = EntryType::try_from_u16(u16::from_be_bytes(self.0[..2].try_into().unwrap())) .unwrap(); let size = entry_type.size(&self.0[2..]); let entry = EntrySlice::from_slice_unchecked(&self.0[..size]); self.0 = &self.0[size..]; Some(entry) } } fn size_hint(&self) -> (usize, Option<usize>) { if self.0.is_empty() { (0, Some(0)) } else { (1, None) } } } impl<'a> FusedIterator for ContentSliceIter<'a> {} impl<'a> fmt::Debug for ContentSlice<'a> { fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result { f.debug_list().entries(self.iter()).finish() } } impl fmt::Debug for Content { fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result { f.debug_list().entries(self.iter()).finish() } } impl<'a> AsRef<[u8]> for ContentSlice<'a> { fn as_ref(&self) -> &[u8] { &self.0 } } impl<'a> Deref for ContentMut<'a> { type Target = [u8]; fn deref(&self) -> &Self::Target { &self.0 } } impl<'a> Deref for ContentSlice<'a> { type Target = [u8]; fn deref(&self) -> &Self::Target { &self.0 } } impl Deref for Content { type Target = [u8]; fn deref(&self) -> &Self::Target { &self.0 } } #[cfg(test)] mod tests { use super::*; use crate::{ key::{curve25519, ed25519::PublicKey}, passport::block::{Entry, EntryMut}, Seed, }; use quickcheck::{Arbitrary, Gen}; impl Arbitrary for Content { fn arbitrary(g: &mut Gen) -> Self { let max = usize::arbitrary(g) % 12; let mut bytes = Vec::with_capacity(1024); let mut content = ContentMut::new(&mut bytes); for _ in 0..max { let entry = Entry::arbitrary(g); match content.push(entry.as_slice()) { Ok(()) => (), Err(ContentError::MaxSizeReached) => break, Err(error) => { // another error occurred, it should not happen but // better ready than sorry unreachable!(&error) } } } content.into_content() } } /// test to make sure we detect the limit of the Content /// when using `push` #[test] fn too_long_fail() { let content = [0; Content::MAX_SIZE + 1]; match ContentSlice::try_from_slice(&content) { Err(ContentError::MaxSizeReached) => (), Err(error) => panic!("Didn't expect this error: {:?}", error), Ok(_) => panic!("Content should have failed with too long error"), } } #[test] fn test_shared_entry_only() { let mut rng = quickcheck::Gen::new(1024); let max = 1; let mut bytes = Vec::with_capacity(1024); let mut content = ContentMut::new(&mut bytes); for _ in 0..max { let mut entry_bytes = Vec::with_capacity(1024); let key = curve25519::SecretKey::arbitrary(&mut rng); let mut builder = EntryMut::new_set_shared_key(&mut entry_bytes, &key.public_key()); let passphrase = Option::<Seed>::arbitrary(&mut rng); let mut entry_rng = Seed::arbitrary(&mut rng).into_rand_chacha(); let count = u8::arbitrary(&mut rng) % 12 + 1; for _ in 0..count { builder .share_with( &mut entry_rng, &key, &PublicKey::arbitrary(&mut rng), &passphrase, ) .expect("valid share to this key"); } let entry = builder.finalize().expect("valid key sharing entry"); match content.push(entry) { Ok(()) => (), Err(ContentError::MaxSizeReached) => break, Err(error) => { // another error occurred, it should not happen but // better ready than sorry unreachable!(&error) } } } let _ = content.into_content(); } #[quickcheck] fn decode_slice(content: Content) -> bool { ContentSlice::try_from_slice(&content.0).unwrap(); true } }
true
fe2b27935168a910385475d8c99f7a74ebb18257
Rust
kyle-rader/advent_of_code
/src/rust/aoc/src/auth.rs
UTF-8
2,264
2.875
3
[]
no_license
use std::fs::{self, File}; use std::io::Write; use std::path::PathBuf; use crate::aoc_client::AocClient; use crate::cookies::aoc_session_token_first; use anyhow::anyhow; use directories::ProjectDirs; pub fn login(token: Option<String>) -> anyhow::Result<()> { // Get let token = match token { Some(token) => { println!("📝 Using token provided on CLI"); token } None => { println!("🍪 Using token from FireFox cookies"); aoc_session_token_first()? } }; // Test let client = AocClient::new(&token); let user_name = client.user_name()?; println!("✅ Token works!"); // Save let cache_file = save_token(&token)?; println!("💾 Token saved at {}", &cache_file.display()); println!("🚀 Welcome, {user_name}! Happy solving 🎉"); Ok(()) } pub fn logout() -> anyhow::Result<()> { let cache_file = cache_file()?; if cache_file.exists() { fs::remove_file(cache_file)?; println!("🗑️ token cache removed"); } else { println!("🔎 no token cache found"); } Ok(()) } pub fn get_token() -> anyhow::Result<String> { let cache_file = cache_file()?; if !cache_file.exists() { println!("⚠️ Attempting to auto login"); login(None)? } match fs::read_to_string(&cache_file) { Ok(token) => Ok(token), Err(err) => Err(anyhow!( "❌ {err}\nUnable to read token. (Make sure you have run the `login` command)" )), } } fn save_token(token: &String) -> anyhow::Result<PathBuf> { let cache_file = cache_file()?; let mut file = File::create(&cache_file)?; file.write_all(token.as_bytes())?; Ok(cache_file) } fn cache_file() -> anyhow::Result<PathBuf> { let Some(project_dir) = ProjectDirs::from("com", "advent_of_code", "aoc_cli") else { return Err(anyhow!("Could not get project directory")) }; let cache_dir = project_dir.cache_dir(); if !cache_dir.exists() { fs::create_dir_all(cache_dir)?; } Ok(cache_dir.join("aoc.cache")) } // todo: function to retrieve cached token // todo: logout
true
e84f63686921b6e2c38786095c792e852a256097
Rust
nparthas/project_euler
/src/q1_50/q16.rs
UTF-8
458
2.953125
3
[]
no_license
extern crate num; extern crate num_bigint; use self::num_bigint::{BigInt, ToBigInt}; use std::ops::Mul; pub fn q16() -> i64 { let num_str: String = { let mut num: BigInt = 2.to_bigint().unwrap(); for _ in 1..1000 { num = num.mul(2); } num.to_str_radix(10) }; let mut dig_sum: i64 = 0; for c in num_str.chars() { dig_sum += c.to_digit(10).unwrap() as i64; } return dig_sum; }
true
da72f242775804c301adbff5e55ce4d923f6a425
Rust
adjivas/vt100-rust
/tests/csi.rs
UTF-8
13,668
2.703125
3
[ "MIT" ]
permissive
extern crate vt100; mod support; use support::TestHelpers; #[test] fn absolute_movement() { let mut screen = vt100::Screen::new(24, 80); assert_eq!(screen.cursor_position(), (0, 0)); screen.assert_process(b"\x1b[10;10H"); assert_eq!(screen.cursor_position(), (9, 9)); screen.assert_process(b"\x1b[d"); assert_eq!(screen.cursor_position(), (0, 9)); screen.assert_process(b"\x1b[15d"); assert_eq!(screen.cursor_position(), (14, 9)); screen.assert_process(b"\x1b[H"); assert_eq!(screen.cursor_position(), (0, 0)); screen.assert_process(b"\x1b[8H"); assert_eq!(screen.cursor_position(), (7, 0)); screen.assert_process(b"\x1b[15G"); assert_eq!(screen.cursor_position(), (7, 14)); screen.assert_process(b"\x1b[G"); assert_eq!(screen.cursor_position(), (7, 0)); screen.assert_process(b"\x1b[0;0H"); assert_eq!(screen.cursor_position(), (0, 0)); screen.assert_process(b"\x1b[1;1H"); assert_eq!(screen.cursor_position(), (0, 0)); screen.assert_process(b"\x1b[500;500H"); assert_eq!(screen.cursor_position(), (23, 79)); } #[test] fn relative_movement() { let mut screen = vt100::Screen::new(24, 80); assert_eq!(screen.cursor_position(), (0, 0)); screen.assert_process(b"\x1b[C"); assert_eq!(screen.cursor_position(), (0, 1)); screen.assert_process(b"\x1b[C"); assert_eq!(screen.cursor_position(), (0, 2)); screen.assert_process(b"\x1b[20C"); assert_eq!(screen.cursor_position(), (0, 22)); screen.assert_process(b"\x1b[D"); assert_eq!(screen.cursor_position(), (0, 21)); screen.assert_process(b"\x1b[D"); assert_eq!(screen.cursor_position(), (0, 20)); screen.assert_process(b"\x1b[9D"); assert_eq!(screen.cursor_position(), (0, 11)); screen.assert_process(b"\x1b[500C"); assert_eq!(screen.cursor_position(), (0, 79)); screen.assert_process(b"\x1b[500D"); assert_eq!(screen.cursor_position(), (0, 0)); screen.assert_process(b"\x1b[B"); assert_eq!(screen.cursor_position(), (1, 0)); screen.assert_process(b"\x1b[B"); assert_eq!(screen.cursor_position(), (2, 0)); screen.assert_process(b"\x1b[20B"); assert_eq!(screen.cursor_position(), (22, 0)); screen.assert_process(b"\x1b[A"); assert_eq!(screen.cursor_position(), (21, 0)); screen.assert_process(b"\x1b[A"); assert_eq!(screen.cursor_position(), (20, 0)); screen.assert_process(b"\x1b[9A"); assert_eq!(screen.cursor_position(), (11, 0)); screen.assert_process(b"\x1b[500B"); assert_eq!(screen.cursor_position(), (23, 0)); screen.assert_process(b"\x1b[500A"); assert_eq!(screen.cursor_position(), (0, 0)); } #[test] fn ed() { let mut screen = vt100::Screen::new(24, 80); assert_eq!(screen.window_contents(0, 0, 23, 79), "\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n"); screen.assert_process(b"foo\x1b[5;5Hbar\x1b[10;10Hbaz\x1b[20;20Hquux"); assert_eq!(screen.window_contents(0, 0, 23, 79), "foo\n\n\n\n bar\n\n\n\n\n baz\n\n\n\n\n\n\n\n\n\n quux\n\n\n\n\n"); screen.assert_process(b"\x1b[10;12H\x1b[0J"); assert_eq!(screen.window_contents(0, 0, 23, 79), "foo\n\n\n\n bar\n\n\n\n\n ba\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n"); screen.assert_process(b"\x1b[5;7H\x1b[1J"); assert_eq!(screen.window_contents(0, 0, 23, 79), "\n\n\n\n r\n\n\n\n\n ba\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n"); screen.assert_process(b"\x1b[7;7H\x1b[2J"); assert_eq!(screen.window_contents(0, 0, 23, 79), "\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n"); screen.assert_process(b"\x1b[2J\x1b[H"); assert_eq!(screen.window_contents(0, 0, 23, 79), "\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n"); screen.assert_process(b"foo\x1b[5;5Hbar\x1b[10;10Hbaz\x1b[20;20Hquux"); assert_eq!(screen.window_contents(0, 0, 23, 79), "foo\n\n\n\n bar\n\n\n\n\n baz\n\n\n\n\n\n\n\n\n\n quux\n\n\n\n\n"); screen.assert_process(b"\x1b[10;12H\x1b[J"); assert_eq!(screen.window_contents(0, 0, 23, 79), "foo\n\n\n\n bar\n\n\n\n\n ba\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n"); screen.assert_process(b"\x1b[2J\x1b[H"); assert_eq!(screen.window_contents(0, 0, 23, 79), "\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n"); screen.assert_process(b"foo\x1b[5;5Hbar\x1b[10;10Hbaz\x1b[20;20Hquux"); assert_eq!(screen.window_contents(0, 0, 23, 79), "foo\n\n\n\n bar\n\n\n\n\n baz\n\n\n\n\n\n\n\n\n\n quux\n\n\n\n\n"); screen.assert_process(b"\x1b[10;12H\x1b[?0J"); assert_eq!(screen.window_contents(0, 0, 23, 79), "foo\n\n\n\n bar\n\n\n\n\n ba\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n"); screen.assert_process(b"\x1b[5;7H\x1b[?1J"); assert_eq!(screen.window_contents(0, 0, 23, 79), "\n\n\n\n r\n\n\n\n\n ba\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n"); screen.assert_process(b"\x1b[7;7H\x1b[?2J"); assert_eq!(screen.window_contents(0, 0, 23, 79), "\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n"); screen.assert_process(b"\x1b[2J\x1b[H"); assert_eq!(screen.window_contents(0, 0, 23, 79), "\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n"); screen.assert_process(b"foo\x1b[5;5Hbar\x1b[10;10Hbaz\x1b[20;20Hquux"); assert_eq!(screen.window_contents(0, 0, 23, 79), "foo\n\n\n\n bar\n\n\n\n\n baz\n\n\n\n\n\n\n\n\n\n quux\n\n\n\n\n"); screen.assert_process(b"\x1b[10;12H\x1b[?J"); assert_eq!(screen.window_contents(0, 0, 23, 79), "foo\n\n\n\n bar\n\n\n\n\n ba\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n"); } #[test] fn el() { let mut screen = vt100::Screen::new(24, 80); assert_eq!(screen.window_contents(0, 0, 23, 79), "\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n"); screen.assert_process(b"foo\x1b[5;5Hbarbar\x1b[10;10Hbazbaz\x1b[20;20Hquux"); assert_eq!(screen.window_contents(0, 0, 23, 79), "foo\n\n\n\n barbar\n\n\n\n\n bazbaz\n\n\n\n\n\n\n\n\n\n quux\n\n\n\n\n"); screen.assert_process(b"\x1b[5;8H\x1b[0K"); assert_eq!(screen.window_contents(0, 0, 23, 79), "foo\n\n\n\n bar\n\n\n\n\n bazbaz\n\n\n\n\n\n\n\n\n\n quux\n\n\n\n\n"); screen.assert_process(b"\x1b[10;13H\x1b[1K"); assert_eq!(screen.window_contents(0, 0, 23, 79), "foo\n\n\n\n bar\n\n\n\n\n baz\n\n\n\n\n\n\n\n\n\n quux\n\n\n\n\n"); screen.assert_process(b"\x1b[20;22H\x1b[2K"); assert_eq!(screen.window_contents(0, 0, 23, 79), "foo\n\n\n\n bar\n\n\n\n\n baz\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n"); screen.assert_process(b"\x1b[1;2H\x1b[K"); assert_eq!(screen.window_contents(0, 0, 23, 79), "f\n\n\n\n bar\n\n\n\n\n baz\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n"); screen.assert_process(b"\x1b[2J\x1b[H"); assert_eq!(screen.window_contents(0, 0, 23, 79), "\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n"); screen.assert_process(b"foo\x1b[5;5Hbarbar\x1b[10;10Hbazbaz\x1b[20;20Hquux"); assert_eq!(screen.window_contents(0, 0, 23, 79), "foo\n\n\n\n barbar\n\n\n\n\n bazbaz\n\n\n\n\n\n\n\n\n\n quux\n\n\n\n\n"); screen.assert_process(b"\x1b[5;8H\x1b[?0K"); assert_eq!(screen.window_contents(0, 0, 23, 79), "foo\n\n\n\n bar\n\n\n\n\n bazbaz\n\n\n\n\n\n\n\n\n\n quux\n\n\n\n\n"); screen.assert_process(b"\x1b[10;13H\x1b[?1K"); assert_eq!(screen.window_contents(0, 0, 23, 79), "foo\n\n\n\n bar\n\n\n\n\n baz\n\n\n\n\n\n\n\n\n\n quux\n\n\n\n\n"); screen.assert_process(b"\x1b[20;22H\x1b[?2K"); assert_eq!(screen.window_contents(0, 0, 23, 79), "foo\n\n\n\n bar\n\n\n\n\n baz\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n"); screen.assert_process(b"\x1b[1;2H\x1b[?K"); assert_eq!(screen.window_contents(0, 0, 23, 79), "f\n\n\n\n bar\n\n\n\n\n baz\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n"); } #[test] fn ich_dch_ech() { let mut screen = vt100::Screen::new(24, 80); assert_eq!(screen.window_contents(0, 0, 23, 79), "\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n"); screen.assert_process(b"\x1b[10;10Hfoobar"); assert_eq!(screen.window_contents(0, 0, 23, 79), "\n\n\n\n\n\n\n\n\n foobar\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n"); screen.assert_process(b"\x1b[10;12H\x1b[3@"); assert_eq!(screen.window_contents(0, 0, 23, 79), "\n\n\n\n\n\n\n\n\n fo obar\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n"); assert_eq!(screen.cursor_position(), (9, 11)); screen.assert_process(b"\x1b[4P"); assert_eq!(screen.window_contents(0, 0, 23, 79), "\n\n\n\n\n\n\n\n\n fobar\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n"); assert_eq!(screen.cursor_position(), (9, 11)); screen.assert_process(b"\x1b[100@"); assert_eq!(screen.window_contents(0, 0, 23, 79), "\n\n\n\n\n\n\n\n\n fo\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n"); assert_eq!(screen.cursor_position(), (9, 11)); screen.assert_process(b"obar"); assert_eq!(screen.window_contents(0, 0, 23, 79), "\n\n\n\n\n\n\n\n\n foobar\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n"); assert_eq!(screen.cursor_position(), (9, 15)); screen.assert_process(b"\x1b[10;12H\x1b[100P"); assert_eq!(screen.window_contents(0, 0, 23, 79), "\n\n\n\n\n\n\n\n\n fo\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n"); assert_eq!(screen.cursor_position(), (9, 11)); screen.assert_process(b"obar"); assert_eq!(screen.window_contents(0, 0, 23, 79), "\n\n\n\n\n\n\n\n\n foobar\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n"); assert_eq!(screen.cursor_position(), (9, 15)); screen.assert_process(b"\x1b[10;13H\x1b[X"); assert_eq!(screen.window_contents(0, 0, 23, 79), "\n\n\n\n\n\n\n\n\n foo ar\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n"); assert_eq!(screen.cursor_position(), (9, 12)); screen.assert_process(b"\x1b[10;11H\x1b[4X"); assert_eq!(screen.window_contents(0, 0, 23, 79), "\n\n\n\n\n\n\n\n\n f r\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n"); assert_eq!(screen.cursor_position(), (9, 10)); screen.assert_process(b"\x1b[10;11H\x1b[400X"); assert_eq!(screen.window_contents(0, 0, 23, 79), "\n\n\n\n\n\n\n\n\n f\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n"); assert_eq!(screen.cursor_position(), (9, 10)); } #[test] fn il_dl() { let mut screen = vt100::Screen::new(24, 80); assert_eq!(screen.window_contents(0, 0, 23, 79), "\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n"); screen.assert_process(b"\x1b[10;10Hfoobar\x1b[3D"); assert_eq!(screen.window_contents(0, 0, 23, 79), "\n\n\n\n\n\n\n\n\n foobar\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n"); assert_eq!(screen.cursor_position(), (9, 12)); screen.assert_process(b"\x1b[L"); assert_eq!(screen.window_contents(0, 0, 23, 79), "\n\n\n\n\n\n\n\n\n\n foobar\n\n\n\n\n\n\n\n\n\n\n\n\n\n"); assert_eq!(screen.cursor_position(), (9, 12)); screen.assert_process(b"\x1b[3L"); assert_eq!(screen.window_contents(0, 0, 23, 79), "\n\n\n\n\n\n\n\n\n\n\n\n\n foobar\n\n\n\n\n\n\n\n\n\n\n"); assert_eq!(screen.cursor_position(), (9, 12)); screen.assert_process(b"\x1b[500L"); assert_eq!(screen.window_contents(0, 0, 23, 79), "\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n"); assert_eq!(screen.cursor_position(), (9, 12)); screen.assert_process(b"\x1b[10;10Hfoobar\x1b[3D\x1b[6A"); assert_eq!(screen.window_contents(0, 0, 23, 79), "\n\n\n\n\n\n\n\n\n foobar\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n"); assert_eq!(screen.cursor_position(), (3, 12)); screen.assert_process(b"\x1b[M"); assert_eq!(screen.window_contents(0, 0, 23, 79), "\n\n\n\n\n\n\n\n foobar\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n"); assert_eq!(screen.cursor_position(), (3, 12)); screen.assert_process(b"\x1b[3M"); assert_eq!(screen.window_contents(0, 0, 23, 79), "\n\n\n\n\n foobar\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n"); assert_eq!(screen.cursor_position(), (3, 12)); screen.assert_process(b"\x1b[500M"); assert_eq!(screen.window_contents(0, 0, 23, 79), "\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n"); assert_eq!(screen.cursor_position(), (3, 12)); } #[test] fn scroll() { let mut screen = vt100::Screen::new(24, 80); assert_eq!(screen.window_contents(0, 0, 23, 79), "\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n"); screen.assert_process(b"1\r\n2\r\n3\r\n4\r\n5\r\n6\r\n7\r\n8\r\n9\r\n10\r\n11\r\n12\r\n13\r\n14\r\n15\r\n16\r\n17\r\n18\r\n19\r\n20\r\n21\r\n22\r\n23\r\n24"); assert_eq!(screen.window_contents(0, 0, 23, 79), "1\n2\n3\n4\n5\n6\n7\n8\n9\n10\n11\n12\n13\n14\n15\n16\n17\n18\n19\n20\n21\n22\n23\n24\n"); screen.assert_process(b"\x1b[15;15H"); assert_eq!(screen.cursor_position(), (14, 14)); screen.assert_process(b"\x1b[S"); assert_eq!(screen.window_contents(0, 0, 23, 79), "2\n3\n4\n5\n6\n7\n8\n9\n10\n11\n12\n13\n14\n15\n16\n17\n18\n19\n20\n21\n22\n23\n24\n\n"); assert_eq!(screen.cursor_position(), (14, 14)); screen.assert_process(b"\x1b[3S"); assert_eq!(screen.window_contents(0, 0, 23, 79), "5\n6\n7\n8\n9\n10\n11\n12\n13\n14\n15\n16\n17\n18\n19\n20\n21\n22\n23\n24\n\n\n\n\n"); assert_eq!(screen.cursor_position(), (14, 14)); screen.assert_process(b"\x1b[T"); assert_eq!(screen.window_contents(0, 0, 23, 79), "\n5\n6\n7\n8\n9\n10\n11\n12\n13\n14\n15\n16\n17\n18\n19\n20\n21\n22\n23\n24\n\n\n\n"); assert_eq!(screen.cursor_position(), (14, 14)); screen.assert_process(b"\x1b[5T"); assert_eq!(screen.window_contents(0, 0, 23, 79), "\n\n\n\n\n\n5\n6\n7\n8\n9\n10\n11\n12\n13\n14\n15\n16\n17\n18\n19\n20\n21\n22\n"); assert_eq!(screen.cursor_position(), (14, 14)); }
true
9aa78b2edba994bb6f3f7e5fdf7b37e563bdb3d7
Rust
dermesser/leveldb-rs
/src/asyncdb.rs
UTF-8
9,918
3.046875
3
[ "MIT" ]
permissive
use std::collections::hash_map::HashMap; use std::path::Path; use crate::{Options, Result, Status, StatusCode, WriteBatch, DB}; use tokio::sync::mpsc; use tokio::sync::oneshot; use tokio::task::{spawn_blocking, JoinHandle}; const CHANNEL_BUFFER_SIZE: usize = 32; #[derive(Clone, Copy)] pub struct SnapshotRef(usize); /// A request sent to the database thread. enum Request { Close, Put { key: Vec<u8>, val: Vec<u8> }, Delete { key: Vec<u8> }, Write { batch: WriteBatch, sync: bool }, Flush, GetAt { snapshot: SnapshotRef, key: Vec<u8> }, Get { key: Vec<u8> }, GetSnapshot, DropSnapshot { snapshot: SnapshotRef }, CompactRange { from: Vec<u8>, to: Vec<u8> }, } /// A response received from the database thread. enum Response { OK, Error(Status), Value(Option<Vec<u8>>), Snapshot(SnapshotRef), } /// Contains both a request and a back-channel for the reply. struct Message { req: Request, resp_channel: oneshot::Sender<Response>, } /// `AsyncDB` makes it easy to use LevelDB in a tokio runtime. /// The methods follow very closely the main API (see `DB` type). Iteration is not yet implemented. /// /// TODO: Make it work in other runtimes as well. This is a matter of adapting the blocking thread /// mechanism as well as the channel types. pub struct AsyncDB { jh: JoinHandle<()>, send: mpsc::Sender<Message>, } impl AsyncDB { /// Create a new or open an existing database. pub fn new<P: AsRef<Path>>(name: P, opts: Options) -> Result<AsyncDB> { let db = DB::open(name, opts)?; let (send, recv) = mpsc::channel(CHANNEL_BUFFER_SIZE); let jh = spawn_blocking(move || AsyncDB::run_server(db, recv)); Ok(AsyncDB { jh, send }) } pub async fn close(&self) -> Result<()> { let r = self.process_request(Request::Close).await?; match r { Response::OK => Ok(()), Response::Error(s) => Err(s), _ => Err(Status { code: StatusCode::AsyncError, err: "Wrong response type in AsyncDB.".to_string(), }), } } pub async fn put(&self, key: Vec<u8>, val: Vec<u8>) -> Result<()> { let r = self.process_request(Request::Put { key, val }).await?; match r { Response::OK => Ok(()), Response::Error(s) => Err(s), _ => Err(Status { code: StatusCode::AsyncError, err: "Wrong response type in AsyncDB.".to_string(), }), } } pub async fn delete(&self, key: Vec<u8>) -> Result<()> { let r = self.process_request(Request::Delete { key }).await?; match r { Response::OK => Ok(()), Response::Error(s) => Err(s), _ => Err(Status { code: StatusCode::AsyncError, err: "Wrong response type in AsyncDB.".to_string(), }), } } pub async fn write(&self, batch: WriteBatch, sync: bool) -> Result<()> { let r = self.process_request(Request::Write { batch, sync }).await?; match r { Response::OK => Ok(()), Response::Error(s) => Err(s), _ => Err(Status { code: StatusCode::AsyncError, err: "Wrong response type in AsyncDB.".to_string(), }), } } pub async fn flush(&self) -> Result<()> { let r = self.process_request(Request::Flush).await?; match r { Response::OK => Ok(()), Response::Error(s) => Err(s), _ => Err(Status { code: StatusCode::AsyncError, err: "Wrong response type in AsyncDB.".to_string(), }), } } pub async fn get(&self, key: Vec<u8>) -> Result<Option<Vec<u8>>> { let r = self.process_request(Request::Get { key }).await?; match r { Response::Value(v) => Ok(v), Response::Error(s) => Err(s), _ => Err(Status { code: StatusCode::AsyncError, err: "Wrong response type in AsyncDB.".to_string(), }), } } pub async fn get_at(&self, snapshot: SnapshotRef, key: Vec<u8>) -> Result<Option<Vec<u8>>> { let r = self .process_request(Request::GetAt { snapshot, key }) .await?; match r { Response::Value(v) => Ok(v), Response::Error(s) => Err(s), _ => Err(Status { code: StatusCode::AsyncError, err: "Wrong response type in AsyncDB.".to_string(), }), } } pub async fn get_snapshot(&self) -> Result<SnapshotRef> { let r = self.process_request(Request::GetSnapshot).await?; match r { Response::Snapshot(sr) => Ok(sr), _ => Err(Status { code: StatusCode::AsyncError, err: "Wrong response type in AsyncDB.".to_string(), }), } } /// As snapshots returned by `AsyncDB::get_snapshot()` are sort-of "weak references" to an /// actual snapshot, they need to be dropped explicitly. pub async fn drop_snapshot(&self, snapshot: SnapshotRef) -> Result<()> { let r = self .process_request(Request::DropSnapshot { snapshot }) .await?; match r { Response::OK => Ok(()), _ => Err(Status { code: StatusCode::AsyncError, err: "Wrong response type in AsyncDB.".to_string(), }), } } pub async fn compact_range(&self, from: Vec<u8>, to: Vec<u8>) -> Result<()> { let r = self .process_request(Request::CompactRange { from, to }) .await?; match r { Response::OK => Ok(()), Response::Error(s) => Err(s), _ => Err(Status { code: StatusCode::AsyncError, err: "Wrong response type in AsyncDB.".to_string(), }), } } async fn process_request(&self, req: Request) -> Result<Response> { let (tx, rx) = oneshot::channel(); let m = Message { req, resp_channel: tx, }; if let Err(e) = self.send.send(m).await { return Err(Status { code: StatusCode::AsyncError, err: e.to_string(), }); } let resp = rx.await; match resp { Err(e) => Err(Status { code: StatusCode::AsyncError, err: e.to_string(), }), Ok(r) => Ok(r), } } fn run_server(mut db: DB, mut recv: mpsc::Receiver<Message>) { let mut snapshots = HashMap::new(); let mut snapshot_counter: usize = 0; while let Some(message) = recv.blocking_recv() { match message.req { Request::Close => { message.resp_channel.send(Response::OK).ok(); recv.close(); return; } Request::Put { key, val } => { let ok = db.put(&key, &val); send_response(message.resp_channel, ok); } Request::Delete { key } => { let ok = db.delete(&key); send_response(message.resp_channel, ok); } Request::Write { batch, sync } => { let ok = db.write(batch, sync); send_response(message.resp_channel, ok); } Request::Flush => { let ok = db.flush(); send_response(message.resp_channel, ok); } Request::GetAt { snapshot, key } => { let snapshot_id = snapshot.0; if let Some(snapshot) = snapshots.get(&snapshot_id) { let ok = db.get_at(&snapshot, &key); match ok { Err(e) => { message.resp_channel.send(Response::Error(e)).ok(); } Ok(v) => { message.resp_channel.send(Response::Value(v)).ok(); } }; } else { message .resp_channel .send(Response::Error(Status { code: StatusCode::AsyncError, err: "Unknown snapshot reference: this is a bug".to_string(), })) .ok(); } } Request::Get { key } => { let r = db.get(&key); message.resp_channel.send(Response::Value(r)).ok(); } Request::GetSnapshot => { snapshots.insert(snapshot_counter, db.get_snapshot()); let sref = SnapshotRef(snapshot_counter); snapshot_counter += 1; message.resp_channel.send(Response::Snapshot(sref)).ok(); } Request::DropSnapshot { snapshot } => { snapshots.remove(&snapshot.0); send_response(message.resp_channel, Ok(())); } Request::CompactRange { from, to } => { let ok = db.compact_range(&from, &to); send_response(message.resp_channel, ok); } } } } } fn send_response(ch: oneshot::Sender<Response>, result: Result<()>) { if let Err(e) = result { ch.send(Response::Error(e)).ok(); } else { ch.send(Response::OK).ok(); } }
true
35e0b54b4037574ec6d98644f5f8c7d0568c401c
Rust
Joxx0r/RustRevEngine
/src/misc/input.rs
UTF-8
2,512
3.078125
3
[]
no_license
use crate::math::vec::Vec2; use glfw::{Key, Action, MouseButton}; use crate::misc::camera::Camera; use crate::misc::camera::Camera_Movement::*; pub struct input_state { pub left_mouse_button_down: bool, pub mouse_position: Vec2, pub forward_key_down: bool, pub back_key_down: bool, pub left_key_down: bool, pub right_key_down: bool, pub esc_button_down: bool } impl input_state { pub fn default() -> input_state { return input_state{ left_mouse_button_down: false, mouse_position: Vec2::default(), forward_key_down: false, back_key_down: false, left_key_down: false, right_key_down: false, esc_button_down: false } } } pub fn calculate_input(window: &mut glfw::Window) -> input_state { let mut state: input_state = input_state::default(); state.forward_key_down = window.get_key(Key::W) == Action::Press; state.back_key_down = window.get_key(Key::S) == Action::Press; state.left_key_down = window.get_key(Key::A) == Action::Press; state.right_key_down = window.get_key(Key::D) == Action::Press; state.esc_button_down = window.get_key(Key::Escape) == Action::Press; state.left_mouse_button_down = window.get_mouse_button(MouseButton::Button1) == Action::Press; state.mouse_position = Vec2::new_tuple_f64(window.get_cursor_pos()); state } pub unsafe fn process_input(window: &mut glfw::Window, delta_time: f32, camera: &mut Camera, state:input_state) { if state.esc_button_down { window.set_should_close(true) } if state.forward_key_down { camera.ProcessKeyboard(FORWARD, delta_time); } if state.back_key_down { camera.ProcessKeyboard(BACKWARD, delta_time); } if state.left_key_down { camera.ProcessKeyboard(LEFT, delta_time); } if state.right_key_down { camera.ProcessKeyboard( RIGHT, delta_time); } static mut prev_frame_mouse_button_down: bool = false; static mut old_mouse_position:Vec2 = Vec2::default(); if(state.left_mouse_button_down) { if(prev_frame_mouse_button_down) { let delta = Vec2::new_tuple_f32((state.mouse_position.x - old_mouse_position.x, -1.0 * (state.mouse_position.y - old_mouse_position.y))); camera.ProcessMouseMovement(delta.x, delta.y, false); } old_mouse_position = state.mouse_position; } prev_frame_mouse_button_down = state.left_mouse_button_down; }
true
dca7e0facdb300a9c8f85b455b55f5bd534dd19a
Rust
hubris-lang/hubris
/src/hubris_rt/src/lib.rs
UTF-8
493
3.125
3
[ "MIT" ]
permissive
use std::rc::Rc; use std::mem::transmute; struct ObjValue { ptr: *mut usize, } pub struct Obj(Rc<ObjValue>); impl Obj { pub fn from<T>(t: T) -> Obj { unsafe { let boxed_val = Box::new(t); let val = ObjValue { ptr: transmute(Box::into_raw(boxed_val)), }; Obj(Rc::new(val)) } } pub fn unbox<T>(&self) -> &T { let ptr: *mut usize = self.0.ptr; unsafe { transmute(ptr) } } }
true
00c5e51dfc68f6afdf5222d95fb4ccfe2d728d48
Rust
bollo35/cryptopals
/src/bin/p040.rs
UTF-8
3,181
3.09375
3
[]
no_license
extern crate ooga; use ooga::rsa::Rsa; extern crate openssl; use openssl::bn::{BigNum, BigNumContext}; use std::ops::{Add, Div, Mul, Sub}; fn main() { let rsa0 = Rsa::new(); let rsa1 = Rsa::new(); let rsa2 = Rsa::new(); let msg = "Never gonna give you up! Never gonna let you down!".to_string(); let ct0 = BigNum::from_slice(&rsa0.enc_str(msg.clone()).unwrap()).unwrap(); let ct1 = BigNum::from_slice(&rsa1.enc_str(msg.clone()).unwrap()).unwrap(); let ct2 = BigNum::from_slice(&rsa2.enc_str(msg.clone()).unwrap()).unwrap(); let og = bncbrt(BigNum::from_slice(&ct0.to_vec()).unwrap()); println!("og: {:?}", String::from_utf8(og.to_vec())); println!("C0: {:?}", ct0); println!("C1: {:?}", ct1); println!("C2: {:?}", ct2); println!(); println!(); println!(); let (e0, n0) = rsa0.get_pubkey(); let (e1, n1) = rsa1.get_pubkey(); let (e2, n2) = rsa2.get_pubkey(); println!("n0 == n1: {}", n0 == n1); println!("n2 == n1: {}", n2 == n1); println!("n0 == n2: {}", n0 == n2); println!("e0: {}", e0); println!("e1: {}", e1); println!("e2: {}", e2); // N0 = n1 * n2 let N0 = n1.mul(&n2); // N1 = n0 * n2 let N1 = n0.mul(&n2); // N2 = n0 * n1 let N2 = n0.mul(&n1); let mut bnctx = BigNumContext::new().unwrap(); // a0 = invmod(N0, n0) let mut a0 = BigNum::new().unwrap(); a0.mod_inverse(&N0, &n0, &mut bnctx).unwrap(); // a1 = invmod(N1, n1) let mut a1 = BigNum::new().unwrap(); a1.mod_inverse(&N1, &n1, &mut bnctx).unwrap(); // a2 = invmod(N2, n2) let mut a2 = BigNum::new().unwrap(); a2.mod_inverse(&N2, &n2, &mut bnctx).unwrap(); // p0 = c0 * N0 * a0 let p0 = ct0.mul(&N0).mul(&a0); // p1 = c1 * N1 * a1 let p1 = ct1.mul(&N1).mul(&a1); // p2 = c2 * N2 * a2 let p2 = ct2.mul(&N2).mul(&a2); // In the instructions, they say that you don't need to take the result // modulo N_012 but that doesn't make sense. // The interesting thing is that if you have a message that's smaller // than N, and you know e = 3, you could just take the cubed root // without the chinese remainder theorem. I don't quite get the point // of this exercise. Ah, I guess the only thing I can think of is if // you have a message that gets broken into chunks, then you could // do this still? I don't know. let mut m_e = BigNum::new().unwrap(); m_e.mod_add(&p0.add(&p1), &p2, &n0.mul(&n1).mul(&n2), &mut bnctx).unwrap(); println!("m^e: {}", m_e); let m = bncbrt(BigNum::from_slice(&m_e.to_vec()).unwrap()); let m_3 = m.mul(&m).mul(&m); if m != m_e { println!("m_e - m'_3 = {}", m_e.sub(&m_3)); } let msg = String::from_utf8(m.to_vec()); println!("Recovered message: {:?}", msg); } fn bncbrt(n: BigNum) -> BigNum { let one = BigNum::from_u32(1).unwrap(); let two = BigNum::from_u32(2).unwrap(); // let's do a binary search... let mut high = BigNum::from_slice(&n.to_vec()).unwrap(); let mut low = BigNum::from_u32(0).unwrap(); let mut guess = high.add(&low).div(&two); let mut cube = guess.mul(&guess).mul(&guess); while cube != n && high != low { if cube > n { high = guess.sub(&one); } else { low = guess.add(&one); } guess = high.add(&low).div(&two); cube = guess.mul(&guess).mul(&guess); } guess }
true
9bd1c7e7f17365d5f76e9cea6cf3c85305130196
Rust
ushkarev/rusty-kms
/src/key_store/key/tags.rs
UTF-8
942
3.125
3
[ "MIT" ]
permissive
use crate::key_store::errors::*; use crate::key_store::tag::Tag; use crate::key_store::key::{Key, State}; impl Key { pub fn add_tag(&mut self, tag: Tag) -> Result<(), AddTagError> { if let State::PendingDeletion(_) = self.state { return Err(AddTagError::InvalidState); } if let Some(existing_tag) = self.tags.iter_mut().find(|t| t.key() == tag.key()) { *existing_tag = tag; } else { self.tags.push(tag); } Ok(()) } pub fn remove_tag(&mut self, tag_key: &str) -> Result<Tag, RemoveTagError> { if let State::PendingDeletion(_) = self.state { return Err(RemoveTagError::InvalidState); } // TODO: should removing a non-existant tag fail? self.tags.iter() .position(|t| t.key() == tag_key) .map(|index| self.tags.remove(index)) .ok_or(RemoveTagError::NotFound) } }
true
9294846647301ee2e68c1e8190d55b981ccf53fd
Rust
leptonyu/salak.rs
/src/lib.rs
UTF-8
10,594
3.671875
4
[ "MIT" ]
permissive
//! Salak is a multi layered configuration loader and zero-boilerplate configuration parser, with many predefined sources. //! //! 1. [About](#about) //! 2. [Quick Start](#quick-start) //! 3. [Features](#features) //! * [Predefined Sources](#predefined-sources) //! * [Key Convention](#key-convention) //! * [Value Placeholder Parsing](#value-placeholder-parsing) //! * [Attributes For Derive](#attributes-for-derive) //! * [Reload Configuration](#reload-configuration) //! * [Resource Factory](#resource-factory) //! //! ## About //! `salak` is a multi layered configuration loader with many predefined sources. Also it //! is a zero-boilerplate configuration parser which provides an auto-derive procedure macro //! to derive [`FromEnvironment`] so that we can parse configuration structs without any additional codes. //! //! ## Quick Start //! A simple example of `salak`: //! //! ``` //! use salak::*; //! //! #[derive(Debug, FromEnvironment)] //! #[salak(prefix = "config")] //! struct Config { //! #[salak(default = false)] //! verbose: bool, //! optional: Option<String>, //! #[salak(name = "val")] //! value: i64, //! } //! let env = Salak::builder() //! .set("config.val", "2021") //! .build() //! .unwrap(); //! let config = env.get::<Config>().unwrap(); //! assert_eq!(2021, config.value); //! assert_eq!(None, config.optional); //! assert_eq!(false, config.verbose); //! ``` //! //! ## Features //! //! #### Predefined Sources //! Predefined sources has the following order, [`Salak`] will find by sequence of these orders, //! if the property with specified key is found at the current source, than return immediately. Otherwise, //! it will search the next source. //! //! 1. Random source provides a group of keys can return random values. //! * `random.u8` //! * `random.u16` //! * `random.u32` //! * `random.u64` //! * `random.u128` //! * `random.usize` //! * `random.i8` //! * `random.i16` //! * `random.i32` //! * `random.i64` //! * `random.i128` //! * `random.isize` //! 2. Custom arguments source. [`SalakBuilder::set()`] can set a single kv, //! and [`SalakBuilder::set_args()`] can set a group of kvs. //! 3. System environment source. Implemented by [`source::system_environment`]. //! 4. Profile specified file source, eg. `app-dev.toml`, supports reloading. //! 5. No profile file source, eg. `app.toml`, supports reloading. //! 6. Custom sources, which can register by [`Salak::register()`]. //! //! #### Key Convention //! Key is used for search configuration from [`Environment`], normally it is represented by string. //! Key is a group of SubKey separated by dot(`.`), and SubKey is a name or a name followed by index. //! 1. SubKey Format (`[a-z][_a-z0-9]+(\[[0-9]+\])*`) //! * `a` //! * `a0` //! * `a_b` //! * `a[0]` //! * `a[0][0]` //! 2. Key Format (`SubKey(\.SubKey)*`) //! * `a` //! * `a.b` //! * `a.val[0]` //! * `a_b[0]` //! //! #### Value Placeholder Parsing //! 1. Placeholder Format //! * `${key}` => Get value of `key`. //! * `${key:default}` => Get value of `key`, if not exists return `default`. //! 2. Escape Format //! * `\$\{key\}` => Return `${key}`. //! * `$`, `\`, `{`, `}` must use escape format. //! //! #### Attributes For Derive //! `salak` supports some attributes for automatically derive [`FromEnvironment`]. //! All attributes have format `#[salak(..)]`, eg. `#[salak(default = "default value")]`. //! 1. Struct Header Attribute. //! * `#[salak(prefix = "salak.application")]`, has this attr will auto implement [`PrefixedFromEnvironment`]. //! 2. Struct Field Attribute. //! * `#[salak(default = "value")]`, this attr can specify default value. //! * `#[salak(name = "key")]`, this attr can specify property key, default convension is use field name. //! * `#[salak(desc = "Field Description")]`, this attr can be describe this property. //! //! #### Reload Configuration //! `salak` supports reload configurations. Since in rust mutable //! and alias can't be used together, here we introduce a wrapper //! [`wrapper::IORef`] for updating values when reloading. //! //! #### Resource Factory //! [`Resource`] defines a standard way to create instance. [`Factory`] provides functions to initialize resource //! and cache resource. Please refer to [salak_factory](https://docs.rs/salak_factory) for resource usage. //! Feature 'app' should be open for this feature. //! #![cfg_attr(docsrs, feature(doc_cfg))] #![warn( anonymous_parameters, missing_copy_implementations, missing_debug_implementations, missing_docs, nonstandard_style, rust_2018_idioms, single_use_lifetimes, trivial_casts, trivial_numeric_casts, unreachable_pub, unused_extern_crates, unused_qualifications, variant_size_differences )] use parking_lot::Mutex; #[cfg(feature = "derive")] use crate::derive::KeyDesc; #[cfg(feature = "derive")] mod derive; #[cfg(feature = "derive")] #[cfg_attr(docsrs, doc(cfg(feature = "derive")))] pub use crate::derive::{ AutoDeriveFromEnvironment, DescFromEnvironment, PrefixedFromEnvironment, SalakDescContext, }; use raw_ioref::IORefT; /// Auto derive [`FromEnvironment`] for struct. #[cfg(feature = "derive")] #[cfg_attr(docsrs, doc(cfg(feature = "derive")))] pub use salak_derive::FromEnvironment; /// Auto derive [`Service`] for struct. #[cfg(all(feature = "derive", feature = "app"))] #[cfg_attr(docsrs, doc(cfg(all(feature = "derive", feature = "app"))))] pub use salak_derive::Service; use source_raw::PropertyRegistryInternal; #[cfg(feature = "args")] #[cfg_attr(docsrs, doc(cfg(feature = "args")))] mod args; #[cfg(feature = "args")] #[cfg_attr(docsrs, doc(cfg(feature = "args")))] pub use crate::args::AppInfo; mod err; mod raw; use crate::raw::SubKey; pub use crate::raw::{IsProperty, Property}; mod raw_ioref; mod raw_vec; use crate::env::PREFIX; pub use crate::env::{Salak, SalakBuilder}; mod env; mod raw_enum; pub use crate::err::PropertyError; pub use crate::raw_enum::EnumProperty; mod source_map; #[cfg(feature = "rand")] #[cfg_attr(docsrs, doc(cfg(feature = "rand")))] mod source_rand; mod source_raw; #[cfg(feature = "toml")] #[cfg_attr(docsrs, doc(cfg(feature = "toml")))] mod source_toml; #[cfg(feature = "yaml")] #[cfg_attr(docsrs, doc(cfg(feature = "yaml")))] mod source_yaml; use crate::source::Key; use crate::source::SubKeys; #[cfg(feature = "app")] #[cfg_attr(docsrs, doc(cfg(feature = "app")))] mod app; #[cfg(feature = "app")] #[cfg_attr(docsrs, doc(cfg(feature = "app")))] pub use crate::app::*; #[cfg(test)] #[macro_use(quickcheck)] extern crate quickcheck_macros; /// Salak wrapper for configuration parsing. /// /// Wrapper can determine extra behavior for parsing. /// Such as check empty of vec or update when reloading. pub mod wrapper { pub use crate::raw_ioref::IORef; pub use crate::raw_vec::NonEmptyVec; } /// Salak sources. /// /// This mod exports all pub sources. pub mod source { #[cfg(feature = "args")] #[cfg_attr(docsrs, doc(cfg(feature = "args")))] pub(crate) use crate::args::from_args; pub use crate::raw::Key; pub use crate::raw::SubKeys; pub use crate::source_map::system_environment; pub use crate::source_map::HashMapSource; } pub(crate) type Res<T> = Result<T, PropertyError>; pub(crate) type Void = Res<()>; /// A property source defines how to load properties. /// `salak` has some predefined sources, user can /// provide custom source by implementing this trait. /// /// Sources provided by `salak`. /// /// * hashmap source /// * std::env source /// * toml source /// * yaml source pub trait PropertySource: Send + Sync { /// [`PropertySource`] name. fn name(&self) -> &str; /// Get property by key. fn get_property(&self, key: &Key<'_>) -> Option<Property<'_>>; /// Get all subkeys with given key. /// /// Subkeys are keys without dot('.'). /// This method is unstable, and will be simplified by hidding /// Key and SubKeys. fn get_sub_keys<'a>(&'a self, key: &Key<'_>, sub_keys: &mut SubKeys<'a>); /// Check whether the [`PropertySource`] is empty. /// Empty source will be ignored when registering to `salak`. fn is_empty(&self) -> bool; /// Reload source, if nothing changes, then return none. #[inline] fn reload_source(&self) -> Res<Option<Box<dyn PropertySource>>> { Ok(None) } } /// Environment defines interface for getting values, and reloading /// configurations. /// /// The implementor of this trait is [`Salak`]. pub trait Environment { /// Get value by key. /// * `key` - Configuration key. /// /// Require means is if the value `T` is not found, /// then error will be returned. But if you try to get /// `Option<T>`, then not found will return `None`. fn require<T: FromEnvironment>(&self, key: &str) -> Res<T>; /// Reload configuration. If reloading is completed, /// all values wrapped by [`wrapper::IORef`] will be updated. /// /// Currently, this feature is unstable, the returned bool /// value means reloading is completed without error. fn reload(&self) -> Res<bool>; #[cfg(feature = "derive")] #[cfg_attr(docsrs, doc(cfg(feature = "derive")))] #[inline] /// Get value with predefined key. /// /// [`PrefixedFromEnvironment`] can be auto derives by /// [`salak_derive::FromEnvironment`] macro. It provides /// a standard key for getting value `T`. fn get<T: PrefixedFromEnvironment>(&self) -> Res<T> { self.require::<T>(T::prefix()) } } /// Context for implementing [`FromEnvironment`]. #[allow(missing_debug_implementations)] pub struct SalakContext<'a> { registry: &'a PropertyRegistryInternal<'a>, iorefs: &'a Mutex<Vec<Box<dyn IORefT + Send>>>, key: &'a mut Key<'a>, } /// Parsing value from environment by [`SalakContext`]. pub trait FromEnvironment: Sized { /// Generate object from [`SalakContext`]. /// * `val` - Property value can be parsed from. /// * `env` - Context. /// /// ```no_run /// use salak::*; /// pub struct Config { /// key: String /// } /// impl FromEnvironment for Config { /// fn from_env( /// val: Option<Property<'_>>, /// env: &mut SalakContext<'_>, /// ) -> Result<Self, PropertyError> { /// Ok(Self{ /// key: env.require_def("key", None)?, /// }) /// } /// } /// /// ``` fn from_env(val: Option<Property<'_>>, env: &mut SalakContext<'_>) -> Res<Self>; }
true
803d47198803d026358b00492feed23c3c720724
Rust
trevershick/uni
/src/main.rs
UTF-8
1,356
3.03125
3
[]
no_license
#[macro_use] extern crate simple_error; #[macro_use] extern crate clap; use std::error::Error; use std::process::exit; use std::result::Result; use clap::App; use hex; fn utf8_to_utf16(unicode_bytes: Vec<u8>) -> Result<String, Box<dyn Error>> { if unicode_bytes.len() != 2 { bail!("only handle arrays of 2"); } let mut wide = unicode_bytes[0] as u16; wide <<= 8; wide += unicode_bytes[1] as u16; return match String::from_utf16(&[wide]) { Ok(v) => Ok(v), Err(x) => Err(Box::new(x)), }; } fn main() { let matches = App::new("uni") .version("1.0.0") .about("Convert unicode hex to unicode character") .args_from_usage("<hex_vals>... 'A sequence of utf16 hex values, i.e. 30CE B0AB'") .get_matches(); let hex_values = values_t!(matches, "hex_vals", String).unwrap(); let mut bad: Vec<String> = Vec::new(); for hex_value in hex_values { let decoded = hex::decode(&hex_value); match decoded { Ok(unicode_bytes) => match utf8_to_utf16(unicode_bytes) { Ok(v) => print!("{}", v), Err(_) => bad.push(hex_value), }, Err(_) => bad.push(hex_value), }; } for b in &bad { eprintln!("bad code {}", b); } if bad.len() > 0 { exit(1) } }
true
9f50f066cda213a8677e45e00a4d365757896afe
Rust
embed-rs/stm32f7x6
/src/ethernet_mmc/mmccr/mod.rs
UTF-8
19,766
2.921875
3
[]
no_license
#[doc = r" Value read from the register"] pub struct R { bits: u32, } #[doc = r" Value to write to the register"] pub struct W { bits: u32, } impl super::MMCCR { #[doc = r" Modifies the contents of the register"] #[inline] pub fn modify<F>(&self, f: F) where for<'w> F: FnOnce(&R, &'w mut W) -> &'w mut W, { let bits = self.register.get(); let r = R { bits: bits }; let mut w = W { bits: bits }; f(&r, &mut w); self.register.set(w.bits); } #[doc = r" Reads the contents of the register"] #[inline] pub fn read(&self) -> R { R { bits: self.register.get(), } } #[doc = r" Writes to the register"] #[inline] pub fn write<F>(&self, f: F) where F: FnOnce(&mut W) -> &mut W, { let mut w = W::reset_value(); f(&mut w); self.register.set(w.bits); } #[doc = r" Writes the reset value to the register"] #[inline] pub fn reset(&self) { self.write(|w| w) } } #[doc = "Possible values of the field `CR`"] #[derive(Clone, Copy, Debug, PartialEq)] pub enum CRR { #[doc = "Reset all counters. Cleared automatically"] RESET, #[doc = r" Reserved"] _Reserved(bool), } impl CRR { #[doc = r" Returns `true` if the bit is clear (0)"] #[inline] pub fn bit_is_clear(&self) -> bool { !self.bit() } #[doc = r" Returns `true` if the bit is set (1)"] #[inline] pub fn bit_is_set(&self) -> bool { self.bit() } #[doc = r" Value of the field as raw bits"] #[inline] pub fn bit(&self) -> bool { match *self { CRR::RESET => true, CRR::_Reserved(bits) => bits, } } #[allow(missing_docs)] #[doc(hidden)] #[inline] pub fn _from(value: bool) -> CRR { match value { true => CRR::RESET, i => CRR::_Reserved(i), } } #[doc = "Checks if the value of the field is `RESET`"] #[inline] pub fn is_reset(&self) -> bool { *self == CRR::RESET } } #[doc = "Possible values of the field `CSR`"] #[derive(Clone, Copy, Debug, PartialEq)] pub enum CSRR { #[doc = "Counters roll over to zero after reaching the maximum value"] DISABLED, #[doc = "Counters do not roll over to zero after reaching the maximum value"] ENABLED, } impl CSRR { #[doc = r" Returns `true` if the bit is clear (0)"] #[inline] pub fn bit_is_clear(&self) -> bool { !self.bit() } #[doc = r" Returns `true` if the bit is set (1)"] #[inline] pub fn bit_is_set(&self) -> bool { self.bit() } #[doc = r" Value of the field as raw bits"] #[inline] pub fn bit(&self) -> bool { match *self { CSRR::DISABLED => false, CSRR::ENABLED => true, } } #[allow(missing_docs)] #[doc(hidden)] #[inline] pub fn _from(value: bool) -> CSRR { match value { false => CSRR::DISABLED, true => CSRR::ENABLED, } } #[doc = "Checks if the value of the field is `DISABLED`"] #[inline] pub fn is_disabled(&self) -> bool { *self == CSRR::DISABLED } #[doc = "Checks if the value of the field is `ENABLED`"] #[inline] pub fn is_enabled(&self) -> bool { *self == CSRR::ENABLED } } #[doc = "Possible values of the field `ROR`"] #[derive(Clone, Copy, Debug, PartialEq)] pub enum RORR { #[doc = "MMC counters do not reset on read"] DISABLED, #[doc = "MMC counters reset to zero after read"] ENABLED, } impl RORR { #[doc = r" Returns `true` if the bit is clear (0)"] #[inline] pub fn bit_is_clear(&self) -> bool { !self.bit() } #[doc = r" Returns `true` if the bit is set (1)"] #[inline] pub fn bit_is_set(&self) -> bool { self.bit() } #[doc = r" Value of the field as raw bits"] #[inline] pub fn bit(&self) -> bool { match *self { RORR::DISABLED => false, RORR::ENABLED => true, } } #[allow(missing_docs)] #[doc(hidden)] #[inline] pub fn _from(value: bool) -> RORR { match value { false => RORR::DISABLED, true => RORR::ENABLED, } } #[doc = "Checks if the value of the field is `DISABLED`"] #[inline] pub fn is_disabled(&self) -> bool { *self == RORR::DISABLED } #[doc = "Checks if the value of the field is `ENABLED`"] #[inline] pub fn is_enabled(&self) -> bool { *self == RORR::ENABLED } } #[doc = "Possible values of the field `MCF`"] #[derive(Clone, Copy, Debug, PartialEq)] pub enum MCFR { #[doc = "All MMC counters update normally"] UNFROZEN, #[doc = "All MMC counters frozen to their current value"] FROZEN, } impl MCFR { #[doc = r" Returns `true` if the bit is clear (0)"] #[inline] pub fn bit_is_clear(&self) -> bool { !self.bit() } #[doc = r" Returns `true` if the bit is set (1)"] #[inline] pub fn bit_is_set(&self) -> bool { self.bit() } #[doc = r" Value of the field as raw bits"] #[inline] pub fn bit(&self) -> bool { match *self { MCFR::UNFROZEN => false, MCFR::FROZEN => true, } } #[allow(missing_docs)] #[doc(hidden)] #[inline] pub fn _from(value: bool) -> MCFR { match value { false => MCFR::UNFROZEN, true => MCFR::FROZEN, } } #[doc = "Checks if the value of the field is `UNFROZEN`"] #[inline] pub fn is_unfrozen(&self) -> bool { *self == MCFR::UNFROZEN } #[doc = "Checks if the value of the field is `FROZEN`"] #[inline] pub fn is_frozen(&self) -> bool { *self == MCFR::FROZEN } } #[doc = "Possible values of the field `MCP`"] #[derive(Clone, Copy, Debug, PartialEq)] pub enum MCPR { #[doc = "MMC counters will be preset to almost full or almost half. Cleared automatically"] PRESET, #[doc = r" Reserved"] _Reserved(bool), } impl MCPR { #[doc = r" Returns `true` if the bit is clear (0)"] #[inline] pub fn bit_is_clear(&self) -> bool { !self.bit() } #[doc = r" Returns `true` if the bit is set (1)"] #[inline] pub fn bit_is_set(&self) -> bool { self.bit() } #[doc = r" Value of the field as raw bits"] #[inline] pub fn bit(&self) -> bool { match *self { MCPR::PRESET => true, MCPR::_Reserved(bits) => bits, } } #[allow(missing_docs)] #[doc(hidden)] #[inline] pub fn _from(value: bool) -> MCPR { match value { true => MCPR::PRESET, i => MCPR::_Reserved(i), } } #[doc = "Checks if the value of the field is `PRESET`"] #[inline] pub fn is_preset(&self) -> bool { *self == MCPR::PRESET } } #[doc = "Possible values of the field `MCFHP`"] #[derive(Clone, Copy, Debug, PartialEq)] pub enum MCFHPR { #[doc = "When MCP is set, MMC counters are preset to almost-half value 0x7FFF_FFF0"] ALMOSTHALF, #[doc = "When MCP is set, MMC counters are preset to almost-full value 0xFFFF_FFF0"] ALMOSTFULL, } impl MCFHPR { #[doc = r" Returns `true` if the bit is clear (0)"] #[inline] pub fn bit_is_clear(&self) -> bool { !self.bit() } #[doc = r" Returns `true` if the bit is set (1)"] #[inline] pub fn bit_is_set(&self) -> bool { self.bit() } #[doc = r" Value of the field as raw bits"] #[inline] pub fn bit(&self) -> bool { match *self { MCFHPR::ALMOSTHALF => false, MCFHPR::ALMOSTFULL => true, } } #[allow(missing_docs)] #[doc(hidden)] #[inline] pub fn _from(value: bool) -> MCFHPR { match value { false => MCFHPR::ALMOSTHALF, true => MCFHPR::ALMOSTFULL, } } #[doc = "Checks if the value of the field is `ALMOSTHALF`"] #[inline] pub fn is_almost_half(&self) -> bool { *self == MCFHPR::ALMOSTHALF } #[doc = "Checks if the value of the field is `ALMOSTFULL`"] #[inline] pub fn is_almost_full(&self) -> bool { *self == MCFHPR::ALMOSTFULL } } #[doc = "Values that can be written to the field `CR`"] pub enum CRW { #[doc = "Reset all counters. Cleared automatically"] RESET, } impl CRW { #[allow(missing_docs)] #[doc(hidden)] #[inline] pub fn _bits(&self) -> bool { match *self { CRW::RESET => true, } } } #[doc = r" Proxy"] pub struct _CRW<'a> { w: &'a mut W, } impl<'a> _CRW<'a> { #[doc = r" Writes `variant` to the field"] #[inline] pub fn variant(self, variant: CRW) -> &'a mut W { { self.bit(variant._bits()) } } #[doc = "Reset all counters. Cleared automatically"] #[inline] pub fn reset(self) -> &'a mut W { self.variant(CRW::RESET) } #[doc = r" Sets the field bit"] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r" Clears the field bit"] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r" Writes raw bits to the field"] #[inline] pub fn bit(self, value: bool) -> &'a mut W { const MASK: bool = true; const OFFSET: u8 = 0; self.w.bits &= !((MASK as u32) << OFFSET); self.w.bits |= ((value & MASK) as u32) << OFFSET; self.w } } #[doc = "Values that can be written to the field `CSR`"] pub enum CSRW { #[doc = "Counters roll over to zero after reaching the maximum value"] DISABLED, #[doc = "Counters do not roll over to zero after reaching the maximum value"] ENABLED, } impl CSRW { #[allow(missing_docs)] #[doc(hidden)] #[inline] pub fn _bits(&self) -> bool { match *self { CSRW::DISABLED => false, CSRW::ENABLED => true, } } } #[doc = r" Proxy"] pub struct _CSRW<'a> { w: &'a mut W, } impl<'a> _CSRW<'a> { #[doc = r" Writes `variant` to the field"] #[inline] pub fn variant(self, variant: CSRW) -> &'a mut W { { self.bit(variant._bits()) } } #[doc = "Counters roll over to zero after reaching the maximum value"] #[inline] pub fn disabled(self) -> &'a mut W { self.variant(CSRW::DISABLED) } #[doc = "Counters do not roll over to zero after reaching the maximum value"] #[inline] pub fn enabled(self) -> &'a mut W { self.variant(CSRW::ENABLED) } #[doc = r" Sets the field bit"] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r" Clears the field bit"] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r" Writes raw bits to the field"] #[inline] pub fn bit(self, value: bool) -> &'a mut W { const MASK: bool = true; const OFFSET: u8 = 1; self.w.bits &= !((MASK as u32) << OFFSET); self.w.bits |= ((value & MASK) as u32) << OFFSET; self.w } } #[doc = "Values that can be written to the field `ROR`"] pub enum RORW { #[doc = "MMC counters do not reset on read"] DISABLED, #[doc = "MMC counters reset to zero after read"] ENABLED, } impl RORW { #[allow(missing_docs)] #[doc(hidden)] #[inline] pub fn _bits(&self) -> bool { match *self { RORW::DISABLED => false, RORW::ENABLED => true, } } } #[doc = r" Proxy"] pub struct _RORW<'a> { w: &'a mut W, } impl<'a> _RORW<'a> { #[doc = r" Writes `variant` to the field"] #[inline] pub fn variant(self, variant: RORW) -> &'a mut W { { self.bit(variant._bits()) } } #[doc = "MMC counters do not reset on read"] #[inline] pub fn disabled(self) -> &'a mut W { self.variant(RORW::DISABLED) } #[doc = "MMC counters reset to zero after read"] #[inline] pub fn enabled(self) -> &'a mut W { self.variant(RORW::ENABLED) } #[doc = r" Sets the field bit"] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r" Clears the field bit"] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r" Writes raw bits to the field"] #[inline] pub fn bit(self, value: bool) -> &'a mut W { const MASK: bool = true; const OFFSET: u8 = 2; self.w.bits &= !((MASK as u32) << OFFSET); self.w.bits |= ((value & MASK) as u32) << OFFSET; self.w } } #[doc = "Values that can be written to the field `MCF`"] pub enum MCFW { #[doc = "All MMC counters update normally"] UNFROZEN, #[doc = "All MMC counters frozen to their current value"] FROZEN, } impl MCFW { #[allow(missing_docs)] #[doc(hidden)] #[inline] pub fn _bits(&self) -> bool { match *self { MCFW::UNFROZEN => false, MCFW::FROZEN => true, } } } #[doc = r" Proxy"] pub struct _MCFW<'a> { w: &'a mut W, } impl<'a> _MCFW<'a> { #[doc = r" Writes `variant` to the field"] #[inline] pub fn variant(self, variant: MCFW) -> &'a mut W { { self.bit(variant._bits()) } } #[doc = "All MMC counters update normally"] #[inline] pub fn unfrozen(self) -> &'a mut W { self.variant(MCFW::UNFROZEN) } #[doc = "All MMC counters frozen to their current value"] #[inline] pub fn frozen(self) -> &'a mut W { self.variant(MCFW::FROZEN) } #[doc = r" Sets the field bit"] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r" Clears the field bit"] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r" Writes raw bits to the field"] #[inline] pub fn bit(self, value: bool) -> &'a mut W { const MASK: bool = true; const OFFSET: u8 = 3; self.w.bits &= !((MASK as u32) << OFFSET); self.w.bits |= ((value & MASK) as u32) << OFFSET; self.w } } #[doc = "Values that can be written to the field `MCP`"] pub enum MCPW { #[doc = "MMC counters will be preset to almost full or almost half. Cleared automatically"] PRESET, } impl MCPW { #[allow(missing_docs)] #[doc(hidden)] #[inline] pub fn _bits(&self) -> bool { match *self { MCPW::PRESET => true, } } } #[doc = r" Proxy"] pub struct _MCPW<'a> { w: &'a mut W, } impl<'a> _MCPW<'a> { #[doc = r" Writes `variant` to the field"] #[inline] pub fn variant(self, variant: MCPW) -> &'a mut W { { self.bit(variant._bits()) } } #[doc = "MMC counters will be preset to almost full or almost half. Cleared automatically"] #[inline] pub fn preset(self) -> &'a mut W { self.variant(MCPW::PRESET) } #[doc = r" Sets the field bit"] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r" Clears the field bit"] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r" Writes raw bits to the field"] #[inline] pub fn bit(self, value: bool) -> &'a mut W { const MASK: bool = true; const OFFSET: u8 = 4; self.w.bits &= !((MASK as u32) << OFFSET); self.w.bits |= ((value & MASK) as u32) << OFFSET; self.w } } #[doc = "Values that can be written to the field `MCFHP`"] pub enum MCFHPW { #[doc = "When MCP is set, MMC counters are preset to almost-half value 0x7FFF_FFF0"] ALMOSTHALF, #[doc = "When MCP is set, MMC counters are preset to almost-full value 0xFFFF_FFF0"] ALMOSTFULL, } impl MCFHPW { #[allow(missing_docs)] #[doc(hidden)] #[inline] pub fn _bits(&self) -> bool { match *self { MCFHPW::ALMOSTHALF => false, MCFHPW::ALMOSTFULL => true, } } } #[doc = r" Proxy"] pub struct _MCFHPW<'a> { w: &'a mut W, } impl<'a> _MCFHPW<'a> { #[doc = r" Writes `variant` to the field"] #[inline] pub fn variant(self, variant: MCFHPW) -> &'a mut W { { self.bit(variant._bits()) } } #[doc = "When MCP is set, MMC counters are preset to almost-half value 0x7FFF_FFF0"] #[inline] pub fn almost_half(self) -> &'a mut W { self.variant(MCFHPW::ALMOSTHALF) } #[doc = "When MCP is set, MMC counters are preset to almost-full value 0xFFFF_FFF0"] #[inline] pub fn almost_full(self) -> &'a mut W { self.variant(MCFHPW::ALMOSTFULL) } #[doc = r" Sets the field bit"] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r" Clears the field bit"] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r" Writes raw bits to the field"] #[inline] pub fn bit(self, value: bool) -> &'a mut W { const MASK: bool = true; const OFFSET: u8 = 5; self.w.bits &= !((MASK as u32) << OFFSET); self.w.bits |= ((value & MASK) as u32) << OFFSET; self.w } } impl R { #[doc = r" Value of the register as raw bits"] #[inline] pub fn bits(&self) -> u32 { self.bits } #[doc = "Bit 0 - CR"] #[inline] pub fn cr(&self) -> CRR { CRR::_from({ const MASK: bool = true; const OFFSET: u8 = 0; ((self.bits >> OFFSET) & MASK as u32) != 0 }) } #[doc = "Bit 1 - CSR"] #[inline] pub fn csr(&self) -> CSRR { CSRR::_from({ const MASK: bool = true; const OFFSET: u8 = 1; ((self.bits >> OFFSET) & MASK as u32) != 0 }) } #[doc = "Bit 2 - ROR"] #[inline] pub fn ror(&self) -> RORR { RORR::_from({ const MASK: bool = true; const OFFSET: u8 = 2; ((self.bits >> OFFSET) & MASK as u32) != 0 }) } #[doc = "Bit 3 - MCF"] #[inline] pub fn mcf(&self) -> MCFR { MCFR::_from({ const MASK: bool = true; const OFFSET: u8 = 3; ((self.bits >> OFFSET) & MASK as u32) != 0 }) } #[doc = "Bit 4 - MCP"] #[inline] pub fn mcp(&self) -> MCPR { MCPR::_from({ const MASK: bool = true; const OFFSET: u8 = 4; ((self.bits >> OFFSET) & MASK as u32) != 0 }) } #[doc = "Bit 5 - MCFHP"] #[inline] pub fn mcfhp(&self) -> MCFHPR { MCFHPR::_from({ const MASK: bool = true; const OFFSET: u8 = 5; ((self.bits >> OFFSET) & MASK as u32) != 0 }) } } impl W { #[doc = r" Reset value of the register"] #[inline] pub fn reset_value() -> W { W { bits: 0 } } #[doc = r" Writes raw bits to the register"] #[inline] pub unsafe fn bits(&mut self, bits: u32) -> &mut Self { self.bits = bits; self } #[doc = "Bit 0 - CR"] #[inline] pub fn cr(&mut self) -> _CRW { _CRW { w: self } } #[doc = "Bit 1 - CSR"] #[inline] pub fn csr(&mut self) -> _CSRW { _CSRW { w: self } } #[doc = "Bit 2 - ROR"] #[inline] pub fn ror(&mut self) -> _RORW { _RORW { w: self } } #[doc = "Bit 3 - MCF"] #[inline] pub fn mcf(&mut self) -> _MCFW { _MCFW { w: self } } #[doc = "Bit 4 - MCP"] #[inline] pub fn mcp(&mut self) -> _MCPW { _MCPW { w: self } } #[doc = "Bit 5 - MCFHP"] #[inline] pub fn mcfhp(&mut self) -> _MCFHPW { _MCFHPW { w: self } } }
true
e293abe837acee93c3c204016fac4b18bdcef1cf
Rust
astro/rust-lpc43xx
/src/scu/pintsel0/mod.rs
UTF-8
30,148
2.53125
3
[ "Apache-2.0" ]
permissive
#[doc = r" Value read from the register"] pub struct R { bits: u32, } #[doc = r" Value to write to the register"] pub struct W { bits: u32, } impl super::PINTSEL0 { #[doc = r" Modifies the contents of the register"] #[inline] pub fn modify<F>(&self, f: F) where for<'w> F: FnOnce(&R, &'w mut W) -> &'w mut W, { let bits = self.register.get(); let r = R { bits: bits }; let mut w = W { bits: bits }; f(&r, &mut w); self.register.set(w.bits); } #[doc = r" Reads the contents of the register"] #[inline] pub fn read(&self) -> R { R { bits: self.register.get(), } } #[doc = r" Writes to the register"] #[inline] pub fn write<F>(&self, f: F) where F: FnOnce(&mut W) -> &mut W, { let mut w = W::reset_value(); f(&mut w); self.register.set(w.bits); } #[doc = r" Writes the reset value to the register"] #[inline] pub fn reset(&self) { self.write(|w| w) } } #[doc = r" Value of the field"] pub struct INTPIN0R { bits: u8, } impl INTPIN0R { #[doc = r" Value of the field as raw bits"] #[inline] pub fn bits(&self) -> u8 { self.bits } } #[doc = "Possible values of the field `PORTSEL0`"] #[derive(Clone, Copy, Debug, PartialEq)] pub enum PORTSEL0R { #[doc = "GPIO Port 0"] GPIO_PORT_0, #[doc = "GPIO Port 1"] GPIO_PORT_1, #[doc = "GPIO Port 2"] GPIO_PORT_2, #[doc = "GPIO Port 3"] GPIO_PORT_3, #[doc = "GPIO Port 4"] GPIO_PORT_4, #[doc = "GPIO Port 5"] GPIO_PORT_5, #[doc = "GPIO Port 6"] GPIO_PORT_6, #[doc = "GPIO Port 7"] GPIO_PORT_7, } impl PORTSEL0R { #[doc = r" Value of the field as raw bits"] #[inline] pub fn bits(&self) -> u8 { match *self { PORTSEL0R::GPIO_PORT_0 => 0, PORTSEL0R::GPIO_PORT_1 => 1, PORTSEL0R::GPIO_PORT_2 => 2, PORTSEL0R::GPIO_PORT_3 => 3, PORTSEL0R::GPIO_PORT_4 => 4, PORTSEL0R::GPIO_PORT_5 => 5, PORTSEL0R::GPIO_PORT_6 => 6, PORTSEL0R::GPIO_PORT_7 => 7, } } #[allow(missing_docs)] #[doc(hidden)] #[inline] pub fn _from(value: u8) -> PORTSEL0R { match value { 0 => PORTSEL0R::GPIO_PORT_0, 1 => PORTSEL0R::GPIO_PORT_1, 2 => PORTSEL0R::GPIO_PORT_2, 3 => PORTSEL0R::GPIO_PORT_3, 4 => PORTSEL0R::GPIO_PORT_4, 5 => PORTSEL0R::GPIO_PORT_5, 6 => PORTSEL0R::GPIO_PORT_6, 7 => PORTSEL0R::GPIO_PORT_7, _ => unreachable!(), } } #[doc = "Checks if the value of the field is `GPIO_PORT_0`"] #[inline] pub fn is_gpio_port_0(&self) -> bool { *self == PORTSEL0R::GPIO_PORT_0 } #[doc = "Checks if the value of the field is `GPIO_PORT_1`"] #[inline] pub fn is_gpio_port_1(&self) -> bool { *self == PORTSEL0R::GPIO_PORT_1 } #[doc = "Checks if the value of the field is `GPIO_PORT_2`"] #[inline] pub fn is_gpio_port_2(&self) -> bool { *self == PORTSEL0R::GPIO_PORT_2 } #[doc = "Checks if the value of the field is `GPIO_PORT_3`"] #[inline] pub fn is_gpio_port_3(&self) -> bool { *self == PORTSEL0R::GPIO_PORT_3 } #[doc = "Checks if the value of the field is `GPIO_PORT_4`"] #[inline] pub fn is_gpio_port_4(&self) -> bool { *self == PORTSEL0R::GPIO_PORT_4 } #[doc = "Checks if the value of the field is `GPIO_PORT_5`"] #[inline] pub fn is_gpio_port_5(&self) -> bool { *self == PORTSEL0R::GPIO_PORT_5 } #[doc = "Checks if the value of the field is `GPIO_PORT_6`"] #[inline] pub fn is_gpio_port_6(&self) -> bool { *self == PORTSEL0R::GPIO_PORT_6 } #[doc = "Checks if the value of the field is `GPIO_PORT_7`"] #[inline] pub fn is_gpio_port_7(&self) -> bool { *self == PORTSEL0R::GPIO_PORT_7 } } #[doc = r" Value of the field"] pub struct INTPIN1R { bits: u8, } impl INTPIN1R { #[doc = r" Value of the field as raw bits"] #[inline] pub fn bits(&self) -> u8 { self.bits } } #[doc = "Possible values of the field `PORTSEL1`"] #[derive(Clone, Copy, Debug, PartialEq)] pub enum PORTSEL1R { #[doc = "GPIO Port 0"] GPIO_PORT_0, #[doc = "GPIO Port 1"] GPIO_PORT_1, #[doc = "GPIO Port 2"] GPIO_PORT_2, #[doc = "GPIO Port 3"] GPIO_PORT_3, #[doc = "GPIO Port 4"] GPIO_PORT_4, #[doc = "GPIO Port 5"] GPIO_PORT_5, #[doc = "GPIO Port 6"] GPIO_PORT_6, #[doc = "GPIO Port 7"] GPIO_PORT_7, } impl PORTSEL1R { #[doc = r" Value of the field as raw bits"] #[inline] pub fn bits(&self) -> u8 { match *self { PORTSEL1R::GPIO_PORT_0 => 0, PORTSEL1R::GPIO_PORT_1 => 1, PORTSEL1R::GPIO_PORT_2 => 2, PORTSEL1R::GPIO_PORT_3 => 3, PORTSEL1R::GPIO_PORT_4 => 4, PORTSEL1R::GPIO_PORT_5 => 5, PORTSEL1R::GPIO_PORT_6 => 6, PORTSEL1R::GPIO_PORT_7 => 7, } } #[allow(missing_docs)] #[doc(hidden)] #[inline] pub fn _from(value: u8) -> PORTSEL1R { match value { 0 => PORTSEL1R::GPIO_PORT_0, 1 => PORTSEL1R::GPIO_PORT_1, 2 => PORTSEL1R::GPIO_PORT_2, 3 => PORTSEL1R::GPIO_PORT_3, 4 => PORTSEL1R::GPIO_PORT_4, 5 => PORTSEL1R::GPIO_PORT_5, 6 => PORTSEL1R::GPIO_PORT_6, 7 => PORTSEL1R::GPIO_PORT_7, _ => unreachable!(), } } #[doc = "Checks if the value of the field is `GPIO_PORT_0`"] #[inline] pub fn is_gpio_port_0(&self) -> bool { *self == PORTSEL1R::GPIO_PORT_0 } #[doc = "Checks if the value of the field is `GPIO_PORT_1`"] #[inline] pub fn is_gpio_port_1(&self) -> bool { *self == PORTSEL1R::GPIO_PORT_1 } #[doc = "Checks if the value of the field is `GPIO_PORT_2`"] #[inline] pub fn is_gpio_port_2(&self) -> bool { *self == PORTSEL1R::GPIO_PORT_2 } #[doc = "Checks if the value of the field is `GPIO_PORT_3`"] #[inline] pub fn is_gpio_port_3(&self) -> bool { *self == PORTSEL1R::GPIO_PORT_3 } #[doc = "Checks if the value of the field is `GPIO_PORT_4`"] #[inline] pub fn is_gpio_port_4(&self) -> bool { *self == PORTSEL1R::GPIO_PORT_4 } #[doc = "Checks if the value of the field is `GPIO_PORT_5`"] #[inline] pub fn is_gpio_port_5(&self) -> bool { *self == PORTSEL1R::GPIO_PORT_5 } #[doc = "Checks if the value of the field is `GPIO_PORT_6`"] #[inline] pub fn is_gpio_port_6(&self) -> bool { *self == PORTSEL1R::GPIO_PORT_6 } #[doc = "Checks if the value of the field is `GPIO_PORT_7`"] #[inline] pub fn is_gpio_port_7(&self) -> bool { *self == PORTSEL1R::GPIO_PORT_7 } } #[doc = r" Value of the field"] pub struct INTPIN2R { bits: u8, } impl INTPIN2R { #[doc = r" Value of the field as raw bits"] #[inline] pub fn bits(&self) -> u8 { self.bits } } #[doc = "Possible values of the field `PORTSEL2`"] #[derive(Clone, Copy, Debug, PartialEq)] pub enum PORTSEL2R { #[doc = "GPIO Port 0"] GPIO_PORT_0, #[doc = "GPIO Port 1"] GPIO_PORT_1, #[doc = "GPIO Port 2"] GPIO_PORT_2, #[doc = "GPIO Port 3"] GPIO_PORT_3, #[doc = "GPIO Port 4"] GPIO_PORT_4, #[doc = "GPIO Port 5"] GPIO_PORT_5, #[doc = "GPIO Port 6"] GPIO_PORT_6, #[doc = "GPIO Port 7"] GPIO_PORT_7, } impl PORTSEL2R { #[doc = r" Value of the field as raw bits"] #[inline] pub fn bits(&self) -> u8 { match *self { PORTSEL2R::GPIO_PORT_0 => 0, PORTSEL2R::GPIO_PORT_1 => 1, PORTSEL2R::GPIO_PORT_2 => 2, PORTSEL2R::GPIO_PORT_3 => 3, PORTSEL2R::GPIO_PORT_4 => 4, PORTSEL2R::GPIO_PORT_5 => 5, PORTSEL2R::GPIO_PORT_6 => 6, PORTSEL2R::GPIO_PORT_7 => 7, } } #[allow(missing_docs)] #[doc(hidden)] #[inline] pub fn _from(value: u8) -> PORTSEL2R { match value { 0 => PORTSEL2R::GPIO_PORT_0, 1 => PORTSEL2R::GPIO_PORT_1, 2 => PORTSEL2R::GPIO_PORT_2, 3 => PORTSEL2R::GPIO_PORT_3, 4 => PORTSEL2R::GPIO_PORT_4, 5 => PORTSEL2R::GPIO_PORT_5, 6 => PORTSEL2R::GPIO_PORT_6, 7 => PORTSEL2R::GPIO_PORT_7, _ => unreachable!(), } } #[doc = "Checks if the value of the field is `GPIO_PORT_0`"] #[inline] pub fn is_gpio_port_0(&self) -> bool { *self == PORTSEL2R::GPIO_PORT_0 } #[doc = "Checks if the value of the field is `GPIO_PORT_1`"] #[inline] pub fn is_gpio_port_1(&self) -> bool { *self == PORTSEL2R::GPIO_PORT_1 } #[doc = "Checks if the value of the field is `GPIO_PORT_2`"] #[inline] pub fn is_gpio_port_2(&self) -> bool { *self == PORTSEL2R::GPIO_PORT_2 } #[doc = "Checks if the value of the field is `GPIO_PORT_3`"] #[inline] pub fn is_gpio_port_3(&self) -> bool { *self == PORTSEL2R::GPIO_PORT_3 } #[doc = "Checks if the value of the field is `GPIO_PORT_4`"] #[inline] pub fn is_gpio_port_4(&self) -> bool { *self == PORTSEL2R::GPIO_PORT_4 } #[doc = "Checks if the value of the field is `GPIO_PORT_5`"] #[inline] pub fn is_gpio_port_5(&self) -> bool { *self == PORTSEL2R::GPIO_PORT_5 } #[doc = "Checks if the value of the field is `GPIO_PORT_6`"] #[inline] pub fn is_gpio_port_6(&self) -> bool { *self == PORTSEL2R::GPIO_PORT_6 } #[doc = "Checks if the value of the field is `GPIO_PORT_7`"] #[inline] pub fn is_gpio_port_7(&self) -> bool { *self == PORTSEL2R::GPIO_PORT_7 } } #[doc = r" Value of the field"] pub struct INTPIN3R { bits: u8, } impl INTPIN3R { #[doc = r" Value of the field as raw bits"] #[inline] pub fn bits(&self) -> u8 { self.bits } } #[doc = "Possible values of the field `PORTSEL3`"] #[derive(Clone, Copy, Debug, PartialEq)] pub enum PORTSEL3R { #[doc = "GPIO Port 0"] GPIO_PORT_0, #[doc = "GPIO Port 1"] GPIO_PORT_1, #[doc = "GPIO Port 2"] GPIO_PORT_2, #[doc = "GPIO Port 3"] GPIO_PORT_3, #[doc = "GPIO Port 4"] GPIO_PORT_4, #[doc = "GPIO Port 5"] GPIO_PORT_5, #[doc = "GPIO Port 6"] GPIO_PORT_6, #[doc = "GPIO Port 7"] GPIO_PORT_7, } impl PORTSEL3R { #[doc = r" Value of the field as raw bits"] #[inline] pub fn bits(&self) -> u8 { match *self { PORTSEL3R::GPIO_PORT_0 => 0, PORTSEL3R::GPIO_PORT_1 => 1, PORTSEL3R::GPIO_PORT_2 => 2, PORTSEL3R::GPIO_PORT_3 => 3, PORTSEL3R::GPIO_PORT_4 => 4, PORTSEL3R::GPIO_PORT_5 => 5, PORTSEL3R::GPIO_PORT_6 => 6, PORTSEL3R::GPIO_PORT_7 => 7, } } #[allow(missing_docs)] #[doc(hidden)] #[inline] pub fn _from(value: u8) -> PORTSEL3R { match value { 0 => PORTSEL3R::GPIO_PORT_0, 1 => PORTSEL3R::GPIO_PORT_1, 2 => PORTSEL3R::GPIO_PORT_2, 3 => PORTSEL3R::GPIO_PORT_3, 4 => PORTSEL3R::GPIO_PORT_4, 5 => PORTSEL3R::GPIO_PORT_5, 6 => PORTSEL3R::GPIO_PORT_6, 7 => PORTSEL3R::GPIO_PORT_7, _ => unreachable!(), } } #[doc = "Checks if the value of the field is `GPIO_PORT_0`"] #[inline] pub fn is_gpio_port_0(&self) -> bool { *self == PORTSEL3R::GPIO_PORT_0 } #[doc = "Checks if the value of the field is `GPIO_PORT_1`"] #[inline] pub fn is_gpio_port_1(&self) -> bool { *self == PORTSEL3R::GPIO_PORT_1 } #[doc = "Checks if the value of the field is `GPIO_PORT_2`"] #[inline] pub fn is_gpio_port_2(&self) -> bool { *self == PORTSEL3R::GPIO_PORT_2 } #[doc = "Checks if the value of the field is `GPIO_PORT_3`"] #[inline] pub fn is_gpio_port_3(&self) -> bool { *self == PORTSEL3R::GPIO_PORT_3 } #[doc = "Checks if the value of the field is `GPIO_PORT_4`"] #[inline] pub fn is_gpio_port_4(&self) -> bool { *self == PORTSEL3R::GPIO_PORT_4 } #[doc = "Checks if the value of the field is `GPIO_PORT_5`"] #[inline] pub fn is_gpio_port_5(&self) -> bool { *self == PORTSEL3R::GPIO_PORT_5 } #[doc = "Checks if the value of the field is `GPIO_PORT_6`"] #[inline] pub fn is_gpio_port_6(&self) -> bool { *self == PORTSEL3R::GPIO_PORT_6 } #[doc = "Checks if the value of the field is `GPIO_PORT_7`"] #[inline] pub fn is_gpio_port_7(&self) -> bool { *self == PORTSEL3R::GPIO_PORT_7 } } #[doc = r" Proxy"] pub struct _INTPIN0W<'a> { w: &'a mut W, } impl<'a> _INTPIN0W<'a> { #[doc = r" Writes raw bits to the field"] #[inline] pub unsafe fn bits(self, value: u8) -> &'a mut W { const MASK: u8 = 31; const OFFSET: u8 = 0; self.w.bits &= !((MASK as u32) << OFFSET); self.w.bits |= ((value & MASK) as u32) << OFFSET; self.w } } #[doc = "Values that can be written to the field `PORTSEL0`"] pub enum PORTSEL0W { #[doc = "GPIO Port 0"] GPIO_PORT_0, #[doc = "GPIO Port 1"] GPIO_PORT_1, #[doc = "GPIO Port 2"] GPIO_PORT_2, #[doc = "GPIO Port 3"] GPIO_PORT_3, #[doc = "GPIO Port 4"] GPIO_PORT_4, #[doc = "GPIO Port 5"] GPIO_PORT_5, #[doc = "GPIO Port 6"] GPIO_PORT_6, #[doc = "GPIO Port 7"] GPIO_PORT_7, } impl PORTSEL0W { #[allow(missing_docs)] #[doc(hidden)] #[inline] pub fn _bits(&self) -> u8 { match *self { PORTSEL0W::GPIO_PORT_0 => 0, PORTSEL0W::GPIO_PORT_1 => 1, PORTSEL0W::GPIO_PORT_2 => 2, PORTSEL0W::GPIO_PORT_3 => 3, PORTSEL0W::GPIO_PORT_4 => 4, PORTSEL0W::GPIO_PORT_5 => 5, PORTSEL0W::GPIO_PORT_6 => 6, PORTSEL0W::GPIO_PORT_7 => 7, } } } #[doc = r" Proxy"] pub struct _PORTSEL0W<'a> { w: &'a mut W, } impl<'a> _PORTSEL0W<'a> { #[doc = r" Writes `variant` to the field"] #[inline] pub fn variant(self, variant: PORTSEL0W) -> &'a mut W { { self.bits(variant._bits()) } } #[doc = "GPIO Port 0"] #[inline] pub fn gpio_port_0(self) -> &'a mut W { self.variant(PORTSEL0W::GPIO_PORT_0) } #[doc = "GPIO Port 1"] #[inline] pub fn gpio_port_1(self) -> &'a mut W { self.variant(PORTSEL0W::GPIO_PORT_1) } #[doc = "GPIO Port 2"] #[inline] pub fn gpio_port_2(self) -> &'a mut W { self.variant(PORTSEL0W::GPIO_PORT_2) } #[doc = "GPIO Port 3"] #[inline] pub fn gpio_port_3(self) -> &'a mut W { self.variant(PORTSEL0W::GPIO_PORT_3) } #[doc = "GPIO Port 4"] #[inline] pub fn gpio_port_4(self) -> &'a mut W { self.variant(PORTSEL0W::GPIO_PORT_4) } #[doc = "GPIO Port 5"] #[inline] pub fn gpio_port_5(self) -> &'a mut W { self.variant(PORTSEL0W::GPIO_PORT_5) } #[doc = "GPIO Port 6"] #[inline] pub fn gpio_port_6(self) -> &'a mut W { self.variant(PORTSEL0W::GPIO_PORT_6) } #[doc = "GPIO Port 7"] #[inline] pub fn gpio_port_7(self) -> &'a mut W { self.variant(PORTSEL0W::GPIO_PORT_7) } #[doc = r" Writes raw bits to the field"] #[inline] pub fn bits(self, value: u8) -> &'a mut W { const MASK: u8 = 7; const OFFSET: u8 = 5; self.w.bits &= !((MASK as u32) << OFFSET); self.w.bits |= ((value & MASK) as u32) << OFFSET; self.w } } #[doc = r" Proxy"] pub struct _INTPIN1W<'a> { w: &'a mut W, } impl<'a> _INTPIN1W<'a> { #[doc = r" Writes raw bits to the field"] #[inline] pub unsafe fn bits(self, value: u8) -> &'a mut W { const MASK: u8 = 31; const OFFSET: u8 = 8; self.w.bits &= !((MASK as u32) << OFFSET); self.w.bits |= ((value & MASK) as u32) << OFFSET; self.w } } #[doc = "Values that can be written to the field `PORTSEL1`"] pub enum PORTSEL1W { #[doc = "GPIO Port 0"] GPIO_PORT_0, #[doc = "GPIO Port 1"] GPIO_PORT_1, #[doc = "GPIO Port 2"] GPIO_PORT_2, #[doc = "GPIO Port 3"] GPIO_PORT_3, #[doc = "GPIO Port 4"] GPIO_PORT_4, #[doc = "GPIO Port 5"] GPIO_PORT_5, #[doc = "GPIO Port 6"] GPIO_PORT_6, #[doc = "GPIO Port 7"] GPIO_PORT_7, } impl PORTSEL1W { #[allow(missing_docs)] #[doc(hidden)] #[inline] pub fn _bits(&self) -> u8 { match *self { PORTSEL1W::GPIO_PORT_0 => 0, PORTSEL1W::GPIO_PORT_1 => 1, PORTSEL1W::GPIO_PORT_2 => 2, PORTSEL1W::GPIO_PORT_3 => 3, PORTSEL1W::GPIO_PORT_4 => 4, PORTSEL1W::GPIO_PORT_5 => 5, PORTSEL1W::GPIO_PORT_6 => 6, PORTSEL1W::GPIO_PORT_7 => 7, } } } #[doc = r" Proxy"] pub struct _PORTSEL1W<'a> { w: &'a mut W, } impl<'a> _PORTSEL1W<'a> { #[doc = r" Writes `variant` to the field"] #[inline] pub fn variant(self, variant: PORTSEL1W) -> &'a mut W { { self.bits(variant._bits()) } } #[doc = "GPIO Port 0"] #[inline] pub fn gpio_port_0(self) -> &'a mut W { self.variant(PORTSEL1W::GPIO_PORT_0) } #[doc = "GPIO Port 1"] #[inline] pub fn gpio_port_1(self) -> &'a mut W { self.variant(PORTSEL1W::GPIO_PORT_1) } #[doc = "GPIO Port 2"] #[inline] pub fn gpio_port_2(self) -> &'a mut W { self.variant(PORTSEL1W::GPIO_PORT_2) } #[doc = "GPIO Port 3"] #[inline] pub fn gpio_port_3(self) -> &'a mut W { self.variant(PORTSEL1W::GPIO_PORT_3) } #[doc = "GPIO Port 4"] #[inline] pub fn gpio_port_4(self) -> &'a mut W { self.variant(PORTSEL1W::GPIO_PORT_4) } #[doc = "GPIO Port 5"] #[inline] pub fn gpio_port_5(self) -> &'a mut W { self.variant(PORTSEL1W::GPIO_PORT_5) } #[doc = "GPIO Port 6"] #[inline] pub fn gpio_port_6(self) -> &'a mut W { self.variant(PORTSEL1W::GPIO_PORT_6) } #[doc = "GPIO Port 7"] #[inline] pub fn gpio_port_7(self) -> &'a mut W { self.variant(PORTSEL1W::GPIO_PORT_7) } #[doc = r" Writes raw bits to the field"] #[inline] pub fn bits(self, value: u8) -> &'a mut W { const MASK: u8 = 7; const OFFSET: u8 = 13; self.w.bits &= !((MASK as u32) << OFFSET); self.w.bits |= ((value & MASK) as u32) << OFFSET; self.w } } #[doc = r" Proxy"] pub struct _INTPIN2W<'a> { w: &'a mut W, } impl<'a> _INTPIN2W<'a> { #[doc = r" Writes raw bits to the field"] #[inline] pub unsafe fn bits(self, value: u8) -> &'a mut W { const MASK: u8 = 31; const OFFSET: u8 = 16; self.w.bits &= !((MASK as u32) << OFFSET); self.w.bits |= ((value & MASK) as u32) << OFFSET; self.w } } #[doc = "Values that can be written to the field `PORTSEL2`"] pub enum PORTSEL2W { #[doc = "GPIO Port 0"] GPIO_PORT_0, #[doc = "GPIO Port 1"] GPIO_PORT_1, #[doc = "GPIO Port 2"] GPIO_PORT_2, #[doc = "GPIO Port 3"] GPIO_PORT_3, #[doc = "GPIO Port 4"] GPIO_PORT_4, #[doc = "GPIO Port 5"] GPIO_PORT_5, #[doc = "GPIO Port 6"] GPIO_PORT_6, #[doc = "GPIO Port 7"] GPIO_PORT_7, } impl PORTSEL2W { #[allow(missing_docs)] #[doc(hidden)] #[inline] pub fn _bits(&self) -> u8 { match *self { PORTSEL2W::GPIO_PORT_0 => 0, PORTSEL2W::GPIO_PORT_1 => 1, PORTSEL2W::GPIO_PORT_2 => 2, PORTSEL2W::GPIO_PORT_3 => 3, PORTSEL2W::GPIO_PORT_4 => 4, PORTSEL2W::GPIO_PORT_5 => 5, PORTSEL2W::GPIO_PORT_6 => 6, PORTSEL2W::GPIO_PORT_7 => 7, } } } #[doc = r" Proxy"] pub struct _PORTSEL2W<'a> { w: &'a mut W, } impl<'a> _PORTSEL2W<'a> { #[doc = r" Writes `variant` to the field"] #[inline] pub fn variant(self, variant: PORTSEL2W) -> &'a mut W { { self.bits(variant._bits()) } } #[doc = "GPIO Port 0"] #[inline] pub fn gpio_port_0(self) -> &'a mut W { self.variant(PORTSEL2W::GPIO_PORT_0) } #[doc = "GPIO Port 1"] #[inline] pub fn gpio_port_1(self) -> &'a mut W { self.variant(PORTSEL2W::GPIO_PORT_1) } #[doc = "GPIO Port 2"] #[inline] pub fn gpio_port_2(self) -> &'a mut W { self.variant(PORTSEL2W::GPIO_PORT_2) } #[doc = "GPIO Port 3"] #[inline] pub fn gpio_port_3(self) -> &'a mut W { self.variant(PORTSEL2W::GPIO_PORT_3) } #[doc = "GPIO Port 4"] #[inline] pub fn gpio_port_4(self) -> &'a mut W { self.variant(PORTSEL2W::GPIO_PORT_4) } #[doc = "GPIO Port 5"] #[inline] pub fn gpio_port_5(self) -> &'a mut W { self.variant(PORTSEL2W::GPIO_PORT_5) } #[doc = "GPIO Port 6"] #[inline] pub fn gpio_port_6(self) -> &'a mut W { self.variant(PORTSEL2W::GPIO_PORT_6) } #[doc = "GPIO Port 7"] #[inline] pub fn gpio_port_7(self) -> &'a mut W { self.variant(PORTSEL2W::GPIO_PORT_7) } #[doc = r" Writes raw bits to the field"] #[inline] pub fn bits(self, value: u8) -> &'a mut W { const MASK: u8 = 7; const OFFSET: u8 = 21; self.w.bits &= !((MASK as u32) << OFFSET); self.w.bits |= ((value & MASK) as u32) << OFFSET; self.w } } #[doc = r" Proxy"] pub struct _INTPIN3W<'a> { w: &'a mut W, } impl<'a> _INTPIN3W<'a> { #[doc = r" Writes raw bits to the field"] #[inline] pub unsafe fn bits(self, value: u8) -> &'a mut W { const MASK: u8 = 31; const OFFSET: u8 = 24; self.w.bits &= !((MASK as u32) << OFFSET); self.w.bits |= ((value & MASK) as u32) << OFFSET; self.w } } #[doc = "Values that can be written to the field `PORTSEL3`"] pub enum PORTSEL3W { #[doc = "GPIO Port 0"] GPIO_PORT_0, #[doc = "GPIO Port 1"] GPIO_PORT_1, #[doc = "GPIO Port 2"] GPIO_PORT_2, #[doc = "GPIO Port 3"] GPIO_PORT_3, #[doc = "GPIO Port 4"] GPIO_PORT_4, #[doc = "GPIO Port 5"] GPIO_PORT_5, #[doc = "GPIO Port 6"] GPIO_PORT_6, #[doc = "GPIO Port 7"] GPIO_PORT_7, } impl PORTSEL3W { #[allow(missing_docs)] #[doc(hidden)] #[inline] pub fn _bits(&self) -> u8 { match *self { PORTSEL3W::GPIO_PORT_0 => 0, PORTSEL3W::GPIO_PORT_1 => 1, PORTSEL3W::GPIO_PORT_2 => 2, PORTSEL3W::GPIO_PORT_3 => 3, PORTSEL3W::GPIO_PORT_4 => 4, PORTSEL3W::GPIO_PORT_5 => 5, PORTSEL3W::GPIO_PORT_6 => 6, PORTSEL3W::GPIO_PORT_7 => 7, } } } #[doc = r" Proxy"] pub struct _PORTSEL3W<'a> { w: &'a mut W, } impl<'a> _PORTSEL3W<'a> { #[doc = r" Writes `variant` to the field"] #[inline] pub fn variant(self, variant: PORTSEL3W) -> &'a mut W { { self.bits(variant._bits()) } } #[doc = "GPIO Port 0"] #[inline] pub fn gpio_port_0(self) -> &'a mut W { self.variant(PORTSEL3W::GPIO_PORT_0) } #[doc = "GPIO Port 1"] #[inline] pub fn gpio_port_1(self) -> &'a mut W { self.variant(PORTSEL3W::GPIO_PORT_1) } #[doc = "GPIO Port 2"] #[inline] pub fn gpio_port_2(self) -> &'a mut W { self.variant(PORTSEL3W::GPIO_PORT_2) } #[doc = "GPIO Port 3"] #[inline] pub fn gpio_port_3(self) -> &'a mut W { self.variant(PORTSEL3W::GPIO_PORT_3) } #[doc = "GPIO Port 4"] #[inline] pub fn gpio_port_4(self) -> &'a mut W { self.variant(PORTSEL3W::GPIO_PORT_4) } #[doc = "GPIO Port 5"] #[inline] pub fn gpio_port_5(self) -> &'a mut W { self.variant(PORTSEL3W::GPIO_PORT_5) } #[doc = "GPIO Port 6"] #[inline] pub fn gpio_port_6(self) -> &'a mut W { self.variant(PORTSEL3W::GPIO_PORT_6) } #[doc = "GPIO Port 7"] #[inline] pub fn gpio_port_7(self) -> &'a mut W { self.variant(PORTSEL3W::GPIO_PORT_7) } #[doc = r" Writes raw bits to the field"] #[inline] pub fn bits(self, value: u8) -> &'a mut W { const MASK: u8 = 7; const OFFSET: u8 = 29; self.w.bits &= !((MASK as u32) << OFFSET); self.w.bits |= ((value & MASK) as u32) << OFFSET; self.w } } impl R { #[doc = r" Value of the register as raw bits"] #[inline] pub fn bits(&self) -> u32 { self.bits } #[doc = "Bits 0:4 - Pint interrupt 0: Select the pin number within the GPIO port selected by the PORTSEL0 bit in this register."] #[inline] pub fn intpin0(&self) -> INTPIN0R { let bits = { const MASK: u8 = 31; const OFFSET: u8 = 0; ((self.bits >> OFFSET) & MASK as u32) as u8 }; INTPIN0R { bits } } #[doc = "Bits 5:7 - Pin interrupt 0: Select the port for the pin number to be selected in the INTPIN0 bits of this register."] #[inline] pub fn portsel0(&self) -> PORTSEL0R { PORTSEL0R::_from({ const MASK: u8 = 7; const OFFSET: u8 = 5; ((self.bits >> OFFSET) & MASK as u32) as u8 }) } #[doc = "Bits 8:12 - Pint interrupt 1: Select the pin number within the GPIO port selected by the PORTSEL1 bit in this register."] #[inline] pub fn intpin1(&self) -> INTPIN1R { let bits = { const MASK: u8 = 31; const OFFSET: u8 = 8; ((self.bits >> OFFSET) & MASK as u32) as u8 }; INTPIN1R { bits } } #[doc = "Bits 13:15 - Pin interrupt 1: Select the port for the pin number to be selected in the INTPIN1 bits of this register."] #[inline] pub fn portsel1(&self) -> PORTSEL1R { PORTSEL1R::_from({ const MASK: u8 = 7; const OFFSET: u8 = 13; ((self.bits >> OFFSET) & MASK as u32) as u8 }) } #[doc = "Bits 16:20 - Pint interrupt 2: Select the pin number within the GPIO port selected by the PORTSEL2 bit in this register."] #[inline] pub fn intpin2(&self) -> INTPIN2R { let bits = { const MASK: u8 = 31; const OFFSET: u8 = 16; ((self.bits >> OFFSET) & MASK as u32) as u8 }; INTPIN2R { bits } } #[doc = "Bits 21:23 - Pin interrupt 2: Select the port for the pin number to be selected in the INTPIN2 bits of this register."] #[inline] pub fn portsel2(&self) -> PORTSEL2R { PORTSEL2R::_from({ const MASK: u8 = 7; const OFFSET: u8 = 21; ((self.bits >> OFFSET) & MASK as u32) as u8 }) } #[doc = "Bits 24:28 - Pint interrupt 3: Select the pin number within the GPIO port selected by the PORTSEL3 bit in this register."] #[inline] pub fn intpin3(&self) -> INTPIN3R { let bits = { const MASK: u8 = 31; const OFFSET: u8 = 24; ((self.bits >> OFFSET) & MASK as u32) as u8 }; INTPIN3R { bits } } #[doc = "Bits 29:31 - Pin interrupt 3: Select the port for the pin number to be selected in the INTPIN3 bits of this register."] #[inline] pub fn portsel3(&self) -> PORTSEL3R { PORTSEL3R::_from({ const MASK: u8 = 7; const OFFSET: u8 = 29; ((self.bits >> OFFSET) & MASK as u32) as u8 }) } } impl W { #[doc = r" Reset value of the register"] #[inline] pub fn reset_value() -> W { W { bits: 0 } } #[doc = r" Writes raw bits to the register"] #[inline] pub unsafe fn bits(&mut self, bits: u32) -> &mut Self { self.bits = bits; self } #[doc = "Bits 0:4 - Pint interrupt 0: Select the pin number within the GPIO port selected by the PORTSEL0 bit in this register."] #[inline] pub fn intpin0(&mut self) -> _INTPIN0W { _INTPIN0W { w: self } } #[doc = "Bits 5:7 - Pin interrupt 0: Select the port for the pin number to be selected in the INTPIN0 bits of this register."] #[inline] pub fn portsel0(&mut self) -> _PORTSEL0W { _PORTSEL0W { w: self } } #[doc = "Bits 8:12 - Pint interrupt 1: Select the pin number within the GPIO port selected by the PORTSEL1 bit in this register."] #[inline] pub fn intpin1(&mut self) -> _INTPIN1W { _INTPIN1W { w: self } } #[doc = "Bits 13:15 - Pin interrupt 1: Select the port for the pin number to be selected in the INTPIN1 bits of this register."] #[inline] pub fn portsel1(&mut self) -> _PORTSEL1W { _PORTSEL1W { w: self } } #[doc = "Bits 16:20 - Pint interrupt 2: Select the pin number within the GPIO port selected by the PORTSEL2 bit in this register."] #[inline] pub fn intpin2(&mut self) -> _INTPIN2W { _INTPIN2W { w: self } } #[doc = "Bits 21:23 - Pin interrupt 2: Select the port for the pin number to be selected in the INTPIN2 bits of this register."] #[inline] pub fn portsel2(&mut self) -> _PORTSEL2W { _PORTSEL2W { w: self } } #[doc = "Bits 24:28 - Pint interrupt 3: Select the pin number within the GPIO port selected by the PORTSEL3 bit in this register."] #[inline] pub fn intpin3(&mut self) -> _INTPIN3W { _INTPIN3W { w: self } } #[doc = "Bits 29:31 - Pin interrupt 3: Select the port for the pin number to be selected in the INTPIN3 bits of this register."] #[inline] pub fn portsel3(&mut self) -> _PORTSEL3W { _PORTSEL3W { w: self } } }
true
161a56edb3db6f76bd0d42056e768fd9b162ee5d
Rust
zfzackfrost/string_studio
/src/generate/regex_gen.rs
UTF-8
6,496
2.609375
3
[ "MIT" ]
permissive
use rand::distributions::{Distribution, Uniform}; use rand::prelude::*; use regex_syntax::hir::{self, Hir, HirKind}; use regex_syntax::Parser; use std::iter::FromIterator; use encoding::all::UTF_8; use encoding::{DecoderTrap, EncoderTrap, Encoding}; const MAX_REPEAT: u32 = 100; struct RandomizeState<'a, R: Rng> { pub rng: &'a mut R, } fn randomize_alternation<R: Rng>( rstate: &mut RandomizeState<R>, mut exprs: Vec<Hir>, ) -> Result<String, ()> { exprs.shuffle(rstate.rng); if !exprs.is_empty() { randomize_for(rstate, exprs[0].kind().clone()) } else { Err(()) } } fn randomize_word_boundry<R: Rng>( _rstate: &mut RandomizeState<R>, _wb: hir::WordBoundary, ) -> Result<String, ()> { Ok(String::from(" ")) } fn randomize_anchor<R: Rng>( _rstate: &mut RandomizeState<R>, _anchor: hir::Anchor, ) -> Result<String, ()> { Ok(String::from("")) } fn randomize_group<R: Rng>( rstate: &mut RandomizeState<R>, group: hir::Group, ) -> Result<String, ()> { randomize_for(rstate, group.hir.kind().clone()) } fn randomize_literal<R: Rng>( _rstate: &mut RandomizeState<R>, literal: hir::Literal, ) -> Result<String, ()> { match literal { hir::Literal::Unicode(c) => Ok(String::from_iter([c].iter())), hir::Literal::Byte(_) => Err(()), } } fn randomize_concat<R: Rng>(rstate: &mut RandomizeState<R>, exprs: Vec<Hir>) -> Result<String, ()> { let mut s = String::new(); for e in &exprs { s += &randomize_for(rstate, e.kind().clone())?; } Ok(s) } fn repeat_exactly<R: Rng>(rstate: &mut RandomizeState<R>, h: Hir, n: u32) -> Result<String, ()> { let s = (0..n) .map(|_| randomize_for(rstate, h.kind().clone()).unwrap()) .collect::<Vec<String>>() .join(""); Ok(s) } fn repeat_at_least<R: Rng + RngCore>( rstate: &mut RandomizeState<R>, h: Hir, n: u32, ) -> Result<String, ()> { let dist = Uniform::from(n..MAX_REPEAT); let n = dist.sample(rstate.rng); let s = (0..n) .map(|_| randomize_for(rstate, h.kind().clone()).unwrap()) .collect::<Vec<String>>() .join(""); Ok(s) } fn repeat_bounded<R: Rng + RngCore>( rstate: &mut RandomizeState<R>, h: Hir, mn: u32, mx: u32, ) -> Result<String, ()> { let mx = mx + 1; let dist = Uniform::from(mn..mx); let n = dist.sample(rstate.rng); let s = (0..n) .map(|_| randomize_for(rstate, h.kind().clone()).unwrap()) .collect::<Vec<String>>() .join(""); Ok(s) } fn randomize_unicode_class<R: Rng + RngCore>( rstate: &mut RandomizeState<R>, cls: hir::ClassUnicode, ) -> Result<String, ()> { let mut chars: Vec<char> = Vec::new(); for r in cls.iter() { let s = r.start(); let e = r.end(); if let (Ok(s), Ok(e)) = ( UTF_8.encode(&String::from_iter([s].iter()), EncoderTrap::Strict), UTF_8.encode(&String::from_iter([e].iter()), EncoderTrap::Strict), ) { if s.len() > 0 && e.len() > 0 { let s = s[0]; let e = e[0] + 1; for byte in s..e { if let Ok(s) = UTF_8.decode(&[byte], DecoderTrap::Strict) { let c = s.chars().nth(0).unwrap(); chars.push(c); } } } } } Ok(String::from_iter(&[*chars.choose(rstate.rng).unwrap()])) } fn randomize_class<R: Rng + RngCore>( rstate: &mut RandomizeState<R>, cls: hir::Class, ) -> Result<String, ()> { match cls { hir::Class::Unicode(cls) => randomize_unicode_class(rstate, cls), _ => Err(()), } } fn randomize_repetition<R: Rng>( rstate: &mut RandomizeState<R>, rep: hir::Repetition, ) -> Result<String, ()> { let hir = rep.hir; match rep.kind { hir::RepetitionKind::ZeroOrOne => repeat_bounded(rstate, hir.as_ref().clone(), 0, 1), hir::RepetitionKind::ZeroOrMore => { repeat_bounded(rstate, hir.as_ref().clone(), 0, MAX_REPEAT) } hir::RepetitionKind::OneOrMore => { repeat_bounded(rstate, hir.as_ref().clone(), 1, MAX_REPEAT) } hir::RepetitionKind::Range(range) => match range { hir::RepetitionRange::Exactly(n) => repeat_exactly(rstate, hir.as_ref().clone(), n), hir::RepetitionRange::AtLeast(n) => repeat_at_least(rstate, hir.as_ref().clone(), n), hir::RepetitionRange::Bounded(mn, mx) => { repeat_bounded(rstate, hir.as_ref().clone(), mn, mx) } }, } } fn randomize_for<R: Rng>(rstate: &mut RandomizeState<R>, kind: HirKind) -> Result<String, ()> { match kind { HirKind::Alternation(exprs) => randomize_alternation(rstate, exprs), HirKind::Literal(lit) => randomize_literal(rstate, lit), HirKind::Concat(exprs) => randomize_concat(rstate, exprs), HirKind::Repetition(rep) => randomize_repetition(rstate, rep), HirKind::Group(grp) => randomize_group(rstate, grp), HirKind::Class(cls) => randomize_class(rstate, cls), HirKind::Anchor(a) => randomize_anchor(rstate, a), HirKind::WordBoundary(wb) => randomize_word_boundry(rstate, wb), _ => Err(()), } } pub struct RegexGen { hir: Hir, } impl RegexGen { pub fn new(pattern: &str) -> Option<Self> { if let Ok(hir) = Parser::new().parse(pattern) { Some(Self { hir }) } else { None } } pub fn kind(&self) -> &HirKind { self.hir.kind() } pub fn randomize(&self, rng: &mut impl Rng) -> Result<String, ()> { let mut rstate = RandomizeState { rng: rng }; randomize_for(&mut rstate, self.kind().clone()) } } #[cfg(test)] mod tests { use super::*; use rand_xoshiro::Xoshiro256StarStar; #[test] fn hir_randomize_test() { let mut rng = Xoshiro256StarStar::seed_from_u64(0); let gen = RegexGen::new("([a-zA-Z]){1,3}").unwrap(); if let Ok(s) = gen.randomize(&mut rng) { println!("{}", s); } } #[test] fn hir_parser_test() { let hir = Parser::new().parse("a|b").unwrap(); assert_eq!( hir, Hir::alternation(vec![ Hir::literal(hir::Literal::Unicode('a')), Hir::literal(hir::Literal::Unicode('b')), ]) ); } }
true
17931b343d778a5308fb58c83b860016ccc33223
Rust
garciparedes/google-hashcode-2021
/src/main.rs
UTF-8
6,015
3.28125
3
[]
no_license
use std::io::prelude::*; use std::io; use std::collections::{HashMap, HashSet}; fn main() -> io::Result<()> { let input = read_input()?; let mut solver = Solver::from_str(&input); // println!("{:?}", solver); let solution = solver.solve(); write_output(solution); return Ok(()); } #[derive(Debug)] struct Solver { max_duration: usize, bonus_points: usize, graph: HashMap<usize, HashSet<String>>, streets: HashMap<String, Street>, paths: HashSet<Path>, } impl Solver { fn from_str(input: &str) -> Self { let lines = input .trim() .split('\n') .collect::<Vec<_>>(); let header = lines[0].split_whitespace().map(|v| v.parse::<usize>().unwrap()).collect::<Vec<_>>(); let (d, _, s, v, f) = (header[0], header[1], header[2], header[3], header[4]); let mut streets = HashMap::new(); for k in 1..1 + s { let street = Street::from_str(lines[k]); streets.insert(street.name.clone(), street); } let mut graph = HashMap::new(); for street in streets.values() { graph.entry(street.to).or_insert_with(HashSet::new).insert(street.name.clone()); } let mut paths = HashSet::new(); for k in (1 + s)..(1 + s + v) { let path = Path::from_str(lines[k]); paths.insert(path); } return Self::new(d, f, streets, graph, paths); } fn new( max_duration: usize, bonus_points: usize, streets: HashMap<String, Street>, graph: HashMap<usize, HashSet<String>>, paths: HashSet<Path> ) -> Self { Self { max_duration: max_duration, bonus_points: bonus_points, streets: streets, graph: graph, paths: paths, } } fn solve(&mut self) -> Solution { let mut solution = Solution::new(); for path in &self.paths { let mut duration = 0; for street in &path.streets { duration += self.streets.get(street).unwrap().transit; self.streets.get_mut(street).unwrap().expected_visits.push(duration); self.streets.get_mut(street).unwrap().visits += 1; } } for (intersection_id, streets) in &self.graph { let mut streets: Vec<_> = streets .iter() .clone() .filter_map(|name| { let street = self.streets.get(name).unwrap(); if street.visits == 0 { return None; } return Some(street); }) .collect(); if streets.is_empty() { continue; } streets.sort_unstable_by_key(|street| cmp::Reverse(street.transit)); let mut incoming = Vec::new(); let mut cycle = 1; let mut last = streets[0].transit; for street in streets { if last > street.transit { last = street.transit; cycle += 1; } incoming.push((street.name.clone(), cycle)); } let intersection = Intersection::new(*intersection_id, incoming); solution.insert(intersection); } return solution; } } use std::cmp; #[derive(Debug)] struct Street { from: usize, to: usize, name: String, transit: usize, visits: usize, expected_visits: Vec<usize>, } impl Street { fn from_str(raw: &str) -> Self { let values: Vec<_> = raw.trim().split_whitespace().collect(); let from = values[0].parse::<usize>().unwrap(); let to = values[1].parse::<usize>().unwrap(); let name = String::from(values[2]); let transit = values[3].parse::<usize>().unwrap(); return Self::new(from, to, name, transit); } fn new(from: usize, to: usize, name: String, transit: usize) -> Self { Self { from: from, to: to, name: name, transit: transit, visits: 0, expected_visits: Vec::new()} } } #[derive(Debug, PartialEq, Eq, Hash)] struct Path { streets: Vec<String>, } impl Path { fn from_str(raw: &str) -> Self { let values: Vec<String> = raw.trim().split_whitespace().skip(1).map(String::from).collect(); return Self::new(values); } fn new(streets: Vec<String>) -> Self { Self { streets: streets } } } #[derive(Debug)] struct Solution { intersections: HashSet<Intersection>, } impl Solution { fn new() -> Self { Self { intersections: HashSet::new() } } fn insert(&mut self, intersection: Intersection) { self.intersections.insert(intersection); } fn to_string(&self) -> String { let mut ans = String::new(); ans.push_str(&self.intersections.len().to_string()); for intersection in &self.intersections { ans.push('\n'); ans.push_str(&intersection.to_string()); } return ans; } } #[derive(Debug, PartialEq, Eq, Hash)] struct Intersection { id: usize, incoming: Vec<(String, usize)> } impl Intersection { fn new(id: usize, incoming: Vec<(String, usize)>) -> Self { Self { id: id, incoming: incoming } } fn to_string(&self) -> String { let mut ans = String::new(); ans.push_str(&format!("{}\n", self.id)); ans.push_str(&self.incoming.len().to_string()); for item in &self.incoming { ans.push('\n'); ans.push_str(&format!("{} {}", item.0, item.1)); } return ans; } } fn read_input() -> io::Result<String> { let mut buffer = String::new(); io::stdin().read_to_string(&mut buffer)?; return Ok(buffer); } fn write_output(solution: Solution) { println!("{}", solution.to_string()); }
true
b97dde5b6f15364dbfc9169474aebd1212babb74
Rust
k124k3n/competitive-programming-answer
/codesignal/largestNumber.rs
UTF-8
167
3.1875
3
[ "MIT" ]
permissive
fn largestNumber(n: i32) -> i32 { if n == 0 { return n; } let mut out = 9; for i in 1..n { out *= 10; out += 9; } out }
true
62aa2389e4b844e79b836993fb75f28eab00910d
Rust
mateuszptr/rust_dhcp
/src/main.rs
UTF-8
3,138
2.515625
3
[]
no_license
#![feature(int_to_from_bytes)] extern crate byteorder; extern crate bytes; extern crate serde; extern crate serde_json; #[macro_use] extern crate serde_derive; extern crate actix; #[macro_use] extern crate actix_derive; extern crate libc; extern crate hwaddr; mod dhcp_frames; mod dhcp_options; mod config; mod server_actor; mod io_actor; use std::thread; use std::fs::File; use std::io::prelude::*; use config::*; use std::net::{UdpSocket, SocketAddr, IpAddr, Ipv4Addr}; use actix::prelude::*; use io_actor::OutputActor; use server_actor::ServerActor; use dhcp_frames::DHCPPacket; use std::os::unix::io::AsRawFd; use std::ffi::CString; use libc::c_void; /// Biblioteka standardowa rusta owrapowuje niektóre wywołania funkcji setsockopt, ale nie zapewnia jej całej funkcjonalności. /// Ponieważ adres ip do broadcastu nie informuje nas o interfejsie, domyślny zostanie wybrany przez OS. /// Pozostaje nam ustawić ręcznie interfejs za pomocą opcji SO_BINDTODEVICE unsafe fn set_socket_device(socket: &UdpSocket, iface: &str) { let fd = socket.as_raw_fd(); let lvl = libc::SOL_SOCKET; let name = libc::SO_BINDTODEVICE; let val = CString::new(iface).unwrap(); let pointer = val.as_ptr() as *const c_void; let len = val.as_bytes_with_nul().len(); libc::setsockopt( fd, lvl, name, pointer, len as libc::socklen_t ); } fn main() { let system = actix::System::new("dhcp"); // otwieramy plik konfiguracyjny w formacie JSON, wczytujemy go do struktury Config let mut config_file = File::open("Config.json").expect("Couldn't open config file"); let mut config_content = String::new(); config_file.read_to_string(&mut config_content).expect("Couldn't read config file"); let config = get_config(config_content); //Tworzymy socket zbindowany na 0.0.0.0, na port 67 (standardowy port serwera DHCP), na interfejs podany w konfiguracji, z broadcastem. let socket = UdpSocket::bind(SocketAddr::new(IpAddr::from(Ipv4Addr::from([0,0,0,0])), 67)).expect("Couldn't bind a socket"); unsafe { set_socket_device(&socket, config.interface.as_str()); } socket.set_broadcast(true).expect("Couldn't set socket to bcast"); let input_socket = socket.try_clone().expect("Couldn't clone the socket"); // Aktor odpowiadający za wysyłanie wiadomości na socket let output_actor: Addr<Syn, _> = OutputActor::new(socket).start(); // Aktor obsługujący logikę serwera DHCP let server_actor: Addr<Syn, _> = ServerActor::new(config, output_actor.clone()).start(); // Tworzymy wątek odbierający w tle pakiety (recv_from) i wysyłający je do aktora serwera. let _input_thread_handle = thread::spawn(move || { loop { println!("Creating buffer"); let mut buf = vec![0u8; 1024]; let (_, addr) = input_socket.recv_from(&mut buf).unwrap(); println!("Received frame from {}", addr); let packet = DHCPPacket::from_vec(buf).unwrap(); server_actor.do_send(packet); } }); //Start systemu aktorów system.run(); }
true
dbe93392a7856f3749ee596cdc81e6a796745de4
Rust
prisma/prisma-engines
/query-engine/prisma-models/src/selection_result.rs
UTF-8
5,231
3.203125
3
[ "Apache-2.0" ]
permissive
use crate::{DomainError, FieldSelection, PrismaValue, ScalarFieldRef, SelectedField}; use itertools::Itertools; use std::convert::TryFrom; /// Represents a set of results. #[derive(Default, Clone, PartialEq, Eq, Hash)] pub struct SelectionResult { pub pairs: Vec<(SelectedField, PrismaValue)>, } impl std::fmt::Debug for SelectionResult { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { f.debug_list() .entries( &self .pairs .iter() .map(|pair| (format!("{}", pair.0), pair.1.clone())) .collect_vec(), ) .finish() } } impl SelectionResult { pub fn new<T>(pairs: Vec<(T, PrismaValue)>) -> Self where T: Into<SelectedField>, { Self { pairs: pairs.into_iter().map(|(rt, value)| (rt.into(), value)).collect(), } } pub fn add<T>(&mut self, pair: (T, PrismaValue)) where T: Into<SelectedField>, { self.pairs.push((pair.0.into(), pair.1)); } pub fn get(&self, selection: &SelectedField) -> Option<&PrismaValue> { self.pairs.iter().find_map(|(result_selection, value)| { if selection == result_selection { Some(value) } else { None } }) } pub fn values(&self) -> impl Iterator<Item = PrismaValue> + '_ { self.pairs.iter().map(|p| p.1.clone()) } pub fn len(&self) -> usize { self.pairs.len() } pub fn is_empty(&self) -> bool { self.len() == 0 } pub fn db_names(&self) -> impl Iterator<Item = &str> + '_ { self.pairs.iter().map(|(field, _)| field.db_name()) } /// Consumes this `SelectionResult` and splits it into a set of `SelectionResult`s based on the passed /// `FieldSelection`s. Assumes that the transformation can be done. pub fn split_into(self, field_selections: &[FieldSelection]) -> Vec<SelectionResult> { field_selections .iter() .map(|field_selection| { let pairs: Vec<_> = field_selection .selections() .map(|selected_field| { self.get(selected_field) .map(|value| (selected_field.clone(), value.clone())) .expect("Error splitting `ReturnValues`: `FieldSelection` doesn't match.") }) .collect(); SelectionResult::new(pairs) }) .collect() } /// Checks if `self` only contains scalar field selections and if so, returns them all in a list. /// If any other selection is contained, returns `None`. pub fn as_scalar_fields(&self) -> Option<Vec<ScalarFieldRef>> { let scalar_fields: Vec<_> = self .pairs .iter() .filter_map(|(selection, _)| match selection { SelectedField::Scalar(sf) => Some(sf.clone()), SelectedField::Composite(_) => None, }) .collect(); if scalar_fields.len() == self.pairs.len() { Some(scalar_fields) } else { None } } /// Coerces contained values to best fit their type. /// - Scalar fields coerce values based on the TypeIdentifier. /// - Composite fields must be objects and contained fields must also follow the type coherence. pub fn coerce_values(self) -> crate::Result<Self> { let pairs = self .pairs .into_iter() .map(|(selection, value)| { let value = selection.coerce_value(value)?; Ok((selection, value)) }) .collect::<crate::Result<Vec<_>>>()?; Ok(Self { pairs }) } } impl TryFrom<SelectionResult> for PrismaValue { type Error = DomainError; fn try_from(return_values: SelectionResult) -> crate::Result<Self> { match return_values.pairs.into_iter().next() { Some(value) => Ok(value.1), None => Err(DomainError::ConversionFailure( "ReturnValues".into(), "PrismaValue".into(), )), } } } impl IntoIterator for SelectionResult { type Item = (SelectedField, PrismaValue); type IntoIter = std::vec::IntoIter<Self::Item>; fn into_iter(self) -> Self::IntoIter { self.pairs.into_iter() } } impl<T> From<(T, PrismaValue)> for SelectionResult where T: Into<SelectedField>, { fn from((x, value): (T, PrismaValue)) -> Self { Self::new(vec![(x.into(), value)]) } } impl<T> From<Vec<(T, PrismaValue)>> for SelectionResult where T: Into<SelectedField>, { fn from(tuples: Vec<(T, PrismaValue)>) -> Self { Self::new(tuples.into_iter().map(|(x, value)| (x.into(), value)).collect()) } } impl From<&FieldSelection> for SelectionResult { fn from(fs: &FieldSelection) -> Self { Self { pairs: fs .selections() .map(|selection| (selection.clone(), PrismaValue::Null)) .collect(), } } }
true
e399c03576c227ce4347ada70b8fb4314f443680
Rust
wa7sa34cx/WhyDoYou-bot
/src/utils/locale.rs
UTF-8
4,945
3.046875
3
[ "LicenseRef-scancode-unknown-license-reference", "MIT" ]
permissive
//! Localization helper use crate::models::error::HandlerError; use include_dir::{include_dir, Dir, File}; use lazy_static::lazy_static; use log::{error, info, warn}; use regex::{Captures, Regex}; use std::collections::HashMap; lazy_static! { /// Shared instance of Locale pub static ref TEXTS: Locale = Locale::parse().unwrap(); static ref ROW_REGEX: Regex = regex::Regex::new("\"([\\w]+)\" = \"([^\"]+)\";").unwrap(); } const LOCALE_DIR: Dir = include_dir!("assets/locale"); #[derive(Debug)] pub struct Locale { locales: Vec<LocaleFileMeta>, } impl Locale { fn parse() -> Result<Self, HandlerError> { let mut locales: Vec<LocaleFileMeta> = Vec::new(); for file in LOCALE_DIR.files() { if let Some(meta) = LocaleFileMeta::from(file) { locales.push(meta); } } let item = Self { locales }; item._test_keys()?; Ok(item) } /// Get localized string /// /// Parameters: /// - key: localization key /// - lang: language code (ex.: en, ru) /// /// Return: localized string or key pub fn get(&self, key: &str, lang: &str) -> String { let result = &self .locales .iter() .find(|l| l.lang.to_lowercase() == lang.to_lowercase()) .or_else(|| *{ &self.locales.iter().find(|l| l.is_base) }) .and_then(|l| l.data.get(key).to_owned().and_then(|s| Some(s.as_str()))) .unwrap_or(key); return result.to_string(); } fn _test_keys(&self) -> Result<(), HandlerError> { let mut is_error = false; for locale in &self.locales { &self.locales.iter().for_each(|l| { if l.lang == locale.lang { return; }; locale.data.iter().for_each(|a| { if !l.data.contains_key(a.0) { if l.is_base { is_error = true; error!( "{} lang not contain '{}' key which is in {} lang", l.title(), a.0, locale.title() ); } else { warn!( "{} lang not contain '{}' key which is in {} lang", l.title(), a.0, locale.title() ); } } }) }); } if is_error { Err(HandlerError::from_str("Locales has errors.")) } else { Ok(()) } } } #[derive(Debug)] struct LocaleFileMeta { lang: String, is_base: bool, data: HashMap<String, String>, } impl LocaleFileMeta { fn from(file: &File) -> Option<Self> { if let Some(os_str_name) = file.path().file_name() { if let Some(raw_str) = os_str_name.to_str() { let components = raw_str.split(".").collect::<Vec<&str>>(); if components.len() > 3 || components.len() < 2 { return None; } if components.last().unwrap().to_lowercase() != String::from("locale") { return None; } if let Some(content) = file.contents_utf8() { let f_content = content .split("\n") .filter(|row| !row.starts_with("//") || !row.is_empty()) .collect::<Vec<&str>>() .join("\n"); let mut data: HashMap<String, String> = HashMap::new(); for row in ROW_REGEX .captures_iter(&*f_content) .collect::<Vec<Captures>>() { let key = row.get(1).unwrap().as_str().parse().unwrap(); let value = row.get(2).unwrap().as_str().parse().unwrap(); data.insert(key, value); } let item = LocaleFileMeta { lang: components.get(0).unwrap().parse().unwrap(), is_base: components.len() == 3 && components.get(1).unwrap().to_lowercase() == String::from("base"), data, }; info!("{} language found & loaded.", item.title()); return Some(item); } } } None } fn title(&self) -> String { let mut result = String::from(&self.lang.clone()); if self.is_base { result.push_str("*"); } result.to_string().to_uppercase() } }
true
6dd8177bfc0f4b5bf78e749af2e13ba80cebe258
Rust
russelltg/srt-rs
/srt-protocol/src/packet/msg_number.rs
UTF-8
229
2.625
3
[ "Apache-2.0" ]
permissive
use super::modular_num::modular_num; modular_num! { pub MsgNumber(u32, 26) } impl MsgNumber { #[must_use] pub fn increment(&mut self) -> Self { let result = *self; *self += 1; result } }
true
bbca1674e59b8a03bc3683d625ef4f58f01db247
Rust
ajm188/advent_of_code
/2015/day01/main.rs
UTF-8
1,158
3.359375
3
[ "MIT" ]
permissive
use std::env::args; struct Santa { position: i32, first_time_in_basement: i32, num_movements: i32, } impl Santa { fn has_been_in_basement(&self) -> bool { self.first_time_in_basement >= 0 } fn from_santa(santa: Santa, movement: i32) -> Santa { let pos = santa.position + movement; let movements = santa.num_movements + 1; let basement = if santa.has_been_in_basement() || pos >= 0 { santa.first_time_in_basement } else { movements }; Santa { position: pos, first_time_in_basement: basement, num_movements: movements, } } } fn main() { let instructions = match args().nth(1) { Some(v) => v, None => "".to_string(), }; let santa = Santa { position: 0, first_time_in_basement: -1, num_movements: 0, }; let last_santa: Santa = instructions .chars() .map(|c: char| if c == '(' { 1 } else { -1 }) .fold(santa, |s, i| Santa::from_santa(s, i)); println!("{} {}", last_santa.position, last_santa.first_time_in_basement); }
true
16e7f44ffefec6125de2b0621a53a7dd1e33864a
Rust
flanfly/rust-pottcpp
/numortxt.rs
UTF-8
480
3.953125
4
[ "MIT" ]
permissive
enum NumberOrText { Number(i32), Text(String) } fn print_number_or_text(nt: NumberOrText) { match nt { NumberOrText::Number(i) => println!("Number: {}",i), NumberOrText::Text(t) => println!("Text: {}",t) } } fn main() { let a: NumberOrText = NumberOrText::Number(42); let b: NumberOrText = NumberOrText::Text("Hello, World".to_string()); // Prints "Number: 42" print_number_or_text(a); // Prints "Text: Hello, World" print_number_or_text(b); }
true
ef3cdeae12f65e6bfe91cbba96412998279f48d7
Rust
AurelienAubry/lc3-vm
/src/instructions/not.rs
UTF-8
2,267
3.5
4
[]
no_license
use crate::bus::Bus; use crate::cpu::{register_from_u16, Register, Registers}; use crate::instructions::Instruction; use anyhow::Result; pub struct Not { dst_reg: Register, src_reg: Register, } impl Not { pub fn new(instruction: u16) -> Result<Self> { let dst_reg = register_from_u16(instruction >> 9 & 0x7)?; let src_reg = register_from_u16(instruction >> 6 & 0x7)?; Ok(Self { dst_reg, src_reg }) } } impl Instruction for Not { fn run(&self, registers: &mut Registers, _bus: &mut Bus) -> Result<()> { registers.write_register(self.dst_reg, !registers.read_register(self.src_reg)); registers.update_flags(self.dst_reg); Ok(()) } fn to_str(&self) -> String { format!("NOT {:?}, {:?}", self.dst_reg, self.src_reg,) } } #[cfg(test)] mod tests { use super::*; use crate::bus::Bus; use crate::cpu::Flag; use crate::instructions::decode; #[test] fn test_run() { let mut reg = Registers::new(); let mut bus = Bus::new().unwrap(); // ZRO FLAG reg.write_register(Register::R0, 0b1111_1111_1111_1111); let instruction = decode(0b1001_001_000_1_11111).unwrap(); instruction.run(&mut reg, &mut bus).unwrap(); assert_eq!(reg.read_register(Register::R1), 0); assert_eq!(reg.read_register(Register::COND), Flag::Zro as u16); // POS FLAG reg.write_register(Register::R0, 0b1000_1111_1111_1111); let instruction = decode(0b1001_001_000_1_11111).unwrap(); instruction.run(&mut reg, &mut bus).unwrap(); assert_eq!(reg.read_register(Register::R1), 0b0111_0000_0000_0000); assert_eq!(reg.read_register(Register::COND), Flag::Pos as u16); // NEG FLAG reg.write_register(Register::R0, 0b0111_1010_1010_1010); let instruction = decode(0b1001_001_000_1_11111).unwrap(); instruction.run(&mut reg, &mut bus).unwrap(); assert_eq!(reg.read_register(Register::R1), 0b1000_0101_0101_0101); assert_eq!(reg.read_register(Register::COND), Flag::Neg as u16); } #[test] fn test_to_str() { let inst = decode(0b1001_001_000_1_11111).unwrap(); assert_eq!(inst.to_str(), "NOT R1, R0"); } }
true
6ae9a645c26ac86e6b447a7bedc95240071b3be4
Rust
rjloura/proxy-rs
/src/utils.rs
UTF-8
1,207
3.3125
3
[ "MIT" ]
permissive
use chrono::prelude::*; const DEFAULT_UNIT: f64 = 1024_f64; const SUFFIX: &[&str] = &["k", "M", "G", "T", "P", "E"]; /// Takes the number of bytes and converts it to a human readable string pub fn pretty_bytes(b: u64) -> String { let b = b as f64; if b < DEFAULT_UNIT { return format!("{:.0} B", b); } let idx = (b.log10() / DEFAULT_UNIT.log10()) as usize; let b = b / DEFAULT_UNIT.powi(idx as i32); let suffix = SUFFIX[idx.wrapping_sub(1)]; format!("{:.1} {}B", b, suffix) } pub fn log<S: AsRef<str>>(message: S) { let dt: DateTime<Local> = Local::now(); println!( "{} {}", dt.format("%Y-%m-%d %H:%M:%S").to_string(), message.as_ref() ); } #[cfg(test)] mod tests { use super::*; #[test] fn pretty_bytes_test() { assert_eq!("1 B", pretty_bytes(1)); assert_eq!("1.0 kB", pretty_bytes(1024)); assert_eq!("1.0 MB", pretty_bytes(1024u64.pow(2))); assert_eq!("1.0 GB", pretty_bytes(1024u64.pow(3))); assert_eq!("1.0 TB", pretty_bytes(1024u64.pow(4))); assert_eq!("1.0 PB", pretty_bytes(1024u64.pow(5))); assert_eq!("1.0 EB", pretty_bytes(1024u64.pow(6))); } }
true
d796f16109e37e30ecb4c6dd6bc3dbc71133b2d3
Rust
BurntSushi/rust-sorts
/src/lib.rs
UTF-8
9,148
2.90625
3
[ "Unlicense" ]
permissive
#![crate_id = "sorts#0.1.0"] #![crate_type = "lib"] #![license = "UNLICENSE"] #![doc(html_root_url = "http://burntsushi.net/rustdoc/rust-sorts")] #![feature(phase)] #![feature(macro_rules)] //! A collection of sorting algorithms with tests and benchmarks. #[phase(syntax, link)] extern crate log; extern crate stdtest = "test"; extern crate quickcheck; extern crate rand; use rand::Rng; // why do I need this? #[cfg(test)] mod bench; #[cfg(test)] mod test; pub static INSERTION_THRESHOLD: uint = 16; /// The `bogo` sort is the simplest but worst sorting algorithm. It shuffles /// the given input until it is sorted. Its worst case space complexity is /// `O(n)` but its time complexity is *unbounded*. pub fn bogo<T: TotalOrd>(xs: &mut [T]) { fn is_sorted<T: TotalOrd>(xs: &[T]) -> bool { for win in xs.windows(2) { if win[0] > win[1] { return false } } true } let rng = &mut rand::task_rng(); while !is_sorted(xs) { rng.shuffle_mut(xs); } } /// Classic in place insertion sort. Worst case time complexity is `O(n^2)`. pub fn insertion<T: TotalOrd>(xs: &mut [T]) { let (mut i, len) = (1, xs.len()); while i < len { let mut j = i; while j > 0 && xs[j-1] > xs[j] { xs.swap(j, j-1); j = j - 1; } i = i + 1; } } /// Classic in place bubble sort. Worst case time complexity is `O(n^2)`. pub fn bubble<T: TotalOrd>(xs: &mut [T]) { let mut n = xs.len(); while n > 0 { let mut newn = 0; let mut i = 1; while i < n { if xs[i-1] > xs[i] { xs.swap(i-1, i); newn = i; } i = i + 1; } n = newn; } } /// Classic in place selection sort. Worst case time complexity is `O(n^2)`. /// Note that this is an *unstable* implementation. pub fn selection<T: TotalOrd>(xs: &mut [T]) { let (mut i, len) = (0, xs.len()); while i < len { let (mut j, mut cur_min) = (i + 1, i); while j < len { if xs[j] < xs[cur_min] { cur_min = j; } j = j + 1; } xs.swap(i, cur_min); i = i + 1; } } pub mod quick { use super::INSERTION_THRESHOLD; /// Standard in-place quicksort that always uses the first element as /// a pivot. Average time complexity is `O(nlogn)` and its space complexity /// is `O(1)` (limited to vectors of size `N`, which is the maximum number /// expressible with a `uint`). pub fn dumb<T: TotalOrd>(xs: &mut [T]) { fn pivot<T: TotalOrd>(_: &[T]) -> uint { 0 } qsort(xs, pivot) } /// Standard in-place quicksort that uses the median of the first, middle /// and last elements in each vector for the pivot. /// Average time complexity is `O(nlogn)` and its space complexity /// is `O(1)` (limited to vectors of size `N`, which is the maximum number /// expressible with a `uint`). /// /// This seems to have the same performance characteristics as the `dumb` /// quicksort, except when the input is almost sorted where intelligently /// choosing a pivot helps by at least an order of magnitude. (This is /// because an almost-sorted vector given to the `dumb` quicksort provokes /// worse case `O(n^2)` performance, whereas picking a pivot intelligently /// helps keep it closer to the average `O(nlogn)` performance.) pub fn smart<T: TotalOrd>(xs: &mut [T]) { qsort(xs, smart_pivot) } pub fn insertion<T: TotalOrd>(xs: &mut [T]) { if xs.len() <= 1 { return } let p = smart_pivot(xs); let p = partition(xs, p); if p <= INSERTION_THRESHOLD { super::insertion(xs.mut_slice_to(p)) } else { qsort(xs.mut_slice_to(p), smart_pivot); } if xs.len() - p+1 <= INSERTION_THRESHOLD { super::insertion(xs.mut_slice_from(p+1)) } else { qsort(xs.mut_slice_from(p+1), smart_pivot); } } fn qsort<T: TotalOrd>(xs: &mut [T], pivot: fn(&[T]) -> uint) { if xs.len() <= 1 { return } let p = pivot(xs); let p = partition(xs, p); qsort(xs.mut_slice_to(p), pivot); qsort(xs.mut_slice_from(p+1), pivot); } fn partition<T: TotalOrd>(xs: &mut [T], p: uint) -> uint { if xs.len() <= 1 { return p } let lasti = xs.len() - 1; let (mut i, mut nextp) = (0, 0); xs.swap(lasti, p); while i < lasti { if xs[i] <= xs[lasti] { xs.swap(i, nextp); nextp = nextp + 1; } i = i + 1; } xs.swap(nextp, lasti); nextp } fn smart_pivot<T: TotalOrd>(xs: &[T]) -> uint { let (l, r) = (0, xs.len() - 1); let m = l + ((r - l) / 2); let (left, middle, right) = (&xs[l], &xs[m], &xs[r]); if middle >= left && middle <= right { m } else if left >= middle && left <= right { l } else { r } } } pub mod heap { pub fn up<T: TotalOrd>(xs: &mut [T]) { sort(xs, heapify_up); } pub fn down<T: TotalOrd>(xs: &mut [T]) { sort(xs, heapify_down); } fn sort<T: TotalOrd>(xs: &mut [T], heapify: fn(&mut [T])) { if xs.len() <= 1 { return } heapify(xs); let mut end = xs.len() - 1; while end > 0 { xs.swap(end, 0); end = end - 1; sift_down(xs, 0, end); } } fn heapify_down<T: TotalOrd>(xs: &mut [T]) { let last = xs.len() - 1; let mut start = 1 + ((last - 1) / 2); while start > 0 { start = start - 1; sift_down(xs, start, last); } } fn sift_down<T: TotalOrd>(xs: &mut [T], start: uint, end: uint) { let mut root = start; while root * 2 + 1 <= end { let child = root * 2 + 1; let mut swap = root; if xs[swap] < xs[child] { swap = child } if child + 1 <= end && xs[swap] < xs[child+1] { swap = child + 1 } if swap == root { return } xs.swap(root, swap); root = swap; } } fn heapify_up<T: TotalOrd>(xs: &mut [T]) { let mut end = 1; while end < xs.len() { sift_up(xs, 0, end); end = end + 1; } } fn sift_up<T: TotalOrd>(xs: &mut [T], start: uint, end: uint) { let mut child = end; while child > start { let parent = (child - 1) / 2; if xs[parent] >= xs[child] { return } xs.swap(parent, child); child = parent; } } } pub mod merge { use std::cmp::min; use std::slice::MutableCloneableVector; use super::INSERTION_THRESHOLD; /// A stable mergesort with worst case `O(nlogn)` performance. This /// particular implementation has `O(n)` complexity. Unfortunately, the /// constant factor is pretty high. /// /// (See Rust's standard library `sort` function for a better mergesort /// which uses unsafe, I think.) pub fn sort<T: TotalOrd + Clone>(xs: &mut [T]) { let (len, mut width) = (xs.len(), 1); let mut buf = xs.to_owned(); while width < len { let mut start = 0; while start < len { let mid = min(len, start + width); let end = min(len, start + 2 * width); merge(xs, buf, start, mid, end); start = start + 2 * width; } width = width * 2; xs.copy_from(buf); } } pub fn insertion<T: TotalOrd + Clone>(xs: &mut [T]) { let (len, mut width) = (xs.len(), INSERTION_THRESHOLD); let mut i = 0; while i < len { let upto = min(len, i + INSERTION_THRESHOLD); super::insertion(xs.mut_slice(i, upto)); i = i + INSERTION_THRESHOLD; } let mut buf = xs.to_owned(); while width < len { let mut start = 0; while start < len { let mid = min(len, start + width); let end = min(len, start + 2 * width); merge(xs, buf, start, mid, end); start = start + 2 * width; } width = width * 2; xs.copy_from(buf); } } fn merge<T: TotalOrd + Clone> (xs: &mut [T], buf: &mut [T], l: uint, r: uint, e: uint) { let (mut il, mut ir) = (l, r); let mut i = l; while i < e { if il < r && (ir >= e || xs[il] <= xs[ir]) { buf[i] = xs[il].clone(); il = il + 1; } else { buf[i] = xs[ir].clone(); ir = ir + 1; } i = i + 1; } } }
true
69824be9d4a6657364d2f9835d88edf36306243b
Rust
dalance/nom-greedyerror
/examples/nom7.rs
UTF-8
1,122
2.8125
3
[ "LicenseRef-scancode-unknown-license-reference", "Apache-2.0", "MIT" ]
permissive
use nom7::branch::alt; use nom7::character::complete::{alpha1, digit1}; use nom7::error::{ErrorKind, ParseError, VerboseError}; use nom7::sequence::tuple; use nom7::Err::Error; use nom7::IResult; use nom_greedyerror::{error_position, GreedyError, Position}; use nom_locate4::LocatedSpan; type Span<'a> = LocatedSpan<&'a str>; fn parser<'a, E: ParseError<Span<'a>>>( input: Span<'a>, ) -> IResult<Span<'a>, (Span<'a>, Span<'a>, Span<'a>), E> { alt(( tuple((alpha1, digit1, alpha1)), tuple((digit1, alpha1, digit1)), ))(input) } fn main() { // VerboseError failed at // abc012::: // ^ let error = parser::<VerboseError<Span>>(Span::new("abc012:::")); dbg!(&error); match error { Err(Error(e)) => assert_eq!(e.errors.first().map(|x| x.0.position()), Some(0)), _ => (), }; // GreedyError failed at // abc012::: // ^ let error = parser::<GreedyError<Span, ErrorKind>>(Span::new("abc012:::")); dbg!(&error); match error { Err(Error(e)) => assert_eq!(error_position(&e), Some(6)), _ => (), }; }
true
a96a37f9547b1bf69e973f22f148f22517062db9
Rust
Tyler-Zhang/words-with-coworkers
/words-game/src/error.rs
UTF-8
1,025
3.1875
3
[ "Apache-2.0" ]
permissive
use std::error; use std::fmt; #[derive(Debug)] pub enum Error { BadAction(String), NotEnoughTiles, InvalidWord(String), StartingTileNotCovered, WordDoesNotIntersect, NoLettersUsed, } impl fmt::Display for Error { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { match *self { Error::BadAction(ref err) => write!(f, "Bad Action error: {}", err), Error::NotEnoughTiles => write!(f, "Not enough tiles"), Error::InvalidWord(ref word) => write!(f, "Word <{}> not in the dictionary", word), Error::StartingTileNotCovered => write!(f, "Starting tile needs to be covered"), Error::WordDoesNotIntersect => write!(f, "Word does not intersect with another word"), Error::NoLettersUsed => write!(f, "You must use at least one letter"), } } } impl error::Error for Error { fn cause(&self) -> Option<&dyn error::Error> { Some(self) } } pub type Result<T> = std::result::Result<T, Box<Error>>;
true
f991c37a6fb8194d58418eae04de737454fc9698
Rust
tiredhaydn/project_euler
/src/bin/problem009/main.rs
UTF-8
343
2.953125
3
[]
no_license
fn main() { let mut answer = 0; for a in 3..=998 { for b in 4..=997 { let c = 1000 - a - b; if a * a + b * b == c * c { let abc = a * b * c; if abc > answer { answer = abc; } } } } println!("{}", answer); }
true
132a4d55730bbebc5b9c22282d60101b60f86f0c
Rust
liang610/flux
/libflux/src/flux/semantic/convert.rs
UTF-8
118,059
2.828125
3
[ "MIT" ]
permissive
use crate::ast; use crate::semantic::fresh::Fresher; use crate::semantic::nodes::*; use crate::semantic::types::MonoType; use std::result; pub type SemanticError = String; pub type Result<T> = result::Result<T, SemanticError>; /// convert_with converts an AST package node to its semantic representation using /// the provided fresher. /// /// Note: most external callers of this function will want to use the analyze() /// function in the libstd crate instead, which is aware of everything in the Flux stdlib and prelude. /// /// The function explicitly moves the ast::Package because it adds information to it. /// We follow here the principle that every compilation step should be isolated and should add meaning /// to the previous one. In other terms, once one converts an AST he should not use it anymore. /// If one wants to do so, he should explicitly pkg.clone() and incur consciously in the memory /// overhead involved. pub fn convert_with(pkg: ast::Package, fresher: &mut Fresher) -> Result<Package> { convert_package(pkg, fresher) // TODO(affo): run checks on the semantic graph. } fn convert_package(pkg: ast::Package, fresher: &mut Fresher) -> Result<Package> { let files = pkg .files .into_iter() .map(|f| convert_file(f, fresher)) .collect::<Result<Vec<File>>>()?; Ok(Package { loc: pkg.base.location, package: pkg.package, files, }) } pub fn convert_file(file: ast::File, fresher: &mut Fresher) -> Result<File> { let package = convert_package_clause(file.package, fresher)?; let imports = file .imports .into_iter() .map(|i| convert_import_declaration(i, fresher)) .collect::<Result<Vec<ImportDeclaration>>>()?; let body = file .body .into_iter() .map(|s| convert_statement(s, fresher)) .collect::<Result<Vec<Statement>>>()?; Ok(File { loc: file.base.location, package, imports, body, }) } fn convert_package_clause( pkg: Option<ast::PackageClause>, fresher: &mut Fresher, ) -> Result<Option<PackageClause>> { if pkg.is_none() { return Ok(None); } let pkg = pkg.unwrap(); let name = convert_identifier(pkg.name, fresher)?; Ok(Some(PackageClause { loc: pkg.base.location, name, })) } fn convert_import_declaration( imp: ast::ImportDeclaration, fresher: &mut Fresher, ) -> Result<ImportDeclaration> { let alias = match imp.alias { None => None, Some(id) => Some(convert_identifier(id, fresher)?), }; let path = convert_string_literal(imp.path, fresher)?; Ok(ImportDeclaration { loc: imp.base.location, alias, path, }) } fn convert_statement(stmt: ast::Statement, fresher: &mut Fresher) -> Result<Statement> { match stmt { ast::Statement::Option(s) => Ok(Statement::Option(Box::new(convert_option_statement( *s, fresher, )?))), ast::Statement::Builtin(s) => { Ok(Statement::Builtin(convert_builtin_statement(*s, fresher)?)) } ast::Statement::Test(s) => Ok(Statement::Test(Box::new(convert_test_statement( *s, fresher, )?))), ast::Statement::Expr(s) => Ok(Statement::Expr(convert_expression_statement(*s, fresher)?)), ast::Statement::Return(s) => Ok(Statement::Return(convert_return_statement(*s, fresher)?)), // TODO(affo): we should fix this to include MemberAssignement. // The error lies in AST: the Statement enum does not include that. // This is not a problem when parsing, because we parse it only in the option assignment case, // and we return an OptionStmt, which is a Statement. ast::Statement::Variable(s) => Ok(Statement::Variable(Box::new( convert_variable_assignment(*s, fresher)?, ))), ast::Statement::Bad(_) => { Err("BadStatement is not supported in semantic analysis".to_string()) } } } fn convert_assignment(assign: ast::Assignment, fresher: &mut Fresher) -> Result<Assignment> { match assign { ast::Assignment::Variable(a) => Ok(Assignment::Variable(convert_variable_assignment( *a, fresher, )?)), ast::Assignment::Member(a) => { Ok(Assignment::Member(convert_member_assignment(*a, fresher)?)) } } } fn convert_option_statement(stmt: ast::OptionStmt, fresher: &mut Fresher) -> Result<OptionStmt> { Ok(OptionStmt { loc: stmt.base.location, assignment: convert_assignment(stmt.assignment, fresher)?, }) } fn convert_builtin_statement(stmt: ast::BuiltinStmt, fresher: &mut Fresher) -> Result<BuiltinStmt> { Ok(BuiltinStmt { loc: stmt.base.location, id: convert_identifier(stmt.id, fresher)?, }) } fn convert_test_statement(stmt: ast::TestStmt, fresher: &mut Fresher) -> Result<TestStmt> { Ok(TestStmt { loc: stmt.base.location, assignment: convert_variable_assignment(stmt.assignment, fresher)?, }) } fn convert_expression_statement(stmt: ast::ExprStmt, fresher: &mut Fresher) -> Result<ExprStmt> { Ok(ExprStmt { loc: stmt.base.location, expression: convert_expression(stmt.expression, fresher)?, }) } fn convert_return_statement(stmt: ast::ReturnStmt, fresher: &mut Fresher) -> Result<ReturnStmt> { Ok(ReturnStmt { loc: stmt.base.location, argument: convert_expression(stmt.argument, fresher)?, }) } fn convert_variable_assignment( stmt: ast::VariableAssgn, fresher: &mut Fresher, ) -> Result<VariableAssgn> { Ok(VariableAssgn::new( convert_identifier(stmt.id, fresher)?, convert_expression(stmt.init, fresher)?, stmt.base.location, )) } fn convert_member_assignment(stmt: ast::MemberAssgn, fresher: &mut Fresher) -> Result<MemberAssgn> { Ok(MemberAssgn { loc: stmt.base.location, member: convert_member_expression(stmt.member, fresher)?, init: convert_expression(stmt.init, fresher)?, }) } fn convert_expression(expr: ast::Expression, fresher: &mut Fresher) -> Result<Expression> { match expr { ast::Expression::Function(expr) => Ok(Expression::Function(Box::new(convert_function_expression(*expr, fresher)?))), ast::Expression::Call(expr) => Ok(Expression::Call(Box::new(convert_call_expression(*expr, fresher)?))), ast::Expression::Member(expr) => Ok(Expression::Member(Box::new(convert_member_expression(*expr, fresher)?))), ast::Expression::Index(expr) => Ok(Expression::Index(Box::new(convert_index_expression(*expr, fresher)?))), ast::Expression::PipeExpr(expr) => Ok(Expression::Call(Box::new(convert_pipe_expression(*expr, fresher)?))), ast::Expression::Binary(expr) => Ok(Expression::Binary(Box::new(convert_binary_expression(*expr, fresher)?))), ast::Expression::Unary(expr) => Ok(Expression::Unary(Box::new(convert_unary_expression(*expr, fresher)?))), ast::Expression::Logical(expr) => Ok(Expression::Logical(Box::new(convert_logical_expression(*expr, fresher)?))), ast::Expression::Conditional(expr) => Ok(Expression::Conditional(Box::new(convert_conditional_expression(*expr, fresher)?))), ast::Expression::Object(expr) => Ok(Expression::Object(Box::new(convert_object_expression(*expr, fresher)?))), ast::Expression::Array(expr) => Ok(Expression::Array(Box::new(convert_array_expression(*expr, fresher)?))), ast::Expression::Identifier(expr) => Ok(Expression::Identifier(convert_identifier_expression(expr, fresher)?)), ast::Expression::StringExpr(expr) => Ok(Expression::StringExpr(Box::new(convert_string_expression(*expr, fresher)?))), ast::Expression::Paren(expr) => convert_expression(expr.expression, fresher), ast::Expression::StringLit(lit) => Ok(Expression::StringLit(convert_string_literal(lit, fresher)?)), ast::Expression::Boolean(lit) => Ok(Expression::Boolean(convert_boolean_literal(lit, fresher)?)), ast::Expression::Float(lit) => Ok(Expression::Float(convert_float_literal(lit, fresher)?)), ast::Expression::Integer(lit) => Ok(Expression::Integer(convert_integer_literal(lit, fresher)?)), ast::Expression::Uint(lit) => Ok(Expression::Uint(convert_unsigned_integer_literal(lit, fresher)?)), ast::Expression::Regexp(lit) => Ok(Expression::Regexp(convert_regexp_literal(lit, fresher)?)), ast::Expression::Duration(lit) => Ok(Expression::Duration(convert_duration_literal(lit, fresher)?)), ast::Expression::DateTime(lit) => Ok(Expression::DateTime(convert_date_time_literal(lit, fresher)?)), ast::Expression::PipeLit(_) => Err("a pipe literal may only be used as a default value for an argument in a function definition".to_string()), ast::Expression::Bad(_) => Err("BadExpression is not supported in semantic analysis".to_string()) } } fn convert_function_expression( expr: ast::FunctionExpr, fresher: &mut Fresher, ) -> Result<FunctionExpr> { let params = convert_function_params(expr.params, fresher)?; let body = convert_function_body(expr.body, fresher)?; Ok(FunctionExpr { loc: expr.base.location, typ: MonoType::Var(fresher.fresh()), params, body, }) } fn convert_function_params( props: Vec<ast::Property>, fresher: &mut Fresher, ) -> Result<Vec<FunctionParameter>> { // The iteration here is complex, cannot use iter().map()..., better to write it explicitly. let mut params: Vec<FunctionParameter> = Vec::new(); let mut piped = false; for prop in props { let id = match prop.key { ast::PropertyKey::Identifier(id) => Ok(id), _ => Err("function params must be identifiers".to_string()), }?; let key = convert_identifier(id, fresher)?; let mut default: Option<Expression> = None; let mut is_pipe = false; if let Some(expr) = prop.value { match expr { ast::Expression::PipeLit(_) => { if piped { return Err("only a single argument may be piped".to_string()); } else { piped = true; is_pipe = true; }; } e => default = Some(convert_expression(e, fresher)?), } }; params.push(FunctionParameter { loc: prop.base.location, is_pipe, key, default, }); } Ok(params) } fn convert_function_body(body: ast::FunctionBody, fresher: &mut Fresher) -> Result<Block> { match body { ast::FunctionBody::Expr(expr) => { let argument = convert_expression(expr, fresher)?; Ok(Block::Return(ReturnStmt { loc: argument.loc().clone(), argument, })) } ast::FunctionBody::Block(block) => Ok(convert_block(block, fresher)?), } } fn convert_block(block: ast::Block, fresher: &mut Fresher) -> Result<Block> { let mut body = block.body.into_iter().rev(); let block = if let Some(ast::Statement::Return(stmt)) = body.next() { let argument = convert_expression(stmt.argument, fresher)?; Block::Return(ReturnStmt { loc: stmt.base.location.clone(), argument, }) } else { return Err("missing return statement in block".to_string()); }; body.try_fold(block, |acc, s| match s { ast::Statement::Variable(dec) => Ok(Block::Variable( Box::new(convert_variable_assignment(*dec, fresher)?), Box::new(acc), )), ast::Statement::Expr(stmt) => Ok(Block::Expr( convert_expression_statement(*stmt, fresher)?, Box::new(acc), )), _ => Err(format!("invalid statement in function block {:#?}", s)), }) } fn convert_call_expression(expr: ast::CallExpr, fresher: &mut Fresher) -> Result<CallExpr> { let callee = convert_expression(expr.callee, fresher)?; // TODO(affo): I'd prefer these checks to be in ast.Check(). if expr.arguments.len() > 1 { return Err("arguments are more than one object expression".to_string()); } let mut args = expr .arguments .into_iter() .map(|a| match a { ast::Expression::Object(obj) => convert_object_expression(*obj, fresher), _ => Err("arguments not an object expression".to_string()), }) .collect::<Result<Vec<ObjectExpr>>>()?; let arguments = match args.len() { 0 => Ok(Vec::new()), 1 => Ok(args.pop().expect("there must be 1 element").properties), _ => Err("arguments are more than one object expression".to_string()), }?; Ok(CallExpr { loc: expr.base.location, typ: MonoType::Var(fresher.fresh()), callee, arguments, pipe: None, }) } fn convert_member_expression(expr: ast::MemberExpr, fresher: &mut Fresher) -> Result<MemberExpr> { let object = convert_expression(expr.object, fresher)?; let property = match expr.property { ast::PropertyKey::Identifier(id) => id.name, ast::PropertyKey::StringLit(lit) => lit.value, }; Ok(MemberExpr { loc: expr.base.location, typ: MonoType::Var(fresher.fresh()), object, property, }) } fn convert_index_expression(expr: ast::IndexExpr, fresher: &mut Fresher) -> Result<IndexExpr> { let array = convert_expression(expr.array, fresher)?; let index = convert_expression(expr.index, fresher)?; Ok(IndexExpr { loc: expr.base.location, typ: MonoType::Var(fresher.fresh()), array, index, }) } fn convert_pipe_expression(expr: ast::PipeExpr, fresher: &mut Fresher) -> Result<CallExpr> { let mut call = convert_call_expression(expr.call, fresher)?; let pipe = convert_expression(expr.argument, fresher)?; call.pipe = Some(pipe); Ok(call) } fn convert_binary_expression(expr: ast::BinaryExpr, fresher: &mut Fresher) -> Result<BinaryExpr> { let left = convert_expression(expr.left, fresher)?; let right = convert_expression(expr.right, fresher)?; Ok(BinaryExpr { loc: expr.base.location, typ: MonoType::Var(fresher.fresh()), operator: expr.operator, left, right, }) } fn convert_unary_expression(expr: ast::UnaryExpr, fresher: &mut Fresher) -> Result<UnaryExpr> { let argument = convert_expression(expr.argument, fresher)?; Ok(UnaryExpr { loc: expr.base.location, typ: MonoType::Var(fresher.fresh()), operator: expr.operator, argument, }) } fn convert_logical_expression( expr: ast::LogicalExpr, fresher: &mut Fresher, ) -> Result<LogicalExpr> { let left = convert_expression(expr.left, fresher)?; let right = convert_expression(expr.right, fresher)?; Ok(LogicalExpr { loc: expr.base.location, typ: MonoType::Var(fresher.fresh()), operator: expr.operator, left, right, }) } fn convert_conditional_expression( expr: ast::ConditionalExpr, fresher: &mut Fresher, ) -> Result<ConditionalExpr> { let test = convert_expression(expr.test, fresher)?; let consequent = convert_expression(expr.consequent, fresher)?; let alternate = convert_expression(expr.alternate, fresher)?; Ok(ConditionalExpr { loc: expr.base.location, typ: MonoType::Var(fresher.fresh()), test, consequent, alternate, }) } fn convert_object_expression(expr: ast::ObjectExpr, fresher: &mut Fresher) -> Result<ObjectExpr> { let properties = expr .properties .into_iter() .map(|p| convert_property(p, fresher)) .collect::<Result<Vec<Property>>>()?; let with = match expr.with { Some(with) => Some(convert_identifier_expression(with.source, fresher)?), None => None, }; Ok(ObjectExpr { loc: expr.base.location, typ: MonoType::Var(fresher.fresh()), with, properties, }) } fn convert_property(prop: ast::Property, fresher: &mut Fresher) -> Result<Property> { let key = match prop.key { ast::PropertyKey::Identifier(id) => convert_identifier(id, fresher)?, ast::PropertyKey::StringLit(lit) => Identifier { loc: lit.base.location.clone(), name: convert_string_literal(lit, fresher)?.value, }, }; let value = match prop.value { Some(expr) => convert_expression(expr, fresher)?, None => Expression::Identifier(IdentifierExpr { loc: key.loc.clone(), typ: MonoType::Var(fresher.fresh()), name: key.name.clone(), }), }; Ok(Property { loc: prop.base.location, key, value, }) } fn convert_array_expression(expr: ast::ArrayExpr, fresher: &mut Fresher) -> Result<ArrayExpr> { let elements = expr .elements .into_iter() .map(|e| convert_expression(e.expression, fresher)) .collect::<Result<Vec<Expression>>>()?; Ok(ArrayExpr { loc: expr.base.location, typ: MonoType::Var(fresher.fresh()), elements, }) } fn convert_identifier(id: ast::Identifier, _fresher: &mut Fresher) -> Result<Identifier> { Ok(Identifier { loc: id.base.location, name: id.name, }) } fn convert_identifier_expression( id: ast::Identifier, fresher: &mut Fresher, ) -> Result<IdentifierExpr> { Ok(IdentifierExpr { loc: id.base.location, typ: MonoType::Var(fresher.fresh()), name: id.name, }) } fn convert_string_expression(expr: ast::StringExpr, fresher: &mut Fresher) -> Result<StringExpr> { let parts = expr .parts .into_iter() .map(|p| convert_string_expression_part(p, fresher)) .collect::<Result<Vec<StringExprPart>>>()?; Ok(StringExpr { loc: expr.base.location, typ: MonoType::Var(fresher.fresh()), parts, }) } fn convert_string_expression_part( expr: ast::StringExprPart, fresher: &mut Fresher, ) -> Result<StringExprPart> { match expr { ast::StringExprPart::Text(txt) => Ok(StringExprPart::Text(TextPart { loc: txt.base.location, value: txt.value, })), ast::StringExprPart::Interpolated(itp) => { Ok(StringExprPart::Interpolated(InterpolatedPart { loc: itp.base.location, expression: convert_expression(itp.expression, fresher)?, })) } } } fn convert_string_literal(lit: ast::StringLit, fresher: &mut Fresher) -> Result<StringLit> { Ok(StringLit { loc: lit.base.location, typ: MonoType::Var(fresher.fresh()), value: lit.value, }) } fn convert_boolean_literal(lit: ast::BooleanLit, fresher: &mut Fresher) -> Result<BooleanLit> { Ok(BooleanLit { loc: lit.base.location, typ: MonoType::Var(fresher.fresh()), value: lit.value, }) } fn convert_float_literal(lit: ast::FloatLit, fresher: &mut Fresher) -> Result<FloatLit> { Ok(FloatLit { loc: lit.base.location, typ: MonoType::Var(fresher.fresh()), value: lit.value, }) } fn convert_integer_literal(lit: ast::IntegerLit, fresher: &mut Fresher) -> Result<IntegerLit> { Ok(IntegerLit { loc: lit.base.location, typ: MonoType::Var(fresher.fresh()), value: lit.value, }) } fn convert_unsigned_integer_literal(lit: ast::UintLit, fresher: &mut Fresher) -> Result<UintLit> { Ok(UintLit { loc: lit.base.location, typ: MonoType::Var(fresher.fresh()), value: lit.value, }) } fn convert_regexp_literal(lit: ast::RegexpLit, fresher: &mut Fresher) -> Result<RegexpLit> { Ok(RegexpLit { loc: lit.base.location, typ: MonoType::Var(fresher.fresh()), value: lit.value, }) } fn convert_duration_literal(lit: ast::DurationLit, fresher: &mut Fresher) -> Result<DurationLit> { Ok(DurationLit { loc: lit.base.location, typ: MonoType::Var(fresher.fresh()), value: convert_duration(&lit.values)?, }) } fn convert_date_time_literal(lit: ast::DateTimeLit, fresher: &mut Fresher) -> Result<DateTimeLit> { Ok(DateTimeLit { loc: lit.base.location, typ: MonoType::Var(fresher.fresh()), value: lit.value, }) } // In these tests we test the results of semantic analysis on some ASTs. // NOTE: we do not care about locations. // We create a default base node and clone it in various AST nodes. #[cfg(test)] mod tests { use super::*; use crate::semantic::fresh; use crate::semantic::types::{MonoType, Tvar}; use pretty_assertions::assert_eq; // type_info() is used for the expected semantic graph. // The id for the Tvar does not matter, because that is not compared. fn type_info() -> MonoType { MonoType::Var(Tvar(0)) } fn test_convert(pkg: ast::Package) -> Result<Package> { convert_with(pkg, &mut fresh::Fresher::default()) } #[test] fn test_convert_empty() { let b = ast::BaseNode::default(); let pkg = ast::Package { base: b.clone(), path: "path".to_string(), package: "main".to_string(), files: Vec::new(), }; let want = Package { loc: b.location.clone(), package: "main".to_string(), files: Vec::new(), }; let got = test_convert(pkg).unwrap(); assert_eq!(want, got); } #[test] fn test_convert_package() { let b = ast::BaseNode::default(); let pkg = ast::Package { base: b.clone(), path: "path".to_string(), package: "main".to_string(), files: vec![ast::File { base: b.clone(), name: "foo.flux".to_string(), metadata: String::new(), package: Some(ast::PackageClause { base: b.clone(), name: ast::Identifier { base: b.clone(), name: "foo".to_string(), }, }), imports: Vec::new(), body: Vec::new(), eof: None, }], }; let want = Package { loc: b.location.clone(), package: "main".to_string(), files: vec![File { loc: b.location.clone(), package: Some(PackageClause { loc: b.location.clone(), name: Identifier { loc: b.location.clone(), name: "foo".to_string(), }, }), imports: Vec::new(), body: Vec::new(), }], }; let got = test_convert(pkg).unwrap(); assert_eq!(want, got); } #[test] fn test_convert_imports() { let b = ast::BaseNode::default(); let pkg = ast::Package { base: b.clone(), path: "path".to_string(), package: "main".to_string(), files: vec![ast::File { base: b.clone(), name: "foo.flux".to_string(), metadata: String::new(), package: Some(ast::PackageClause { base: b.clone(), name: ast::Identifier { base: b.clone(), name: "foo".to_string(), }, }), imports: vec![ ast::ImportDeclaration { base: b.clone(), path: ast::StringLit { base: b.clone(), value: "path/foo".to_string(), }, alias: None, }, ast::ImportDeclaration { base: b.clone(), path: ast::StringLit { base: b.clone(), value: "path/bar".to_string(), }, alias: Some(ast::Identifier { base: b.clone(), name: "b".to_string(), }), }, ], body: Vec::new(), eof: None, }], }; let want = Package { loc: b.location.clone(), package: "main".to_string(), files: vec![File { loc: b.location.clone(), package: Some(PackageClause { loc: b.location.clone(), name: Identifier { loc: b.location.clone(), name: "foo".to_string(), }, }), imports: vec![ ImportDeclaration { loc: b.location.clone(), path: StringLit { loc: b.location.clone(), typ: type_info(), value: "path/foo".to_string(), }, alias: None, }, ImportDeclaration { loc: b.location.clone(), path: StringLit { loc: b.location.clone(), typ: type_info(), value: "path/bar".to_string(), }, alias: Some(Identifier { loc: b.location.clone(), name: "b".to_string(), }), }, ], body: Vec::new(), }], }; let got = test_convert(pkg).unwrap(); assert_eq!(want, got); } #[test] fn test_convert_var_assignment() { let b = ast::BaseNode::default(); let pkg = ast::Package { base: b.clone(), path: "path".to_string(), package: "main".to_string(), files: vec![ast::File { base: b.clone(), name: "foo.flux".to_string(), metadata: String::new(), package: None, imports: Vec::new(), body: vec![ ast::Statement::Variable(Box::new(ast::VariableAssgn { base: b.clone(), id: ast::Identifier { base: b.clone(), name: "a".to_string(), }, init: ast::Expression::Boolean(ast::BooleanLit { base: b.clone(), value: true, }), })), ast::Statement::Expr(Box::new(ast::ExprStmt { base: b.clone(), expression: ast::Expression::Identifier(ast::Identifier { base: b.clone(), name: "a".to_string(), }), })), ], eof: None, }], }; let want = Package { loc: b.location.clone(), package: "main".to_string(), files: vec![File { loc: b.location.clone(), package: None, imports: Vec::new(), body: vec![ Statement::Variable(Box::new(VariableAssgn::new( Identifier { loc: b.location.clone(), name: "a".to_string(), }, Expression::Boolean(BooleanLit { loc: b.location.clone(), typ: type_info(), value: true, }), b.location.clone(), ))), Statement::Expr(ExprStmt { loc: b.location.clone(), expression: Expression::Identifier(IdentifierExpr { loc: b.location.clone(), typ: type_info(), name: "a".to_string(), }), }), ], }], }; let got = test_convert(pkg).unwrap(); assert_eq!(want, got); } #[test] fn test_convert_object() { let b = ast::BaseNode::default(); let pkg = ast::Package { base: b.clone(), path: "path".to_string(), package: "main".to_string(), files: vec![ast::File { base: b.clone(), name: "foo.flux".to_string(), metadata: String::new(), package: None, imports: Vec::new(), body: vec![ast::Statement::Expr(Box::new(ast::ExprStmt { base: b.clone(), expression: ast::Expression::Object(Box::new(ast::ObjectExpr { base: b.clone(), lbrace: None, with: None, properties: vec![ast::Property { base: b.clone(), key: ast::PropertyKey::Identifier(ast::Identifier { base: b.clone(), name: "a".to_string(), }), separator: None, value: Some(ast::Expression::Integer(ast::IntegerLit { base: b.clone(), value: 10, })), comma: None, }], rbrace: None, })), }))], eof: None, }], }; let want = Package { loc: b.location.clone(), package: "main".to_string(), files: vec![File { loc: b.location.clone(), package: None, imports: Vec::new(), body: vec![Statement::Expr(ExprStmt { loc: b.location.clone(), expression: Expression::Object(Box::new(ObjectExpr { loc: b.location.clone(), typ: type_info(), with: None, properties: vec![Property { loc: b.location.clone(), key: Identifier { loc: b.location.clone(), name: "a".to_string(), }, value: Expression::Integer(IntegerLit { loc: b.location.clone(), typ: type_info(), value: 10, }), }], })), })], }], }; let got = test_convert(pkg).unwrap(); assert_eq!(want, got); } #[test] fn test_convert_object_with_string_key() { let b = ast::BaseNode::default(); let pkg = ast::Package { base: b.clone(), path: "path".to_string(), package: "main".to_string(), files: vec![ast::File { base: b.clone(), name: "foo.flux".to_string(), metadata: String::new(), package: None, imports: Vec::new(), body: vec![ast::Statement::Expr(Box::new(ast::ExprStmt { base: b.clone(), expression: ast::Expression::Object(Box::new(ast::ObjectExpr { base: b.clone(), lbrace: None, with: None, properties: vec![ast::Property { base: b.clone(), key: ast::PropertyKey::StringLit(ast::StringLit { base: b.clone(), value: "a".to_string(), }), separator: None, value: Some(ast::Expression::Integer(ast::IntegerLit { base: b.clone(), value: 10, })), comma: None, }], rbrace: None, })), }))], eof: None, }], }; let want = Package { loc: b.location.clone(), package: "main".to_string(), files: vec![File { loc: b.location.clone(), package: None, imports: Vec::new(), body: vec![Statement::Expr(ExprStmt { loc: b.location.clone(), expression: Expression::Object(Box::new(ObjectExpr { loc: b.location.clone(), typ: type_info(), with: None, properties: vec![Property { loc: b.location.clone(), key: Identifier { loc: b.location.clone(), name: "a".to_string(), }, value: Expression::Integer(IntegerLit { loc: b.location.clone(), typ: type_info(), value: 10, }), }], })), })], }], }; let got = test_convert(pkg).unwrap(); assert_eq!(want, got); } #[test] fn test_convert_object_with_mixed_keys() { let b = ast::BaseNode::default(); let pkg = ast::Package { base: b.clone(), path: "path".to_string(), package: "main".to_string(), files: vec![ast::File { base: b.clone(), name: "foo.flux".to_string(), metadata: String::new(), package: None, imports: Vec::new(), body: vec![ast::Statement::Expr(Box::new(ast::ExprStmt { base: b.clone(), expression: ast::Expression::Object(Box::new(ast::ObjectExpr { base: b.clone(), lbrace: None, with: None, properties: vec![ ast::Property { base: b.clone(), key: ast::PropertyKey::StringLit(ast::StringLit { base: b.clone(), value: "a".to_string(), }), separator: None, value: Some(ast::Expression::Integer(ast::IntegerLit { base: b.clone(), value: 10, })), comma: None, }, ast::Property { base: b.clone(), key: ast::PropertyKey::Identifier(ast::Identifier { base: b.clone(), name: "b".to_string(), }), separator: None, value: Some(ast::Expression::Integer(ast::IntegerLit { base: b.clone(), value: 11, })), comma: None, }, ], rbrace: None, })), }))], eof: None, }], }; let want = Package { loc: b.location.clone(), package: "main".to_string(), files: vec![File { loc: b.location.clone(), package: None, imports: Vec::new(), body: vec![Statement::Expr(ExprStmt { loc: b.location.clone(), expression: Expression::Object(Box::new(ObjectExpr { loc: b.location.clone(), typ: type_info(), with: None, properties: vec![ Property { loc: b.location.clone(), key: Identifier { loc: b.location.clone(), name: "a".to_string(), }, value: Expression::Integer(IntegerLit { loc: b.location.clone(), typ: type_info(), value: 10, }), }, Property { loc: b.location.clone(), key: Identifier { loc: b.location.clone(), name: "b".to_string(), }, value: Expression::Integer(IntegerLit { loc: b.location.clone(), typ: type_info(), value: 11, }), }, ], })), })], }], }; let got = test_convert(pkg).unwrap(); assert_eq!(want, got); } #[test] fn test_convert_object_with_implicit_keys() { let b = ast::BaseNode::default(); let pkg = ast::Package { base: b.clone(), path: "path".to_string(), package: "main".to_string(), files: vec![ast::File { base: b.clone(), name: "foo.flux".to_string(), metadata: String::new(), package: None, imports: Vec::new(), body: vec![ast::Statement::Expr(Box::new(ast::ExprStmt { base: b.clone(), expression: ast::Expression::Object(Box::new(ast::ObjectExpr { base: b.clone(), lbrace: None, with: None, properties: vec![ ast::Property { base: b.clone(), key: ast::PropertyKey::Identifier(ast::Identifier { base: b.clone(), name: "a".to_string(), }), separator: None, value: None, comma: None, }, ast::Property { base: b.clone(), key: ast::PropertyKey::Identifier(ast::Identifier { base: b.clone(), name: "b".to_string(), }), separator: None, value: None, comma: None, }, ], rbrace: None, })), }))], eof: None, }], }; let want = Package { loc: b.location.clone(), package: "main".to_string(), files: vec![File { loc: b.location.clone(), package: None, imports: Vec::new(), body: vec![Statement::Expr(ExprStmt { loc: b.location.clone(), expression: Expression::Object(Box::new(ObjectExpr { loc: b.location.clone(), typ: type_info(), with: None, properties: vec![ Property { loc: b.location.clone(), key: Identifier { loc: b.location.clone(), name: "a".to_string(), }, value: Expression::Identifier(IdentifierExpr { loc: b.location.clone(), typ: type_info(), name: "a".to_string(), }), }, Property { loc: b.location.clone(), key: Identifier { loc: b.location.clone(), name: "b".to_string(), }, value: Expression::Identifier(IdentifierExpr { loc: b.location.clone(), typ: type_info(), name: "b".to_string(), }), }, ], })), })], }], }; let got = test_convert(pkg).unwrap(); assert_eq!(want, got); } #[test] fn test_convert_options_declaration() { let b = ast::BaseNode::default(); let pkg = ast::Package { base: b.clone(), path: "path".to_string(), package: "main".to_string(), files: vec![ast::File { base: b.clone(), name: "foo.flux".to_string(), metadata: String::new(), package: None, imports: Vec::new(), body: vec![ast::Statement::Option(Box::new(ast::OptionStmt { base: b.clone(), assignment: ast::Assignment::Variable(Box::new(ast::VariableAssgn { base: b.clone(), id: ast::Identifier { base: b.clone(), name: "task".to_string(), }, init: ast::Expression::Object(Box::new(ast::ObjectExpr { base: b.clone(), lbrace: None, with: None, properties: vec![ ast::Property { base: b.clone(), key: ast::PropertyKey::Identifier(ast::Identifier { base: b.clone(), name: "name".to_string(), }), separator: None, value: Some(ast::Expression::StringLit(ast::StringLit { base: b.clone(), value: "foo".to_string(), })), comma: None, }, ast::Property { base: b.clone(), key: ast::PropertyKey::Identifier(ast::Identifier { base: b.clone(), name: "every".to_string(), }), separator: None, value: Some(ast::Expression::Duration(ast::DurationLit { base: b.clone(), values: vec![ast::Duration { magnitude: 1, unit: "h".to_string(), }], })), comma: None, }, ast::Property { base: b.clone(), key: ast::PropertyKey::Identifier(ast::Identifier { base: b.clone(), name: "delay".to_string(), }), separator: None, value: Some(ast::Expression::Duration(ast::DurationLit { base: b.clone(), values: vec![ast::Duration { magnitude: 10, unit: "m".to_string(), }], })), comma: None, }, ast::Property { base: b.clone(), key: ast::PropertyKey::Identifier(ast::Identifier { base: b.clone(), name: "cron".to_string(), }), separator: None, value: Some(ast::Expression::StringLit(ast::StringLit { base: b.clone(), value: "0 2 * * *".to_string(), })), comma: None, }, ast::Property { base: b.clone(), key: ast::PropertyKey::Identifier(ast::Identifier { base: b.clone(), name: "retry".to_string(), }), separator: None, value: Some(ast::Expression::Integer(ast::IntegerLit { base: b.clone(), value: 5, })), comma: None, }, ], rbrace: None, })), })), }))], eof: None, }], }; let want = Package { loc: b.location.clone(), package: "main".to_string(), files: vec![File { loc: b.location.clone(), package: None, imports: Vec::new(), body: vec![Statement::Option(Box::new(OptionStmt { loc: b.location.clone(), assignment: Assignment::Variable(VariableAssgn::new( Identifier { loc: b.location.clone(), name: "task".to_string(), }, Expression::Object(Box::new(ObjectExpr { loc: b.location.clone(), typ: type_info(), with: None, properties: vec![ Property { loc: b.location.clone(), key: Identifier { loc: b.location.clone(), name: "name".to_string(), }, value: Expression::StringLit(StringLit { loc: b.location.clone(), typ: type_info(), value: "foo".to_string(), }), }, Property { loc: b.location.clone(), key: Identifier { loc: b.location.clone(), name: "every".to_string(), }, value: Expression::Duration(DurationLit { loc: b.location.clone(), typ: type_info(), value: Duration { months: 5, nanoseconds: 5000, negative: false, }, }), }, Property { loc: b.location.clone(), key: Identifier { loc: b.location.clone(), name: "delay".to_string(), }, value: Expression::Duration(DurationLit { loc: b.location.clone(), typ: type_info(), value: Duration { months: 1, nanoseconds: 50, negative: true, }, }), }, Property { loc: b.location.clone(), key: Identifier { loc: b.location.clone(), name: "cron".to_string(), }, value: Expression::StringLit(StringLit { loc: b.location.clone(), typ: type_info(), value: "0 2 * * *".to_string(), }), }, Property { loc: b.location.clone(), key: Identifier { loc: b.location.clone(), name: "retry".to_string(), }, value: Expression::Integer(IntegerLit { loc: b.location.clone(), typ: type_info(), value: 5, }), }, ], })), b.location.clone(), )), }))], }], }; let got = test_convert(pkg).unwrap(); assert_eq!(want, got); } #[test] fn test_convert_qualified_option_statement() { let b = ast::BaseNode::default(); let pkg = ast::Package { base: b.clone(), path: "path".to_string(), package: "main".to_string(), files: vec![ast::File { base: b.clone(), name: "foo.flux".to_string(), metadata: String::new(), package: None, imports: Vec::new(), body: vec![ast::Statement::Option(Box::new(ast::OptionStmt { base: b.clone(), assignment: ast::Assignment::Member(Box::new(ast::MemberAssgn { base: b.clone(), member: ast::MemberExpr { base: b.clone(), object: ast::Expression::Identifier(ast::Identifier { base: b.clone(), name: "alert".to_string(), }), lbrack: None, property: ast::PropertyKey::Identifier(ast::Identifier { base: b.clone(), name: "state".to_string(), }), rbrack: None, }, init: ast::Expression::StringLit(ast::StringLit { base: b.clone(), value: "Warning".to_string(), }), })), }))], eof: None, }], }; let want = Package { loc: b.location.clone(), package: "main".to_string(), files: vec![File { loc: b.location.clone(), package: None, imports: Vec::new(), body: vec![Statement::Option(Box::new(OptionStmt { loc: b.location.clone(), assignment: Assignment::Member(MemberAssgn { loc: b.location.clone(), member: MemberExpr { loc: b.location.clone(), typ: type_info(), object: Expression::Identifier(IdentifierExpr { loc: b.location.clone(), typ: type_info(), name: "alert".to_string(), }), property: "state".to_string(), }, init: Expression::StringLit(StringLit { loc: b.location.clone(), typ: type_info(), value: "Warning".to_string(), }), }), }))], }], }; let got = test_convert(pkg).unwrap(); assert_eq!(want, got); } #[test] fn test_convert_function() { let b = ast::BaseNode::default(); let pkg = ast::Package { base: b.clone(), path: "path".to_string(), package: "main".to_string(), files: vec![ast::File { base: b.clone(), name: "foo.flux".to_string(), metadata: String::new(), package: None, imports: Vec::new(), body: vec![ ast::Statement::Variable(Box::new(ast::VariableAssgn { base: b.clone(), id: ast::Identifier { base: b.clone(), name: "f".to_string(), }, init: ast::Expression::Function(Box::new(ast::FunctionExpr { base: b.clone(), lparen: None, params: vec![ ast::Property { base: b.clone(), key: ast::PropertyKey::Identifier(ast::Identifier { base: b.clone(), name: "a".to_string(), }), separator: None, value: None, comma: None, }, ast::Property { base: b.clone(), key: ast::PropertyKey::Identifier(ast::Identifier { base: b.clone(), name: "b".to_string(), }), separator: None, value: None, comma: None, }, ], rparen: None, arrow: None, body: ast::FunctionBody::Expr(ast::Expression::Binary(Box::new( ast::BinaryExpr { base: b.clone(), operator: ast::Operator::AdditionOperator, left: ast::Expression::Identifier(ast::Identifier { base: b.clone(), name: "a".to_string(), }), right: ast::Expression::Identifier(ast::Identifier { base: b.clone(), name: "b".to_string(), }), }, ))), })), })), ast::Statement::Expr(Box::new(ast::ExprStmt { base: b.clone(), expression: ast::Expression::Call(Box::new(ast::CallExpr { base: b.clone(), callee: ast::Expression::Identifier(ast::Identifier { base: b.clone(), name: "f".to_string(), }), lparen: None, arguments: vec![ast::Expression::Object(Box::new(ast::ObjectExpr { base: b.clone(), lbrace: None, with: None, properties: vec![ ast::Property { base: b.clone(), key: ast::PropertyKey::Identifier(ast::Identifier { base: b.clone(), name: "a".to_string(), }), separator: None, value: Some(ast::Expression::Integer(ast::IntegerLit { base: b.clone(), value: 2, })), comma: None, }, ast::Property { base: b.clone(), key: ast::PropertyKey::Identifier(ast::Identifier { base: b.clone(), name: "b".to_string(), }), separator: None, value: Some(ast::Expression::Integer(ast::IntegerLit { base: b.clone(), value: 3, })), comma: None, }, ], rbrace: None, }))], rparen: None, })), })), ], eof: None, }], }; let want = Package { loc: b.location.clone(), package: "main".to_string(), files: vec![File { loc: b.location.clone(), package: None, imports: Vec::new(), body: vec![ Statement::Variable(Box::new(VariableAssgn::new( Identifier { loc: b.location.clone(), name: "f".to_string(), }, Expression::Function(Box::new(FunctionExpr { loc: b.location.clone(), typ: type_info(), params: vec![ FunctionParameter { loc: b.location.clone(), is_pipe: false, key: Identifier { loc: b.location.clone(), name: "a".to_string(), }, default: None, }, FunctionParameter { loc: b.location.clone(), is_pipe: false, key: Identifier { loc: b.location.clone(), name: "b".to_string(), }, default: None, }, ], body: Block::Return(ReturnStmt { loc: b.location.clone(), argument: Expression::Binary(Box::new(BinaryExpr { loc: b.location.clone(), typ: type_info(), operator: ast::Operator::AdditionOperator, left: Expression::Identifier(IdentifierExpr { loc: b.location.clone(), typ: type_info(), name: "a".to_string(), }), right: Expression::Identifier(IdentifierExpr { loc: b.location.clone(), typ: type_info(), name: "b".to_string(), }), })), }), })), b.location.clone(), ))), Statement::Expr(ExprStmt { loc: b.location.clone(), expression: Expression::Call(Box::new(CallExpr { loc: b.location.clone(), typ: type_info(), pipe: None, callee: Expression::Identifier(IdentifierExpr { loc: b.location.clone(), typ: type_info(), name: "f".to_string(), }), arguments: vec![ Property { loc: b.location.clone(), key: Identifier { loc: b.location.clone(), name: "a".to_string(), }, value: Expression::Integer(IntegerLit { loc: b.location.clone(), typ: type_info(), value: 2, }), }, Property { loc: b.location.clone(), key: Identifier { loc: b.location.clone(), name: "b".to_string(), }, value: Expression::Integer(IntegerLit { loc: b.location.clone(), typ: type_info(), value: 3, }), }, ], })), }), ], }], }; let got = test_convert(pkg).unwrap(); assert_eq!(want, got); } #[test] fn test_convert_function_with_defaults() { let b = ast::BaseNode::default(); let pkg = ast::Package { base: b.clone(), path: "path".to_string(), package: "main".to_string(), files: vec![ast::File { base: b.clone(), name: "foo.flux".to_string(), metadata: String::new(), package: None, imports: Vec::new(), body: vec![ ast::Statement::Variable(Box::new(ast::VariableAssgn { base: b.clone(), id: ast::Identifier { base: b.clone(), name: "f".to_string(), }, init: ast::Expression::Function(Box::new(ast::FunctionExpr { base: b.clone(), lparen: None, params: vec![ ast::Property { base: b.clone(), key: ast::PropertyKey::Identifier(ast::Identifier { base: b.clone(), name: "a".to_string(), }), separator: None, value: Some(ast::Expression::Integer(ast::IntegerLit { base: b.clone(), value: 0, })), comma: None, }, ast::Property { base: b.clone(), key: ast::PropertyKey::Identifier(ast::Identifier { base: b.clone(), name: "b".to_string(), }), separator: None, value: Some(ast::Expression::Integer(ast::IntegerLit { base: b.clone(), value: 0, })), comma: None, }, ast::Property { base: b.clone(), key: ast::PropertyKey::Identifier(ast::Identifier { base: b.clone(), name: "c".to_string(), }), separator: None, value: None, comma: None, }, ], rparen: None, arrow: None, body: ast::FunctionBody::Expr(ast::Expression::Binary(Box::new( ast::BinaryExpr { base: b.clone(), operator: ast::Operator::AdditionOperator, left: ast::Expression::Binary(Box::new(ast::BinaryExpr { base: b.clone(), operator: ast::Operator::AdditionOperator, left: ast::Expression::Identifier(ast::Identifier { base: b.clone(), name: "a".to_string(), }), right: ast::Expression::Identifier(ast::Identifier { base: b.clone(), name: "b".to_string(), }), })), right: ast::Expression::Identifier(ast::Identifier { base: b.clone(), name: "c".to_string(), }), }, ))), })), })), ast::Statement::Expr(Box::new(ast::ExprStmt { base: b.clone(), expression: ast::Expression::Call(Box::new(ast::CallExpr { base: b.clone(), callee: ast::Expression::Identifier(ast::Identifier { base: b.clone(), name: "f".to_string(), }), lparen: None, arguments: vec![ast::Expression::Object(Box::new(ast::ObjectExpr { base: b.clone(), lbrace: None, with: None, properties: vec![ast::Property { base: b.clone(), key: ast::PropertyKey::Identifier(ast::Identifier { base: b.clone(), name: "c".to_string(), }), separator: None, value: Some(ast::Expression::Integer(ast::IntegerLit { base: b.clone(), value: 42, })), comma: None, }], rbrace: None, }))], rparen: None, })), })), ], eof: None, }], }; let want = Package { loc: b.location.clone(), package: "main".to_string(), files: vec![File { loc: b.location.clone(), package: None, imports: Vec::new(), body: vec![ Statement::Variable(Box::new(VariableAssgn::new( Identifier { loc: b.location.clone(), name: "f".to_string(), }, Expression::Function(Box::new(FunctionExpr { loc: b.location.clone(), typ: type_info(), params: vec![ FunctionParameter { loc: b.location.clone(), is_pipe: false, key: Identifier { loc: b.location.clone(), name: "a".to_string(), }, default: Some(Expression::Integer(IntegerLit { loc: b.location.clone(), typ: type_info(), value: 0, })), }, FunctionParameter { loc: b.location.clone(), is_pipe: false, key: Identifier { loc: b.location.clone(), name: "b".to_string(), }, default: Some(Expression::Integer(IntegerLit { loc: b.location.clone(), typ: type_info(), value: 0, })), }, FunctionParameter { loc: b.location.clone(), is_pipe: false, key: Identifier { loc: b.location.clone(), name: "c".to_string(), }, default: None, }, ], body: Block::Return(ReturnStmt { loc: b.location.clone(), argument: Expression::Binary(Box::new(BinaryExpr { loc: b.location.clone(), typ: type_info(), operator: ast::Operator::AdditionOperator, left: Expression::Binary(Box::new(BinaryExpr { loc: b.location.clone(), typ: type_info(), operator: ast::Operator::AdditionOperator, left: Expression::Identifier(IdentifierExpr { loc: b.location.clone(), typ: type_info(), name: "a".to_string(), }), right: Expression::Identifier(IdentifierExpr { loc: b.location.clone(), typ: type_info(), name: "b".to_string(), }), })), right: Expression::Identifier(IdentifierExpr { loc: b.location.clone(), typ: type_info(), name: "c".to_string(), }), })), }), })), b.location.clone(), ))), Statement::Expr(ExprStmt { loc: b.location.clone(), expression: Expression::Call(Box::new(CallExpr { loc: b.location.clone(), typ: type_info(), pipe: None, callee: Expression::Identifier(IdentifierExpr { loc: b.location.clone(), typ: type_info(), name: "f".to_string(), }), arguments: vec![Property { loc: b.location.clone(), key: Identifier { loc: b.location.clone(), name: "c".to_string(), }, value: Expression::Integer(IntegerLit { loc: b.location.clone(), typ: type_info(), value: 42, }), }], })), }), ], }], }; let got = test_convert(pkg).unwrap(); assert_eq!(want, got); } #[test] fn test_convert_function_multiple_pipes() { let b = ast::BaseNode::default(); let pkg = ast::Package { base: b.clone(), path: "path".to_string(), package: "main".to_string(), files: vec![ast::File { base: b.clone(), name: "foo.flux".to_string(), metadata: String::new(), package: None, imports: Vec::new(), body: vec![ast::Statement::Variable(Box::new(ast::VariableAssgn { base: b.clone(), id: ast::Identifier { base: b.clone(), name: "f".to_string(), }, init: ast::Expression::Function(Box::new(ast::FunctionExpr { base: b.clone(), lparen: None, params: vec![ ast::Property { base: b.clone(), key: ast::PropertyKey::Identifier(ast::Identifier { base: b.clone(), name: "a".to_string(), }), separator: None, value: None, comma: None, }, ast::Property { base: b.clone(), key: ast::PropertyKey::Identifier(ast::Identifier { base: b.clone(), name: "piped1".to_string(), }), separator: None, value: Some(ast::Expression::PipeLit(ast::PipeLit { base: b.clone(), })), comma: None, }, ast::Property { base: b.clone(), key: ast::PropertyKey::Identifier(ast::Identifier { base: b.clone(), name: "piped2".to_string(), }), separator: None, value: Some(ast::Expression::PipeLit(ast::PipeLit { base: b.clone(), })), comma: None, }, ], rparen: None, arrow: None, body: ast::FunctionBody::Expr(ast::Expression::Identifier( ast::Identifier { base: b.clone(), name: "a".to_string(), }, )), })), }))], eof: None, }], }; let got = test_convert(pkg).err().unwrap().to_string(); assert_eq!("only a single argument may be piped".to_string(), got); } #[test] fn test_convert_call_multiple_object_arguments() { let b = ast::BaseNode::default(); let pkg = ast::Package { base: b.clone(), path: "path".to_string(), package: "main".to_string(), files: vec![ast::File { base: b.clone(), name: "foo.flux".to_string(), metadata: String::new(), package: None, imports: Vec::new(), body: vec![ast::Statement::Expr(Box::new(ast::ExprStmt { base: b.clone(), expression: ast::Expression::Call(Box::new(ast::CallExpr { base: b.clone(), callee: ast::Expression::Identifier(ast::Identifier { base: b.clone(), name: "f".to_string(), }), lparen: None, arguments: vec![ ast::Expression::Object(Box::new(ast::ObjectExpr { base: b.clone(), lbrace: None, with: None, properties: vec![ast::Property { base: b.clone(), key: ast::PropertyKey::Identifier(ast::Identifier { base: b.clone(), name: "a".to_string(), }), separator: None, value: Some(ast::Expression::Integer(ast::IntegerLit { base: b.clone(), value: 0, })), comma: None, }], rbrace: None, })), ast::Expression::Object(Box::new(ast::ObjectExpr { base: b.clone(), lbrace: None, with: None, properties: vec![ast::Property { base: b.clone(), key: ast::PropertyKey::Identifier(ast::Identifier { base: b.clone(), name: "b".to_string(), }), separator: None, value: Some(ast::Expression::Integer(ast::IntegerLit { base: b.clone(), value: 1, })), comma: None, }], rbrace: None, })), ], rparen: None, })), }))], eof: None, }], }; let got = test_convert(pkg).err().unwrap().to_string(); assert_eq!( "arguments are more than one object expression".to_string(), got ); } #[test] fn test_convert_pipe_expression() { let b = ast::BaseNode::default(); let pkg = ast::Package { base: b.clone(), path: "path".to_string(), package: "main".to_string(), files: vec![ast::File { base: b.clone(), name: "foo.flux".to_string(), metadata: String::new(), package: None, imports: Vec::new(), body: vec![ ast::Statement::Variable(Box::new(ast::VariableAssgn { base: b.clone(), id: ast::Identifier { base: b.clone(), name: "f".to_string(), }, init: ast::Expression::Function(Box::new(ast::FunctionExpr { base: b.clone(), lparen: None, params: vec![ ast::Property { base: b.clone(), key: ast::PropertyKey::Identifier(ast::Identifier { base: b.clone(), name: "piped".to_string(), }), separator: None, value: Some(ast::Expression::PipeLit(ast::PipeLit { base: b.clone(), })), comma: None, }, ast::Property { base: b.clone(), key: ast::PropertyKey::Identifier(ast::Identifier { base: b.clone(), name: "a".to_string(), }), separator: None, value: None, comma: None, }, ], rparen: None, arrow: None, body: ast::FunctionBody::Expr(ast::Expression::Binary(Box::new( ast::BinaryExpr { base: b.clone(), operator: ast::Operator::AdditionOperator, left: ast::Expression::Identifier(ast::Identifier { base: b.clone(), name: "a".to_string(), }), right: ast::Expression::Identifier(ast::Identifier { base: b.clone(), name: "piped".to_string(), }), }, ))), })), })), ast::Statement::Expr(Box::new(ast::ExprStmt { base: b.clone(), expression: ast::Expression::PipeExpr(Box::new(ast::PipeExpr { base: b.clone(), argument: ast::Expression::Integer(ast::IntegerLit { base: b.clone(), value: 3, }), call: ast::CallExpr { base: b.clone(), callee: ast::Expression::Identifier(ast::Identifier { base: b.clone(), name: "f".to_string(), }), lparen: None, arguments: vec![ast::Expression::Object(Box::new( ast::ObjectExpr { base: b.clone(), lbrace: None, with: None, properties: vec![ast::Property { base: b.clone(), key: ast::PropertyKey::Identifier(ast::Identifier { base: b.clone(), name: "a".to_string(), }), separator: None, value: Some(ast::Expression::Integer( ast::IntegerLit { base: b.clone(), value: 2, }, )), comma: None, }], rbrace: None, }, ))], rparen: None, }, })), })), ], eof: None, }], }; let want = Package { loc: b.location.clone(), package: "main".to_string(), files: vec![File { loc: b.location.clone(), package: None, imports: Vec::new(), body: vec![ Statement::Variable(Box::new(VariableAssgn::new( Identifier { loc: b.location.clone(), name: "f".to_string(), }, Expression::Function(Box::new(FunctionExpr { loc: b.location.clone(), typ: type_info(), params: vec![ FunctionParameter { loc: b.location.clone(), is_pipe: true, key: Identifier { loc: b.location.clone(), name: "piped".to_string(), }, default: None, }, FunctionParameter { loc: b.location.clone(), is_pipe: false, key: Identifier { loc: b.location.clone(), name: "a".to_string(), }, default: None, }, ], body: Block::Return(ReturnStmt { loc: b.location.clone(), argument: Expression::Binary(Box::new(BinaryExpr { loc: b.location.clone(), typ: type_info(), operator: ast::Operator::AdditionOperator, left: Expression::Identifier(IdentifierExpr { loc: b.location.clone(), typ: type_info(), name: "a".to_string(), }), right: Expression::Identifier(IdentifierExpr { loc: b.location.clone(), typ: type_info(), name: "piped".to_string(), }), })), }), })), b.location.clone(), ))), Statement::Expr(ExprStmt { loc: b.location.clone(), expression: Expression::Call(Box::new(CallExpr { loc: b.location.clone(), typ: type_info(), pipe: Some(Expression::Integer(IntegerLit { loc: b.location.clone(), typ: type_info(), value: 3, })), callee: Expression::Identifier(IdentifierExpr { loc: b.location.clone(), typ: type_info(), name: "f".to_string(), }), arguments: vec![Property { loc: b.location.clone(), key: Identifier { loc: b.location.clone(), name: "a".to_string(), }, value: Expression::Integer(IntegerLit { loc: b.location.clone(), typ: type_info(), value: 2, }), }], })), }), ], }], }; let got = test_convert(pkg).unwrap(); assert_eq!(want, got); } #[test] fn test_function_expression_simple() { let b = ast::BaseNode::default(); let f = FunctionExpr { loc: b.location.clone(), typ: type_info(), params: vec![ FunctionParameter { loc: b.location.clone(), is_pipe: false, key: Identifier { loc: b.location.clone(), name: "a".to_string(), }, default: None, }, FunctionParameter { loc: b.location.clone(), is_pipe: false, key: Identifier { loc: b.location.clone(), name: "b".to_string(), }, default: None, }, ], body: Block::Return(ReturnStmt { loc: b.location.clone(), argument: Expression::Binary(Box::new(BinaryExpr { loc: b.location.clone(), typ: type_info(), operator: ast::Operator::AdditionOperator, left: Expression::Identifier(IdentifierExpr { loc: b.location.clone(), typ: type_info(), name: "a".to_string(), }), right: Expression::Identifier(IdentifierExpr { loc: b.location.clone(), typ: type_info(), name: "b".to_string(), }), })), }), }; assert_eq!(Vec::<&FunctionParameter>::new(), f.defaults()); assert_eq!(None, f.pipe()); } #[test] fn test_function_expression_defaults_and_pipes() { let b = ast::BaseNode::default(); let piped = FunctionParameter { loc: b.location.clone(), is_pipe: true, key: Identifier { loc: b.location.clone(), name: "a".to_string(), }, default: Some(Expression::Integer(IntegerLit { loc: b.location.clone(), typ: type_info(), value: 0, })), }; let default1 = FunctionParameter { loc: b.location.clone(), is_pipe: false, key: Identifier { loc: b.location.clone(), name: "b".to_string(), }, default: Some(Expression::Integer(IntegerLit { loc: b.location.clone(), typ: type_info(), value: 1, })), }; let default2 = FunctionParameter { loc: b.location.clone(), is_pipe: false, key: Identifier { loc: b.location.clone(), name: "c".to_string(), }, default: Some(Expression::Integer(IntegerLit { loc: b.location.clone(), typ: type_info(), value: 2, })), }; let no_default = FunctionParameter { loc: b.location.clone(), is_pipe: false, key: Identifier { loc: b.location.clone(), name: "d".to_string(), }, default: None, }; let defaults = vec![&piped, &default1, &default2]; let f = FunctionExpr { loc: b.location.clone(), typ: type_info(), params: vec![ piped.clone(), default1.clone(), default2.clone(), no_default.clone(), ], body: Block::Return(ReturnStmt { loc: b.location.clone(), argument: Expression::Binary(Box::new(BinaryExpr { loc: b.location.clone(), typ: type_info(), operator: ast::Operator::AdditionOperator, left: Expression::Identifier(IdentifierExpr { loc: b.location.clone(), typ: type_info(), name: "a".to_string(), }), right: Expression::Identifier(IdentifierExpr { loc: b.location.clone(), typ: type_info(), name: "b".to_string(), }), })), }), }; assert_eq!(defaults, f.defaults()); assert_eq!(Some(&piped), f.pipe()); } #[test] fn test_convert_index_expression() { let b = ast::BaseNode::default(); let pkg = ast::Package { base: b.clone(), path: "path".to_string(), package: "main".to_string(), files: vec![ast::File { base: b.clone(), name: "foo.flux".to_string(), metadata: String::new(), package: None, imports: Vec::new(), body: vec![ast::Statement::Expr(Box::new(ast::ExprStmt { base: b.clone(), expression: ast::Expression::Index(Box::new(ast::IndexExpr { base: b.clone(), array: ast::Expression::Identifier(ast::Identifier { base: b.clone(), name: "a".to_string(), }), lbrack: None, index: ast::Expression::Integer(ast::IntegerLit { base: b.clone(), value: 3, }), rbrack: None, })), }))], eof: None, }], }; let want = Package { loc: b.location.clone(), package: "main".to_string(), files: vec![File { loc: b.location.clone(), package: None, imports: Vec::new(), body: vec![Statement::Expr(ExprStmt { loc: b.location.clone(), expression: Expression::Index(Box::new(IndexExpr { loc: b.location.clone(), typ: type_info(), array: Expression::Identifier(IdentifierExpr { loc: b.location.clone(), typ: type_info(), name: "a".to_string(), }), index: Expression::Integer(IntegerLit { loc: b.location.clone(), typ: type_info(), value: 3, }), })), })], }], }; let got = test_convert(pkg).unwrap(); assert_eq!(want, got); } #[test] fn test_convert_nested_index_expression() { let b = ast::BaseNode::default(); let pkg = ast::Package { base: b.clone(), path: "path".to_string(), package: "main".to_string(), files: vec![ast::File { base: b.clone(), name: "foo.flux".to_string(), metadata: String::new(), package: None, imports: Vec::new(), body: vec![ast::Statement::Expr(Box::new(ast::ExprStmt { base: b.clone(), expression: ast::Expression::Index(Box::new(ast::IndexExpr { base: b.clone(), array: ast::Expression::Index(Box::new(ast::IndexExpr { base: b.clone(), array: ast::Expression::Identifier(ast::Identifier { base: b.clone(), name: "a".to_string(), }), lbrack: None, index: ast::Expression::Integer(ast::IntegerLit { base: b.clone(), value: 3, }), rbrack: None, })), lbrack: None, index: ast::Expression::Integer(ast::IntegerLit { base: b.clone(), value: 5, }), rbrack: None, })), }))], eof: None, }], }; let want = Package { loc: b.location.clone(), package: "main".to_string(), files: vec![File { loc: b.location.clone(), package: None, imports: Vec::new(), body: vec![Statement::Expr(ExprStmt { loc: b.location.clone(), expression: Expression::Index(Box::new(IndexExpr { loc: b.location.clone(), typ: type_info(), array: Expression::Index(Box::new(IndexExpr { loc: b.location.clone(), typ: type_info(), array: Expression::Identifier(IdentifierExpr { loc: b.location.clone(), typ: type_info(), name: "a".to_string(), }), index: Expression::Integer(IntegerLit { loc: b.location.clone(), typ: type_info(), value: 3, }), })), index: Expression::Integer(IntegerLit { loc: b.location.clone(), typ: type_info(), value: 5, }), })), })], }], }; let got = test_convert(pkg).unwrap(); assert_eq!(want, got); } #[test] fn test_convert_access_idexed_object_returned_from_function_call() { let b = ast::BaseNode::default(); let pkg = ast::Package { base: b.clone(), path: "path".to_string(), package: "main".to_string(), files: vec![ast::File { base: b.clone(), name: "foo.flux".to_string(), metadata: String::new(), package: None, imports: Vec::new(), body: vec![ast::Statement::Expr(Box::new(ast::ExprStmt { base: b.clone(), expression: ast::Expression::Index(Box::new(ast::IndexExpr { base: b.clone(), array: ast::Expression::Call(Box::new(ast::CallExpr { base: b.clone(), callee: ast::Expression::Identifier(ast::Identifier { base: b.clone(), name: "f".to_string(), }), lparen: None, arguments: vec![], rparen: None, })), lbrack: None, index: ast::Expression::Integer(ast::IntegerLit { base: b.clone(), value: 3, }), rbrack: None, })), }))], eof: None, }], }; let want = Package { loc: b.location.clone(), package: "main".to_string(), files: vec![File { loc: b.location.clone(), package: None, imports: Vec::new(), body: vec![Statement::Expr(ExprStmt { loc: b.location.clone(), expression: Expression::Index(Box::new(IndexExpr { loc: b.location.clone(), typ: type_info(), array: Expression::Call(Box::new(CallExpr { loc: b.location.clone(), typ: type_info(), pipe: None, callee: Expression::Identifier(IdentifierExpr { loc: b.location.clone(), typ: type_info(), name: "f".to_string(), }), arguments: Vec::new(), })), index: Expression::Integer(IntegerLit { loc: b.location.clone(), typ: type_info(), value: 3, }), })), })], }], }; let got = test_convert(pkg).unwrap(); assert_eq!(want, got); } #[test] fn test_convert_nested_member_expression() { let b = ast::BaseNode::default(); let pkg = ast::Package { base: b.clone(), path: "path".to_string(), package: "main".to_string(), files: vec![ast::File { base: b.clone(), name: "foo.flux".to_string(), metadata: String::new(), package: None, imports: Vec::new(), body: vec![ast::Statement::Expr(Box::new(ast::ExprStmt { base: b.clone(), expression: ast::Expression::Member(Box::new(ast::MemberExpr { base: b.clone(), object: ast::Expression::Member(Box::new(ast::MemberExpr { base: b.clone(), object: ast::Expression::Identifier(ast::Identifier { base: b.clone(), name: "a".to_string(), }), lbrack: None, property: ast::PropertyKey::Identifier(ast::Identifier { base: b.clone(), name: "b".to_string(), }), rbrack: None, })), lbrack: None, property: ast::PropertyKey::Identifier(ast::Identifier { base: b.clone(), name: "c".to_string(), }), rbrack: None, })), }))], eof: None, }], }; let want = Package { loc: b.location.clone(), package: "main".to_string(), files: vec![File { loc: b.location.clone(), package: None, imports: Vec::new(), body: vec![Statement::Expr(ExprStmt { loc: b.location.clone(), expression: Expression::Member(Box::new(MemberExpr { loc: b.location.clone(), typ: type_info(), object: Expression::Member(Box::new(MemberExpr { loc: b.location.clone(), typ: type_info(), object: Expression::Identifier(IdentifierExpr { loc: b.location.clone(), typ: type_info(), name: "a".to_string(), }), property: "b".to_string(), })), property: "c".to_string(), })), })], }], }; let got = test_convert(pkg).unwrap(); assert_eq!(want, got); } #[test] fn test_convert_member_with_call_expression() { let b = ast::BaseNode::default(); let pkg = ast::Package { base: b.clone(), path: "path".to_string(), package: "main".to_string(), files: vec![ast::File { base: b.clone(), name: "foo.flux".to_string(), metadata: String::new(), package: None, imports: Vec::new(), body: vec![ast::Statement::Expr(Box::new(ast::ExprStmt { base: b.clone(), expression: ast::Expression::Member(Box::new(ast::MemberExpr { base: b.clone(), object: ast::Expression::Call(Box::new(ast::CallExpr { base: b.clone(), callee: ast::Expression::Member(Box::new(ast::MemberExpr { base: b.clone(), object: ast::Expression::Identifier(ast::Identifier { base: b.clone(), name: "a".to_string(), }), lbrack: None, property: ast::PropertyKey::Identifier(ast::Identifier { base: b.clone(), name: "b".to_string(), }), rbrack: None, })), lparen: None, arguments: vec![], rparen: None, })), lbrack: None, property: ast::PropertyKey::Identifier(ast::Identifier { base: b.clone(), name: "c".to_string(), }), rbrack: None, })), }))], eof: None, }], }; let want = Package { loc: b.location.clone(), package: "main".to_string(), files: vec![File { loc: b.location.clone(), package: None, imports: Vec::new(), body: vec![Statement::Expr(ExprStmt { loc: b.location.clone(), expression: Expression::Member(Box::new(MemberExpr { loc: b.location.clone(), typ: type_info(), object: Expression::Call(Box::new(CallExpr { loc: b.location.clone(), typ: type_info(), pipe: None, callee: Expression::Member(Box::new(MemberExpr { loc: b.location.clone(), typ: type_info(), object: Expression::Identifier(IdentifierExpr { loc: b.location.clone(), typ: type_info(), name: "a".to_string(), }), property: "b".to_string(), })), arguments: Vec::new(), })), property: "c".to_string(), })), })], }], }; let got = test_convert(pkg).unwrap(); assert_eq!(want, got); } #[test] fn test_convert_bad_stmt() { let b = ast::BaseNode::default(); let pkg = ast::Package { base: b.clone(), path: "path".to_string(), package: "main".to_string(), files: vec![ast::File { base: b.clone(), name: "foo.flux".to_string(), metadata: String::new(), package: None, imports: Vec::new(), body: vec![ast::Statement::Bad(Box::new(ast::BadStmt { base: b.clone(), text: "bad statement".to_string(), }))], eof: None, }], }; let want: Result<Package> = Err("BadStatement is not supported in semantic analysis".to_string()); let got = test_convert(pkg); assert_eq!(want, got); } #[test] fn test_convert_bad_expr() { let b = ast::BaseNode::default(); let pkg = ast::Package { base: b.clone(), path: "path".to_string(), package: "main".to_string(), files: vec![ast::File { base: b.clone(), name: "foo.flux".to_string(), metadata: String::new(), package: None, imports: Vec::new(), body: vec![ast::Statement::Expr(Box::new(ast::ExprStmt { base: b.clone(), expression: ast::Expression::Bad(Box::new(ast::BadExpr { base: b.clone(), text: "bad expression".to_string(), expression: None, })), }))], eof: None, }], }; let want: Result<Package> = Err("BadExpression is not supported in semantic analysis".to_string()); let got = test_convert(pkg); assert_eq!(want, got); } }
true
12e6e4ad88859f08e5b01b898b0ec9d33a62cb58
Rust
Vechro/roman
/src/lib.rs
UTF-8
3,082
3.578125
4
[]
no_license
mod test; const fn roman_lut(numeral: &char) -> Option<usize> { match numeral { 'I' => Some(1), 'V' => Some(5), 'X' => Some(10), 'L' => Some(50), 'C' => Some(100), 'D' => Some(500), 'M' => Some(1000), _ => None, } } const fn arabic_lut(digit: &usize) -> Option<&str> { match digit { 1 => Some("I"), 4 => Some("IV"), 5 => Some("V"), 9 => Some("IX"), 10 => Some("X"), 40 => Some("XL"), 50 => Some("L"), 90 => Some("XC"), 100 => Some("C"), 400 => Some("CD"), 500 => Some("D"), 900 => Some("DM"), 1000 => Some("M"), _ => None, } } static DIGITS_DESC: [usize; 13] = [1000, 900, 500, 400, 100, 90, 50, 40, 10, 9, 5, 4, 1]; struct Tally { total: usize, max: usize, } // Impure function as it prints to stdout immediately. pub fn convert_and_print_numerals(list_of_numerals: &[String]) { for number_str in list_of_numerals { let result = match number_str.chars().next() { Some(c) => match c { c if c.is_ascii_alphabetic() => roman_to_arabic(&number_str.to_ascii_uppercase()), c if c.is_ascii_digit() => arabic_to_roman(number_str), _ => None, }, _ => unreachable!(), }; match result { Some(s) => println!("{}", s), None => println!("Invalid numerals!"), }; } } fn arabic_to_roman(arabic_numerals: &str) -> Option<String> { let mut num = match arabic_numerals.parse::<usize>() { Ok(n) => n, Err(_) => return None, }; let result = DIGITS_DESC .iter() .fold(String::new(), |mut state: String, digit| { let quot = num / *digit; num = num % *digit; let numeral = match arabic_lut(digit) { Some(s) => s, None => unreachable!(), }; state.push_str(&numeral.repeat(quot)); state }); Some(result) } fn roman_to_arabic(roman_numerals: &str) -> Option<String> { let result = roman_numerals.chars().rfold( Some(Tally { total: 0, max: 0 }), |tally: Option<Tally>, c| { let current_value = match roman_lut(&c) { Some(val) => val, None => return None, }; let (total, mut max) = match tally { Some(Tally { total, max }) => (total, max), None => return None, }; max = current_value.max(max); if current_value >= max { Some(Tally { total: total + current_value, max, }) } else { Some(Tally { total: total - current_value, max, }) } }, ); match result { Some(Tally { total, .. }) => Some(total.to_string()), None => None, } }
true
eeafdd246e46748861c9efec9a4ce1d0f6e81ccd
Rust
bouzuya/rust-atcoder
/cargo-atcoder/contests/abc296/src/bin/b.rs
UTF-8
312
2.640625
3
[]
no_license
use proconio::{input, marker::Chars}; fn main() { input! { s: [Chars; 8], }; for i in 0..8 { let n = 8 - i; for j in 0..8 { let a = (b'a' + j as u8) as char; if s[i][j] == '*' { println!("{}{}", a, n); } } } }
true
81318dce492702fd85da4204de7e836ba3beca28
Rust
kakoc/leetcode
/src/except_self.rs
UTF-8
779
3.21875
3
[]
no_license
pub fn product_except_self(nums: Vec<i32>) -> Vec<i32> { let mut res = vec![0; nums.len()]; let mut p = 1; for (i, v) in nums.iter().rev().enumerate() { if i == 0 { res[nums.len() - 1 - i] = 1; p = *v; continue; } res[nums.len() - 1 - i] = p; p *= v; } let mut p = 1; for (i, v) in nums.iter().enumerate() { if i == 0 { p *= v; continue; } res[i] = p * res[i]; p *= v; } res } #[test] fn test_except_self() { let i = vec![1, 2, 3, 4]; let a = product_except_self(i); assert_eq!(a, vec![24, 12, 8, 6]); let i = vec![9, 0, -2]; let a = product_except_self(i); assert_eq!(a, vec![0, -18, 0]); }
true
507353e2300b9ffbfd7a5507ffdbdb51ce737fde
Rust
ebsnet/blockchain
/lib/data/src/tx.rs
UTF-8
2,121
3.140625
3
[]
no_license
//! This module contains transaction specific data. use hack::BigArray; use bincode::serialize; use failure::Error; /// Size of a Ed25519 signature in bytes. pub const SIG_SIZE: usize = 64; /// Convenience type for a signature. pub type Signature = [u8; SIG_SIZE]; /// Convenience type for signed data inside a block. pub type BlockData = SignedData<Data>; /// Wrapper for signed date. This struct contains the data and the signature. #[derive(Deserialize, Serialize, Clone)] pub struct SignedData<T> { #[serde(with = "BigArray")] signature: Signature, data: T, } impl<T> SignedData<T> { /// Generate a new object from supplied data and a signature. pub fn new(signature: Signature, data: T) -> Self { Self { signature: signature, data: data, } } /// Returns a reference to the wrapped data. pub fn data(&self) -> &T { &self.data } /// Returns a reference to the wrapped signature. pub fn signature(&self) -> &Signature { &self.signature } } /// Convenience type for a fingerprint. pub type Fingerprint = Vec<u8>; /// The data that can be contained in a block. #[derive(Deserialize, Serialize, PartialEq, Clone)] pub enum Data { /// Billing operation used to initialize a billing process and indicate that a user has been /// billed at a certain point in time. Billing(Fingerprint), /// Usage operation that protocols the power usage of a user. Usage(u64), } /// Typed that implement this trait can be signed. pub trait Signable { /// Converts the data to a list of bytes that can be signed. fn get_bytes(&self) -> Result<Vec<u8>, Error>; } impl Signable for Data { fn get_bytes(&self) -> Result<Vec<u8>, Error> { let res = serialize(self)?; Ok(res) } } impl<T> Default for SignedData<T> where T: Default, { fn default() -> Self { Self { signature: [0; SIG_SIZE], data: Default::default(), } } } impl Default for Data { fn default() -> Self { Data::Billing(Default::default()) } }
true
c8a539fe0e2a23380ab156c99430c4099d68e83c
Rust
arashout/cover-letter-generator
/src/blurb.rs
UTF-8
1,322
2.96875
3
[]
no_license
use crate::rules::{Rule, apply_rules}; use crate::types::TokenizedDescription; #[derive(Default)] pub struct Blurb<'a> { pub precendence: u8, pub long_description: &'a str, pub short_description: &'a str, pub name: &'a str, rules: Vec<Box<Rule>>, } impl<'a> Blurb<'a> { pub fn new(name: &'a str) -> Self { Blurb { name: name, precendence: 10, long_description: &"", short_description: &"", rules: vec![], } } pub fn with_precedence(mut self, n: u8) -> Self { self.precendence = n; self } pub fn with_description(mut self, description: &'a str) -> Self { self.short_description = description; self.long_description = description; self } pub fn with_long_description(mut self, description: &'a str) -> Self { self.long_description = description; self } pub fn add_rule(mut self, boxed_rule: Box<Rule>) -> Self { self.rules.push(boxed_rule); self } pub fn is_applicable(&self, tokenized_description: &TokenizedDescription) -> bool { if self.rules.len() == 0 { return false; } apply_rules(tokenized_description, &self.rules) } } pub type BlurbVec<'a> = Vec<Blurb<'a>>;
true
4ce7f85b9bc8f8e8ddf7a5c2bb82447b83da7e83
Rust
richarddowner/Rust
/rust-by-example/modules/modules.rs
UTF-8
975
3.734375
4
[]
no_license
// Rust provides a powerful module system that can be used to hierarchically // split code in logical units (modules), and manage visibility (public/priv) // between them. // A module is a collection of items like: functions, structs, traits, impl blocks, // and even other modules. fn function() { println!("called `function()`"); } // A module named `my` mod my { // A module can contain items like functions #[allow(dead_code)] fn function() { println!("called `my::function()`"); } // Modules can be nested mod nested { #[allow(dead_code)] fn function() { println!("called `my::nested::function()`"); } } } fn main() { function(); // items inside a module can be called using their full path // the `println` function lives in the stdio module // the `stdio` module lives in the `io` module // and the `io` module lives in the `std` crate std::io::stdio::println("Hello World!"); // Error! `my::function` is private // my::function(); }
true
b8f9029ec2b77aa4922952530b4ef8e23da04d25
Rust
davideGiovannini/rust_sdl2_engine
/leek/src/lib.rs
UTF-8
3,592
2.78125
3
[]
no_license
//! The following code creates an empty window: //! ``` //! fn main() { //! Engine::new("Title").start::<Game>(); //! } //! //! //! struct Game; //! //! impl GameScene for Game { //! fn set_up(&mut self) {} //! //! fn logic(&mut self, context: &EngineContext, engine: &mut Engine, ui: &Ui) -> EngineAction { //! EngineAction::default() //! } //! //! fn render(&mut self, context: &EngineContext, engine: &mut Engine, ui: &Ui) {} //! } //! //! impl FromEngine for Game { //! fn init(engine: &mut Engine) -> Self { //! Game{} //! } //! } //! //! ``` //! Update Game struct with your desired field :) //! extern crate gl; pub extern crate sdl2; pub extern crate alto; extern crate failure; pub extern crate lewton; extern crate notify; #[macro_use] pub extern crate imgui; use sdl2::pixels::Color; pub mod alto_utils; mod engine; mod fps_counter; mod game_controllers; mod imgui_backend; mod opengl; mod post_processing; pub mod prelude; mod sdl2_utils; mod debug; pub mod resources; #[macro_use] mod common_macros; pub use post_processing::PostProcessEffect as PostProcessingEffect; pub mod math; pub use engine::game::{AnyGameScene, FromEngine, GameScene}; pub use game_controllers::{GameController, GameControllerManager}; pub use engine::action::EngineAction; pub use engine::context::EngineContext; pub use engine::Engine; pub use sdl2_utils::log_system_info; pub mod font; const WINDOW_SIZE: (u32, u32) = (800, 600); const CLEAR_COLOR: Color = Color { r: 0, g: 0, b: 0, a: 255, }; pub struct EngineBuilder<'window> { window_title: &'window str, window_size: (u32, u32), logical_size: Option<(u32, u32)>, fullscreen: bool, hide_cursor: bool, relative_cursor: bool, clear_color: Color, imgui_font_scale: f32, } impl Engine { pub fn new(window_title: &str) -> EngineBuilder { EngineBuilder { window_title, window_size: WINDOW_SIZE, logical_size: None, clear_color: CLEAR_COLOR, fullscreen: false, hide_cursor: false, relative_cursor: false, imgui_font_scale: 1.5, } } } impl<'window> EngineBuilder<'window> { /// Set the initial size of the window. pub fn with_window_size(&mut self, width: u32, height: u32) -> &mut Self { self.window_size = (width, height); self } /// Set the logical render size. pub fn with_logical_size(&mut self, width: u32, height: u32) -> &mut Self { self.logical_size = Some((width, height)); self } pub fn with_clear_color(&mut self, color: Color) -> &mut Self { self.clear_color = color; self } pub fn with_fullscreen(&mut self, fullscreen: bool) -> &mut Self { self.fullscreen = fullscreen; self } pub fn with_imgui_font_scale(&mut self, font_scale: f32) -> &mut Self { self.imgui_font_scale = font_scale; self } pub fn with_hidden_cursor(&mut self, hide_cursor: bool) -> &mut Self { self.hide_cursor = hide_cursor; self } pub fn with_relative_cursor(&mut self, relative_cursor: bool) -> &mut Self { self.relative_cursor = relative_cursor; self } /// Start the engine. pub fn start<Scene: 'static>(&mut self) where Scene: GameScene + FromEngine, { if let Err(error) = engine::run_engine::<Scene>(self) { println!("{:?}", error) } } } // RE-EXPORTS pub mod keyboard { pub use sdl2::keyboard::Scancode; }
true
8f22063d9137e61e624b12972c7fa844563d77e2
Rust
zargony/advent-of-code-2016
/src/day20.rs
UTF-8
1,553
3.359375
3
[]
no_license
use std::num; /// Parse multiline-text of ranges into a vector of tuples pub fn parse(s: &str) -> Result<Vec<(u32, u32)>, num::ParseIntError> { s.lines().map(|line| { let mut nums = line.split('-').map(|s| s.parse::<u32>().unwrap()); Ok((nums.next().unwrap(), nums.next().unwrap())) }).collect() } /// Find lowest number not covered by a list of ranges pub fn find_lowest(ranges: &[(u32, u32)]) -> u32 { let mut ranges = ranges.to_owned(); ranges.sort_by_key(|n| n.0); let mut n = 0; for (from, to) in ranges { if from > n { break; } if to >= n { n = to + 1; } } n } /// Find amount of numbers not covered by a list of ranges pub fn find_uncovered(ranges: &[(u32, u32)]) -> u32 { let mut ranges = ranges.to_owned(); ranges.sort_by_key(|n| n.0); let mut upto = 0; let mut count = 0; for (from, to) in ranges { if from > upto { count += from - upto - 1; } if to > upto { upto = to; } } count } fn main() { let ranges = parse(include_str!("day20.txt")).unwrap(); println!("Lowest non-blocked IP: {}", find_lowest(&ranges)); println!("Number of allowed IPs: {}", find_uncovered(&ranges)); } #[cfg(test)] mod tests { use super::*; #[test] fn finding_lowest() { let ranges = parse("5-8\n0-2\n4-7").unwrap(); assert_eq!(find_lowest(&ranges), 3); } #[test] fn finding_uncovered() { let ranges = parse("5-8\n0-2\n4-7").unwrap(); assert_eq!(find_uncovered(&ranges), 1); } }
true
ee0dd34daf298b6848c5a9ed8498979928af2d71
Rust
m1el/refterm-hash-break
/main.rs
UTF-8
12,123
2.609375
3
[]
no_license
#![allow(clippy::needless_return)] #![feature(portable_simd)] use core_simd::Simd; use core::convert::TryInto; use srng::SRng; use simd_aes::SimdAes; const DEFAULT_SEED: Simd<u8, 16> = Simd::from_array([ 178, 201, 95, 240, 40, 41, 143, 216, 2, 209, 178, 114, 232, 4, 176, 188, ]); #[allow(non_snake_case)] fn ComputeGlyphHash(data: &[u8]) -> Simd<u8, 16> { let zero = Simd::splat(0); let mut hash = Simd::<u64, 2>::from_array([data.len() as u64, 0]).to_ne_bytes(); hash ^= DEFAULT_SEED; let mut chunks = data.chunks_exact(16); for chunk in chunks.by_ref() { let chunk: &[u8; 16] = chunk.try_into().unwrap(); let value = Simd::from_array(*chunk); hash ^= value; hash = hash.aes_dec(zero); hash = hash.aes_dec(zero); hash = hash.aes_dec(zero); hash = hash.aes_dec(zero); } let remainder = chunks.remainder(); let mut temp = [0_u8; 16]; temp[..remainder.len()].copy_from_slice(remainder); let value = Simd::from_array(temp); hash ^= value; hash = hash.aes_dec(zero); hash = hash.aes_dec(zero); hash = hash.aes_dec(zero); hash = hash.aes_dec(zero); return hash; } #[allow(dead_code)] fn inv_aes_dec(mut data: Simd<u8, 16>, key: Simd<u8, 16>) -> Simd<u8, 16> { data ^= key; let zero = Simd::splat(0); data = data.aes_dec_last(zero).aes_enc(zero); return data.aes_enc_last(zero); } fn inv_aes_decx4(mut hash: Simd<u8, 16>) -> Simd<u8, 16> { let zero = Simd::splat(0); hash = hash.aes_dec_last(zero); hash = hash.aes_enc(zero); hash = hash.aes_enc(zero); hash = hash.aes_enc(zero); hash = hash.aes_enc(zero); hash = hash.aes_enc_last(zero); return hash; } fn single_prefix(count: usize, target_hash: Simd<u8, 16>) -> Simd<u8, 16> { // The first stage looks like this: // Hash ^ Seed = dec^4(Count ^ Seed ^ Chunk) // To get the chunk, we need to reverse these: // dec^-4(Hash ^ Seed) = Count ^ Seed ^ Chunk // Chunk = dec^4(Hash ^ Seed) ^ Count ^ Seed // To create a one-prefix initialization, we want: // Hash = Count // Count = Count + 16 let mut hash = target_hash; hash = inv_aes_decx4(hash); let prefix_init = Simd::<u64, 2>::from_array([count as u64 + 16, 0]).to_ne_bytes(); hash ^= prefix_init; hash ^= DEFAULT_SEED; return hash; } fn preimage_prefix_hash(mut hash: Simd<u8, 16>, data: &[u8]) -> Simd<u8, 16> { let chunks = data.len() / 16; let tail = &data[chunks*16..]; let mut tail_buf = [0_u8; 16]; tail_buf[..tail.len()].copy_from_slice(tail); let value = Simd::from_array(tail_buf); hash = inv_aes_decx4(hash); hash ^= value; for chunk in data.chunks_exact(16).rev() { let chunk: &[u8; 16] = chunk.try_into().unwrap(); let value = Simd::from_array(*chunk); hash = inv_aes_decx4(hash); hash ^= value; } return hash; } fn invert_block(mut hash: Simd<u8, 16>, chunk: &[u8]) -> Simd<u8, 16> { let chunk: &[u8; 16] = chunk.try_into().unwrap(); let value = Simd::from_array(*chunk); hash = inv_aes_decx4(hash); return hash ^ value; } fn invert_last(suffix: &[u8], mut hash: Simd<u8, 16>) -> Simd<u8, 16> { let mut tail_buf = [0_u8; 16]; tail_buf[..suffix.len()].copy_from_slice(suffix); let value = Simd::from_array(tail_buf); hash = inv_aes_decx4(hash); hash ^= value; hash = inv_aes_decx4(hash); return hash; } fn concat(prefix: Simd<u8, 16>, target: &[u8]) -> Vec<u8> { let mut image = prefix.to_array().to_vec(); image.extend_from_slice(target); image } fn prefix_collision_attack(message: &[u8]) { let mut target_hash = Simd::<u64, 2>::from_array([message.len() as u64, 0]).to_ne_bytes(); target_hash ^= DEFAULT_SEED; let prefix = single_prefix(message.len(), target_hash); println!("Demonstrating prefix attack"); println!("message: {:x?}", message); println!("hash: {:x?}", ComputeGlyphHash(b"hello")); println!("prefix: {:x?}", prefix); let forgery = concat(prefix, message); println!("forgery: {:x?}", forgery); println!("hash: {:x?}", ComputeGlyphHash(&forgery)); println!(); } fn chosen_prefix(prefix: &[u8]) { let zero = Simd::splat(0); let mut message = prefix.to_vec(); let remainder = 16 - (message.len() % 16); message.extend((0..remainder).map(|_| b'A')); message.extend((0..16).map(|_| 0)); let hash = ComputeGlyphHash(&message); let pre_current = invert_last(&[], hash); let pre_target = invert_last(&[], zero); let last = message.len() - 16; let suffix = pre_current ^ pre_target; message[last..].copy_from_slice(&suffix.to_array()); println!("Demonstrating chosen prefix attack"); println!("prefix: {:x?}", prefix); println!("forgery: {:x?}", message); println!("hash: {:x?}", ComputeGlyphHash(&message)); println!(); } fn preimage_attack(suffix: &[u8]) { println!("Demonstrating preimage attack"); println!("suffix: {:x?}", suffix); let target_hash = Simd::splat(0); println!("goal hash: {:x?}", target_hash); let prefix_hash = preimage_prefix_hash(target_hash, suffix); let preimage_prefix = single_prefix(suffix.len(), prefix_hash); println!("prefix: {:x?}", preimage_prefix); let message = concat(preimage_prefix, suffix); println!("message: {:x?}", message); println!("hash: {:x?}", ComputeGlyphHash(&message)); } fn padding_attack() { println!("Demonstrating padding attack"); println!(r#"message: "", hash: {:x?}"#, ComputeGlyphHash(b"")); println!(r#"message: "\x01", hash: {:x?}"#, ComputeGlyphHash(b"\x01")); println!(r#"message: "A", hash: {:x?}"#, ComputeGlyphHash(b"A")); println!(r#"message: "B\x00", hash: {:x?}"#, ComputeGlyphHash(b"B\x00")); println!(r#"message: "BAAAAAAAAAAAAAAA", hash: {:x?}"#, ComputeGlyphHash(b"BAAAAAAAAAAAAAAA")); println!(r#"message: "CAAAAAAAAAAAAAAA\x00", hash: {:x?}"#, ComputeGlyphHash(b"CAAAAAAAAAAAAAAA\x00")); println!(); } fn invert_attack(message: &[u8]) { println!("Demonstrating invert attack, invert a hash up to 15 bytes"); println!("Note: due to padding attack, there are actually more messages"); println!("plaintext: {:x?}", message); let mut hash = ComputeGlyphHash(message); println!("hash: {:x?}", hash); hash = inv_aes_decx4(hash); hash ^= DEFAULT_SEED; let mut buffer = hash.to_array(); let len = buffer.iter().rposition(|&chr| chr != 0).map_or(0, |x| x + 1); if len == 16 { println!("the plaintext mus be shorter than 16 bytes, cannot invert"); return; } buffer[0] ^= len as u8; let recovered = &buffer[..len]; println!("recovered: {:x?}", recovered); println!("hash: {:x?}", ComputeGlyphHash(recovered)); println!(); } pub fn check_alphanum(bytes: Simd<u8, 16>) -> bool { // check if the characters are outside of '0'..'z' range if (bytes - Simd::splat(b'0')).lanes_gt(Simd::splat(b'z' - b'0')).any() { return false; } // check if the characters are in of '9'+1..'A'-1 range if (bytes - Simd::splat(b'9' + 1)).lanes_lt(Simd::splat(b'A' - (b'9' + 1))).any() { return false; } // check if the characters are in of 'Z'+1..'a'-1 range if (bytes - Simd::splat(b'Z' + 1)).lanes_lt(Simd::splat(b'a' - (b'Z' + 1))).any() { return false; } return true; } use core::sync::atomic::{AtomicBool, Ordering}; static FOUND: AtomicBool = AtomicBool::new(false); fn find_ascii_zeros(suffix: &[u8], worker: u64) { const ATTACK_BYTES: usize = 6; let mut target_hash = Simd::<u8, 16>::splat(0); let mut bsuffix = suffix; let suffix_len = 16 - ATTACK_BYTES; let mut whole_block = false; if suffix.len() >= suffix_len { target_hash = preimage_prefix_hash(target_hash, &suffix[suffix_len..]); bsuffix = &suffix[..suffix_len]; whole_block = true; } let mut controlled = [0u8; 16]; let total_len = ATTACK_BYTES + suffix.len(); let controlled_bytes = total_len.min(16); let controlled = &mut controlled[..controlled_bytes]; controlled[ATTACK_BYTES..].copy_from_slice(bsuffix); let seed = Simd::from_array([ 17820195240, 4041143216, 22093178114, 2324176188, ]); let mut rng = SRng::new(seed * Simd::splat(worker + 1)); let start = std::time::Instant::now(); for ii in 0_u64.. { if FOUND.load(Ordering::Relaxed) { return; } let prefix = rng.random_alphanum(); controlled[..6].copy_from_slice(&prefix[..6]); let prefix = { let prefix_hash = if whole_block { invert_block(target_hash, controlled) } else { preimage_prefix_hash(target_hash, controlled) }; single_prefix(total_len, prefix_hash) }; if check_alphanum(prefix) { FOUND.store(true, Ordering::Relaxed); let mut buffer = prefix.to_array().to_vec(); buffer.extend_from_slice(&controlled[..6]); buffer.extend_from_slice(suffix); let elapsed = start.elapsed(); let mhs = (ii as f64) / 1e6 / elapsed.as_secs_f64(); eprintln!("found prefix in {}it {:?} {:3.3}MH/s/core", ii, elapsed, mhs); eprintln!("hash: {:x?}", ComputeGlyphHash(&buffer)); println!("{}", core::str::from_utf8(&buffer).unwrap()); break; } } } const MESSAGE: &[&[u8]] = &[ b" Hello Casey! I hope this message finds you well.", b" Please ignore those 22 random chars to the left for now.", b" The work you've done on refterm is admirable. There are", b" not enough performance conscious programmers around, and", b" we need a demonstration of what is achievable. However,", b" I would like to address the claim that the hash function", b" used in refterm is 'cryptographically secure'. There is", b" a very specific meaning attached to those words, namely:", b" 1) it is hard to create a message for a given hash value", b" 2) it is hard to produce two messages with the same hash", b" If you check, the following strings have the same hash:", b" xvD7FsaUdGy9UyjalZlFEU, 0XXPpB0wpVszsvSxgsn0su,", b" IGNwdjol0dxLflcnfW7vsI, jcTHx0zBJbW2tdiX157RSz.", b" In fact, every line in the message yields the exact same", b" hash value. That is 0x00000000000000000000000000000000.", b" I believe this was a clear enough demonstration that the", b" hash function `ComputeGlyphHash` isn't cryptographically", b" secure, and that an attacker can corrupt the glyph cache", b" by printing glyphs with the same hash. The main problem", b" with this hash function is that all operations consuming", b" bytes are invertible. Which means an attacker could run", b" the hash function in reverse, consuming the message from", b" behind, and calculate the message to get the given hash.", b" The hash is also weak to a padding attack. For example,", br#" two strings "A" and "B\x00" yield the same hash, because"#, b" the padding is constant, so zero byte in the end doens't", b" matter, and the first byte is `xor`ed with input length.", b" If you'd like to, you can read this blog post explaining", b" these attacks in detail and how to avoid them using well", b" known methods: https://m1el.github.io/refterm-hash", b" Best regards, -- Igor", ]; fn main() { padding_attack(); invert_attack(b"Qwerty123"); prefix_collision_attack(b"hello"); chosen_prefix(b"hello"); preimage_attack(b"hello"); const THREADS: u64 = 16; for msg in MESSAGE { FOUND.store(false, Ordering::Relaxed); let threads = (0..THREADS) .map(|worker| std::thread::spawn(move || find_ascii_zeros(msg, worker))) .collect::<Vec<_>>(); for thread in threads { thread.join().unwrap(); } }; }
true
cced4a2da2d263cc5d424bf18d58bfc715bb4de4
Rust
neosmart/securestore-rs
/securestore/src/tests/secrets.rs
UTF-8
4,788
3.59375
4
[ "Apache-2.0", "MIT" ]
permissive
//! Highest-level tests for the secure store use crate::errors::ErrorKind; use crate::{KeySource, SecretsManager}; use tempfile::NamedTempFile; /// Verify that basic storage and retrieval of secrets functions correctly. #[test] fn basic_store_get() { // Create a new secrets manager with a known secret so we don't need to muck // around with keyfiles later. let secrets_path = NamedTempFile::new().unwrap().into_temp_path(); let mut sman = SecretsManager::new(KeySource::Password("mysecret")).unwrap(); // Make sure that we can set values in different &str/String types sman.set("foo", "bar"); sman.set("foo", "bar".to_string()); sman.save_as(&secrets_path).unwrap(); // Do we get the same value back on get? let getd: String = sman.get("foo").unwrap(); assert_eq!("bar", getd); // Now open the store from the disk with the same settings and make sure the // data remains loadable. let sman2 = SecretsManager::load(&secrets_path, KeySource::Password("mysecret")).unwrap(); let getd: String = sman2.get("foo").unwrap(); assert_eq!("bar", getd); } #[test] fn wrong_password() { let secrets_path = NamedTempFile::new().unwrap().into_temp_path(); let mut sman = SecretsManager::new(KeySource::Password("mysecret")).unwrap(); // Set something sman.set("foo", "foo"); // And save the store to disk sman.save_as(&secrets_path).unwrap(); // Now try loading the store with wrong password match SecretsManager::load(&secrets_path, KeySource::Password("notmysecret")) { Ok(_) => panic!("Sentinel failed to detect wrong password on load"), Err(e) => { assert_eq!(ErrorKind::DecryptionFailure, e.kind()); } } } #[test] fn secret_not_found() { let sman = SecretsManager::new(KeySource::Csprng).unwrap(); assert_eq!(Err(ErrorKind::SecretNotFound.into()), sman.get("foo")); } #[test] fn csprng_export() { let secrets_path = NamedTempFile::new().unwrap().into_temp_path(); let key_path = NamedTempFile::new().unwrap().into_temp_path(); { let mut sman = SecretsManager::new(KeySource::Csprng).unwrap(); sman.export_key(&key_path).unwrap(); sman.set("foo", "bar"); sman.save_as(&secrets_path).unwrap(); } let sman = SecretsManager::load(secrets_path, KeySource::File(key_path)).unwrap(); assert_eq!(Ok("bar".to_owned()), sman.get("foo")); } #[test] fn password_export() { let secrets_path = NamedTempFile::new().unwrap().into_temp_path(); let key_path = NamedTempFile::new().unwrap().into_temp_path(); { let mut sman = SecretsManager::new(KeySource::Password("password123")).unwrap(); // Use legacy .export() alias .export_keyfile() to make sure it works sman.export_keyfile(&key_path).unwrap(); sman.set("foo", "bar"); sman.save_as(&secrets_path).unwrap(); } let sman = SecretsManager::load(secrets_path, KeySource::File(key_path)).unwrap(); assert_eq!(Ok("bar".to_owned()), sman.get("foo")); } #[test] fn invalid_key_file() { let key_path = NamedTempFile::new().unwrap().into_temp_path(); match SecretsManager::new(KeySource::File(key_path)) { Ok(_) => panic!("SecretsManager loaded with invalid key file!"), Err(e) => assert_eq!(ErrorKind::InvalidKeyfile, e.kind()), } } #[test] fn binary_secret() { let mut sman = SecretsManager::new(KeySource::Csprng).unwrap(); let (key, value) = ("secret", b"Hello, world!"); sman.set(key, &value[..]); assert_eq!(&value[..], sman.get_as::<Vec<u8>>(key).unwrap().as_slice()); } #[test] /// A release added generics to KeySource which were later removed because the /// default generic fallback doesn't work on current rust versions. This had /// let `KeySource::File(path: AsRef<Path>)` work, but broke `KeySource::Csprng` /// and `KeySource::Password` because the `P: AsRef<Path>` wasn't defined for /// those variants (unless it was explicitly provided, though not used). /// /// `KeySource::File` was renamed to `KeySource::Path` and takes a `&Path` only, /// but a function masquerading as a variant called `KeySource::File()` was /// introduced that returns `impl GenericKeySource`, the trait which we now /// accept in the `new()` and `load()` functions. This function is hidden from /// the docs and is for backwards-compatibility only. fn legacy_generic_keysource() { // We just want to verify that this compiles, we don't test the result here. let _ = SecretsManager::load("secrets.json", KeySource::File("secrets.key")); } #[test] fn str_as_generic_keysource() { // We just want to verify that this compiles, we don't test the result here. let _ = SecretsManager::load("secrets.json", "secrets.key"); }
true
68d081dfc8abf1dcb438287b74bb54f66deef276
Rust
jneem/rust-playground
/src/skyline.rs
UTF-8
9,228
3.28125
3
[]
no_license
#[deriving(Clone, Show)] struct Building { m: f64, b: f64, end: f64 } // To prevent numerical instability, we don't allow large slopes. static MAX_SLOPE: f64 = 1e3; impl Building { fn from_points(x1: f64, y1: f64, x2: f64, y2: f64) -> Building { // To avoid NaNs, we deal with vertical line segments separately. if x1 == x2 { return Building { m: 0.0, b: y1.max(y2), end: x1 } } let m_orig = (y2 - y1) / (x2 - x1); let m = m_orig.max(-MAX_SLOPE).min(MAX_SLOPE); let b = (y1 - m*x1).max(y2 - m*x2); Building { m: m, b: b, end: x1.max(x2) } } fn intersection(&self, other: &Building) -> f64 { let x = (other.b - self.b) / (self.m - other.m); if x.is_nan() { Float::neg_infinity() } else { x } } fn conceals(&self, other: &Building, x: f64) -> bool { self.conceals_with_intersect(other, x, self.intersection(other)) } fn conceals_with_intersect(&self, other: &Building, x: f64, intersect: f64) -> bool { if self.m == other.m { self.b >= other.b } else { (intersect <= x && self.m > other.m) || (intersect > x && self.m < other.m) } } fn empty(end: f64) -> Building { Building { m: 0.0, b: Float::neg_infinity(), end: end } } fn chop(&self, new_end: f64) -> Building { Building { m: self.m, b: self.b, end: new_end } } fn y(&self, x: f64) -> f64 { // We assume that the slope is not infinite. Then // the only way to get NaN out of m*x + b is if // b is infinite. But if b is infinite // then it should be negative infinity, and we just return it. if self.b.is_infinite() { self.b } else { self.m * x + self.b } } } // FIXME: the parameter of type Option<Self> is a work-around // for not having UFCS. See // https://mail.mozilla.org/pipermail/rust-dev/2014-May/009850.html pub trait Direction { fn direction_multiplier(_: Option<Self>) -> f64; } #[deriving(Show)] pub struct Up; #[deriving(Show)] pub struct Down; #[deriving(Show)] pub struct Left; #[deriving(Show)] pub struct Right; impl Direction for Up { fn direction_multiplier(_: Option<Up>) -> f64 { 1.0 } } impl Direction for Down { fn direction_multiplier(_: Option<Down>) -> f64 { -1.0 } } impl Direction for Left { fn direction_multiplier(_: Option<Left>) -> f64 { -1.0 } } impl Direction for Right { fn direction_multiplier(_: Option<Right>) -> f64 { 1.0 } } pub trait Flip<T> {} impl Flip<Up> for Down {} impl Flip<Down> for Up {} impl Flip<Right> for Left {} impl Flip<Left> for Right {} #[deriving(Clone, Show)] pub struct Skyline<T: Direction> { buildings: Vec<Building> } impl<T: Direction> Skyline<T> { pub fn empty() -> Box<Skyline<T>> { box Skyline { buildings: vec![Building::empty(Float::infinity())] } } pub fn single(x1: f64, y1: f64, x2: f64, y2: f64) -> Box<Skyline<T>> { let mult = Direction::direction_multiplier(None::<T>); let b = Building::from_points(x1, y1 * mult, x2, y2 * mult); let start = Building::empty(x1.min(x2)); let end = Building::empty(Float::infinity()); box Skyline { buildings: vec![start, b, end] } } #[cfg(test)] fn from_buildings(bldgs: Vec<Building>) -> Box<Skyline<T>> { box Skyline { buildings: bldgs } } pub fn overlap<S: Flip<T>>(&self, other: &Skyline<S>) -> f64 { let mut dist: f64 = Float::neg_infinity(); let mut start: f64 = Float::neg_infinity(); let mut i = 0u; let mut j = 0u; let imax = self.buildings.len(); let jmax = other.buildings.len(); while i < imax && j < jmax { // Loop invariant: b1 and b2 start at or after `start`. let b1 = self.buildings[i]; let b2 = other.buildings[j]; let end: f64; if b1.end < b2.end { end = b1.end; i += 1; } else { end = b2.end; j += 1; } dist = dist.max(b1.y(start) + b2.y(start)); dist = dist.max(b1.y(end) + b2.y(end)); start = end; } dist } fn first_intersection(b: &Building, bldgs: &[Building], mut start: f64, idx: &mut uint) -> f64 { let idxmax = bldgs.len(); while *idx < idxmax { let other = &bldgs[*idx]; let intersect = b.intersection(other); if b.conceals_with_intersect(other, start, intersect) { if intersect > start && intersect < b.end.min(other.end) { // This building intersects with the other one. return intersect; } else if b.end < other.end { // This building ends before the other one. return b.end; } else { // The other building ends first (or they end together). *idx += 1; start = other.end; } } else { return start; } } return Float::infinity(); } fn internal_merge(in1: &[Building], in2: &[Building], out: &mut Vec<Building>) { let mut start: f64 = Float::neg_infinity(); let mut i = 0u; let mut j = 0u; let imax = in1.len(); let jmax = in2.len(); // Loop invariant: if j == jmax then i == imax-1. while i < imax && j < jmax { let b1 = &in1[i]; let b2 = &in2[j]; if b1.conceals(b2, start) { start = Skyline::<T>::first_intersection(b1, in2, start, &mut j); out.push(b1.chop(start)); // If i == imax-1 then b1.end == inf. If in addition, // start >= b1.end then we must have j == jmax-1 // (i.e., we're done with with input skylines). if start >= b1.end { i += 1; } } else { start = Skyline::<T>::first_intersection(b2, in1, start, &mut i); out.push(b2.chop(start)); if start >= b2.end { j += 1; } } } } pub fn merge(&mut self, other: &Skyline<T>) { let mut new_bldgs = Vec::new(); Skyline::<T>::internal_merge(self.buildings.as_slice(), other.buildings.as_slice(), &mut new_bldgs); self.buildings = new_bldgs; } pub fn slide(&mut self, x: f64) { for b in self.buildings.iter_mut() { b.end += x } } pub fn bump(&mut self, y: f64) { let y = y * Direction::direction_multiplier(None::<T>); for b in self.buildings.iter_mut() { b.b += y } } } #[cfg(test)] mod test { use test_utils::ApproxEq; mod test_utils; impl<'a> ApproxEq for &'a Building { fn approx_eq<'b>(self, other: &'b Building) -> bool { self.m.approx_eq(other.m) && self.b.approx_eq(other.b) && self.end.approx_eq(other.end) } } impl<'a, T: Direction> ApproxEq for &'a Skyline<T> { fn approx_eq<'b>(self, other: &'b Skyline<T>) -> bool { if self.buildings.len() != other.buildings.len() { return false; } for i in range(0, self.buildings.len()) { if !self.buildings[i].approx_eq(&other.buildings[i]) { return false; } } return true; } } #[test] fn basic_skyline_merge() { let mut sky1 = Skyline::<Up>::single(-2.0, 0.0, -1.0, 0.0); let mut sky2 = Skyline::<Up>::single(1.0, 0.0, 2.0, 0.0); sky2.merge(&*sky1); let target = Skyline::from_buildings( vec!(Building::empty(-2.0), Building { m: 0.0, b: 0.0, end: -1.0 }, Building::empty(1.0), Building { m: 0.0, b: 0.0, end: 2.0 }, Building::empty(Float::infinity()))); assert!(sky2.approx_eq(&*target)); sky1.merge(&*sky2); assert!(sky1.approx_eq(&*target)); } #[test] fn basic_skyline_overlap() { let sky1 = Skyline::<Up>::single(-1.0, 3.0, 1.0, 3.0); let sky2 = Skyline::<Down>::single(-1.0, 2.0, 1.0, 2.0); let d = sky1.overlap(&*sky2); assert!(d.approx_eq(1.0), "d = {}, should be 1.0", d); } // TODO: once compilefail tests are available, add some to make // sure we can't compare skylines with different directions. // TODO: test slide and bump }
true
fcb01a63ca183f09223a866c6536afe442a45c51
Rust
yoav-steinberg/jsonpath
/tests/filter.rs
UTF-8
6,059
2.671875
3
[ "MIT" ]
permissive
#[macro_use] extern crate serde_json; use common::{read_json, select_and_then_compare, setup}; mod common; #[test] fn quote() { setup(); select_and_then_compare( r#"$['single\'quote']"#, json!({"single'quote":"value"}), json!(["value"]), ); select_and_then_compare( r#"$["double\"quote"]"#, json!({"double\"quote":"value"}), json!(["value"]), ); } #[test] fn filter_next_all() { setup(); for path in &[r#"$.*"#, r#"$[*]"#] { select_and_then_compare( path, json!(["string", 42, { "key": "value" }, [0, 1]]), json!(["string", 42, { "key": "value" }, [0, 1]]), ); } } #[test] fn filter_all() { setup(); for path in &[r#"$..*"#, r#"$..[*]"#] { select_and_then_compare( path, json!(["string", 42, { "key": "value" }, [0, 1]]), json!([ "string", 42, { "key" : "value" }, [ 0, 1 ], "value", 0, 1 ]), ); } } #[test] fn filter_array_next_all() { setup(); for path in &[r#"$.*.*"#, r#"$[*].*"#, r#"$.*[*]"#, r#"$[*][*]"#] { select_and_then_compare( path, json!(["string", 42, { "key": "value" }, [0, 1]]), json!(["value", 0, 1]), ); } } #[test] fn filter_all_complex() { setup(); for path in &[r#"$..friends.*"#, r#"$[*].friends.*"#] { select_and_then_compare( path, read_json("./benchmark/data_array.json"), json!([ { "id" : 0, "name" : "Millicent Norman" }, { "id" : 1, "name" : "Vincent Cannon" }, { "id" : 2, "name" : "Gray Berry" }, { "id" : 0, "name" : "Tillman Mckay" }, { "id" : 1, "name" : "Rivera Berg" }, { "id" : 2, "name" : "Rosetta Erickson" } ]), ); } } #[test] fn filter_parent_with_matched_child() { setup(); select_and_then_compare( "$.a[?(@.b.c == 1)]", json!({ "a": { "b": { "c": 1 } } }), json!([ { "b" : { "c" : 1 } } ]), ); } #[test] fn filter_parent_exist_child() { setup(); select_and_then_compare( "$.a[?(@.b.c)]", json!({ "a": { "b": { "c": 1 } } }), json!([ { "b" : { "c" : 1 } } ]), ); } #[test] fn filter_parent_paths() { setup(); select_and_then_compare( "$[?(@.key.subKey == 'subKey2')]", json!([ {"key": {"seq": 1, "subKey": "subKey1"}}, {"key": {"seq": 2, "subKey": "subKey2"}}, {"key": 42}, {"some": "value"} ]), json!([{"key": {"seq": 2, "subKey": "subKey2"}}]), ); } #[test] fn bugs33_exist_in_all() { setup(); select_and_then_compare( "$..[?(@.first.second)]", json!({ "foo": { "first": { "second": "value" } }, "foo2": { "first": {} }, "foo3": { } }), json!([ { "first": { "second": "value" } } ]), ); } #[test] fn bugs33_exist_left_in_all_with_and_condition() { setup(); select_and_then_compare( "$..[?(@.first && @.first.second)]", json!({ "foo": { "first": { "second": "value" } }, "foo2": { "first": {} }, "foo3": { } }), json!([ { "first": { "second": "value" } } ]), ); } #[test] fn bugs33_exist_right_in_all_with_and_condition() { setup(); select_and_then_compare( "$..[?(@.b.c.d && @.b)]", json!({ "a": { "b": { "c": { "d" : { "e" : 1 } } } } }), json!([ { "b" : { "c" : { "d" : { "e" : 1 } } } } ]), ); } #[test] fn bugs38_array_notation_in_filter() { setup(); select_and_then_compare( "$[?(@['key']==42)]", json!([ {"key": 0}, {"key": 42}, {"key": -1}, {"key": 41}, {"key": 43}, {"key": 42.0001}, {"key": 41.9999}, {"key": 100}, {"some": "value"} ]), json!([{"key": 42}]), ); select_and_then_compare( "$[?(@['key'].subKey == 'subKey2')]", json!([ {"key": {"seq": 1, "subKey": "subKey1"}}, {"key": {"seq": 2, "subKey": "subKey2"}}, {"key": 42}, {"some": "value"} ]), json!([{"key": {"seq": 2, "subKey": "subKey2"}}]), ); select_and_then_compare( "$[?(@['key']['subKey'] == 'subKey2')]", json!([ {"key": {"seq": 1, "subKey": "subKey1"}}, {"key": {"seq": 2, "subKey": "subKey2"}}, {"key": 42}, {"some": "value"} ]), json!([{"key": {"seq": 2, "subKey": "subKey2"}}]), ); select_and_then_compare( "$..key[?(@['subKey'] == 'subKey2')]", json!([ {"key": {"seq": 1, "subKey": "subKey1"}}, {"key": {"seq": 2, "subKey": "subKey2"}}, {"key": 42}, {"some": "value"} ]), json!([{"seq": 2, "subKey": "subKey2"}]), ); }
true
582843f9fd9e5b818bade37d0bf1ab284e8d4432
Rust
JiahaiHu/kv-server
/src/store/mod.rs
UTF-8
417
2.71875
3
[]
no_license
use std::collections::HashMap; pub mod engine; pub type Key = String; pub type Value = String; pub trait Engine { fn get(&self, key: &Key) -> Result<Option<Value>, ()>; fn put(&mut self, key: &Key, value: &Value) -> Result<Option<Value>, ()>; fn delete(&mut self, key: &Key) -> Result<Option<Value>, ()>; fn scan(&self, key_start: &Key, key_end: &Key) -> Result<Option<HashMap<Key, Value>>, ()>; }
true
f3729e4c24aac1e3e0c3c89a1e7c07b2a18a1a5c
Rust
Lakelezz/white_rabbit
/src/lib.rs
UTF-8
10,550
3.53125
4
[ "ISC" ]
permissive
//! *“I'm late! I'm late! For a very important date!”* //! *by “The White Rabbit”* 『Alice's Adventures in Wonderland』 //! //! `white_rabbit` schedules your tasks and can repeat them! //! //! One funny use case are chat bot commands: Imagine a *remind me*-command, //! the command gets executed and you simply create a one-time job to be //! scheduled for whatever time the user desires. //! //! We are using chrono's `DateTime<Utc>`, enabling you to serialise and thus //! backup currently running tasks, //! in case you want to shutdown/restart your application, //! constructing a new scheduler is doable. //! However, please make sure your internal clock is synced. #![deny(rust_2018_idioms)] use chrono::Duration as ChronoDuration; use parking_lot::{Condvar, Mutex, RwLock}; use std::{cmp::Ordering, collections::BinaryHeap, sync::Arc, time::Duration as StdDuration}; use threadpool::ThreadPool; pub use chrono::{DateTime, Duration, Utc}; /// Compare if an `enum`-variant matches another variant. macro_rules! cmp_variant { ($expression:expr, $($variant:tt)+) => { match $expression { $($variant)+ => true, _ => false } } } /// When a task is due, this will be passed to the task. /// Currently, there is not much use to this. However, this might be extended /// in the future. pub struct Context { time: DateTime<Utc>, } /// Every task will return this `enum`. pub enum DateResult { /// The task is considered finished and can be fully removed. Done, /// The task will be scheduled for a new date on passed `DateTime<Utc>`. Repeat(DateTime<Utc>), } /// Every job gets a planned `Date` with the scheduler. pub struct Date { pub context: Context, pub job: Box<dyn FnMut(&mut Context) -> DateResult + Send + Sync + 'static>, } impl Eq for Date {} /// Invert comparisions to create a min-heap. impl Ord for Date { fn cmp(&self, other: &Date) -> Ordering { match self.context.time.cmp(&other.context.time) { Ordering::Less => Ordering::Greater, Ordering::Greater => Ordering::Less, Ordering::Equal => Ordering::Equal, } } } /// Invert comparisions to create a min-heap. impl PartialOrd for Date { fn partial_cmp(&self, other: &Date) -> Option<Ordering> { Some(match self.context.time.cmp(&other.context.time) { Ordering::Less => Ordering::Greater, Ordering::Greater => Ordering::Less, Ordering::Equal => Ordering::Equal, }) } } impl PartialEq for Date { fn eq(&self, other: &Date) -> bool { self.context.time == other.context.time } } /// The [`Scheduler`]'s worker thread switches through different states /// while running, each state changes the behaviour. /// /// [`Scheduler`]: struct.Scheduler.html enum SchedulerState { /// No dates being awaited, sleep until one gets added. PauseEmpty, /// Pause until next date is due. PauseTime(StdDuration), /// If the next date is already waiting to be executed, /// the thread continues running without sleeping. Run, /// Exits the thread. Exit, } impl SchedulerState { fn is_running(&self) -> bool { cmp_variant!(*self, SchedulerState::Run) } fn new_pause_time(duration: ChronoDuration) -> Self { SchedulerState::PauseTime( duration .to_std() .unwrap_or_else(|_| StdDuration::from_millis(0)), ) } } /// This scheduler exists on two levels: The handle, granting you the /// ability of adding new tasks, and the executor, dating and executing these /// tasks when specified time is met. /// /// **Info**: This scheduler may not be precise due to anomalies such as /// preemption or platform differences. pub struct Scheduler { /// The mean of communication with the running scheduler. condvar: Arc<(Mutex<SchedulerState>, Condvar)>, /// Every job has its date listed inside this. dates: Arc<RwLock<BinaryHeap<Date>>>, } impl Scheduler { /// Add a task to be executed when `time` is reached. pub fn add_task_datetime<T>(&mut self, time: DateTime<Utc>, to_execute: T) where T: FnMut(&mut Context) -> DateResult + Send + Sync + 'static, { let &(ref state_lock, ref notifier) = &*self.condvar; let task = Date { context: Context { time }, job: Box::new(to_execute), }; let mut locked_heap = self.dates.write(); if locked_heap.is_empty() { let mut scheduler_state = state_lock.lock(); let left = task.context.time.signed_duration_since(Utc::now()); if !scheduler_state.is_running() { *scheduler_state = SchedulerState::new_pause_time(left); notifier.notify_one(); } } else { let mut scheduler_state = state_lock.lock(); if let SchedulerState::PauseTime(_) = *scheduler_state { let peeked = locked_heap.peek().expect("Expected heap to be filled."); if task.context.time < peeked.context.time { let left = task.context.time.signed_duration_since(Utc::now()); if !scheduler_state.is_running() { *scheduler_state = SchedulerState::PauseTime( left.to_std() .unwrap_or_else(|_| StdDuration::from_millis(0)), ); notifier.notify_one(); } } } } locked_heap.push(task); } pub fn add_task_duration<T>(&mut self, how_long: ChronoDuration, to_execute: T) where T: FnMut(&mut Context) -> DateResult + Send + Sync + 'static, { let time = Utc::now() + how_long; self.add_task_datetime(time, to_execute); } } fn set_state_lock(state_lock: &Mutex<SchedulerState>, to_set: SchedulerState) { let mut state = state_lock.lock(); *state = to_set; } #[inline] fn _push_and_notfiy(date: Date, heap: &mut BinaryHeap<Date>, notifier: &Condvar) { heap.push(date); notifier.notify_one(); } /// This function pushes a `date` onto `data_pooled` and notifies the /// dispatching-thread in case they are sleeping. #[inline] fn push_and_notfiy( dispatcher_pair: &Arc<(Mutex<SchedulerState>, Condvar)>, data_pooled: &Arc<RwLock<BinaryHeap<Date>>>, when: &DateTime<Utc>, date: Date, ) { let &(ref state_lock, ref notifier) = &**dispatcher_pair; let mut state = state_lock.lock(); let mut heap_lock = data_pooled.write(); if let Some(peek) = heap_lock.peek() { if peek.context.time < *when { let left = peek.context.time.signed_duration_since(Utc::now()); *state = SchedulerState::new_pause_time(left); _push_and_notfiy(date, &mut heap_lock, &notifier); } else { let left = when.signed_duration_since(Utc::now()); *state = SchedulerState::new_pause_time(left); _push_and_notfiy(date, &mut heap_lock, &notifier); } } else { let left = when.signed_duration_since(Utc::now()); *state = SchedulerState::new_pause_time(left); _push_and_notfiy(date, &mut heap_lock, &notifier); } } #[must_use] enum Break { Yes, No, } #[inline] fn process_states(state_lock: &Mutex<SchedulerState>, notifier: &Condvar) -> Break { let mut scheduler_state = state_lock.lock(); while let SchedulerState::PauseEmpty = *scheduler_state { notifier.wait(&mut scheduler_state); } while let SchedulerState::PauseTime(duration) = *scheduler_state { if notifier .wait_for(&mut scheduler_state, duration) .timed_out() { break; } } if let SchedulerState::Exit = *scheduler_state { return Break::Yes; } Break::No } fn dispatch_date( threadpool: &ThreadPool, dates: &Arc<RwLock<BinaryHeap<Date>>>, pair_scheduler: &Arc<(Mutex<SchedulerState>, Condvar)>, ) { let mut date = { let mut dates = dates.write(); dates.pop().expect("Should not run on empty heap.") }; let date_dispatcher = dates.clone(); let dispatcher_pair = pair_scheduler.clone(); threadpool.execute(move || { if let DateResult::Repeat(when) = (date.job)(&mut date.context) { date.context.time = when; push_and_notfiy(&dispatcher_pair, &date_dispatcher, &when, date); } }); } fn check_peeking_date(dates: &Arc<RwLock<BinaryHeap<Date>>>, state_lock: &Mutex<SchedulerState>) { if let Some(next) = dates.read().peek() { let now = Utc::now(); if next.context.time > now { let left = next.context.time.signed_duration_since(now); set_state_lock(&state_lock, SchedulerState::new_pause_time(left)); } else { set_state_lock(&state_lock, SchedulerState::Run); } } else { set_state_lock(&state_lock, SchedulerState::PauseEmpty); } } impl Scheduler { /// Creates a new [`Scheduler`] which will use `thread_count` number of /// threads when tasks are being dispatched/dated. /// /// [`Scheduler`]: struct.Scheduler.html pub fn new(thread_count: usize) -> Self { let pair = Arc::new((Mutex::new(SchedulerState::PauseEmpty), Condvar::new())); let pair_scheduler = pair.clone(); let dates: Arc<RwLock<BinaryHeap<Date>>> = Arc::new(RwLock::new(BinaryHeap::new())); let dates_scheduler = Arc::clone(&dates); std::thread::spawn(move || { let &(ref state_lock, ref notifier) = &*pair_scheduler; let threadpool = ThreadPool::new(thread_count); loop { if let Break::Yes = process_states(&state_lock, &notifier) { break; } dispatch_date(&threadpool, &dates_scheduler, &pair_scheduler); check_peeking_date(&dates_scheduler, &state_lock); } }); Scheduler { condvar: pair, dates, } } } /// Once the scheduler is dropped, we also need to join and finish the thread. impl<'a> Drop for Scheduler { fn drop(&mut self) { let &(ref state_lock, ref notifier) = &*self.condvar; let mut state = state_lock.lock(); *state = SchedulerState::Exit; notifier.notify_one(); } }
true
faa3c8a3555c2ab38c745576140f3d8811832fd9
Rust
neont21/pjos-rust-programming
/chapter11/assert_format/src/lib.rs
UTF-8
341
3.390625
3
[ "Apache-2.0" ]
permissive
pub fn greeting(name: &str) -> String { String::from("Hello?") } #[cfg(test)] mod tests { use super::*; #[test] fn greeting_contains_name() { let result = greeting("Peter"); assert!(result.contains("Peter"), "Greeting did not contain name, value was '{}'", result ); } }
true