ServiceNow/stack-3b-sample · Datasets at Fast360
{
// 获取包含Hugging Face文本的span元素
const spans = link.querySelectorAll('span.whitespace-nowrap, span.hidden.whitespace-nowrap');
spans.forEach(span => {
if (span.textContent && span.textContent.trim().match(/Hugging\s*Face/i)) {
span.textContent = 'AI快站';
}
});
});
// 替换logo图片的alt属性
document.querySelectorAll('img[alt*="Hugging"], img[alt*="Face"]').forEach(img => {
if (img.alt.match(/Hugging\s*Face/i)) {
img.alt = 'AI快站 logo';
}
});
}
// 替换导航栏中的链接
function replaceNavigationLinks() {
// 已替换标记,防止重复运行
if (window._navLinksReplaced) {
return;
}
// 已经替换过的链接集合,防止重复替换
const replacedLinks = new Set();
// 只在导航栏区域查找和替换链接
const headerArea = document.querySelector('header') || document.querySelector('nav');
if (!headerArea) {
return;
}
// 在导航区域内查找链接
const navLinks = headerArea.querySelectorAll('a');
navLinks.forEach(link => {
// 如果已经替换过,跳过
if (replacedLinks.has(link)) return;
const linkText = link.textContent.trim();
const linkHref = link.getAttribute('href') || '';
// 替换Spaces链接 - 仅替换一次
if (
(linkHref.includes('/spaces') || linkHref === '/spaces' ||
linkText === 'Spaces' || linkText.match(/^s*Spacess*$/i)) &&
linkText !== 'OCR模型免费转Markdown' &&
linkText !== 'OCR模型免费转Markdown'
) {
link.textContent = 'OCR模型免费转Markdown';
link.href = 'https://fast360.xyz';
link.setAttribute('target', '_blank');
link.setAttribute('rel', 'noopener noreferrer');
replacedLinks.add(link);
}
// 删除Posts链接
else if (
(linkHref.includes('/posts') || linkHref === '/posts' ||
linkText === 'Posts' || linkText.match(/^s*Postss*$/i))
) {
if (link.parentNode) {
link.parentNode.removeChild(link);
}
replacedLinks.add(link);
}
// 替换Docs链接 - 仅替换一次
else if (
(linkHref.includes('/docs') || linkHref === '/docs' ||
linkText === 'Docs' || linkText.match(/^s*Docss*$/i)) &&
linkText !== '模型下载攻略'
) {
link.textContent = '模型下载攻略';
link.href = '/';
replacedLinks.add(link);
}
// 删除Enterprise链接
else if (
(linkHref.includes('/enterprise') || linkHref === '/enterprise' ||
linkText === 'Enterprise' || linkText.match(/^s*Enterprises*$/i))
) {
if (link.parentNode) {
link.parentNode.removeChild(link);
}
replacedLinks.add(link);
}
});
// 查找可能嵌套的Spaces和Posts文本
const textNodes = [];
function findTextNodes(element) {
if (element.nodeType === Node.TEXT_NODE) {
const text = element.textContent.trim();
if (text === 'Spaces' || text === 'Posts' || text === 'Enterprise') {
textNodes.push(element);
}
} else {
for (const child of element.childNodes) {
findTextNodes(child);
}
}
}
// 只在导航区域内查找文本节点
findTextNodes(headerArea);
// 替换找到的文本节点
textNodes.forEach(node => {
const text = node.textContent.trim();
if (text === 'Spaces') {
node.textContent = node.textContent.replace(/Spaces/g, 'OCR模型免费转Markdown');
} else if (text === 'Posts') {
// 删除Posts文本节点
if (node.parentNode) {
node.parentNode.removeChild(node);
}
} else if (text === 'Enterprise') {
// 删除Enterprise文本节点
if (node.parentNode) {
node.parentNode.removeChild(node);
}
}
});
// 标记已替换完成
window._navLinksReplaced = true;
}
// 替换代码区域中的域名
function replaceCodeDomains() {
// 特别处理span.hljs-string和span.njs-string元素
document.querySelectorAll('span.hljs-string, span.njs-string, span[class*="hljs-string"], span[class*="njs-string"]').forEach(span => {
if (span.textContent && span.textContent.includes('huggingface.co')) {
span.textContent = span.textContent.replace(/huggingface.co/g, 'aifasthub.com');
}
});
// 替换hljs-string类的span中的域名(移除多余的转义符号)
document.querySelectorAll('span.hljs-string, span[class*="hljs-string"]').forEach(span => {
if (span.textContent && span.textContent.includes('huggingface.co')) {
span.textContent = span.textContent.replace(/huggingface.co/g, 'aifasthub.com');
}
});
// 替换pre和code标签中包含git clone命令的域名
document.querySelectorAll('pre, code').forEach(element => {
if (element.textContent && element.textContent.includes('git clone')) {
const text = element.innerHTML;
if (text.includes('huggingface.co')) {
element.innerHTML = text.replace(/huggingface.co/g, 'aifasthub.com');
}
}
});
// 处理特定的命令行示例
document.querySelectorAll('pre, code').forEach(element => {
const text = element.innerHTML;
if (text.includes('huggingface.co')) {
// 针对git clone命令的专门处理
if (text.includes('git clone') || text.includes('GIT_LFS_SKIP_SMUDGE=1')) {
element.innerHTML = text.replace(/huggingface.co/g, 'aifasthub.com');
}
}
});
// 特别处理模型下载页面上的代码片段
document.querySelectorAll('.flex.border-t, .svelte_hydrator, .inline-block').forEach(container => {
const content = container.innerHTML;
if (content && content.includes('huggingface.co')) {
container.innerHTML = content.replace(/huggingface.co/g, 'aifasthub.com');
}
});
// 特别处理模型仓库克隆对话框中的代码片段
try {
// 查找包含"Clone this model repository"标题的对话框
const cloneDialog = document.querySelector('.svelte_hydration_boundary, [data-target="MainHeader"]');
if (cloneDialog) {
// 查找对话框中所有的代码片段和命令示例
const codeElements = cloneDialog.querySelectorAll('pre, code, span');
codeElements.forEach(element => {
if (element.textContent && element.textContent.includes('huggingface.co')) {
if (element.innerHTML.includes('huggingface.co')) {
element.innerHTML = element.innerHTML.replace(/huggingface.co/g, 'aifasthub.com');
} else {
element.textContent = element.textContent.replace(/huggingface.co/g, 'aifasthub.com');
}
}
});
}
// 更精确地定位克隆命令中的域名
document.querySelectorAll('[data-target]').forEach(container => {
const codeBlocks = container.querySelectorAll('pre, code, span.hljs-string');
codeBlocks.forEach(block => {
if (block.textContent && block.textContent.includes('huggingface.co')) {
if (block.innerHTML.includes('huggingface.co')) {
block.innerHTML = block.innerHTML.replace(/huggingface.co/g, 'aifasthub.com');
} else {
block.textContent = block.textContent.replace(/huggingface.co/g, 'aifasthub.com');
}
}
});
});
} catch (e) {
// 错误处理但不打印日志
}
}
// 当DOM加载完成后执行替换
if (document.readyState === 'loading') {
document.addEventListener('DOMContentLoaded', () => {
replaceHeaderBranding();
replaceNavigationLinks();
replaceCodeDomains();
// 只在必要时执行替换 - 3秒后再次检查
setTimeout(() => {
if (!window._navLinksReplaced) {
console.log('[Client] 3秒后重新检查导航链接');
replaceNavigationLinks();
}
}, 3000);
});
} else {
replaceHeaderBranding();
replaceNavigationLinks();
replaceCodeDomains();
// 只在必要时执行替换 - 3秒后再次检查
setTimeout(() => {
if (!window._navLinksReplaced) {
console.log('[Client] 3秒后重新检查导航链接');
replaceNavigationLinks();
}
}, 3000);
}
// 增加一个MutationObserver来处理可能的动态元素加载
const observer = new MutationObserver(mutations => {
// 检查是否导航区域有变化
const hasNavChanges = mutations.some(mutation => {
// 检查是否存在header或nav元素变化
return Array.from(mutation.addedNodes).some(node => {
if (node.nodeType === Node.ELEMENT_NODE) {
// 检查是否是导航元素或其子元素
if (node.tagName === 'HEADER' || node.tagName === 'NAV' ||
node.querySelector('header, nav')) {
return true;
}
// 检查是否在导航元素内部
let parent = node.parentElement;
while (parent) {
if (parent.tagName === 'HEADER' || parent.tagName === 'NAV') {
return true;
}
parent = parent.parentElement;
}
}
return false;
});
});
// 只在导航区域有变化时执行替换
if (hasNavChanges) {
// 重置替换状态,允许再次替换
window._navLinksReplaced = false;
replaceHeaderBranding();
replaceNavigationLinks();
}
});
// 开始观察document.body的变化,包括子节点
if (document.body) {
observer.observe(document.body, { childList: true, subtree: true });
} else {
document.addEventListener('DOMContentLoaded', () => {
observer.observe(document.body, { childList: true, subtree: true });
});
}
})();
\",\"\",\"\",\"\",\"\",\"
\",\"\",\"\",\"\",\"\",\"\"]\n\t\n\treturn open.index(openTag) == close.index(closeTag)\nend\n\t\ndef main\n \thtml_filtirle\nend\n \nmain if __FILE__ == $PROGRAM_NAME\n"},"directory_id":{"kind":"string","value":"21da4374db58bc9bca8283021e9d91fd28ff651b"},"languages":{"kind":"list like","value":["Ruby"],"string":"[\n \"Ruby\"\n]"},"num_files":{"kind":"number","value":1,"string":"1"},"repo_language":{"kind":"string","value":"Ruby"},"repo_name":{"kind":"string","value":"ilkinzeynalli/ruby"},"revision_id":{"kind":"string","value":"3c78c7d91baf32a0603cf87348459e0d3934c752"},"snapshot_id":{"kind":"string","value":"72e6c9c7941c1aebc59c966a65347c1f97e880b0"}}},{"rowIdx":204,"cells":{"branch_name":{"kind":"string","value":"refs/heads/master"},"text":{"kind":"string","value":"/*\ninput :\n3\n6\n10\n30\nOutput :\n1\n2\n3\n*/\n//Initial Template for C++\n\n#include \nusing namespace std;\n\n\n // } Driver Code Ends\n\n\n\n\n\n//User function Template for C++\n\n//Complete this function\nbool isprime(int n);\nint exactly3Divisors(int N)\n{\n //Your code here\n int c=1;\n if(N<=3)\n {\n return 0; \n }\n for(int i=3;i<=sqrt(N);i++)\n {\n if (isprime(i) && (i*i)<=N)\n {\n c++;\n }\n }\n return c;\n}\nbool isprime(int n)\n{\n int limit=(int)sqrt(n);\n for(int i=2;i<=limit;i++)\n {\n if(n%i==0)\n return false;\n }\n return true;\n}\n\n\n// { Driver Code Starts.\n\n\nint main()\n {\n int T;\n cin>>T;\n while(T--)\n {\n int N;\n cin>>N;\n cout<\n\n#include\nusing namespace std;\n\n\n\n // } Driver Code Ends\n\n\n\n\n\n//User function Template for C++\n\n//You need to complete this function\nint digitsInFactorial(int N)\n{\n if(N<=1)\n return 1;\n else\n {\n double di=0;\n for(int i=2;i<=N;i++)\n {\n di+=log10(i);\n }\n return floor(di)+1;\n }\n}\n\n\n// { Driver Code Starts.\n\nint main()\n{\n int T;\n cin>>T;\n while(T--)\n {\n int N;\n cin>>N;\n cout<xoofoo-xoops-themes/xdt_standard/language/french_iso/script.js\n/* $Id: script.js 275 2010-05-24 10:00:13Z kris_fr $ */\n/* Localization script *//README.md\nxdt_standard\n============\n\ntheme standard for xoops\n"},"directory_id":{"kind":"string","value":"df2887aec3c19cb8f3b6b530f062f749462e23e8"},"languages":{"kind":"list like","value":["JavaScript","Markdown"],"string":"[\n \"JavaScript\",\n \"Markdown\"\n]"},"num_files":{"kind":"number","value":2,"string":"2"},"repo_language":{"kind":"string","value":"JavaScript"},"repo_name":{"kind":"string","value":"xoofoo-xoops-themes/xdt_standard"},"revision_id":{"kind":"string","value":"fdb7322d504ce316023d9d10b59be5180249c9d6"},"snapshot_id":{"kind":"string","value":"9aa2e2c02d96a4798032a02f85d8f44bb70762c3"}}},{"rowIdx":206,"cells":{"branch_name":{"kind":"string","value":"refs/heads/master"},"text":{"kind":"string","value":"const reverseWords = (str: String) =>\n str\n .split(\" \")\n .reduce((revStr: Array, word: String) => [word, ...revStr], [])\n .join(\" \");\nexport default reverseWords;\nimport reverseWords from \"./mod.ts\";\n\nconsole.log(reverseWords(\"This is fun\"));\n"},"directory_id":{"kind":"string","value":"2379a809346d9acdb1ee320c344bb7d3583b4d21"},"languages":{"kind":"list like","value":["TypeScript"],"string":"[\n \"TypeScript\"\n]"},"num_files":{"kind":"number","value":2,"string":"2"},"repo_language":{"kind":"string","value":"TypeScript"},"repo_name":{"kind":"string","value":"felipecrs/reverse-words"},"revision_id":{"kind":"string","value":"19bbfbd20c8e3f3f173ab7bc76ccda0478b961c8"},"snapshot_id":{"kind":"string","value":"6d8843815146f928ea393bc90cbaaba119c433e1"}}},{"rowIdx":207,"cells":{"branch_name":{"kind":"string","value":"refs/heads/master"},"text":{"kind":"string","value":"from setuptools import setup, find_packages\n\ndef load_requirements():\n try:\n with open(\"./requirements.txt\") as f:\n return [line.strip() for line in f.readlines()]\n except FileNotFoundError:\n print(\"WARNING: requirements.txt not found\")\n return []\n\nsetup(\n name=\"toraw\",\n version=\"0.1.0\",\n description=\"\",\n author=\"\",\n url=\"https://github.com/bwulff/toraw\",\n packages=[\"toraw\"],\n package_dir={\"toraw\": \"src/toraw\"},\n include_package_data=True,\n install_requires=load_requirements(),\n entry_points=\"\"\"\n [console_scripts]\n toraw=toraw:cli\n \"\"\",\n)# Convert image files to RAW file (e.g. for Unity heightmaps)\n\nThis is a small Python script that will take a 1-channel image file (e.g. PNG) and converts it to a `.raw` file compatible for use in Unity e.g. for Terrain heightmaps.\n\nimport click\nimport numpy as np\nfrom PIL import Image\nfrom numpy import asarray\n\ndef convert(infile, outfile=None):\n\timage = Image.open(infile)\n\tdata = asarray(image)\n\tprint(f\"Image size: {data.shape}\")\n\twith open(outfile, 'wb') as f:\n\t\tfor y in range(data.shape[0]):\n\t\t\tfor x in range(data.shape[1]):\n\t\t\t\tv = np.uint16(data[y][x])\n\t\t\t\tf.write(v)\n\n@click.command()\n@click.argument(\"infile\")\n@click.option(\"--outfile\", default=None, help=\"specify output filename (default: replace filename extension with .raw)\")\ndef cli(infile, outfile):\n\tif outfile is None:\n\t\tparts = infile.split('.')\n\t\tparts[-1] = \"raw\"\n\t\toutfile = \".\".join(parts)\n\tconvert(infile, outfile)"},"directory_id":{"kind":"string","value":"9b5afc9dba1ef1dd15b016e5d4ab50a15c126cfe"},"languages":{"kind":"list like","value":["Markdown","Python"],"string":"[\n \"Markdown\",\n \"Python\"\n]"},"num_files":{"kind":"number","value":3,"string":"3"},"repo_language":{"kind":"string","value":"Python"},"repo_name":{"kind":"string","value":"bwulff/toraw"},"revision_id":{"kind":"string","value":"fe89297b61578741d81742bdbc302cb20769296d"},"snapshot_id":{"kind":"string","value":"d66d0f20392abbd23943ce636ebfca3959fc1434"}}},{"rowIdx":208,"cells":{"branch_name":{"kind":"string","value":"refs/heads/master"},"text":{"kind":"string","value":"/**\n * Copyright 2016 \n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\npackage org.redisson.api;\n\nimport java.util.concurrent.ScheduledExecutorService;\n\n/**\n * Distributed implementation of {@link java.util.concurrent.ScheduledExecutorService}\n * \n * @author \n *\n */\npublic interface RScheduledExecutorService extends RExecutorService, ScheduledExecutorService, RScheduledExecutorServiceAsync {\n\n /**\n * Cancels scheduled task by id\n * \n * @see RScheduledFuture#getTaskId()\n * \n * @param taskId \n * @return\n */\n boolean cancelScheduledTask(String taskId);\n \n}\n"},"directory_id":{"kind":"string","value":"ee9ff621c8e4cd1724efca4544012928581c01e8"},"languages":{"kind":"list like","value":["Java"],"string":"[\n \"Java\"\n]"},"num_files":{"kind":"number","value":1,"string":"1"},"repo_language":{"kind":"string","value":"Java"},"repo_name":{"kind":"string","value":"albertogoya/redisson"},"revision_id":{"kind":"string","value":"2f797e2c1f09c1d16d48712315b2b6558443191d"},"snapshot_id":{"kind":"string","value":"cb344c7cc4a93c87b37ed6e8eac2cafefdd85a91"}}},{"rowIdx":209,"cells":{"branch_name":{"kind":"string","value":"refs/heads/master"},"text":{"kind":"string","value":"ashmoran/language_implementation_patterns/Gemfile\nsource 'http://rubygems.org'\n\nruby '1.9.3'\n\ngem 'facets'\ngem 'treetop'\n\ngroup :development do\n\tgem 'guard'\n\tgem 'listen'\n\tgem 'rb-fsevent'\n gem 'growl'\n gem 'terminal-notifier-guard'\n\n\tgem 'cucumber'\n\tgem 'guard-cucumber'\n\tgem 'aruba'\n\n\tgem 'rspec'\n\tgem 'guard-rspec'\n\tgem 'fuubar'\n\tgem 'fakefs'\n\tgem 'lstrip-on-steroids'\n\n\tgem 'pry'\n\n\tgem 'awesome_print'\nend\n/spec/1_getting_started/ch02_basic_parsing/p04_llk_recursive_descent_parser/list_parser_with_assignment_spec.rb\nrequire 'spec_helper'\n\nrequire '1_getting_started/ch02_basic_parsing/p04_llk_recursive_descent_parser/list_parser_with_assignment'\nrequire '1_getting_started/ch02_basic_parsing/p04_llk_recursive_descent_parser/lexer_lookahead'\nrequire '1_getting_started/ch02_basic_parsing/p02_ll1_recursive_descent_lexer/list_lexer'\nrequire_relative '../p03_ll1_recursive_descent_parser/list_parser_contract'\nrequire_relative 'list_parser_assignment_contract'\n\nmodule GettingStarted\n module BasicParsing\n module LLkRecursiveDescentParser\n describe \"Intergration:\", ListParserWithAssignment, \"and lexer\" do\n let(:input) { \"[ a = b ]\" }\n\n let(:lexer) { LL1RecursiveDescentLexer::ListLexer.new(input) }\n let(:lookahead) { LexerLookahead.new(lexer) }\n subject(:parser) { ListParserWithAssignment.new(lookahead) }\n\n it \"parses lists with assignments\" do\n expect(parser.list).to be == [ { :a => :b } ]\n end\n end\n\n describe ListParserWithAssignment do\n let(:tokens) {\n LL1RecursiveDescentLexer::Token.descriptions_to_tokens(token_descriptions)\n }\n let(:lexer) { tokens.each }\n let(:lookahead) { LexerLookahead.new(lexer) }\n subject(:parser) { ListParserWithAssignment.new(lookahead) }\n\n it_behaves_like \"a ListParser\"\n it_behaves_like \"a ListParser with assignment\"\n end\n end\n end\nend/lib/2_analyzing_languages/ch06_tracking_symbols/p16_monolithic_scope/variable_symbol.rb\nmodule AnalyzingLanguages\n module TrackingSymbols\n module MonolithicScope\n class VariableSymbol\n attr_reader :name, :type\n\n def initialize(name, type)\n @name = name.to_sym\n @type = type\n end\n\n def ==(other)\n other.is_a?(VariableSymbol) &&\n @name == other.name &&\n @type == other.type\n end\n\n def to_s\n \"<#{@name}:#{@type}>\"\n end\n end\n end\n end\nend/lib/1_getting_started/ch03_enhanced_parsing/p06_memoizing_parser/memoizing_list_parser.rb\nrequire '1_getting_started/errors'\n\n# UNFINISHED!!!\n#\n# I got bored of this - it's a big refactoring for a relatively minor change,\n# and I'm not primarily concerned with how to implement parsers. But I've left\n# the unfunished code here in case I decide to return to it some time.\n# It passes all the tests :)\n\nmodule GettingStarted\n module EnhancedParsing\n module MemoizingParser\n\n class MemoizingListParser\n def initialize(replayable_lexer)\n @lexer = replayable_lexer\n end\n\n def stat\n if speculate_stat_list\n stat_list\n elsif speculate_stat_parallel_assigment\n stat_parallel_assignment\n else\n raise NoViableAlternativeError.new(\"Expecting or \")\n end\n end\n\n def speculate_stat_list\n @lexer.speculate do\n stat_list\n end\n rescue RecognitionError => e\n false\n end\n\n def speculate_stat_parallel_assigment\n @lexer.speculate do\n stat_parallel_assignment\n end\n rescue RecognitionError => e\n false\n end\n\n def stat_list\n matched_list = list\n match(:eof)\n matched_list\n end\n\n def stat_parallel_assignment\n matched_parallel_assigment = parallel_assignment\n match(:eof)\n matched_parallel_assigment\n end\n\n def list\n failed = false\n\n @lexer.if_speculating do\n return if already_parsed?(:list)\n end\n\n parsed_list = _list\n\n @lexer.if_speculating do\n memoize(:list, parsed_list)\n end\n\n parsed_list\n rescue RecognitionError => e\n @lexer.if_speculating do\n memoize_failure(:list)\n end\n\n raise\n end\n\n def _list\n [ ].tap do |collected_list|\n match(:lbrack)\n elements(collected_list)\n match(:rbrack)\n end\n end\n\n def parallel_assignment\n lhs = list\n match(:equals)\n rhs = list\n { lhs => rhs }\n end\n\n def elements(collected_list)\n first_element(collected_list)\n while @lexer.peek.type == :comma\n match(:comma)\n element(collected_list)\n end\n end\n\n def first_element(collected_list)\n case @lexer.peek.type\n when :name, :lbrack\n element(collected_list)\n when :rbrack\n return\n else\n raise RecognitionError.new(\n \"Expected :lbrack, :name or :rbrack, found #{@lexer.peek.inspect}\"\n )\n end\n end\n\n def element(collected_list)\n if @lexer.peek(1).type == :name && @lexer.peek(2).type == :equals\n lhs, _, rhs = match(:name), match(:equals), match(:name)\n collected_list << { lhs.value.to_sym => rhs.value.to_sym }\n elsif @lexer.peek.type == :name\n collected_list << @lexer.peek.value.to_sym\n match(:name)\n elsif @lexer.peek.type == :lbrack\n collected_list << list\n else\n raise RecognitionError.new(\n \"Expected :name or :lbrack, found #{@lexer.peek.inspect}\"\n )\n end\n end\n\n private\n\n def already_parsed?(expression_type)\n false\n end\n\n def memoize(expression_type, expression)\n expression\n end\n\n def memoize_failure(expression_type)\n nil\n end\n\n def match(expected_type)\n if @lexer.peek.type == expected_type\n consume\n else\n raise RecognitionError.new(\n \"Expected #{expected_type.inspect}, found #{@lexer.peek.inspect}\"\n )\n end\n end\n\n def consume\n @lexer.next\n end\n end\n\n end\n end\nend\n/spec/2_analyzing_languages/ch06_tracking_symbols/p16_monolithic_scope/language_symbol_spec.rb\nrequire 'spec_helper'\n\nrequire '2_analyzing_languages/ch06_tracking_symbols/p16_monolithic_scope/language_symbol'\n\nmodule AnalyzingLanguages\n module TrackingSymbols\n module MonolithicScope\n describe LanguageSymbol do\n subject(:symbol) { LanguageSymbol.new(:foo) }\n\n its(:to_s) { should be == \"foo\" }\n\n describe \"#==\" do\n example do\n expect(symbol).to be == LanguageSymbol.new(:foo)\n end\n\n example do\n expect(symbol).to be == LanguageSymbol.new(\"foo\")\n end\n\n example do\n expect(symbol).to_not be == LanguageSymbol.new(:bar)\n end\n\n example do\n expect(symbol).to_not be == :foo\n end\n end\n\n describe \"#name\" do\n its(:name) { should be == :foo }\n\n it \"is always a symbol\" do\n expect(LanguageSymbol.new(\"foo\").name).to be == :foo\n end\n end\n end\n end\n end\nend/lib/2_analyzing_languages/ch06_tracking_symbols/p17_nested_scopes/cymbol2.rb\nrequire_relative 'cymbol2_parser'\n\nmodule AnalyzingLanguages\n module TrackingSymbols\n module NestedScopes\n\n class Cymbol2\n def initialize\n @parser = CymbolNestedParser.new\n end\n\n def parse(source)\n @parser.parse(source)\n end\n end\n\n end\n end\nend/lib/2_analyzing_languages/ch06_tracking_symbols/p16_monolithic_scope/language_symbol.rb\nmodule AnalyzingLanguages\n module TrackingSymbols\n module MonolithicScope\n class LanguageSymbol\n attr_reader :name\n\n def initialize(name)\n @name = name.to_sym\n end\n\n def ==(other)\n other.is_a?(LanguageSymbol) && @name == other.name\n end\n\n def to_s\n @name.to_s\n end\n end\n end\n end\nend/spec/2_analyzing_languages/ch06_tracking_symbols/p16_monolithic_scope/symbol_table_spec.rb\nrequire 'spec_helper'\n\nrequire '2_analyzing_languages/ch06_tracking_symbols/p16_monolithic_scope/symbol_table'\nrequire '2_analyzing_languages/ch06_tracking_symbols/p16_monolithic_scope/language_symbol'\n\nmodule AnalyzingLanguages\n module TrackingSymbols\n module MonolithicScope\n describe SymbolTable do\n subject(:symbol_table) { SymbolTable.new }\n\n describe \"#define\" do\n it \"ignores metadata\" do\n expect {\n symbol_table.define(LanguageSymbol.new(:foo), unused: \"stuff\")\n }.to_not raise_error(ArgumentError)\n end\n end\n\n describe \"#resolve\" do\n context \"undefined symbol\" do\n specify {\n expect {\n symbol_table.resolve(\"foo\")\n }.to raise_error(RuntimeError, \"Unknown symbol: foo\")\n }\n\n it \"ignores metadata\" do\n expect {\n symbol_table.resolve(\"foo\", unused: \"stuff\")\n }.to_not raise_error(ArgumentError)\n end\n end\n\n context \"defined symbol\" do\n let(:symbol) { LanguageSymbol.new(:foo) }\n\n before(:each) do\n symbol_table.define(symbol)\n end\n\n specify {\n expect(symbol_table.resolve(\"foo\")).to equal(symbol)\n }\n\n specify {\n expect(symbol_table.resolve(:foo)).to equal(symbol)\n }\n end\n end\n\n describe \"#to_s\" do\n before(:each) do\n symbol_table.define(LanguageSymbol.new(:foo))\n symbol_table.define(LanguageSymbol.new(:bar))\n symbol_table.define(LanguageSymbol.new(:baz))\n end\n\n it \"outputs in alphabetic order\" do\n expect(symbol_table.to_s).to be ==\n \"globals: {bar=bar, baz=baz, foo=foo}\"\n end\n end\n end\n end\n end\nend/spec/1_getting_started/ch02_basic_parsing/p02_ll1_recursive_descent_lexer/list_lexer_contract.rb\nrequire_relative 'enumerator_contract'\n\nmodule GettingStarted\n module BasicParsing\n module LL1RecursiveDescentLexer\n shared_examples_for \"a ListLexer\" do\n def tokenize_all_input\n begin\n loop do\n collected_output << lexer.next\n end\n rescue StopIteration => e\n\n end\n end\n\n describe \"#peek\" do\n let(:input) { \"[a]\" }\n\n it \"lets you see the next character\" do\n expect {\n lexer.next\n }.to change {\n lexer.peek\n }.from(Token.new(lbrack: \"[\")).to(Token.new(name: \"a\"))\n end\n end\n\n describe \"Enumerator-like properties\" do\n let(:input) { \"\" }\n\n def advance_to_end\n lexer.next\n end\n\n it_behaves_like \"an Enumerator\"\n end\n\n context \"valid input\" do\n before(:each) do\n tokenize_all_input\n end\n\n context \"empty string\" do\n let(:input) { \"\" }\n\n it \"marks the end of the tokens explicitly\" do\n expect(output).to be == [ { eof: nil } ]\n end\n end\n\n context \"blank string\" do\n context \"spaces, tabs, newlines\" do\n let(:input) { \" \" }\n\n specify {\n expect(output).to be == [ { eof: nil } ]\n }\n end\n end\n\n context \"lbrack\" do\n let(:input) { \"[\" }\n\n specify {\n expect(output).to be == [ { lbrack: \"[\" }, { eof: nil } ]\n }\n end\n\n context \"comma\" do\n let(:input) { \",\" }\n\n specify {\n expect(output).to be == [ { comma: \",\" }, { eof: nil } ]\n }\n end\n\n context \"equals\" do\n let(:input) { \"=\" }\n\n specify {\n expect(output).to be == [ { equals: \"=\" }, { eof: nil } ]\n }\n end\n\n context \"rbrack\" do\n let(:input) { \"]\" }\n\n specify {\n expect(output).to be == [ { rbrack: \"]\" }, { eof: nil } ]\n }\n end\n\n context \"names\" do\n context \"single letter\" do\n let(:input) { \"a\" }\n\n specify {\n expect(output).to be == [ { name: \"a\" }, { eof: nil } ]\n }\n end\n\n context \"multi-letter\" do\n let(:input) { \"abcdefghijklmnopqrstuvwxyz\" }\n\n specify {\n expect(output).to be == [ { name: \"abcdefghijklmnopqrstuvwxyz\" }, { eof: nil } ]\n }\n end\n\n context \"in a list\" do\n let(:input) { \"[ a, xyz, bc ]\" }\n\n specify {\n expect(output).to be == [\n { lbrack: \"[\" },\n { name: \"a\" },\n { comma: \",\" },\n { name: \"xyz\" },\n { comma: \",\" },\n { name: \"bc\" },\n { rbrack: \"]\" },\n { eof: nil }\n ]\n }\n end\n end\n\n context \"delimiters, separators, and spaces\" do\n let(:input) { \" [ \\t, \\n, ] \\n\" }\n\n specify {\n expect(output).to be == [\n { lbrack: \"[\" }, { comma: \",\" }, { comma: \",\" }, { rbrack: \"]\" }, { eof: nil }\n ]\n }\n end\n end\n\n context \"invalid input\" do\n context \"invalid characters\" do\n context \"in normal context\" do\n let(:input) { \"@\" }\n\n specify {\n expect { tokenize_all_input }.to raise_error(ArgumentError, \"Invalid character: @\")\n }\n end\n\n context \"in a name\" do\n let(:input) { \"a$\"}\n\n specify {\n expect { tokenize_all_input }.to raise_error(ArgumentError, \"Invalid character: $\")\n }\n end\n end\n end\n end\n\n end\n end\nend/lib/2_analyzing_languages/ch06_tracking_symbols/p16_monolithic_scope/cymbol.rb\nrequire 'treetop'\n\nmodule AnalyzingLanguages; module TrackingSymbols; module MonolithicScope; end; end; end\nTreetop.load(File.dirname(__FILE__) + '/cymbol_monolithic')\n\nrequire_relative 'cymbol_monolithic_node_classes'\n\nmodule AnalyzingLanguages\n module TrackingSymbols\n module MonolithicScope\n\n # Implements Pattern 16: Symbol Table for Monolithic Scope\n #\n # Note that this is more of an experimental playground than a translation\n # of the example in the book. Key points:\n #\n # * I'm using Treetop, not ANTLR. Mainly because I wanted to learn an\n # implementation of Parsing Expression Grammars (mentioned on p56),\n # and because Treetop works well in all Ruby runtimes, whereas ANTLR\n # needs JRuby if you want access to the latest version. (The Ruby\n # target lags behind the Java, C, C++ targets etc.)\n #\n # * The book produces output with code blocks in the ANTLR grammar.\n # I wanted to avoid hijacking the predicate system in Treetop, so the\n # way I solve it is closer to Pattern 25: Tree-Based Interpreter. Note\n # that it's not a very *good* interpreter, but it does the job of\n # spitting out the output we want (or something close enough to it).\n class Cymbol\n def initialize(symbol_table)\n @symbol_table = symbol_table\n @parser = CymbolMonolithicParser.new\n end\n\n def parse(source)\n populate_symbols(tree(source))\n end\n\n private\n\n def tree(source)\n @parser.parse(source)\n end\n\n def populate_symbols(tree)\n tree.populate_symbols(@symbol_table)\n end\n end\n\n end\n end\nend/lib/2_analyzing_languages/ch06_tracking_symbols/p16_monolithic_scope/symbol_table.rb\nmodule AnalyzingLanguages\n module TrackingSymbols\n module MonolithicScope\n class SymbolTable\n def initialize\n @symbols = { }\n end\n\n def define(symbol, metadata = { })\n @symbols[symbol.name] = symbol\n end\n\n def resolve(name, metadata = { })\n @symbols.fetch(name.to_sym) {\n raise \"Unknown symbol: #{name}\"\n }\n end\n\n def to_s\n \"globals: {#{key_value_pairs.join(\", \")}}\"\n end\n\n private\n\n def key_value_pairs\n @symbols.keys.sort.map { |key| \"#{key}=#{@symbols[key]}\" }\n end\n end\n end\n end\nend/spec/2_analyzing_languages/ch06_tracking_symbols/p17_nested_scopes/method_symbol_spec.rb\nrequire 'spec_helper'\n\nrequire '2_analyzing_languages/ch06_tracking_symbols/p17_nested_scopes/method_symbol'\nrequire '2_analyzing_languages/ch06_tracking_symbols/p16_monolithic_scope/language_symbol'\n\nmodule AnalyzingLanguages\n module TrackingSymbols\n module NestedScopes\n describe MethodSymbol do\n let(:type) { LanguageSymbol.new(:type) }\n subject(:symbol) { MethodSymbol.new(:name, type) }\n\n its(:to_s) { should be == \"method\" }\n\n describe \"#==\" do\n example do\n expect(symbol).to be == MethodSymbol.new(:name, type)\n end\n\n example do\n expect(symbol).to be == MethodSymbol.new(\"name\", type)\n end\n\n example do\n expect(symbol).to_not be == MethodSymbol.new(:bar, type)\n end\n\n example do\n expect(symbol).to_not be == MethodSymbol.new(:name, LanguageSymbol.new(:wrong_type))\n end\n\n example do\n expect(symbol).to_not be == :name\n end\n end\n\n describe \"#name\" do\n its(:name) { should be == :name }\n\n it \"is always a symbol\" do\n expect(MethodSymbol.new(\"name\", :ununused_type).name).to be == :name\n end\n end\n end\n end\n end\nend/lib/1_getting_started/ch02_basic_parsing/p04_llk_recursive_descent_parser/lexer_lookahead.rb\nmodule GettingStarted\n module BasicParsing\n module LLkRecursiveDescentParser\n\n # Helps implement Pattern 4: LL(k) Recursive-Descent Parser\n #\n # Major differences from the book example:\n #\n # * This isn't actually LL(k) because there isn't a fixed size _k_ for\n # the lookahead buffer (the book example uses a small circular buffer).\n # It's so easy to implement this by shifting a Ruby Array I went with\n # the idiomatic solution rather than the memory-efficient one. I'm not\n # too bothered about this from a learning point of view because I only\n # care about what this does, not how to implement it efficiently.\n #\n # * This solution uses a separate object rather than rolling the lookahead\n # code into the Parser. The code below could be used to add lookahead to\n # any lexer, and can feed any parser, even a non-lookahead one (it\n # conforms to the ListLexer contract).\n class LexerLookahead\n def initialize(lexer)\n @lexer = lexer\n @lexer_empty = false\n @buffer = [ ]\n end\n\n def peek(lookahead_distance = 1)\n fill_buffer(lookahead_distance)\n token_or_stop_iteration(@buffer[lookahead_distance - 1])\n end\n\n def next\n fill_buffer(1)\n consume\n end\n\n private\n\n def consume\n token_or_stop_iteration(@buffer.shift)\n end\n\n def fill_buffer(required_size)\n (required_size - @buffer.size).times do\n harvest\n end\n end\n\n def harvest\n @buffer << @lexer.next\n rescue StopIteration\n def self.harvest\n # NOOP\n end\n\n def self.token_or_stop_iteration(token)\n raise StopIteration if token.nil?\n token\n end\n end\n\n def token_or_stop_iteration(token)\n token\n end\n end\n\n end\n end\nend/lib/1_getting_started/ch02_basic_parsing/p04_llk_recursive_descent_parser/list_parser_with_assignment.rb\nrequire '1_getting_started/errors'\n\nmodule GettingStarted\n module BasicParsing\n module LLkRecursiveDescentParser\n\n # Implements Pattern 4: LL(k) Recursive-Descent Parser\n # (Needs the help of the LexerLookahead)\n #\n # Major differences from the book example:\n #\n # * I put the code for the lookahead in a separate class (the rest of\n # the discussion is in the LexerLookahead code)\n class ListParserWithAssignment\n def initialize(lookahead_lexer)\n @lexer = lookahead_lexer\n end\n\n def list\n [ ].tap do |collected_list|\n match(:lbrack)\n elements(collected_list)\n match(:rbrack)\n end\n end\n\n def elements(collected_list)\n first_element(collected_list)\n while @lexer.peek.type == :comma\n match(:comma)\n element(collected_list)\n end\n end\n\n def first_element(collected_list)\n case @lexer.peek.type\n when :name, :lbrack\n element(collected_list)\n when :rbrack\n return\n else\n raise RecognitionError.new(\n \"Expected :lbrack, :name or :rbrack, found #{@lexer.peek.inspect}\"\n )\n end\n end\n\n def element(collected_list)\n if @lexer.peek(1).type == :name && @lexer.peek(2).type == :equals\n lhs, _, rhs = match(:name), match(:equals), match(:name)\n collected_list << { lhs.value.to_sym => rhs.value.to_sym }\n elsif @lexer.peek.type == :name\n collected_list << @lexer.peek.value.to_sym\n match(:name)\n elsif @lexer.peek.type == :lbrack\n collected_list << list\n else\n raise RecognitionError.new(\n \"Expected :name or :lbrack, found #{@lexer.peek.inspect}\"\n )\n end\n end\n\n private\n\n def match(expected_type)\n if @lexer.peek.type == expected_type\n consume\n else\n raise RecognitionError.new(\n \"Expected #{expected_type.inspect}, found #{@lexer.peek.inspect}\"\n )\n end\n end\n\n def consume\n @lexer.next\n end\n end\n\n end\n end\nend/spec/1_getting_started/ch02_basic_parsing/p04_llk_recursive_descent_parser/lexer_lookahead_spec.rb\nrequire 'spec_helper'\n\nrequire '1_getting_started/ch02_basic_parsing/p02_ll1_recursive_descent_lexer/list_lexer'\nrequire '1_getting_started/ch02_basic_parsing/p04_llk_recursive_descent_parser/lexer_lookahead'\nrequire_relative '../p02_ll1_recursive_descent_lexer/list_lexer_contract'\n\nmodule GettingStarted\n module BasicParsing\n module LLkRecursiveDescentParser\n shared_examples_for \"LexerLookahead#peek\" do\n example do\n expect(lexer.peek.value).to be == \"[\"\n end\n\n example do\n expect(lexer.peek(1).value).to be == \"[\"\n end\n\n example do\n expect(lexer.peek(2).value).to be == \"a\"\n end\n\n example do\n expect(lexer.peek(5).value).to be == \"]\"\n end\n\n example do\n expect(lexer.peek(6).to_hash).to be == { eof: nil }\n end\n\n example do\n expect { lexer.peek(7) }.to raise_error(StopIteration)\n end\n end\n\n describe LexerLookahead do\n describe \"lookahead\" do\n let(:tokens) {\n LL1RecursiveDescentLexer::Token.descriptions_to_tokens(\n [\n { lbrack: \"[\" },\n { name: \"a\" },\n { equals: \"=\" },\n { name: \"b\" },\n { rbrack: \"]\" },\n { eof: nil }\n ]\n )\n }\n\n let(:underlying_lexer) { tokens.each }\n\n subject(:lexer) { LexerLookahead.new(underlying_lexer) }\n\n describe \"#peek\" do\n it_behaves_like \"LexerLookahead#peek\"\n end\n end\n\n context \"wrapping a ListLexer\" do\n let(:input) { \"[ a = b ]\" }\n let(:underlying_lexer) { LL1RecursiveDescentLexer::ListLexer.new(input) }\n subject(:lexer) { LexerLookahead.new(underlying_lexer) }\n\n let(:collected_output) { [ ] }\n let(:output) {\n collected_output.map { |token| token.to_hash }\n }\n\n it_behaves_like \"a ListLexer\"\n\n describe \"#peek\" do\n it_behaves_like \"LexerLookahead#peek\"\n end\n end\n end\n end\n end\nend/lib/2_analyzing_languages/ch06_tracking_symbols/p16_monolithic_scope/symbol_table_logger.rb\nmodule AnalyzingLanguages\n module TrackingSymbols\n module MonolithicScope\n # A Decorator for SymbolTable\n #\n # Note this makes the questionable decision of only logging if location\n # metadata is available. Arguable we should have distinct #resolve_at_location\n # and #define_at_location methods to be used by clients (I haven't thought\n # this through though).\n class SymbolTableLogger\n def initialize(symbol_table, output_io)\n @symbol_table = symbol_table\n @output_io = output_io\n end\n\n def resolve(symbol_name, metadata = { })\n @symbol_table.resolve(symbol_name).tap do |symbol|\n log(\"ref\", symbol, metadata)\n end\n rescue RuntimeError => e # Eek! Seems we were using too geneneral an error class...\n log(\"ref [failed]\", symbol_name, metadata)\n end\n\n def define(symbol, metadata = { })\n log(\"def\", symbol, metadata)\n @symbol_table.define(symbol)\n end\n\n # Hacked in as part of the NestedScopes code...\n # I'm not taking too much care to refactor now\n def define_as_scope(symbol, metadata = { })\n log(\"defscope\", symbol, metadata)\n @symbol_table.define_as_scope(symbol)\n end\n\n # Also hacked in\n def push_scope\n @symbol_table.push_scope\n end\n\n # And this\n def pop_scope\n @symbol_table.pop_scope\n end\n\n private\n\n def log(event, item, metadata)\n @output_io.puts(\"#{metadata[:location]}: #{event} #{item}\")\n end\n end\n end\n end\nend/spec/2_analyzing_languages/ch06_tracking_symbols/p17_nested_scopes/cymbol2_parser_spec.rb\nrequire 'spec_helper'\n\nrequire '2_analyzing_languages/ch06_tracking_symbols/p17_nested_scopes/cymbol2_parser'\n\nRSpec::Matchers.define :parse do\n match do |source|\n parser.parse(source)\n end\nend\n\nmodule AnalyzingLanguages\n module TrackingSymbols\n module NestedScopes\n\n describe CymbolNestedParser do\n subject(:parser) { CymbolNestedParser.new }\n\n # If something goes wrong in here, we're missing a focused example!\n describe \"a complex program\" do\n example do\n expect(<<-C).to parse\n // intro comment\n int i = 9;\n float j;\n int k = i+2;\n\n float f(int x, float y)\n {\n float i;\n { float z = x+y; i = z; }\n return i;\n }\n\n void g()\n {\n f(i, 2);\n }\n C\n end\n end\n\n describe \"empty program\" do\n example do\n expect(\"\").to parse\n end\n example do\n expect(\" \").to parse\n end\n example do\n expect(\" \\n\").to parse\n end\n example do\n expect(\"\\t\\t\\t\").to parse\n end\n example do\n expect(\"\\n\\n\\n\").to parse\n end\n example do\n expect(\"\\n \\n\\t\\n\").to parse\n end\n end\n\n describe \"comments\" do\n example do\n expect(\"// this is a comment\\n\").to parse\n end\n example do\n expect(\"// this is a comment\").to parse\n end\n example do\n expect(\" // this is a comment\").to parse\n end\n end\n\n describe \"variable declaration\" do\n context \"without initialization\" do\n example do\n expect(\"int foo;\").to parse\n end\n example do\n expect(\"int foo ;\").to parse\n end\n example do\n expect(\"float foo;\").to parse\n end\n end\n\n context \"with initialization\" do\n context \"to a literal\" do\n example do\n expect(\"int foo=99;\").to parse\n end\n example do\n expect(\"int foo = 99 ;\").to parse\n end\n end\n\n context \"to a variable\" do\n example do\n expect(\"int foo = x;\").to parse\n end\n end\n\n context \"to an arbitrary expression\" do\n example do\n expect(\"int foo = x + 1;\").to parse\n end\n example do\n expect(\"int foo = x+1;\").to parse\n end\n end\n end\n end\n\n describe \"assignment\" do\n example do\n expect(\"foo=bar;\").to parse\n end\n example do\n expect(\"foo = bar;\").to parse\n end\n example do\n expect(\"foo = 1;\").to parse\n end\n end\n\n describe \"blocks\" do\n example do\n expect(\"{}\").to parse\n end\n example do\n expect(\"{ }\").to parse\n end\n example do\n expect(\"{ int i; }\").to parse\n end\n example do\n expect(\"{ int i; x = y; }\").to parse\n end\n example do\n expect(\"{\n int i;\n x = y;\n }\").to parse\n end\n example do\n expect(\"{ x = a + b; }\").to parse\n end\n\n describe \"nested\" do\n example do\n expect(\"{{}}\").to parse\n end\n example do\n expect(\"{ { } }\").to parse\n end\n example do\n expect(\"{\n int i;\n {\n float k;\n }\n }\").to parse\n end\n end\n end\n\n describe \"methods\" do\n describe \"definitions\" do\n example do\n expect(\"int foo(){}\").to parse\n end\n example do\n expect(\"int foo() { }\").to parse\n end\n example do\n expect(\"int foo()\\n{ }\").to parse\n end\n example do\n expect(\"int foo(int x) { }\").to parse\n end\n example do\n expect(\"int foo(int x, float y) { }\").to parse\n end\n example do\n expect(\"int foo( int x ,float y ) { }\").to parse\n end\n example do\n expect(\"float f() { int i; }\").to parse\n end\n example do\n expect(\"float f() { return a + b; }\").to parse\n end\n end\n describe \"calls\" do\n example do\n expect(\"foo();\").to parse\n end\n example do\n expect(\"foo( ) ;\").to parse\n end\n example do\n expect(\"f(a);\").to parse\n end\n example do\n expect(\"f(a, b);\").to parse\n end\n example do\n expect(\"f(a, b + 1);\").to parse\n end\n end\n end\n end\n\n end\n end\nend/lib/1_getting_started/ch03_enhanced_parsing/p05_backtracking_parser/replayable_buffer.rb\nrequire 'forwardable'\n\nmodule GettingStarted\n module EnhancedParsing\n module BacktrackingParser\n\n class ReplayableBuffer\n extend Forwardable\n\n def_delegator :@buffer, :[]\n def_delegator :@buffer, :size\n\n def_delegator :@buffer, :<<\n def_delegator :@buffer, :shift\n\n class ArrayProxy\n def initialize(buffer = [ ])\n @buffer = buffer\n end\n\n # Queries\n\n def [](index)\n @buffer[index]\n end\n\n def size\n @buffer.size\n end\n\n # Commands\n\n def <<(object)\n @buffer << object\n end\n\n def shift\n @buffer.shift\n end\n\n # Support\n\n def dup\n ArrayProxy.new(@buffer.dup)\n end\n\n def unwrap\n raise RuntimeError.new(\"No mark has been set\")\n end\n end\n\n class TimeMachine\n def initialize(buffer, snapshot = nil)\n @buffer = buffer\n @snapshot = snapshot || buffer.dup\n end\n\n # Queries\n\n def [](index)\n @snapshot[index]\n end\n\n def size\n @snapshot.size\n end\n\n # Commands\n\n def <<(object)\n @snapshot << object\n @buffer << object\n end\n\n def shift\n @snapshot.shift\n end\n\n # Support\n\n def dup\n TimeMachine.new(self, @snapshot.dup)\n end\n\n def unwrap\n @buffer\n end\n end\n\n def initialize\n @buffer = ArrayProxy.new\n end\n\n def mark\n @buffer = TimeMachine.new(@buffer)\n end\n\n def release\n @buffer = @buffer.unwrap\n end\n end\n\n end\n end\nend\n/lib/1_getting_started/ch02_basic_parsing/p02_ll1_recursive_descent_lexer/token.rb\nmodule GettingStarted\n module BasicParsing\n module LL1RecursiveDescentLexer\n class Token\n attr_reader :type, :value\n\n class << self\n def descriptions_to_tokens(descriptions)\n descriptions.map { |description| Token.new(description) }\n end\n end\n\n def initialize(description)\n @type = description.keys.first\n @value = description.values.first\n end\n\n def to_hash\n { @type => @value }\n end\n\n def inspect\n @type.inspect\n end\n\n def ==(other)\n @type == other.type && @value == other.value\n end\n end\n end\n end\nend/spec/1_getting_started/ch03_enhanced_parsing/p05_backtracking_parser/replayable_buffer_spec.rb\nrequire 'spec_helper'\n\nrequire '1_getting_started/ch03_enhanced_parsing/p05_backtracking_parser/replayable_buffer'\n\nmodule GettingStarted\n module EnhancedParsing\n module BacktrackingParser\n\n describe ReplayableBuffer do\n subject(:buffer) { ReplayableBuffer.new }\n\n context \"used without marking\" do\n describe \"#[]\" do\n example do\n expect(buffer[0]).to be_nil\n expect(buffer[1]).to be_nil\n expect(buffer[2]).to be_nil\n expect(buffer[-1]).to be_nil\n end\n\n example do\n buffer << :a\n buffer << :b\n\n expect(buffer[0]).to be == :a\n expect(buffer[1]).to be == :b\n expect(buffer[2]).to be_nil\n expect(buffer[-1]).to be == :b\n end\n end\n\n describe \"#shift\" do\n example do\n expect(buffer.shift).to be_nil\n end\n\n example do\n buffer << :a\n buffer << :b\n\n expect(buffer.shift).to be == :a\n expect(buffer.shift).to be == :b\n expect(buffer.shift).to be_nil\n end\n end\n\n describe \"#size\" do\n example do\n expect {\n buffer << :a\n buffer << :b\n }.to change { buffer.size }.from(0).to(2)\n end\n\n example do\n buffer << :a\n buffer << :b\n expect { buffer.shift }.to change { buffer.size }.from(2).to(1)\n expect { buffer.shift }.to change { buffer.size }.from(1).to(0)\n end\n end\n\n describe \"#release\" do\n it \"raises an error\" do\n expect {\n buffer.release\n }.to raise_error(RuntimeError, \"No mark has been set\")\n end\n end\n end\n\n context \"with a mark\" do\n before(:each) do\n buffer << :premark_a\n buffer << :premark_b\n buffer.mark\n end\n\n describe \"#shift\" do\n example do\n expect(buffer.shift).to be == :premark_a\n expect(buffer.shift).to be == :premark_b\n expect(buffer.shift).to be_nil\n\n buffer.release\n\n expect(buffer.shift).to be == :premark_a\n expect(buffer.shift).to be == :premark_b\n expect(buffer.shift).to be_nil\n end\n end\n\n describe \"#[]\" do\n example do\n buffer.shift\n buffer.shift\n buffer.release\n\n expect(buffer[0]).to be == :premark_a\n expect(buffer[1]).to be == :premark_b\n end\n end\n\n describe \"#size\" do\n example do\n expect { buffer.shift }.to change { buffer.size }.from(2).to(1)\n expect { buffer.shift }.to change { buffer.size }.from(1).to(0)\n expect { buffer.release }.to change { buffer.size }.from(0).to(2)\n end\n end\n end\n\n context \"with two marks\" do\n describe \"releasing both marks\" do\n before(:each) do\n buffer << :a\n buffer << :b\n buffer << :c\n buffer.mark\n buffer.mark\n end\n\n example do\n output_before_release_1 = [ buffer.shift, buffer.shift, buffer.shift ]\n expect(output_before_release_1).to be == [ :a, :b, :c ]\n\n buffer.release\n\n output_before_release_2 = [ buffer.shift, buffer.shift, buffer.shift ]\n expect(output_before_release_2).to be == [ :a, :b, :c ]\n\n buffer.release\n\n output_before_release_3 = [ buffer.shift, buffer.shift, buffer.shift ]\n expect(output_before_release_3).to be == [ :a, :b, :c ]\n end\n end\n\n describe \"shifting between the marks\" do\n example do\n buffer << :a\n buffer << :b\n buffer.mark\n expect(buffer.shift).to be == :a\n buffer.mark\n expect(buffer.shift).to be == :b\n buffer.release\n expect(buffer.shift).to be == :b\n buffer.release\n expect(buffer.shift).to be == :a\n expect(buffer.shift).to be == :b\n end\n end\n\n describe \"buffering across two marks\" do\n before(:each) do\n buffer << :a\n buffer.mark\n buffer << :b\n buffer.mark\n buffer << :c\n end\n\n example do\n output_before_release_1 = [ buffer.shift, buffer.shift, buffer.shift ]\n expect(output_before_release_1).to be == [ :a, :b, :c ]\n\n buffer.release\n\n output_before_release_2 = [ buffer.shift, buffer.shift, buffer.shift ]\n expect(output_before_release_2).to be == [ :a, :b, :c ]\n\n buffer.release\n\n output_before_release_3 = [ buffer.shift, buffer.shift, buffer.shift ]\n expect(output_before_release_3).to be == [ :a, :b, :c ]\n end\n end\n end\n end\n\n end\n end\nend\n/lib/2_analyzing_languages/ch06_tracking_symbols/p17_nested_scopes/symbol_table.rb\nmodule AnalyzingLanguages\n module TrackingSymbols\n module NestedScopes\n module Scope\n def define(symbol)\n @symbols[symbol.name] = symbol\n end\n\n def resolve(name)\n @symbols.fetch(name.to_sym) {\n @parent_scope.resolve(name)\n }\n end\n\n def unwrap\n @parent_scope\n end\n end\n\n class SymbolTable\n class DeadEndScope\n def resolve(name)\n raise \"Unknown symbol: #{name}\"\n end\n end\n\n class LocalScope\n include Scope\n\n def initialize(parent_scope = :remove_me)\n @parent_scope = parent_scope\n @symbols = { }\n end\n\n\n def to_s\n \"symbols: {#{key_value_pairs.join(\", \")}}\"\n end\n\n private\n\n def key_value_pairs\n @symbols.keys.sort.map { |key| \"#{key}=#{@symbols[key]}\" }\n end\n end\n\n def initialize\n @scope = LocalScope.new(DeadEndScope.new)\n end\n\n def push_scope\n @scope = LocalScope.new(@scope)\n end\n\n def pop_scope\n @scope = @scope.unwrap\n end\n\n def define(symbol, metadata = { })\n @scope.define(symbol)\n end\n\n def define_as_scope(symbol, metadata = { })\n define(symbol)\n symbol.wrap(@scope)\n @scope = symbol\n end\n\n def resolve(name, metadata = { })\n @scope.resolve(name)\n end\n\n def to_s\n @scope.to_s\n end\n end\n\n end\n end\nend/lib/1_getting_started/ch03_enhanced_parsing/p05_backtracking_parser/list_parser_with_parallel_assignment.rb\nrequire '1_getting_started/errors'\n\nmodule GettingStarted\n module EnhancedParsing\n module BacktrackingParser\n class ListParserWithParallelAssignment\n def initialize(replayable_lexer)\n @lexer = replayable_lexer\n end\n\n def stat\n if speculate_stat_list\n matched_list = list\n match(:eof)\n matched_list\n elsif speculate_stat_parallel_assigment\n matched_parallel_assigment = parallel_assignment\n match(:eof)\n matched_parallel_assigment\n else\n raise NoViableAlternativeError.new(\"Expecting or \")\n end\n end\n\n def speculate_stat_list\n @lexer.speculate do\n list\n match(:eof)\n end\n rescue RecognitionError => e\n false\n end\n\n def speculate_stat_parallel_assigment\n @lexer.speculate do\n parallel_assignment\n match(:eof)\n end\n rescue RecognitionError => e\n false\n end\n\n def list\n [ ].tap do |collected_list|\n match(:lbrack)\n elements(collected_list)\n match(:rbrack)\n end\n end\n\n def parallel_assignment\n lhs = list\n match(:equals)\n rhs = list\n { lhs => rhs }\n end\n\n def elements(collected_list)\n first_element(collected_list)\n while @lexer.peek.type == :comma\n match(:comma)\n element(collected_list)\n end\n end\n\n def first_element(collected_list)\n case @lexer.peek.type\n when :name, :lbrack\n element(collected_list)\n when :rbrack\n return\n else\n raise RecognitionError.new(\n \"Expected :lbrack, :name or :rbrack, found #{@lexer.peek.inspect}\"\n )\n end\n end\n\n def element(collected_list)\n if @lexer.peek(1).type == :name && @lexer.peek(2).type == :equals\n lhs, _, rhs = match(:name), match(:equals), match(:name)\n collected_list << { lhs.value.to_sym => rhs.value.to_sym }\n elsif @lexer.peek.type == :name\n collected_list << @lexer.peek.value.to_sym\n match(:name)\n elsif @lexer.peek.type == :lbrack\n collected_list << list\n else\n raise RecognitionError.new(\n \"Expected :name or :lbrack, found #{@lexer.peek.inspect}\"\n )\n end\n end\n\n private\n\n def match(expected_type)\n if @lexer.peek.type == expected_type\n consume\n else\n raise RecognitionError.new(\n \"Expected #{expected_type.inspect}, found #{@lexer.peek.inspect}\"\n )\n end\n end\n\n def consume\n @lexer.next\n end\n end\n\n end\n end\nend\n/lib/1_getting_started/errors.rb\nclass RecognitionError < RuntimeError; end\nclass NoViableAlternativeError < RuntimeError; end/spec/1_getting_started/ch02_basic_parsing/p04_llk_recursive_descent_parser/list_parser_assignment_contract.rb\nshared_examples_for \"a ListParser with assignment\" do\n context \"list with assignment\" do\n let(:token_descriptions) {\n [\n { lbrack: \"[\" },\n { name: \"a\" },\n { equals: \"=\" },\n { name: \"b\" },\n { rbrack: \"]\" },\n { eof: nil }\n ]\n }\n\n specify {\n expect(parser.list).to be == [ { :a => :b } ]\n }\n end\nend\n/spec/1_getting_started/ch02_basic_parsing/p02_ll1_recursive_descent_lexer/enumerator_contract.rb\nshared_examples_for \"an Enumerator\" do\n context \"at the start\" do\n specify {\n expect(subject.next).to_not be_nil\n }\n\n specify {\n expect(subject.peek).to_not be_nil\n }\n\n specify {\n peeked_value = subject.peek\n expect(subject.next).to be == peeked_value\n }\n end\n\n context \"at the end\" do\n before(:each) do\n advance_to_end\n end\n\n specify {\n expect { subject.next }.to raise_error(StopIteration)\n }\n\n specify {\n expect { subject.peek }.to raise_error(StopIteration)\n }\n end\nend\n/spec/2_analyzing_languages/ch06_tracking_symbols/p16_monolithic_scope/variable_symbol_spec.rb\nrequire 'spec_helper'\n\nrequire '2_analyzing_languages/ch06_tracking_symbols/p16_monolithic_scope/variable_symbol'\n\nmodule AnalyzingLanguages\n module TrackingSymbols\n module MonolithicScope\n describe VariableSymbol do\n let(:type) { LanguageSymbol.new(:type) }\n subject(:symbol) { VariableSymbol.new(:name, type) }\n\n its(:to_s) { should be == \"\" }\n\n describe \"#==\" do\n example do\n expect(symbol).to be == VariableSymbol.new(:name, type)\n end\n\n example do\n expect(symbol).to be == VariableSymbol.new(\"name\", type)\n end\n\n example do\n expect(symbol).to_not be == VariableSymbol.new(:bar, type)\n end\n\n example do\n expect(symbol).to_not be == VariableSymbol.new(:name, LanguageSymbol.new(:wrong_type))\n end\n\n example do\n expect(symbol).to_not be == :name\n end\n end\n\n describe \"#name\" do\n its(:name) { should be == :name }\n\n it \"is always a symbol\" do\n expect(VariableSymbol.new(\"name\", :ununused_type).name).to be == :name\n end\n end\n end\n end\n end\nend/spec/1_getting_started/ch02_basic_parsing/p02_ll1_recursive_descent_lexer/enumerator_spec.rb\nrequire 'spec_helper'\n\nrequire_relative 'enumerator_contract'\n\n# Prove that the Enumerator interface we depend on works how we think it does\ndescribe \"a real Enumerator\" do\n subject(:array_enumerator) { [ { eof: nil } ].each }\n\n def advance_to_end\n array_enumerator.next\n end\n\n it_behaves_like \"an Enumerator\"\nend/lib/2_analyzing_languages/ch06_tracking_symbols/p16_monolithic_scope/cymbol_monolithic_node_classes.rb\nrequire 'treetop'\n\nmodule AnalyzingLanguages; module TrackingSymbols; module MonolithicScope; end; end; end\nTreetop.load(File.dirname(__FILE__) + '/cymbol_monolithic')\n\nrequire_relative 'variable_symbol'\n\nmodule AnalyzingLanguages\n module TrackingSymbols\n module MonolithicScope\n\n # Module for Treetop\n module CymbolMonolithic\n class ::Treetop::Runtime::SyntaxNode\n def populate_symbols(symbol_table)\n return unless elements\n elements.each do |element|\n element.populate_symbols(symbol_table)\n end\n end\n\n private\n\n def location\n \"line #{input.line_of(interval.first)}\"\n end\n end\n\n class VarDeclaration < Treetop::Runtime::SyntaxNode\n def populate_symbols(symbol_table)\n var_type = symbol_table.resolve(type.name, location: location)\n if assignment\n assignment.populate_symbols(symbol_table)\n end\n symbol_table.define(VariableSymbol.new(var_name.name, var_type), location: location)\n end\n end\n\n class VarExpression < Treetop::Runtime::SyntaxNode\n def populate_symbols(symbol_table)\n symbol_table.resolve(type.name, location: location)\n end\n end\n\n class CymbolSymbol < Treetop::Runtime::SyntaxNode\n def name\n text_value\n end\n\n def populate_symbols(symbol_table)\n symbol_table.resolve(name, location: location)\n end\n end\n end\n\n end\n end\nend/spec/1_getting_started/ch02_basic_parsing/p03_ll1_recursive_descent_parser/list_parser_spec.rb\nrequire 'spec_helper'\n\nrequire '1_getting_started/ch02_basic_parsing/p03_ll1_recursive_descent_parser/list_parser'\nrequire '1_getting_started/ch02_basic_parsing/p02_ll1_recursive_descent_lexer/list_lexer'\nrequire_relative 'list_parser_contract'\n\nmodule GettingStarted\n module BasicParsing\n module LL1RecursiveDescentParser\n describe \"Intergration:\", ListParser, \"and lexer\" do\n let(:input) { \"[ a, [ x, [ i, j ] ], b ]\" }\n\n let(:lexer) { LL1RecursiveDescentLexer::ListLexer.new(input) }\n\n subject(:parser) { ListParser.new(lexer) }\n\n it \"parses lists!\" do\n expect(parser.list).to be == [ :a, [ :x, [ :i, :j ] ], :b ]\n end\n end\n\n describe ListParser do\n it_behaves_like \"a ListParser\"\n\n let(:tokens) {\n LL1RecursiveDescentLexer::Token.descriptions_to_tokens(token_descriptions)\n }\n let(:lexer) { tokens.each }\n\n subject(:parser) { ListParser.new(lexer) }\n end\n end\n end\nend/spec/1_getting_started/ch02_basic_parsing/p02_ll1_recursive_descent_lexer/list_lexer_spec.rb\nrequire 'spec_helper'\n\nrequire '1_getting_started/ch02_basic_parsing/p02_ll1_recursive_descent_lexer/list_lexer'\nrequire_relative 'list_lexer_contract'\n\nmodule GettingStarted\n module BasicParsing\n module LL1RecursiveDescentLexer\n describe ListLexer do\n subject(:lexer) { ListLexer.new(input) }\n\n let(:collected_output) { [ ] }\n let(:output) {\n collected_output.map { |token| token.to_hash }\n }\n\n it_behaves_like \"a ListLexer\"\n end\n end\n end\nend/spec/1_getting_started/ch02_basic_parsing/p03_ll1_recursive_descent_parser/list_parser_contract.rb\nshared_examples_for \"a ListParser\" do\n # The example in the book (deliberately?) avoids this case, but it's an obvious\n # TDD bootstrapping case, and turned out to be not too difficult to implement\n # compared to the Java example\n context \"empty list\" do\n let(:token_descriptions) {\n [\n { lbrack: \"[\" },\n { rbrack: \"]\" },\n { eof: nil }\n ]\n }\n\n specify {\n expect(parser.list).to be == [ ]\n }\n end\n\n context \"list with a name\" do\n let(:token_descriptions) {\n [\n { lbrack: \"[\" },\n { name: \"a\" },\n { rbrack: \"]\" },\n { eof: nil }\n ]\n }\n\n specify {\n expect(parser.list).to be == [ :a ]\n }\n end\n\n context \"list with a multiple names\" do\n let(:token_descriptions) {\n [\n { lbrack: \"[\" },\n { name: \"a\" },\n { comma: \",\" },\n { name: \"b\" },\n { comma: \",\" },\n { name: \"c\" },\n { rbrack: \"]\" },\n { eof: nil }\n ]\n }\n\n specify {\n expect(parser.list).to be == [ :a, :b, :c ]\n }\n end\n\n context \"list of lists\" do\n let(:token_descriptions) {\n [\n { lbrack: \"[\" },\n { name: \"a\" },\n { comma: \",\" },\n { lbrack: \"[\" },\n { name: \"x\" },\n { comma: \",\" },\n { name: \"y\" },\n { rbrack: \"]\" },\n { comma: \",\" },\n { name: \"b\" },\n { rbrack: \"]\" },\n { eof: nil }\n ]\n }\n\n specify {\n expect(parser.list).to be == [ :a, [ :x, :y ], :b ]\n }\n end\n\n context \"empty list in a list\" do\n let(:token_descriptions) {\n [\n { lbrack: \"[\" },\n { name: \"a\" },\n { comma: \",\" },\n { lbrack: \"[\" },\n { rbrack: \"]\" },\n { comma: \",\" },\n { name: \"b\" },\n { rbrack: \"]\" },\n { eof: nil }\n ]\n }\n\n specify {\n expect(parser.list).to be == [ :a, [ ], :b ]\n }\n end\n\n context \"list of list of lists to prove to Bobby my code really works\" do\n let(:token_descriptions) {\n [\n { lbrack: \"[\" },\n { name: \"a\" },\n { comma: \",\" },\n { lbrack: \"[\" },\n { name: \"x\" },\n { comma: \",\" },\n { lbrack: \"[\" },\n { name: \"i\" },\n { comma: \",\" },\n { name: \"j\" },\n { rbrack: \"]\" },\n { rbrack: \"]\" },\n { comma: \",\" },\n { name: \"b\" },\n { rbrack: \"]\" },\n { eof: nil }\n ]\n }\n\n specify {\n expect(parser.list).to be == [ :a, [ :x, [ :i, :j ] ], :b ]\n }\n end\n\n context \"invalid input\" do\n context \"no list\" do\n let(:token_descriptions) {\n [\n { eof: nil }\n ]\n }\n\n specify {\n expect { parser.list }.to raise_error(RecognitionError, \"Expected :lbrack, found :eof\")\n }\n end\n\n context \"unclosed list\" do\n let(:token_descriptions) {\n [\n { lbrack: \"[\" },\n { eof: nil }\n ]\n }\n\n specify {\n expect { parser.list }.to raise_error(\n RecognitionError, \"Expected :lbrack, :name or :rbrack, found :eof\"\n )\n }\n end\n\n context \"missing name before a comma\" do\n let(:token_descriptions) {\n [\n { lbrack: \"[\" },\n { comma: \",\" },\n { rbrack: \"]\" },\n { eof: nil }\n ]\n }\n\n specify {\n expect { parser.list }.to raise_error(\n RecognitionError, \"Expected :lbrack, :name or :rbrack, found :comma\"\n )\n }\n end\n\n context \"missing name after a comma\" do\n let(:token_descriptions) {\n [\n { lbrack: \"[\" },\n { name: \"a\" },\n { comma: \",\" },\n { rbrack: \"]\" },\n { eof: nil }\n ]\n }\n\n specify {\n expect { parser.list }.to raise_error(\n RecognitionError, \"Expected :name or :lbrack, found :rbrack\"\n )\n }\n end\n end\nend/spec/1_getting_started/ch03_enhanced_parsing/p05_backtracking_parser/list_parser_with_parallel_assignment_spec.rb\nrequire 'spec_helper'\n\nrequire '1_getting_started/ch03_enhanced_parsing/p05_backtracking_parser/list_parser_with_parallel_assignment'\nrequire '1_getting_started/ch03_enhanced_parsing/p05_backtracking_parser/lexer_replayable_lookahead'\nrequire '1_getting_started/ch02_basic_parsing/p02_ll1_recursive_descent_lexer/list_lexer'\nrequire_relative '../../ch02_basic_parsing/p03_ll1_recursive_descent_parser/list_parser_contract'\nrequire_relative '../../ch02_basic_parsing/p04_llk_recursive_descent_parser/list_parser_assignment_contract'\nrequire_relative 'list_parser_with_parallel_assignment_contract'\n\nmodule GettingStarted\n module EnhancedParsing\n module BacktrackingParser\n describe \"Intergration:\", ListParserWithParallelAssignment, \"and lexer\" do\n let(:input) { \"[ a ] = [ b ]\" }\n\n let(:lexer) { BasicParsing::LL1RecursiveDescentLexer::ListLexer.new(input) }\n let(:lookahead) { LexerReplayableLookahead.new(lexer) }\n subject(:parser) { ListParserWithParallelAssignment.new(lookahead) }\n\n describe \"statements\" do\n context \"a list\" do\n let(:input) { \"[ a = b, c, d ]\" }\n\n example do\n expect(parser.stat).to be == [ { :a => :b }, :c, :d ]\n end\n end\n\n context \"a parallel assignment\" do\n let(:input) { \"[ a, b ] = [ c, d ]\" }\n\n example do\n expect(parser.stat).to be == { [ :a, :b ] => [ :c, :d ] }\n end\n end\n end\n end\n\n describe ListParserWithParallelAssignment do\n let(:tokens) {\n BasicParsing::LL1RecursiveDescentLexer::Token.descriptions_to_tokens(token_descriptions)\n }\n let(:lexer) { tokens.each }\n let(:lookahead) { LexerReplayableLookahead.new(lexer) }\n subject(:parser) { ListParserWithParallelAssignment.new(lookahead) }\n\n it_behaves_like \"a ListParser\"\n it_behaves_like \"a ListParser with assignment\"\n it_behaves_like \"a ListParser with parallel assignment\"\n end\n end\n end\nend/spec/1_getting_started/ch03_enhanced_parsing/p05_backtracking_parser/list_parser_with_parallel_assignment_contract.rb\nshared_examples_for \"a ListParser with parallel assignment\" do\n describe \"statements\" do\n context \"a list\" do\n let(:token_descriptions) {\n [\n { lbrack: \"[\" },\n { name: \"a\" }, { equals: \"=\" }, { name: \"b\" }, { comma: \",\" },\n { name: \"c\" }, { comma: \",\" },\n { name: \"d\" },\n { rbrack: \"]\" },\n { eof: nil }\n ]\n }\n\n specify {\n expect(parser.list).to be == [ { :a => :b }, :c, :d ]\n }\n end\n\n context \"a parallel assignment\" do\n let(:token_descriptions) {\n [\n { lbrack: \"[\" },\n { name: \"a\" }, { comma: \",\" }, { name: \"b\" },\n { rbrack: \"]\" },\n { equals: \"=\" },\n { lbrack: \"[\" },\n { name: \"c\" }, { comma: \",\" }, { name: \"d\" },\n { rbrack: \"]\" },\n { eof: nil }\n ]\n }\n\n specify {\n expect(parser.stat).to be == { [ :a, :b ] => [ :c, :d ] }\n }\n end\n\n context \"invalid statement\" do\n # \"[ a ] b\"\n let(:token_descriptions) {\n [\n { lbrack: \"[\" },\n { name: \"a\" },\n { rbrack: \"]\" },\n { name: \"b\" },\n { eof: nil }\n ]\n }\n\n specify {\n expect {\n parser.stat\n }.to raise_error(\n NoViableAlternativeError, \"Expecting or \"\n )\n }\n end\n end\nend/lib/2_analyzing_languages/ch06_tracking_symbols/p17_nested_scopes/cymbol2_parser.rb\nrequire 'treetop'\n\nrequire_relative 'method_symbol'\nrequire_relative '../p16_monolithic_scope/variable_symbol'\n\nmodule AnalyzingLanguages; module TrackingSymbols; module NestedScopes; end; end; end\nTreetop.load(File.dirname(__FILE__) + '/cymbol_nested')\n\nmodule AnalyzingLanguages\n module TrackingSymbols\n module NestedScopes\n\n # Module for Treetop\n module CymbolNested\n class ::Treetop::Runtime::SyntaxNode\n def walk(symbol_table)\n return unless elements\n elements.each do |element|\n element.walk(symbol_table)\n end\n end\n\n private\n\n def location\n \"line #{input.line_of(interval.first)}\"\n end\n end\n\n class MethodDefinition < Treetop::Runtime::SyntaxNode\n def walk(symbol_table)\n method_type = symbol_table.resolve(type.name, location: location)\n symbol_table.define_as_scope(MethodSymbol.new(name.name, method_type), location: location)\n\n parameters.each do |parameter|\n parameter_type = symbol_table.resolve(parameter.type.name, location: location)\n symbol_table.define(\n MonolithicScope::VariableSymbol.new(parameter.name.name, parameter_type),\n location: location\n )\n end\n\n body.walk(symbol_table)\n end\n end\n\n class CymbolBlock < Treetop::Runtime::SyntaxNode\n def walk(symbol_table)\n symbol_table.push_scope\n elements.each do |element|\n element.walk(symbol_table)\n end\n end\n end\n\n class ParameterList < Treetop::Runtime::SyntaxNode\n def each(&block)\n parameters.each(&block)\n end\n\n # We can only handle one - as it happens the book example only uses one\n def parameters\n [ first_parameter ]\n end\n end\n\n class VarDeclaration < Treetop::Runtime::SyntaxNode\n def walk(symbol_table)\n var_type = symbol_table.resolve(type.name, location: location)\n if initialization\n initialization.walk(symbol_table)\n end\n symbol_table.define(\n MonolithicScope::VariableSymbol.new(var_name.name, var_type),\n location: location\n )\n end\n end\n\n class CymbolSymbol < Treetop::Runtime::SyntaxNode\n def name\n text_value\n end\n\n def walk(symbol_table)\n symbol_table.resolve(name, location: location)\n end\n end\n end\n\n end\n end\nend/spec/1_getting_started/ch02_basic_parsing/p02_ll1_recursive_descent_lexer/token_spec.rb\nrequire 'spec_helper'\n\nrequire '1_getting_started/ch02_basic_parsing/p02_ll1_recursive_descent_lexer/token'\n\nmodule GettingStarted\n module BasicParsing\n module LL1RecursiveDescentLexer\n describe Token do\n subject(:token) { Token.new(token_type: \"token_value\") }\n\n its(:type) { should be == :token_type }\n its(:value) { should be == \"token_value\" }\n its(:inspect) { should be == \":token_type\" } # Only used for errors so far\n its(:to_hash) { should be == { token_type: \"token_value\" } }\n\n describe \"#==\" do\n it \"knows equality\" do\n expect(token).to be == Token.new(token_type: \"token_value\")\n end\n\n it \"compares type\" do\n expect(token).to_not be == Token.new(wrong_token_type: \"token_value\")\n end\n\n it \"compares value\" do\n expect(token).to_not be == Token.new(token_type: \"wrong_token_value\")\n end\n end\n\n describe \".descriptions_to_tokens\" do\n example do\n expect(\n Token.descriptions_to_tokens([{ a: \"one\" }, { b: \"two\" }])\n ).to be == [\n Token.new(a: \"one\"), Token.new(b: \"two\")\n ]\n end\n end\n end\n end\n end\nend/spec/2_analyzing_languages/ch06_tracking_symbols/p17_nested_scopes/symbol_table_spec.rb\nrequire 'spec_helper'\n\nrequire '2_analyzing_languages/ch06_tracking_symbols/p17_nested_scopes/symbol_table'\nrequire '2_analyzing_languages/ch06_tracking_symbols/p16_monolithic_scope/language_symbol'\n\nmodule AnalyzingLanguages\n module TrackingSymbols\n module NestedScopes\n # Easy access to the existing LanguageSymbol\n include MonolithicScope\n\n describe SymbolTable do\n subject(:symbol_table) { SymbolTable.new }\n\n context \"single scope\" do\n describe \"#define\" do\n it \"ignores metadata\" do\n expect {\n symbol_table.define(LanguageSymbol.new(:foo), unused: \"stuff\")\n }.to_not raise_error(ArgumentError)\n end\n end\n\n describe \"#resolve\" do\n context \"undefined symbol\" do\n specify {\n expect {\n symbol_table.resolve(\"foo\")\n }.to raise_error(RuntimeError, \"Unknown symbol: foo\")\n }\n\n it \"ignores metadata\" do\n expect {\n symbol_table.resolve(\"foo\", unused: \"stuff\")\n }.to_not raise_error(ArgumentError)\n end\n end\n\n context \"defined symbol\" do\n let(:symbol) { LanguageSymbol.new(:foo) }\n\n before(:each) do\n symbol_table.define(symbol)\n end\n\n specify {\n expect(symbol_table.resolve(\"foo\")).to equal(symbol)\n }\n\n specify {\n expect(symbol_table.resolve(:foo)).to equal(symbol)\n }\n end\n end\n\n describe \"#to_s\" do\n before(:each) do\n symbol_table.define(LanguageSymbol.new(:foo))\n symbol_table.define(LanguageSymbol.new(:bar))\n symbol_table.define(LanguageSymbol.new(:baz))\n end\n\n it \"outputs in alphabetic order\" do\n expect(symbol_table.to_s).to be ==\n \"symbols: {bar=bar, baz=baz, foo=foo}\"\n end\n end\n end\n\n describe \"nested scopes\" do\n let(:symbol) { LanguageSymbol.new(:foo) }\n\n before(:each) do\n symbol_table.define(symbol)\n symbol_table.push_scope\n end\n\n it \"gives access to the higher scope\" do\n expect(symbol_table.resolve(:foo)).to equal(symbol)\n end\n\n describe \"overriding\" do\n let(:new_foo) { LanguageSymbol.new(:foo) }\n\n before(:each) do\n symbol_table.define(new_foo)\n end\n\n it \"lets you override a symbol\" do\n expect(symbol_table.resolve(:foo)).to equal(new_foo)\n end\n\n it \"lets you pop the scope tree to get the old symbol back\" do\n symbol_table.pop_scope\n expect(symbol_table.resolve(:foo)).to equal(symbol)\n end\n\n it \"updates #to_s\" do\n pending\n end\n end\n end\n\n describe \"scope symbols\" do\n # Using a mock here bleeds some implementation details of the aggregate,\n # we should probably remove this if we get a suitable integration test\n let(:scope_symbol) {\n mock(\"ScopeSymbol\", name: :bar, wrap: nil, resolve: :trust_passed_through)\n }\n\n it \"defines the symbol\" do\n symbol_table.define_as_scope(scope_symbol)\n expect(symbol_table.resolve(:bar)).to equal(:trust_passed_through)\n end\n\n it \"ignores metadata\" do\n expect {\n symbol_table.define_as_scope(scope_symbol, unused: \"stuff\")\n }.to_not raise_error(ArgumentError)\n end\n\n it \"pushes the symbol as a scope\" do\n symbol_table.define(LanguageSymbol.new(:moo))\n\n scope_symbol.should_receive(:wrap) do |parent_scope|\n expect(parent_scope.resolve(:moo))\n end\n\n symbol_table.define_as_scope(scope_symbol)\n end\n\n it \"uses the pushed scope\" do\n symbol_table.define_as_scope(scope_symbol)\n scope_symbol.should_receive(:resolve).with(:baz)\n symbol_table.resolve(:baz)\n end\n end\n end\n end\n end\nend/spec/1_getting_started/ch03_enhanced_parsing/p05_backtracking_parser/lexer_replayable_lookahead_spec.rb\nrequire 'spec_helper'\n\nrequire '1_getting_started/ch02_basic_parsing/p02_ll1_recursive_descent_lexer/list_lexer'\nrequire '1_getting_started/ch03_enhanced_parsing/p05_backtracking_parser/lexer_replayable_lookahead'\nrequire_relative '../../ch02_basic_parsing/p02_ll1_recursive_descent_lexer/list_lexer_contract'\n\nmodule GettingStarted\n module EnhancedParsing\n module BacktrackingParser\n shared_examples_for \"LexerReplayableLookahead#peek\" do\n example do\n expect(lexer.peek.value).to be == \"[\"\n end\n\n example do\n expect(lexer.peek(1).value).to be == \"[\"\n end\n\n example do\n expect(lexer.peek(2).value).to be == \"a\"\n end\n\n example do\n expect(lexer.peek(5).value).to be == \"]\"\n end\n\n example do\n expect(lexer.peek(6).to_hash).to be == { eof: nil }\n end\n\n example do\n expect { lexer.peek(7) }.to raise_error(StopIteration)\n end\n end\n\n shared_examples_for \"LexerReplayableLookahead#speculate\" do\n specify {\n lexer.speculate do\n expect(lexer.peek.value).to be == \"[\"\n expect(lexer.next.value).to be == \"[\"\n expect(lexer.next.value).to be == \"a\"\n end\n }\n\n specify {\n lexer.next\n expect {\n lexer.speculate do\n lexer.next\n end\n }.to_not change { lexer.peek.value }\n }\n\n specify {\n expect(lexer.peek.value).to be == \"[\"\n lexer.speculate do\n lexer.next\n expect(lexer.peek.value).to be == \"a\"\n lexer.speculate do\n lexer.next\n expect(lexer.peek.value).to be == \"=\"\n end\n expect(lexer.peek.value).to be == \"a\"\n end\n expect(lexer.peek.value).to be == \"[\"\n }\n\n describe \"#if_speculating\" do\n specify {\n should_not_receive(:was_speculating)\n lexer.if_speculating do\n was_speculating\n end\n }\n\n specify {\n should_receive(:was_speculating)\n lexer.speculate do\n lexer.if_speculating do\n was_speculating\n end\n end\n }\n\n specify {\n should_receive(:was_speculating)\n lexer.speculate do\n lexer.speculate do\n # nothing here\n end\n lexer.if_speculating do\n was_speculating\n end\n end\n }\n end\n\n describe \"error handling\" do\n it \"(re-)raises errors\" do\n expect {\n lexer.speculate do\n raise \"re-raised error\"\n end\n }.to raise_error(RuntimeError, \"re-raised error\")\n end\n\n it \"is not disturbed by errors\" do\n lexer.speculate do\n lexer.next\n lexer.next\n raise \"ignored error\"\n end rescue nil\n\n expect(lexer.next.value).to be == \"[\"\n expect(lexer.next.value).to be == \"a\"\n end\n end\n end\n\n describe LexerReplayableLookahead do\n describe \"lookahead\" do\n let(:tokens) {\n BasicParsing::LL1RecursiveDescentLexer::Token.descriptions_to_tokens(\n [\n { lbrack: \"[\" },\n { name: \"a\" },\n { equals: \"=\" },\n { name: \"b\" },\n { rbrack: \"]\" },\n { eof: nil }\n ]\n )\n }\n\n let(:underlying_lexer) { tokens.each }\n\n subject(:lexer) { LexerReplayableLookahead.new(underlying_lexer) }\n\n describe \"#peek\" do\n it_behaves_like \"LexerReplayableLookahead#peek\"\n end\n\n describe \"#speculate\" do\n it_behaves_like \"LexerReplayableLookahead#speculate\"\n end\n end\n\n context \"wrapping a ListLexer\" do\n let(:input) { \"[ a = b ]\" }\n let(:underlying_lexer) { BasicParsing::LL1RecursiveDescentLexer::ListLexer.new(input) }\n subject(:lexer) { LexerReplayableLookahead.new(underlying_lexer) }\n\n let(:collected_output) { [ ] }\n let(:output) {\n collected_output.map { |token| token.to_hash }\n }\n\n it_behaves_like \"a ListLexer\"\n\n describe \"#peek\" do\n it_behaves_like \"LexerReplayableLookahead#peek\"\n end\n\n describe \"#speculate\" do\n it_behaves_like \"LexerReplayableLookahead#speculate\"\n end\n end\n end\n end\n end\nend/spec/2_analyzing_languages/ch06_tracking_symbols/p16_monolithic_scope/symbol_table_logger_spec.rb\nrequire 'spec_helper'\n\nrequire '2_analyzing_languages/ch06_tracking_symbols/p16_monolithic_scope/symbol_table'\nrequire '2_analyzing_languages/ch06_tracking_symbols/p16_monolithic_scope/symbol_table_logger'\n\nmodule AnalyzingLanguages\n module TrackingSymbols\n module MonolithicScope\n describe SymbolTableLogger do\n let(:symbol_table) { mock(SymbolTable, define: nil, resolve: :resolved_symbol) }\n\n let(:output_io) { StringIO.new }\n def output\n output_io.rewind\n output_io.read.chomp\n end\n\n subject(:logger) { SymbolTableLogger.new(symbol_table, output_io) }\n\n describe \"#resolve\" do\n specify {\n symbol_table.should_receive(:resolve).with(:symbol_name)\n logger.resolve(:symbol_name).should be == :resolved_symbol\n }\n\n specify {\n logger.resolve(:symbol_name, location: \"line 1\")\n expect(output).to be == \"line 1: ref resolved_symbol\"\n }\n end\n\n describe \"#define\" do\n specify {\n logger.define(:symbol, location: \"line 1\")\n expect(output).to be == \"line 1: def symbol\"\n }\n\n specify {\n symbol_table.should_receive(:define).with(:symbol)\n logger.define(:symbol)\n }\n end\n end\n end\n end\nend/lib/1_getting_started/ch02_basic_parsing/p02_ll1_recursive_descent_lexer/list_lexer.rb\nrequire_relative 'token'\n\nmodule GettingStarted\n module BasicParsing\n module LL1RecursiveDescentLexer\n\n # Implements Pattern 2: LL(1) Recursive-Descent Lexer\n #\n # Note that there are two main differences from the example implementation:\n #\n # * I don't use a Token class (which is just a dumb struct in the Java\n # example), this can be more easily implemented in Ruby with simple hashes.\n #\n # * I set about solving this using standard Ruby iterators with blocks,\n # which makes some bits more idiomatic and some bits more complex. The\n # Java example looks ahead to the next character, whereas with blocks\n # we have to `redo` if we detect we've entered a different token type.\n # Because of this, the code has ended up with a State implementation,\n # albeit a slightly wacky one.\n #\n # * There's the minor difference that I forgot to allow parsing capital\n # letters in names. Oops.\n #\n # ERRATA: I just realised that the whole point of LL(1) is to *look ahead*\n # and therefore this implementation using blocks misses the point. Sight.\n # Oh well, from the outside, it's behaviourally equivalent, ie passes all\n # the same tests it would do if we weren't `redo`ing enumerator blocks.\n # I'm not going to refactor it unless I have to.\n class ListLexer\n def initialize(input)\n @input = input\n\n # Hack to work around the fact I didn't implement this as a pull system\n @tokens = tokenize_all.each\n end\n\n def peek\n @tokens.peek\n end\n\n def next\n @tokens.next\n end\n\n private\n\n def tokenize_all\n [ ].tap do |tokens|\n tokenize do |token|\n tokens << token\n end\n end\n end\n\n def tokenize(&block)\n switch_to_mode(:normal)\n\n @input.chars.each do |char|\n match_result =\n catch :state_changed do\n match(char, &block)\n end\n\n redo if match_result == :state_changed\n end\n\n finish(&block)\n end\n\n def match_normal(char, &block)\n case char\n when \" \", \"\\t\", \"\\n\"\n when \"[\"\n yield(Token.new(lbrack: char))\n when \",\"\n yield(Token.new(comma: char))\n when \"=\"\n yield(Token.new(equals: char))\n when \"]\"\n yield(Token.new(rbrack: char))\n when \"a\"..\"z\"\n switch_to_mode(:name)\n throw(:state_changed, :state_changed)\n else\n raise ArgumentError.new(\"Invalid character: #{char}\")\n end\n end\n\n def match_name(char, &block)\n case char\n when \"a\"..\"z\"\n @name << char\n else\n yield(Token.new(name: @name))\n switch_to_mode(:normal)\n throw(:state_changed, :state_changed)\n end\n end\n\n def finish_normal(&block)\n yield(Token.new(eof: nil))\n end\n\n def finish_name(&block)\n yield(Token.new(name: @name))\n yield(Token.new(eof: nil))\n end\n\n # This is an insane way to implement the State pattern, but I've left it in purely\n # because it's so ridiculous, and this is only the first pattern in the book\n def switch_to_mode(mode)\n @name = \"\"\n\n singleton_class.send(:alias_method, :match, :\"match_#{mode}\")\n singleton_class.send(:alias_method, :finish, :\"finish_#{mode}\")\n end\n end\n\n end\n end\nend/spec/1_getting_started/ch03_enhanced_parsing/p06_memoizing_parser/memoizing_list_parser_spec.rb\nrequire 'spec_helper'\n\nrequire '1_getting_started/ch03_enhanced_parsing/p06_memoizing_parser/memoizing_list_parser'\nrequire '1_getting_started/ch03_enhanced_parsing/p05_backtracking_parser/lexer_replayable_lookahead'\nrequire '1_getting_started/ch02_basic_parsing/p02_ll1_recursive_descent_lexer/list_lexer'\nrequire_relative '../../ch02_basic_parsing/p03_ll1_recursive_descent_parser/list_parser_contract'\nrequire_relative '../../ch02_basic_parsing/p04_llk_recursive_descent_parser/list_parser_assignment_contract'\nrequire_relative '../p05_backtracking_parser/list_parser_with_parallel_assignment_contract'\n\nmodule GettingStarted\n module EnhancedParsing\n module MemoizingParser\n\n # The spec for this is identical to the backtracking parser, only the\n # implementation differs. I've decided not to write any specs to prove\n # eg performance or memory usage, as for my purposes I only want to\n # understand what is going on, not be able to implement these efficiently\n # myself.\n describe MemoizingListParser do\n let(:tokens) {\n BasicParsing::LL1RecursiveDescentLexer::Token.descriptions_to_tokens(token_descriptions)\n }\n let(:lexer) { tokens.each }\n let(:lookahead) { BacktrackingParser::LexerReplayableLookahead.new(lexer) }\n subject(:parser) { MemoizingListParser.new(lookahead) }\n\n it_behaves_like \"a ListParser\"\n it_behaves_like \"a ListParser with assignment\"\n it_behaves_like \"a ListParser with parallel assignment\"\n end\n\n end\n end\nend\n/lib/1_getting_started/ch02_basic_parsing/p03_ll1_recursive_descent_parser/list_parser.rb\nrequire '1_getting_started/errors'\n\nmodule GettingStarted\n module BasicParsing\n module LL1RecursiveDescentParser\n\n # Implements Pattern 3: LL(1) Recursive-Descent Parser\n #\n # Two major differences from the book examples:\n #\n # * We turn the tokens into a Ruby array (ie we do something with them\n # other than detect errors). This is pretty easy as we just need a\n # Collecting Parameter.\n #\n # * We handle the empty list case. I didn't notice this was excluded\n # from the grammar, but the discussion on p40 hints at why. Turns out\n # even in this simple case it's much harder to parse optional elements,\n # as it means we have to treat the first element of a list differently\n # (because \"[ ]\" is valid but \"[ a, ]\" is not, which is what a naive\n # parser gives you).\n class ListParser\n def initialize(lexer)\n @lexer = lexer\n @lookahead = @lexer.next\n end\n\n def list\n [ ].tap do |collected_list|\n match(:lbrack)\n elements(collected_list)\n match(:rbrack)\n end\n end\n\n def elements(collected_list)\n first_element(collected_list)\n while @lookahead.type == :comma\n match(:comma)\n element(collected_list)\n end\n end\n\n def first_element(collected_list)\n case @lookahead.type\n when :name, :lbrack\n element(collected_list)\n when :rbrack\n return\n else\n raise RecognitionError.new(\n \"Expected :lbrack, :name or :rbrack, found #{@lookahead.type.inspect}\"\n )\n end\n end\n\n def element(collected_list)\n case @lookahead.type\n when :name\n collected_list << @lookahead.value.to_sym\n match(:name)\n when :lbrack\n collected_list << list\n else\n raise RecognitionError.new(\n \"Expected :name or :lbrack, found #{@lookahead.type.inspect}\"\n )\n end\n end\n\n private\n\n def match(expected_type)\n if @lookahead.type == expected_type\n consume\n else\n raise RecognitionError.new(\n \"Expected #{expected_type.inspect}, found #{@lookahead.type.inspect}\"\n )\n end\n end\n\n def consume\n @lookahead = @lexer.next\n end\n end\n\n end\n end\nend/spec/spec_helper.rb\nrequire 'ap'\n\nrequire 'fakefs/spec_helpers'\nrequire 'lstrip-on-steroids'\n\nRSpec.configure do |config|\n config.filter_run(focus: true)\n config.run_all_when_everything_filtered = true\nend\n/spec/2_analyzing_languages/ch06_tracking_symbols/p16_monolithic_scope/cymbol_spec.rb\nrequire 'spec_helper'\n\nrequire '2_analyzing_languages/ch06_tracking_symbols/p16_monolithic_scope/cymbol'\nrequire '2_analyzing_languages/ch06_tracking_symbols/p16_monolithic_scope/symbol_table'\nrequire '2_analyzing_languages/ch06_tracking_symbols/p16_monolithic_scope/symbol_table_logger'\nrequire '2_analyzing_languages/ch06_tracking_symbols/p16_monolithic_scope/language_symbol'\nrequire '2_analyzing_languages/ch06_tracking_symbols/p16_monolithic_scope/variable_symbol'\n\nmodule AnalyzingLanguages\n module TrackingSymbols\n module MonolithicScope\n describe Cymbol do\n let(:symbol_table) { mock(SymbolTable, resolve: nil, define: nil) }\n\n let(:cymbol) { Cymbol.new(symbol_table) }\n\n context \"an uninitialised variable\" do\n let(:source) { \"float j;\" }\n\n specify {\n symbol_table.should_receive(:resolve).with(\"float\", hash_including(location: \"line 1\"))\n cymbol.parse(source)\n }\n end\n\n context \"an initialized variable\" do\n let(:source) { \"int i = 9;\" }\n\n let(:int) { LanguageSymbol.new(:int) }\n\n before(:each) do\n symbol_table.stub(resolve: int)\n end\n\n specify {\n symbol_table.should_receive(:resolve).with(\n \"int\", hash_including(location: \"line 1\")\n ).ordered\n symbol_table.should_receive(:define).with(\n VariableSymbol.new(:i, LanguageSymbol.new(:int)),\n hash_including(location: \"line 1\")\n ).ordered\n\n cymbol.parse(source)\n }\n end\n\n context \"a variable initialized with an expression\" do\n let(:source) { \"int k = i + 2;\" }\n\n let(:float) { LanguageSymbol.new(:float) }\n\n before(:each) do\n symbol_table.stub(resolve: float)\n end\n\n specify {\n symbol_table.should_receive(:resolve).with(\n \"int\", hash_including(location: \"line 1\")\n ).ordered\n symbol_table.should_receive(:resolve).with(\n \"i\", hash_including(location: \"line 1\")\n ).ordered\n symbol_table.should_receive(:define).with(\n VariableSymbol.new(:k, float), hash_including(location: \"line 1\")\n ).ordered\n\n cymbol.parse(source)\n }\n end\n end\n\n describe Cymbol, \"from the book\" do\n let(:symbol_table) { SymbolTable.new }\n subject(:cymbol) { Cymbol.new(symbol_table_for_parser) }\n\n let(:source) {\n -%{\n int i = 9;\n float j;\n int k = i+2;\n }\n }\n\n before(:each) do\n # Bypass any logging\n symbol_table.define(LanguageSymbol.new(:int))\n symbol_table.define(LanguageSymbol.new(:float))\n end\n\n context \"but with no logging\" do\n let(:symbol_table_for_parser) { symbol_table }\n\n specify {\n expect { cymbol.parse(source) }.to_not raise_error\n }\n end\n\n context \"with logging as described\" do\n let(:output_io) { StringIO.new }\n def output\n output_io.rewind\n output_io.read.chomp\n end\n\n let(:symbol_table_for_parser) { SymbolTableLogger.new(symbol_table, output_io) }\n\n # Changed slightly because to produce the same output as the book\n # requires an inordinate increase in complexity in the code\n #\n # Test output from the book:\n # line 1: ref int\n # line 1: def i\n # line 2: ref float\n # line 2: def j\n # line 3: ref int\n # line 3: ref to \n # line 3: def k\n specify {\n cymbol.parse(source)\n\n expect(output.to_s).to be == -%{\n line 1: ref int\n line 1: def \n line 2: ref float\n line 2: def \n line 3: ref int\n line 3: ref \n line 3: def \n }\n }\n\n # We don't write this to the output just to avoid having to pass an IO in to Cymbol\n specify {\n cymbol.parse(source)\n\n expect(symbol_table.to_s).to be ==\n \"globals: {float=float, i=, int=int, j=, k=}\"\n }\n end\n end\n end\n end\nend/lib/1_getting_started/ch03_enhanced_parsing/p05_backtracking_parser/lexer_replayable_lookahead.rb\nrequire_relative 'replayable_buffer'\n\nmodule GettingStarted\n module EnhancedParsing\n module BacktrackingParser\n\n class LexerReplayableLookahead\n def initialize(lexer)\n @lexer = lexer\n @lexer_empty = false\n @buffer = ReplayableBuffer.new\n @speculating = [ false ]\n end\n\n def peek(lookahead_distance = 1)\n fill_buffer(lookahead_distance)\n token_or_stop_iteration(@buffer[lookahead_distance - 1])\n end\n\n def next\n fill_buffer(1)\n consume\n end\n\n def speculate(&block)\n @speculating.push(true)\n @buffer.mark\n yield\n ensure\n @speculating.pop\n @buffer.release\n end\n\n def if_speculating(&block)\n yield if @speculating.last\n end\n\n private\n\n def consume\n token_or_stop_iteration(@buffer.shift)\n end\n\n def fill_buffer(required_size)\n (required_size - @buffer.size).times do\n harvest\n end\n end\n\n def harvest\n @buffer << @lexer.next\n rescue StopIteration\n def self.harvest\n # NOOP\n end\n\n def self.token_or_stop_iteration(token)\n raise StopIteration if token.nil?\n token\n end\n end\n\n def token_or_stop_iteration(token)\n token\n end\n end\n\n end\n end\nend\n/spec/2_analyzing_languages/ch06_tracking_symbols/p17_nested_scopes/cymbol2_spec.rb\nrequire 'spec_helper'\n\nrequire '2_analyzing_languages/ch06_tracking_symbols/p17_nested_scopes/cymbol2'\nrequire '2_analyzing_languages/ch06_tracking_symbols/p17_nested_scopes/symbol_table'\nrequire '2_analyzing_languages/ch06_tracking_symbols/p16_monolithic_scope/symbol_table_logger'\nrequire '2_analyzing_languages/ch06_tracking_symbols/p16_monolithic_scope/language_symbol'\n# require '2_analyzing_languages/ch06_tracking_symbols/p16_monolithic_scope/variable_symbol'\n\nmodule AnalyzingLanguages\n module TrackingSymbols\n module NestedScopes\n include MonolithicScope\n\n describe Cymbol2, \"from the book\" do\n let(:symbol_table) { SymbolTable.new }\n let(:symbol_table_logger) { SymbolTableLogger.new(symbol_table, output_io) }\n\n let(:output_io) { StringIO.new }\n def output\n output_io.rewind\n output_io.read.chomp\n end\n\n subject(:cymbol) { Cymbol2.new }\n\n before(:each) do\n # Bypass any logging\n symbol_table.define(LanguageSymbol.new(:int))\n symbol_table.define(LanguageSymbol.new(:float))\n end\n\n let(:source) {\n -%{\n // global scope\n float f(int x)\n {\n float i;\n { float z = x+y; i = z; }\n }\n }\n }\n\n specify {\n cymbol.parse(source).walk(symbol_table_logger)\n\n # Changed a bit: I made it as close as I felt was worth it,\n # but some stuff is quite hard (eg assignment, unless we start)\n # extending the symbol table for this\n expect(output.to_s).to be == -%{\n line 2: ref float\n line 2: defscope method\n line 2: ref int\n line 2: def \n line 4: ref float\n line 4: def \n line 5: ref float\n line 5: ref \n line 5: ref [failed] y\n line 5: def \n line 5: ref \n line 5: ref \n }\n }\n end\n end\n end\nend/README.markdown\n# Language Implementation Patterns\n\nMy workthrough of the book [Language Implementation Patterns][lipbook] by [][parrt].\n\n[lipbook]: http://pragprog.com/book/tpdsl/language-implementation-patterns\n[parrt]: http://www.cs.usfca.edu/~parrt/\n/lib/2_analyzing_languages/ch06_tracking_symbols/p17_nested_scopes/method_symbol.rb\nrequire_relative 'symbol_table'\n\nmodule AnalyzingLanguages\n module TrackingSymbols\n module NestedScopes\n class MethodSymbol\n include Scope\n\n attr_reader :name, :type\n\n def initialize(name, type)\n @name = name.to_sym\n @type = type\n\n @symbols = { } # Hackety hackety hack\n end\n\n def ==(other)\n other.is_a?(MethodSymbol) &&\n @name == other.name &&\n @type == other.type\n end\n\n def wrap(parent_scope)\n @parent_scope = parent_scope\n end\n\n def to_s\n \"method<#{@name}:#{@type}>\"\n end\n end\n end\n end\nend"},"directory_id":{"kind":"string","value":"88dc429a8b45a587e6dce5882c083a7bc9f0a84d"},"languages":{"kind":"list like","value":["Markdown","Ruby"],"string":"[\n \"Markdown\",\n \"Ruby\"\n]"},"num_files":{"kind":"number","value":47,"string":"47"},"repo_language":{"kind":"string","value":"Ruby"},"repo_name":{"kind":"string","value":"ashmoran/language_implementation_patterns"},"revision_id":{"kind":"string","value":"fa9923cd9c51c069b719cbf7247e407535d9dc70"},"snapshot_id":{"kind":"string","value":"e89ed786e3ce554f5fab18fd3c9adc4af3301c01"}}},{"rowIdx":210,"cells":{"branch_name":{"kind":"string","value":"refs/heads/master"},"text":{"kind":"string","value":"import org.junit.Test;\nimport static org.junit.Assert.*;\n\nimport org.junit.After;\nimport org.junit.Before;\nimport org.junit.FixMethodOrder;\nimport org.junit.runners.MethodSorters;\nimport org.mockito.*;\n\n@FixMethodOrder(MethodSorters.NAME_ASCENDING)\npublic class RentACatTest {\n\n\t/**\n\t * The test fixture for this JUnit test. Test fixture: a fixed state of a set of\n\t * objects used as a baseline for running tests. The test fixture is initialized\n\t * using the @Before setUp method which runs before every test case. The test\n\t * fixture is removed using the @After tearDown method which runs after each\n\t * test case.\n\t */\n\n\tRentACat r; // Object to test\n\tCat c1; // First mock cat object\n\tCat c2; // Second mock cat object\n\tCat c3; // Third mock cat object\n\n\t@Before\n\tpublic void setUp() throws Exception {\n\t\t// Turn on automatic bug injection in the Cat class, to emulate a buggy Cat.\n\t\t// Your unit tests should work regardless of these bugs if you mock all Cats.\n\t\tCat._bugInjectionOn = true;\n\n\t\t// INITIALIZE THE TEST FIXTURE\n\t\t// 1. Create a new RentACat object and assign to r\n\t\tr = RentACat.createInstance(); \n\n\t\t// 2. Create a mock Cat with ID 1 and name \"Jennyanydots\", assign to c1\n\t\t// TODO: Fill in\n\t\tc1 = Mockito.mock(Cat.class);\n\t\tMockito.when(c1.getId()).thenReturn(1);\n\t\tMockito.when(c1.getName()).thenReturn(\"Jennyanydots\");\n\t\t\n\t\t// 3. Create a mock Cat with ID 2 and name \"Old Deuteronomy\", assign to c2\n\t\t// TODO: Fill in\n\t\tc2 = Mockito.mock(Cat.class);\n\t\tMockito.when(c2.getId()).thenReturn(2);\n\t\tMockito.when(c2.getName()).thenReturn(\"Old Deuteronomy\");\n\t\t\n\n\t\t// 4. Create a mock Cat with ID 3 and name \"Mistoffelees\", assign to c3\n\t\t// TODO: Fill in\n\t\tc3 = Mockito.mock(Cat.class);\n\t\tMockito.when(c3.getId()).thenReturn(3);\n\t\tMockito.when(c3.getName()).thenReturn(\"Mistoffelees\");\t\t\n\t\t\n\t\t// Hint: You will have to stub the mocked Cats to make them behave as if the ID\n\t\t// is 1 and name is \"Jennyanydots\", etc.\n\t}\n\n\t@After\n\tpublic void tearDown() throws Exception {\n\t\t// Not necessary strictly speaking since the references will be overwritten in\n\t\t// the next setUp call anyway and Java has automatic garbage collection.\n\t\tr = null;\n\t\tc1 = null;\n\t\tc2 = null;\n\t\tc3 = null;\n\t}\n\n\t/**\n\t * Test case for Cat getCat(int id).\n\t * Preconditions: r has no cats.\n\t * Execution steps: Call getCat(2).\n\t * Postconditions: Return value is null.\n\t */\n\n\t@Test\n\tpublic void testGetCatNullNumCats0() {\n\t\tassertNull(r.getCat(2));\n\t}\n\n\t/**\n\t * Test case for Cat getCat(int id).\n\t * Preconditions: c1, c2, and c3 are added to r using addCat(Cat c).\n\t * Execution steps: Call getCat(2).\n\t * Postconditions: Return value is not null.\n\t * Returned cat has an ID of 2.\n\t */\n\t\n\t@Test\n\tpublic void testGetCatNumCats3() {\n\t\tr.addCat(c1);\n\t\tr.addCat(c2);\n\t\tr.addCat(c3);\n\t\t\n\t\tassert(r.getCat(2).getId() == 2) : \"getcat shoud be 2\";\n\t}\n\n\t/**\n\t * Test case for boolean catAvailable(int id).\n\t * Preconditions: r has no cats.\n\t * Execution steps: Call catAvailable(2).\n\t * Postconditions: Return value is false.\n\t */\n\n\t@Test\n\tpublic void testCatAvailableFalseNumCats0() {\n\t\tassert(r.catAvailable(2)==false) : \"No cats but catAvailable(2) returns true\";\n\t}\n\n\t/**\n\t * Test case for boolean catAvailable(int id).\n\t * Preconditions: c1, c2, and c3 are added to r using addCat(Cat c).\n\t * c3 is rented.\n\t * c1 and c2 are not rented.\n\t * Execution steps: Call catAvailable(2).\n\t * Postconditions: Return value is true.\n\t */\n\n\t@Test\n\tpublic void testCatAvailableTrueNumCats3() { \n\t\tr.addCat(c1);\n\t\tr.addCat(c2);\n\t\tr.addCat(c3);\n\t\tr.rentCat(3);\n\t\tassert(r.catAvailable(2) == true) : \"3 cats and cat 2 is not rented but catAvailable(2) returns false\";\n\t}\n \n\t/**\n\t * Test case for boolean catAvailable(int id).\n\t * Preconditions: c1, c2, and c3 are added to r using addCat(Cat c).\n\t * c2 is rented.\n\t * c1 and c3 are not rented.\n\t * Execution steps: Call catAvailable(2).\n\t * Postconditions: Return value is false.\n\t */\n\t\n\t@Test\n\tpublic void testCatAvailableFalseNumCats3() {\n\t\tr.addCat(c1);\n\t\tr.addCat(c2);\n\t\tr.addCat(c3);\n\t\tr.rentCat(2);;\n\t\tassert(r.catAvailable(2) == false) : \"3 cats and cat 2 is rented but catAvailable(2) returns true\";\n\t}\n\n\t/**\n\t * Test case for boolean catExists(int id).\n\t * Preconditions: r has no cats.\n\t * Execution steps: Call catExists(2).\n\t * Postconditions: Return value is false.\n\t */\n\n\t@Test\n\tpublic void testCatExistsFalseNumCats0() {\n\t\tassert(r.catExists(2)==false) : \"No cats but catExists(2) returns true\";\n\t}\n\n\t/**\n\t * Test case for boolean catExists(int id).\n\t * Preconditions: c1, c2, and c3 are added to r using addCat(Cat c).\n\t * Execution steps: Call catExists(2).\n\t * Postconditions: Return value is true.\n\t */\n\t\n\t@Test\n\tpublic void testCatExistsTrueNumCats3() {\n\t\tr.addCat(c1);\n\t\tr.addCat(c2);\n\t\tr.addCat(c3);\n\t\tassert(r.catExists(2)==true) : \"3 cats but catExists(2) returns false\";\n\t}\n\n\t/**\n\t * Test case for String listCats().\n\t * Preconditions: r has no cats.\n\t * Execution steps: Call listCats().\n\t * Postconditions: Return value is \"\".\n\t */\n\n\t@Test\n\tpublic void testListCatsNumCats0() {\n\t\tassert(r.listCats().equals(\"\")) : \"No cats but listCats() returns non-empty string expected:<[]> but was:<[empty]>\";\n\t}\n\n\t/**\n\t * Test case for String listCats().\n\t * Preconditions: c1, c2, and c3 are added to r using addCat(Cat c).\n\t * Execution steps: Call listCats().\n\t * Postconditions: Return value is \"ID 1. Jennyanydots\\nID 2. Old\n\t * Deuteronomy\\nID 3. Mistoffelees\\n\".\n\t */\n\t\n\t@Test\n\tpublic void testListCatsNumCats3() {\n\t\tr.addCat(c1);\n\t\tr.addCat(c2);\n\t\tr.addCat(c3);\n\t\tString ret = \"ID 1. Jennyanydots\\nID 2. Old Deuteronomy\\nID 3. Mistoffelees\\n\";\n\t\tassert(r.listCats().equals(ret)) : \"3 cats and listCats() does not return the expected string expected: but was:\";\n\t\t\n\t}\n\n\t/**\n\t * Test case for boolean rentCat(int id).\n\t * Preconditions: r has no cats.\n\t * Execution steps: Call rentCat(2).\n\t * Postconditions: Return value is false.\n\t */\n\n\t@Test\n\tpublic void testRentCatFailureNumCats0() {\n\t\tassert(r.rentCat(2) == false) : \"No cats but rentCat(2) returns true\";\n\t}\n\n\t/** \n\t * Test case for boolean rentCat(int id).\n\t * Preconditions: c1, c2, and c3 are added to r using addCat(Cat c).\n\t * c2 is rented.\n\t * Execution steps: Call rentCat(2).\n\t * Postconditions: Return value is false.\n\t * c1.rentCat(), c2.rentCat(), c3.rentCat() are never called.\n\t * \n\t * Hint: See Example/NoogieTest.java in testBadgerPlayCalled method to see an\n\t * example of behavior verification.\n\t */\n\t\n\t@Test\n\tpublic void testRentCatFailureNumCats3() {\n\t\tr.addCat(c1);\n\t\tr.addCat(c2);\n\t\tr.addCat(c3);\n\t\tr.rentCat(2);\n\t\t\n\t\t\n\t\tassert(r.rentCat(2)==false) : \"3 cats and cat 2 is rented but rentCat(2) returns true\";\n\t\tMockito.verify(c1,Mockito.times(0)).rentCat();\n\t\tMockito.verify(c2,Mockito.times(0)).rentCat();\n\t\tMockito.verify(c3,Mockito.times(0)).rentCat();\n\t}\n\n\t/**\n\t * Test case for boolean returnCat(int id).\n\t * Preconditions: r has no cats.\n\t * Execution steps: Call returnCat(2).\n\t * Postconditions: Return value is false.\n\t */\n\n\t@Test\n\tpublic void testReturnCatFailureNumCats0() { \n\t\tassert(r.returnCat(2)==false) : \"No cats but returnCat(2) returns true\";\n\t}\n\n\t/**\n\t * Test case for boolean returnCat(int id).\n\t * Preconditions: c1, c2, and c3 are added to r using addCat(Cat c).\n\t * c2 is rented.\n\t * Execution steps: Call returnCat(2).\n\t * Postconditions: Return value is true.\n\t * c2.returnCat() is called exactly once.\n\t * c1.returnCat() and c3.returnCat are never called.\n\t * \n\t * Hint: See Example/NoogieTest.java in testBadgerPlayCalled method to see an\n\t * example of behavior verification.\n\t */\n\t\n\t@Test\n\tpublic void testReturnCatNumCats3() {\n\t\tr.addCat(c1);\n\t\tr.addCat(c2);\n\t\tr.addCat(c3);\n\t\tc2.rentCat();;\n\t\t\n\t\t\n\t\tassert(r.returnCat(2)==true);\n\t\tMockito.verify(c1,Mockito.times(0)).returnCat();\n\t\tMockito.verify(c2,Mockito.times(1)).returnCat();\n\t\tMockito.verify(c3,Mockito.times(0)).returnCat();\n\t}\n}\n// Generated by Selenium IDE\nimport org.junit.Test;\nimport org.junit.Before;\nimport org.junit.After;\nimport static org.junit.Assert.*;\nimport static org.hamcrest.CoreMatchers.is;\nimport static org.hamcrest.core.IsNot.not;\nimport org.openqa.selenium.By;\nimport org.openqa.selenium.WebDriver;\nimport org.openqa.selenium.firefox.FirefoxDriver;\nimport org.openqa.selenium.chrome.ChromeDriver;\nimport org.openqa.selenium.chrome.ChromeOptions;\nimport org.openqa.selenium.remote.RemoteWebDriver;\nimport org.openqa.selenium.remote.DesiredCapabilities;\nimport org.openqa.selenium.Dimension;\nimport org.openqa.selenium.WebElement;\nimport org.openqa.selenium.interactions.Actions;\nimport org.openqa.selenium.support.ui.ExpectedConditions;\nimport org.openqa.selenium.support.ui.WebDriverWait;\nimport org.openqa.selenium.JavascriptExecutor;\nimport org.openqa.selenium.Alert;\nimport org.openqa.selenium.Keys;\nimport java.util.*;\nimport java.net.MalformedURLException;\nimport java.net.URL;\npublic class RedditCatsTest {\n private WebDriver driver;\n private Map vars;\n JavascriptExecutor js;\n @Before\n public void setUp() {\n ChromeOptions options = new ChromeOptions();\n options.addArguments(\"--headless\");\n options.addArguments(\"--disable-dev-shm-usage\"); // overcome limited resource problems\n driver = new ChromeDriver(options);\n js = (JavascriptExecutor) driver;\n vars = new HashMap();\n }\n @After\n public void tearDown() {\n driver.quit();\n }\n @Test\n public void fUNTITLE() {\n driver.get(\"https://www.reddit.com/r/cats/\");\n driver.manage().window().setSize(new Dimension(966, 990));\n driver.findElement(By.xpath(\"//div[@id=\\'SHORTCUT_FOCUSABLE_DIV\\']/div[2]/div/div/div/div[2]/div/div/div/div/div/h1\")).click();\n assertThat(driver.getTitle(), is(\"Cats\"));\n }\n @Test\n public void fUNJOINBUTTONEXISTS() {\n driver.get(\"https://www.reddit.com/r/cats/\");\n driver.manage().window().setSize(new Dimension(969, 990));\n driver.findElement(By.cssSelector(\".\\\\_3I4Wpl_rl6oTm02aWPZayD\")).click();\n assertThat(driver.findElement(By.xpath(\"//div[@id=\\'SHORTCUT_FOCUSABLE_DIV\\']/div[2]/div/div/div/div[2]/div/div/div/div/div[2]/button\")).getText(), is(\"JOIN\"));\n }\n @Test\n public void fUNRULE3() {\n driver.get(\"https://www.reddit.com/r/cats/\");\n driver.manage().window().setSize(new Dimension(977, 990));\n driver.findElement(By.cssSelector(\".\\\\_3I4Wpl_rl6oTm02aWPZayD\")).click();\n // The rules box is located in the bottom right corner\n assertThat(driver.findElement(By.xpath(\"//div[@id=\\'SHORTCUT_FOCUSABLE_DIV\\']/div[2]/div/div/div/div[2]/div[3]/div[2]/div/div[5]/div/div[2]/div[3]/div/div[2]/div\")).getText(), is(\"No NSFW, animal abuse, or cruelty\"));\n }\n @Test\n public void fUNRULES11ITEMS() {\n driver.get(\"https://www.reddit.com/r/cats/\");\n driver.manage().window().setSize(new Dimension(981, 990));\n driver.findElement(By.cssSelector(\".\\\\_3I4Wpl_rl6oTm02aWPZayD\")).click();\n {\n WebElement element = driver.findElement(By.cssSelector(\".\\\\_2RkQc9Gtsq3cPQNZLYv4zc\"));\n Actions builder = new Actions(driver);\n builder.moveToElement(element).perform();\n }\n // 11th element present\n {\n List elements = driver.findElements(By.xpath(\"//div[@id=\\'SHORTCUT_FOCUSABLE_DIV\\']/div[2]/div/div/div/div[2]/div[3]/div[2]/div/div[5]/div/div[2]/div[11]/div/div/div\"));\n assert(elements.size() > 0);\n }\n // 12th element not present\n {\n List elements = driver.findElements(By.xpath(\"//div[@id=\\'SHORTCUT_FOCUSABLE_DIV\\']/div[2]/div/div/div/div[2]/div[3]/div[2]/div/div[5]/div/div[2]/div[12]/div/div/div\"));\n assert(elements.size() == 0);\n }\n }\n @Test\n public void fUNSIGNUPLINK() {\n driver.get(\"https://www.reddit.com/r/cats/\");\n driver.manage().window().setSize(new Dimension(972, 990));\n driver.findElement(By.cssSelector(\".\\\\_3I4Wpl_rl6oTm02aWPZayD\")).click();\n // variable for sign up button\n {\n WebElement element = driver.findElement(By.xpath(\"//a[contains(@href, \\'https://www.reddit.com/register/?dest=https%3A%2F%2Fwww.reddit.com%2Fr%2Fcats%2F\\')]\"));\n String attribute = element.getAttribute(\"href\");\n vars.put(\"signUp\", attribute);\n }\n // link should be the same\n assertEquals(vars.get(\"signUp\").toString(), \"https://www.reddit.com/register/?dest=https%3A%2F%2Fwww.reddit.com%2Fr%2Fcats%2F\");\n }\n\n @Test\n public void fUNSEARCHSMELLYCAT() {\n driver.get(\"https://www.reddit.com/r/cats/\");\n driver.manage().window().setSize(new Dimension(974, 990));\n driver.findElement(By.cssSelector(\".\\\\_3I4Wpl_rl6oTm02aWPZayD\")).click();\n {\n WebElement element = driver.findElement(By.cssSelector(\".\\\\_2RkQc9Gtsq3cPQNZLYv4zc\"));\n Actions builder = new Actions(driver);\n builder.moveToElement(element).perform();\n }\n driver.findElement(By.id(\"header-search-bar\")).click();\n driver.findElement(By.id(\"header-search-bar\")).sendKeys(\"smelly cat\");\n driver.findElement(By.id(\"header-search-bar\")).sendKeys(Keys.ENTER);\n // list of results will display the search term \"smelly cat\" at the top above \"Search results\"\n assertThat(driver.findElement(By.cssSelector(\".\\\\_3j9XjJayuKq7dJ8huVnCuS\")).getText(), is(\"smelly cat\"));\n }\n \n}\n"},"directory_id":{"kind":"string","value":"104f72a2018693a712cd911ba23481e72bddc609"},"languages":{"kind":"list like","value":["Java"],"string":"[\n \"Java\"\n]"},"num_files":{"kind":"number","value":2,"string":"2"},"repo_language":{"kind":"string","value":"Java"},"repo_name":{"kind":"string","value":"vahob/cs1632"},"revision_id":{"kind":"string","value":"a06a1ab81435b2bc2418bb839e56cebffd844828"},"snapshot_id":{"kind":"string","value":"2d0b9d5f743f795f086c907e4c3c11efb6a836a8"}}},{"rowIdx":211,"cells":{"branch_name":{"kind":"string","value":"refs/heads/master"},"text":{"kind":"string","value":"id;\n }\n // validation assert\n /**\n * @ORM\\Column(type=\"text\")\n * @Assert\\Currency\n **/\n private $name;\n\n //getters and setters\n public function getName()\n {\n return $this->name;\n }\n\n public function setName($name)\n {\n $this->name = $name;\n }\n}\n# currency-calculator\n\n\t\tCurrency Calcylator - Readme file\nFor the needs of our project, we used PHP framework, Symfony 4 and we were programming mostly in PHP and JavaScript.\nAlso we used CSS and Bootstrap for stylesheet and twig template engine for our pages.\n\nFirst of all we built ourController.php for rendering the pages and get used it as our server. After we set the requirements we needed,\nwe created our routes: Home (for home page), login (for login page), admin (for administrator page) and delete \n(for deleting the currencies from administrator page).\n\nOn home page (templates/pages/index.html.twig) we have a form where there are the options of base currency,\namount (if it is empty, a prompt message shows up and reloads the page) and exchange.\nAfter user gives his options and click the calculation button, a JavaScript function make the calculation and send the proper message\non the screen.\n\nOn administrator page (templates/pages/admin.html.twig) we have access after log in authentication (templates/pages/login.html.twig),\nthe administrator has the ability to add and delete a currency for the home page. The currency must be validated.\n\nFurthermore we created a JavaScript file (public/index.js) with a function which makes the calculation of currencies after getting APIs\nfor them and sends the proper message to home page and also a callback function which deletes from the database and by extension from the\nscreen, the currencies from administrator page.\n\nFinally, for our database we created an Entity with the use of Composer (src/Enity/Currency.php).\nComposer also helped us for the annotations, form creation (for administrator page), validator and authentication\n(creation of config/security.yaml where you can find username (administrator) and password for login page).\nfunction currencyCalculation(){\n var base = document.querySelector(\".base\").value;\n var amount = document.querySelector(\".amount\").value;\n var exchange = document.querySelector(\".exchange\").value;\n var request = new XMLHttpRequest();\n // get API for the currencies\n request.open(\"GET\", \"https://api.exchangeratesapi.io/latest?base=\" + base);\n request.onload = function(){\n var data = JSON.parse(request.responseText);\n var keyNames = Object.keys(data.rates);\n var values = Object.values(data.rates);\n console.log(keyNames);\n if (amount === \"\"){\n alert(\"Please fill the 'Amount' box!\");\n location.reload();\n }\n if (base === exchange){\n var result = amount;\n } else {\n for (var i = 0; i < keyNames.length; i++){\n if (keyNames[i] === exchange){\n var result = (amount) * (values[i]);\n break;\n }\n }\n }\n\n document.querySelector(\".show-result\").innerHTML = \"Today, \" + data.date + \", \" + amount + \" \" + base + \" are equal to \" + result + \" \" + exchange + \"!\"\n };\n request.send();\n}\n// callback function for deleting currency\nvar currencies = document.querySelectorAll(\".admin-currencies\");\nfor (var i = 0; i < currencies.length; i++){\n currencies[i].addEventListener(\"click\", function(e){\n if (e.target.className === \"btn btn-danger btn-sm\"){\n var id = e.target.getAttribute(\"id\");\n var route = `/currency-calculator/public/delete/${id}`;\n\n var newReq = new XMLHttpRequest();\n newReq.open(\"DELETE\", route);\n newReq.onload = function(){\n location.reload();\n };\n newReq.send();\n }\n });\n}\ngetDoctrine()->getRepository(Currency::class)->findAll();\n\n return $this->render('pages/index.html.twig', array(\"currencies\" => $currencies));\n }\n /**\n * @Route(\"/login\", name=\"login\")\n * @Method({\"GET\", \"POST\"})\n */\n\n public function login(AuthenticationUtils $authenticationUtils){\n\n // get the login error if there is one\n $error = $authenticationUtils->getLastAuthenticationError();\n\n // last username entered by the user\n $lastUsername = $authenticationUtils->getLastUsername();\n\n return $this->render('pages/login.html.twig', [\n 'last_username' => $lastUsername,\n 'error' => $error,\n ]);\n }\n\n /**\n * @Route(\"/admin\", name=\"admin\")\n * @Method({\"GET\", \"POST\"})\n **/\n\n public function adminPage(Request $request, ValidatorInterface $validator){\n $currency = new Currency();\n\n // form creation\n $form = $this->createFormBuilder($currency)\n ->add(\"name\", TextType::class, array(\"attr\" => array(\"class\" => \"form-control admin-form\")))\n ->add(\"save\", SubmitType::class, array(\"label\" => \"Add\", \"attr\" => array(\"class\" => \"btn btn-primary btn-add\")))\n ->getForm();\n\n $form->handleRequest($request);\n\n //get data from the form to database\n if($form->isSubmitted() && $form->isValid()){\n $currency = $form->getData();\n $entityManager = $this->getDoctrine()->getManager();\n $entityManager->persist($currency);\n $entityManager->flush();\n\n return $this->redirectToRoute(\"admin\");\n }\n\n $currencies = $this->getDoctrine()->getRepository(Currency::class)->findAll();\n\n return $this->render('pages/admin.html.twig', array(\"form\" => $form->createView(), \"currencies\" => $currencies));\n }\n /**\n * @Route(\"/delete/{id}\", name=\"delete\")\n * @Method({\"DELETE\"})\n **/\n public function delete(Request $request, $id){\n\n //delete from database\n $currency = $this->getDoctrine()->getRepository(Currency::class)->find($id);\n $entityManager = $this->getDoctrine()->getManager();\n $entityManager->remove($currency);\n $entityManager->flush();\n\n $response = new Response();\n $response->send();\n\n }\n\n /**\n * @Route(\"/logout\", name=\"logout\")\n * @Method({\"GET\"})\n **/\n \n}\n\n?>\n"},"directory_id":{"kind":"string","value":"e37d474a4269594a2df3e29dcfd5c8b8fa28dddb"},"languages":{"kind":"list like","value":["Markdown","JavaScript","PHP"],"string":"[\n \"Markdown\",\n \"JavaScript\",\n \"PHP\"\n]"},"num_files":{"kind":"number","value":4,"string":"4"},"repo_language":{"kind":"string","value":"PHP"},"repo_name":{"kind":"string","value":"vstogiannis/currency-calculator"},"revision_id":{"kind":"string","value":"cbb95d61934679c5fa9a0a01ed3635020c6d5035"},"snapshot_id":{"kind":"string","value":"0cf8325d281354bb3aafc3a94c00f894066d95db"}}},{"rowIdx":212,"cells":{"branch_name":{"kind":"string","value":"refs/heads/master"},"text":{"kind":"string","value":"liyuan837/leolder/src/main/java/com/liyuan/demo/controller/HeroController.java\npackage com.liyuan.demo.controller;\n\nimport com.liyuan.demo.entity.Hero;\nimport com.liyuan.demo.service.HeroService;\nimport org.springframework.beans.factory.annotation.Autowired;\nimport org.springframework.web.bind.annotation.*;\nimport org.springframework.web.multipart.MultipartFile;\n\nimport javax.servlet.http.HttpServletRequest;\nimport javax.servlet.http.HttpServletResponse;\nimport java.io.*;\nimport java.util.HashMap;\nimport java.util.List;\nimport java.util.Map;\n\n/**\n * @Author:LiYuan\n * @description:\n * @Date:Create in 15:28 2018/2/8\n * @Modified By:\n */\n@RestController\n@RequestMapping(\"/hero\")\npublic class HeroController {\n\n @Autowired\n private HeroService heroService;\n\n @GetMapping(\"/get\")\n public Map list(){\n Map modelMap = new HashMap<>();\n List list = heroService.queryAll();\n modelMap.put(\"list\",list);\n return modelMap;\n }\n\n @GetMapping(\"/get/{id}\")\n public Map find(@PathVariable(\"id\") Integer id){\n Map modelMap = new HashMap<>();\n Hero hero = heroService.findById(id);\n modelMap.put(\"hero\",hero);\n return modelMap;\n\n }\n\n @PostMapping(\"/post\")\n public Map post(@RequestBody Hero hero){\n Map modelMap = new HashMap<>();\n Integer id = heroService.saveHero(hero);\n hero.setId(id);\n modelMap.put(\"hero\",hero);\n return modelMap;\n }\n\n @PostMapping(\"/put\")\n public Map put(@RequestBody Hero hero){\n Map modelMap = new HashMap<>();\n Integer result = heroService.updateHero(hero);\n modelMap.put(\"hero\",hero);\n return modelMap;\n }\n @GetMapping(\"/delete/{id}\")\n public Map delete(@PathVariable(\"id\") Integer id){\n Map modelMap = new HashMap<>();\n Integer result = heroService.deleteHero(id);\n modelMap.put(\"result\",\"deleteSuccess\");\n return modelMap;\n\n }\n\n /**\n * 实现文件上传\n * */\n @RequestMapping(value=\"/fileUpload\",method = RequestMethod.POST)\n @ResponseBody\n public String fileUpload(MultipartFile file){\n\n if(file.isEmpty()){\n return \"false\";\n }\n String fileName = file.getOriginalFilename();\n\n String path = System.getProperty(\"user.dir\") + \"/uploadFile\" ;\n System.out.println(path);\n File dest = new File(path + \"/\" + fileName);\n if(!dest.getParentFile().exists()){ //判断文件父目录是否存在\n dest.getParentFile().mkdir();\n }\n try {\n file.transferTo(dest); //保存文件\n return \"true\";\n } catch (IllegalStateException e) {\n // TODO Auto-generated catch block\n e.printStackTrace();\n return \"false\";\n } catch (IOException e) {\n // TODO Auto-generated catch block\n e.printStackTrace();\n return \"false\";\n }\n }\n\n //文件下载相关代码\n @RequestMapping(\"/download\")\n public String downloadFile(HttpServletRequest request, HttpServletResponse response) {\n String fileName = \"1.png\";// 设置文件名,根据业务需要替换成要下载的文件名\n System.out.println(fileName);\n if (fileName != null) {\n //设置文件路径\n String realPath = \"D:/workspace_idea/leolder/leolder/uploadFile/\";\n File file = new File(realPath , fileName);\n if (file.exists()) {\n response.setContentType(\"application/force-download\");// 设置强制下载不打开\n response.addHeader(\"Content-Disposition\", \"attachment;fileName=\" + fileName);// 设置文件名\n byte[] buffer = new byte[1024];\n FileInputStream fis = null;\n BufferedInputStream bis = null;\n try {\n fis = new FileInputStream(file);\n bis = new BufferedInputStream(fis);\n OutputStream os = response.getOutputStream();\n int i = bis.read(buffer);\n while (i != -1) {\n os.write(buffer, 0, i);\n i = bis.read(buffer);\n }\n System.out.println(\"success\");\n } catch (Exception e) {\n e.printStackTrace();\n } finally {\n if (bis != null) {\n try {\n bis.close();\n } catch (IOException e) {\n e.printStackTrace();\n }\n }\n if (fis != null) {\n try {\n fis.close();\n } catch (IOException e) {\n e.printStackTrace();\n }\n }\n }\n }\n }\n return null;\n }\n\n}\n/src/main/java/wechat/util/CertHttpUtil.java\npackage wechat.util;\n\npublic class CertHttpUtil {\n//\n//\tprivate static int socketTimeout = 10000;// 连接超时时间,默认10秒\n//\tprivate static int connectTimeout = 30000;// 传输超时时间,默认30秒\n//\tprivate static RequestConfig requestConfig;// 请求器的配置\n//\tprivate static CloseableHttpClient httpClient;// HTTP请求器\n//\n//\tpublic static Map refundOrder(Order order) throws Exception{\n//\t\t//创建退款订单\n//\t\tRefundOrder refundOrder = new RefundOrder();\n//\t\trefundOrder.setAppid(ConstantsUtil.appid);\n//\t\trefundOrder.setMch_id(ConstantsUtil.mchId);\n//\t\trefundOrder.setNonce_str(UUID.randomUUID().toString().replace(\"-\", \"\").substring(0,32));\n//\t\trefundOrder.setOut_refund_no(ConstantsUtil.mchId);\n//\t\trefundOrder.setOut_refund_no(order.getBackcode());\n//\t\trefundOrder.setOut_trade_no(order.getOrdercode());\n//\t\trefundOrder.setTotal_fee((int) (1));\n//\t\trefundOrder.setRefund_fee((int) (1));\n//\t\trefundOrder.setOp_user_id(ConstantsUtil.mchId);\n//\t\trefundOrder.setSign(createSign(refundOrder));\n//\n//\t\t//将集合转换成xml\n//\t\tString requestXML =MessageUtil.messageToXml(refundOrder).replace(\"__\", \"_\");\n//\t\tSystem.out.println(\"请求:\"+requestXML);\n//\t\t//带证书的post\n////\t\tString resXml = CertHttpUtil.postData(ConstantsUtil.refundUrl, requestXML);\n//\t\tString resXml = postData(ConstantsUtil.refundUrl, requestXML);\n//\t\tSystem.out.println(\"结果:\"+resXml);\n//\t\t//解析xml为集合,请打断点查看resXml详细信息\n//\t\tMap resultMap = MessageUtil.parseXml(resXml);\n//\n//\t\treturn resultMap;\n//\t}\n//\n//\t/**\n//\t * 通过Https往API post xml数据\n//\t *\n//\t * @param url\n//\t * API地址\n//\t * @param xmlObj\n//\t * 要提交的XML数据对象\n//\t * @return\n//\t * @throws Exception\n//\t */\n//\tpublic static String postData(String url, String xmlObj) throws Exception {\n//\t\t// 加载证书\n//\t\ttry {\n//\t\t\tinitCert();\n//\t\t} catch (Exception e) {\n//\t\t\te.printStackTrace();\n//\t\t}\n//\t\tString result = null;\n//\t\tHttpPost httpPost = new HttpPost(url);\n//\t\t// 得指明使用UTF-8编码,否则到API服务器XML的中文不能被成功识别\n//\t\tStringEntity postEntity = new StringEntity(xmlObj, \"UTF-8\");\n//\t\thttpPost.addHeader(\"Content-Type\", \"text/xml\");\n//\t\thttpPost.setEntity(postEntity);\n//\t\t// 根据默认超时限制初始化requestConfig\n//\t\trequestConfig = RequestConfig.custom().setSocketTimeout(socketTimeout)\n//\t\t\t\t.setConnectTimeout(connectTimeout).build();\n//\t\t// 设置请求器的配置\n//\t\thttpPost.setConfig(requestConfig);\n//\t\ttry {\n//\t\t\tHttpResponse response = null;\n//\t\t\ttry {\n//\t\t\t\tresponse = httpClient.execute(httpPost);\n//\t\t\t} catch (IOException e) {\n//\t\t\t\te.printStackTrace();\n//\t\t\t}\n//\t\t\tHttpEntity entity = response.getEntity();\n//\t\t\ttry {\n//\t\t\t\tresult = EntityUtils.toString(entity, \"UTF-8\");\n//\t\t\t} catch (IOException e) {\n//\t\t\t\te.printStackTrace();\n//\t\t\t}\n//\t\t} finally {\n//\t\t\thttpPost.abort();\n//\t\t}\n//\t\treturn result;\n//\t}\n//\n//\t/**\n//\t * 加载证书\n//\t *\n//\t */\n//\tprivate static void initCert() throws Exception {\n//\t\t// 证书密码,默认为商户ID\n//\t\tString key = ConstantsUtil.mchId;\n//\t\t// 证书的路径\n//\t\tString path = ConstantsUtil.certPath;\n//\t\t// 指定读取证书格式为PKCS12\n//\t\tKeyStore keyStore = KeyStore.getInstance(\"PKCS12\");\n//\t\t// 读取本机存放的PKCS12证书文件\n//\t\tFileInputStream instream = new FileInputStream(new File(path));\n//\t\ttry {\n//\t\t\t// 指定PKCS12的密码(商户ID)\n//\t\t\tkeyStore.load(instream, key.toCharArray());\n//\t\t} finally {\n//\t\t\tinstream.close();\n//\t\t}\n//\t\tSSLContext sslcontext = SSLContexts.custom()\n//\t\t\t\t.loadKeyMaterial(keyStore, key.toCharArray()).build();\n//\t\t// 指定TLS版本\n//\t\tSSLConnectionSocketFactory sslsf = new SSLConnectionSocketFactory(\n//\t\t\t\tsslcontext, new String[] { \"TLSv1\" }, null,\n//\t\t\t\tSSLConnectionSocketFactory.BROWSER_COMPATIBLE_HOSTNAME_VERIFIER);\n//\t\t// 设置httpclient的SSLSocketFactory\n//\t\thttpClient = HttpClients.custom().setSSLSocketFactory(sslsf).build();\n//\t}\n//\n//\t// 生成退款单sign\n//\tprivate static String createSign(RefundOrder refundOrder) {\n//\t\tSortedMap