{ // 获取包含Hugging Face文本的span元素 const spans = link.querySelectorAll('span.whitespace-nowrap, span.hidden.whitespace-nowrap'); spans.forEach(span => { if (span.textContent && span.textContent.trim().match(/Hugging\s*Face/i)) { span.textContent = 'AI快站'; } }); }); // 替换logo图片的alt属性 document.querySelectorAll('img[alt*="Hugging"], img[alt*="Face"]').forEach(img => { if (img.alt.match(/Hugging\s*Face/i)) { img.alt = 'AI快站 logo'; } }); } // 替换导航栏中的链接 function replaceNavigationLinks() { // 已替换标记,防止重复运行 if (window._navLinksReplaced) { return; } // 已经替换过的链接集合,防止重复替换 const replacedLinks = new Set(); // 只在导航栏区域查找和替换链接 const headerArea = document.querySelector('header') || document.querySelector('nav'); if (!headerArea) { return; } // 在导航区域内查找链接 const navLinks = headerArea.querySelectorAll('a'); navLinks.forEach(link => { // 如果已经替换过,跳过 if (replacedLinks.has(link)) return; const linkText = link.textContent.trim(); const linkHref = link.getAttribute('href') || ''; // 替换Spaces链接 - 仅替换一次 if ( (linkHref.includes('/spaces') || linkHref === '/spaces' || linkText === 'Spaces' || linkText.match(/^s*Spacess*$/i)) && linkText !== 'PDF TO Markdown' && linkText !== 'PDF TO Markdown' ) { link.textContent = 'PDF TO Markdown'; link.href = 'https://fast360.xyz'; link.setAttribute('target', '_blank'); link.setAttribute('rel', 'noopener noreferrer'); replacedLinks.add(link); } // 删除Posts链接 else if ( (linkHref.includes('/posts') || linkHref === '/posts' || linkText === 'Posts' || linkText.match(/^s*Postss*$/i)) ) { if (link.parentNode) { link.parentNode.removeChild(link); } replacedLinks.add(link); } // 替换Docs链接 - 仅替换一次 else if ( (linkHref.includes('/docs') || linkHref === '/docs' || linkText === 'Docs' || linkText.match(/^s*Docss*$/i)) && linkText !== 'Voice Cloning' ) { link.textContent = 'Voice Cloning'; link.href = 'https://vibevoice.info/'; replacedLinks.add(link); } // 删除Enterprise链接 else if ( (linkHref.includes('/enterprise') || linkHref === '/enterprise' || linkText === 'Enterprise' || linkText.match(/^s*Enterprises*$/i)) ) { if (link.parentNode) { link.parentNode.removeChild(link); } replacedLinks.add(link); } }); // 查找可能嵌套的Spaces和Posts文本 const textNodes = []; function findTextNodes(element) { if (element.nodeType === Node.TEXT_NODE) { const text = element.textContent.trim(); if (text === 'Spaces' || text === 'Posts' || text === 'Enterprise') { textNodes.push(element); } } else { for (const child of element.childNodes) { findTextNodes(child); } } } // 只在导航区域内查找文本节点 findTextNodes(headerArea); // 替换找到的文本节点 textNodes.forEach(node => { const text = node.textContent.trim(); if (text === 'Spaces') { node.textContent = node.textContent.replace(/Spaces/g, 'PDF TO Markdown'); } else if (text === 'Posts') { // 删除Posts文本节点 if (node.parentNode) { node.parentNode.removeChild(node); } } else if (text === 'Enterprise') { // 删除Enterprise文本节点 if (node.parentNode) { node.parentNode.removeChild(node); } } }); // 标记已替换完成 window._navLinksReplaced = true; } // 替换代码区域中的域名 function replaceCodeDomains() { // 特别处理span.hljs-string和span.njs-string元素 document.querySelectorAll('span.hljs-string, span.njs-string, span[class*="hljs-string"], span[class*="njs-string"]').forEach(span => { if (span.textContent && span.textContent.includes('huggingface.co')) { span.textContent = span.textContent.replace(/huggingface.co/g, 'aifasthub.com'); } }); // 替换hljs-string类的span中的域名(移除多余的转义符号) document.querySelectorAll('span.hljs-string, span[class*="hljs-string"]').forEach(span => { if (span.textContent && span.textContent.includes('huggingface.co')) { span.textContent = span.textContent.replace(/huggingface.co/g, 'aifasthub.com'); } }); // 替换pre和code标签中包含git clone命令的域名 document.querySelectorAll('pre, code').forEach(element => { if (element.textContent && element.textContent.includes('git clone')) { const text = element.innerHTML; if (text.includes('huggingface.co')) { element.innerHTML = text.replace(/huggingface.co/g, 'aifasthub.com'); } } }); // 处理特定的命令行示例 document.querySelectorAll('pre, code').forEach(element => { const text = element.innerHTML; if (text.includes('huggingface.co')) { // 针对git clone命令的专门处理 if (text.includes('git clone') || text.includes('GIT_LFS_SKIP_SMUDGE=1')) { element.innerHTML = text.replace(/huggingface.co/g, 'aifasthub.com'); } } }); // 特别处理模型下载页面上的代码片段 document.querySelectorAll('.flex.border-t, .svelte_hydrator, .inline-block').forEach(container => { const content = container.innerHTML; if (content && content.includes('huggingface.co')) { container.innerHTML = content.replace(/huggingface.co/g, 'aifasthub.com'); } }); // 特别处理模型仓库克隆对话框中的代码片段 try { // 查找包含"Clone this model repository"标题的对话框 const cloneDialog = document.querySelector('.svelte_hydration_boundary, [data-target="MainHeader"]'); if (cloneDialog) { // 查找对话框中所有的代码片段和命令示例 const codeElements = cloneDialog.querySelectorAll('pre, code, span'); codeElements.forEach(element => { if (element.textContent && element.textContent.includes('huggingface.co')) { if (element.innerHTML.includes('huggingface.co')) { element.innerHTML = element.innerHTML.replace(/huggingface.co/g, 'aifasthub.com'); } else { element.textContent = element.textContent.replace(/huggingface.co/g, 'aifasthub.com'); } } }); } // 更精确地定位克隆命令中的域名 document.querySelectorAll('[data-target]').forEach(container => { const codeBlocks = container.querySelectorAll('pre, code, span.hljs-string'); codeBlocks.forEach(block => { if (block.textContent && block.textContent.includes('huggingface.co')) { if (block.innerHTML.includes('huggingface.co')) { block.innerHTML = block.innerHTML.replace(/huggingface.co/g, 'aifasthub.com'); } else { block.textContent = block.textContent.replace(/huggingface.co/g, 'aifasthub.com'); } } }); }); } catch (e) { // 错误处理但不打印日志 } } // 当DOM加载完成后执行替换 if (document.readyState === 'loading') { document.addEventListener('DOMContentLoaded', () => { replaceHeaderBranding(); replaceNavigationLinks(); replaceCodeDomains(); // 只在必要时执行替换 - 3秒后再次检查 setTimeout(() => { if (!window._navLinksReplaced) { console.log('[Client] 3秒后重新检查导航链接'); replaceNavigationLinks(); } }, 3000); }); } else { replaceHeaderBranding(); replaceNavigationLinks(); replaceCodeDomains(); // 只在必要时执行替换 - 3秒后再次检查 setTimeout(() => { if (!window._navLinksReplaced) { console.log('[Client] 3秒后重新检查导航链接'); replaceNavigationLinks(); } }, 3000); } // 增加一个MutationObserver来处理可能的动态元素加载 const observer = new MutationObserver(mutations => { // 检查是否导航区域有变化 const hasNavChanges = mutations.some(mutation => { // 检查是否存在header或nav元素变化 return Array.from(mutation.addedNodes).some(node => { if (node.nodeType === Node.ELEMENT_NODE) { // 检查是否是导航元素或其子元素 if (node.tagName === 'HEADER' || node.tagName === 'NAV' || node.querySelector('header, nav')) { return true; } // 检查是否在导航元素内部 let parent = node.parentElement; while (parent) { if (parent.tagName === 'HEADER' || parent.tagName === 'NAV') { return true; } parent = parent.parentElement; } } return false; }); }); // 只在导航区域有变化时执行替换 if (hasNavChanges) { // 重置替换状态,允许再次替换 window._navLinksReplaced = false; replaceHeaderBranding(); replaceNavigationLinks(); } }); // 开始观察document.body的变化,包括子节点 if (document.body) { observer.observe(document.body, { childList: true, subtree: true }); } else { document.addEventListener('DOMContentLoaded', () => { observer.observe(document.body, { childList: true, subtree: true }); }); } })(); `\n content += ''\n content = inlineCss(content, style + config.extraCss, config.inlineOpts)\n } else {\n content = `
${content}
`\n content = inlineCss(content, style, config.inlineOpts)\n }\n\n res.send(content)\n})\n\napp.listen(process.env.PORT || 3000)\n"},"avg_line_length":{"kind":"number","value":33.6883116883,"string":"33.688312"},"max_line_length":{"kind":"number","value":106,"string":"106"},"alphanum_fraction":{"kind":"number","value":0.6723207402,"string":"0.672321"},"score":{"kind":"number","value":3.140625,"string":"3.140625"}}},{"rowIdx":909,"cells":{"hexsha":{"kind":"string","value":"5f5df4c16e3cbba91cabd10a1877c24964949db2"},"size":{"kind":"number","value":2915,"string":"2,915"},"ext":{"kind":"string","value":"ts"},"lang":{"kind":"string","value":"TypeScript"},"max_stars_repo_path":{"kind":"string","value":"src/tests/utilities.test.ts"},"max_stars_repo_name":{"kind":"string","value":"jmgrady/TheCombine"},"max_stars_repo_head_hexsha":{"kind":"string","value":"4645d891f4825458553b94119a7b5731bed715d3"},"max_stars_repo_licenses":{"kind":"list like","value":["MIT"],"string":"[\n \"MIT\"\n]"},"max_stars_count":{"kind":"null"},"max_stars_repo_stars_event_min_datetime":{"kind":"null"},"max_stars_repo_stars_event_max_datetime":{"kind":"null"},"max_issues_repo_path":{"kind":"string","value":"src/tests/utilities.test.ts"},"max_issues_repo_name":{"kind":"string","value":"jmgrady/TheCombine"},"max_issues_repo_head_hexsha":{"kind":"string","value":"4645d891f4825458553b94119a7b5731bed715d3"},"max_issues_repo_licenses":{"kind":"list like","value":["MIT"],"string":"[\n \"MIT\"\n]"},"max_issues_count":{"kind":"null"},"max_issues_repo_issues_event_min_datetime":{"kind":"null"},"max_issues_repo_issues_event_max_datetime":{"kind":"null"},"max_forks_repo_path":{"kind":"string","value":"src/tests/utilities.test.ts"},"max_forks_repo_name":{"kind":"string","value":"jmgrady/TheCombine"},"max_forks_repo_head_hexsha":{"kind":"string","value":"4645d891f4825458553b94119a7b5731bed715d3"},"max_forks_repo_licenses":{"kind":"list like","value":["MIT"],"string":"[\n \"MIT\"\n]"},"max_forks_count":{"kind":"null"},"max_forks_repo_forks_event_min_datetime":{"kind":"null"},"max_forks_repo_forks_event_max_datetime":{"kind":"null"},"content":{"kind":"string","value":"import * as utilities from \"utilities\";\n\ndescribe(\"utilities\", () => {\n describe(\"quicksort\", () => {\n const compareItem = (input: number) => {\n return input;\n };\n\n const numbers: number[] = [];\n for (let i = 0; i < 25; i++) numbers.push(Math.random());\n\n it(\"orders properly\", () => {\n const sortedNums = utilities.quicksort(numbers, compareItem);\n for (let i = 1; i < sortedNums.length; i++)\n expect(sortedNums[i - 1]).toBeLessThanOrEqual(sortedNums[i]);\n });\n });\n\n describe(\"getNowDateTimeString\", () => {\n // This tests will fail intermittently if there is a bug with the 0-prepend\n it(\"returns string of correct length\", () => {\n const expectedLength = \"YYYY-MM-DD_hh-mm-ss\".length;\n expect(utilities.getNowDateTimeString().length).toBe(expectedLength);\n });\n });\n\n describe(\"LevenshteinDistance\", () => {\n let finder: utilities.LevenshteinDistance;\n const testParams: utilities.LevenshteinDistParams = {\n delCost: 3,\n insCost: 4,\n subCost: 5,\n };\n\n beforeEach(() => {\n finder = new utilities.LevenshteinDistance(testParams);\n });\n\n describe(\"getDistance\", () => {\n const baseWord = \"testing\";\n\n test(\"with empty word\", () => {\n expect(finder.getDistance(\"\", \"\")).toEqual(0);\n expect(finder.getDistance(baseWord, \"\")).toEqual(\n baseWord.length * testParams.delCost\n );\n expect(finder.getDistance(\"\", baseWord)).toEqual(\n baseWord.length * testParams.insCost\n );\n });\n\n const similarCases: [string, string, number][] = [\n [\"same word\", baseWord, 0],\n [\"1 deletion\", \"testin\", testParams.delCost],\n [\"1 insertion\", \"testings\", testParams.insCost],\n [\"1 substitution\", \"tasting\", testParams.subCost],\n [\"2 substitutions\", \"tossing\", 2 * testParams.subCost],\n [\n \"1 insertion, 1 deletion\",\n \"teasing\",\n testParams.insCost + testParams.delCost,\n ],\n [\n \"1 insertion, 1 substitution\",\n \"toasting\",\n testParams.insCost + testParams.subCost,\n ],\n ];\n test.each(similarCases)(\n \"with similar word: %p\",\n (_description: string, secondWord: string, expectedDist: number) => {\n expect(finder.getDistance(baseWord, secondWord)).toEqual(\n expectedDist\n );\n }\n );\n\n test(\"with much different words\", () => {\n const diffWord = \"QQQ\";\n expect(finder.getDistance(diffWord, baseWord)).toEqual(\n diffWord.length * testParams.subCost +\n (baseWord.length - diffWord.length) * testParams.insCost\n );\n expect(finder.getDistance(baseWord, diffWord)).toEqual(\n diffWord.length * testParams.subCost +\n (baseWord.length - diffWord.length) * testParams.delCost\n );\n });\n });\n });\n});\n"},"avg_line_length":{"kind":"number","value":31.6847826087,"string":"31.684783"},"max_line_length":{"kind":"number","value":79,"string":"79"},"alphanum_fraction":{"kind":"number","value":0.5780445969,"string":"0.578045"},"score":{"kind":"number","value":3.359375,"string":"3.359375"}}},{"rowIdx":910,"cells":{"hexsha":{"kind":"string","value":"dfd7c4d2364f15422ade24e6579947af63f87ec7"},"size":{"kind":"number","value":1585,"string":"1,585"},"ext":{"kind":"string","value":"ts"},"lang":{"kind":"string","value":"TypeScript"},"max_stars_repo_path":{"kind":"string","value":"backend/src/connect.database.ts"},"max_stars_repo_name":{"kind":"string","value":"ExiledNarwal28/gif-3112-project"},"max_stars_repo_head_hexsha":{"kind":"string","value":"806c697705c13d813a85dd643bd733dbd39af96d"},"max_stars_repo_licenses":{"kind":"list like","value":["MIT"],"string":"[\n \"MIT\"\n]"},"max_stars_count":{"kind":"number","value":1,"string":"1"},"max_stars_repo_stars_event_min_datetime":{"kind":"string","value":"2021-11-12T06:58:38.000Z"},"max_stars_repo_stars_event_max_datetime":{"kind":"string","value":"2021-11-12T06:58:38.000Z"},"max_issues_repo_path":{"kind":"string","value":"backend/src/connect.database.ts"},"max_issues_repo_name":{"kind":"string","value":"ExiledNarwal28/gif-3112-project"},"max_issues_repo_head_hexsha":{"kind":"string","value":"806c697705c13d813a85dd643bd733dbd39af96d"},"max_issues_repo_licenses":{"kind":"list like","value":["MIT"],"string":"[\n \"MIT\"\n]"},"max_issues_count":{"kind":"null"},"max_issues_repo_issues_event_min_datetime":{"kind":"null"},"max_issues_repo_issues_event_max_datetime":{"kind":"null"},"max_forks_repo_path":{"kind":"string","value":"backend/src/connect.database.ts"},"max_forks_repo_name":{"kind":"string","value":"ExiledNarwal28/gif-3112-project"},"max_forks_repo_head_hexsha":{"kind":"string","value":"806c697705c13d813a85dd643bd733dbd39af96d"},"max_forks_repo_licenses":{"kind":"list like","value":["MIT"],"string":"[\n \"MIT\"\n]"},"max_forks_count":{"kind":"null"},"max_forks_repo_forks_event_min_datetime":{"kind":"null"},"max_forks_repo_forks_event_max_datetime":{"kind":"null"},"content":{"kind":"string","value":"import mongoose from 'mongoose';\nimport { logger } from './middlewares/logger';\n\nconst mongoURL = process.env.MONGO_URL || '';\n\nconst mongoOptions = {\n useFindAndModify: false,\n useNewUrlParser: true,\n useUnifiedTopology: true,\n user: process.env.MONGO_USERNAME,\n pass: process.env.MONGO_PASSWORD,\n};\n\nconst db = mongoose.connection;\n\ndb.on('connecting', () => logger.info('Connecting to MongoDB...'));\n\ndb.on('error', (error) =>\n logger.error(`Error in MongoDB connection : ${error}`),\n);\ndb.on('connected', () => logger.info('MongoDB connected!'));\n\ndb.once('open', async () => {\n logger.info('MongoDB connection opened!');\n});\n\ndb.on('reconnected', () => logger.info('MongoDB reconnected!'));\n\ndb.on('disconnected', () => {\n logger.info('MongoDB disconnected!');\n retryConnectionAfterTimeout();\n});\n\nconst MAX_ATTEMPTS = 10;\nconst FACTOR = 1.5;\nconst DEFAULT_RETRY_TIMEOUT = 5000;\nconst DEFAULT_ATTEMPTS = 0;\n\nlet retryTimeout = DEFAULT_RETRY_TIMEOUT;\nlet attempts = DEFAULT_ATTEMPTS;\n\nconst retryConnectionAfterTimeout = () => {\n if (attempts < MAX_ATTEMPTS) {\n logger.info(`Retrying connection in ${retryTimeout / 1000} seconds`);\n setTimeout(connectDatabase, retryTimeout);\n\n retryTimeout *= FACTOR;\n attempts++;\n } else {\n logger.info(`Max connection attempts (${MAX_ATTEMPTS}) reached!`);\n }\n};\n\nexport function connectDatabase() {\n mongoose\n .connect(mongoURL, mongoOptions)\n .then(() => {\n retryTimeout = DEFAULT_RETRY_TIMEOUT;\n attempts = DEFAULT_ATTEMPTS;\n })\n .catch(() => {\n retryConnectionAfterTimeout();\n });\n}\n"},"avg_line_length":{"kind":"number","value":24.3846153846,"string":"24.384615"},"max_line_length":{"kind":"number","value":73,"string":"73"},"alphanum_fraction":{"kind":"number","value":0.680126183,"string":"0.680126"},"score":{"kind":"number","value":3.015625,"string":"3.015625"}}},{"rowIdx":911,"cells":{"hexsha":{"kind":"string","value":"c36bc166ac8c5e32810efdfb68416109d184994a"},"size":{"kind":"number","value":1233,"string":"1,233"},"ext":{"kind":"string","value":"go"},"lang":{"kind":"string","value":"Go"},"max_stars_repo_path":{"kind":"string","value":"st600/fuzz.go"},"max_stars_repo_name":{"kind":"string","value":"larixsource/stgps"},"max_stars_repo_head_hexsha":{"kind":"string","value":"03af5b868f238dd04a10c161038759036daee6ac"},"max_stars_repo_licenses":{"kind":"list like","value":["MIT"],"string":"[\n \"MIT\"\n]"},"max_stars_count":{"kind":"number","value":7,"string":"7"},"max_stars_repo_stars_event_min_datetime":{"kind":"string","value":"2017-05-17T20:12:49.000Z"},"max_stars_repo_stars_event_max_datetime":{"kind":"string","value":"2021-07-12T21:41:48.000Z"},"max_issues_repo_path":{"kind":"string","value":"st600/fuzz.go"},"max_issues_repo_name":{"kind":"string","value":"larixsource/stgps"},"max_issues_repo_head_hexsha":{"kind":"string","value":"03af5b868f238dd04a10c161038759036daee6ac"},"max_issues_repo_licenses":{"kind":"list like","value":["MIT"],"string":"[\n \"MIT\"\n]"},"max_issues_count":{"kind":"null"},"max_issues_repo_issues_event_min_datetime":{"kind":"null"},"max_issues_repo_issues_event_max_datetime":{"kind":"null"},"max_forks_repo_path":{"kind":"string","value":"st600/fuzz.go"},"max_forks_repo_name":{"kind":"string","value":"larixsource/stgps"},"max_forks_repo_head_hexsha":{"kind":"string","value":"03af5b868f238dd04a10c161038759036daee6ac"},"max_forks_repo_licenses":{"kind":"list like","value":["MIT"],"string":"[\n \"MIT\"\n]"},"max_forks_count":{"kind":"number","value":3,"string":"3"},"max_forks_repo_forks_event_min_datetime":{"kind":"string","value":"2016-11-29T23:30:08.000Z"},"max_forks_repo_forks_event_max_datetime":{"kind":"string","value":"2018-12-04T13:19:58.000Z"},"content":{"kind":"string","value":"package st600\n\nfunc Fuzz(data []byte) int {\n\tp := ParseBytes(data, ParserOpts{})\n\n\tvar results []int\n\tfor p.Next() {\n\t\tframe := p.Msg()\n\t\tif frame == nil {\n\t\t\tpanic(\"nil frame\")\n\t\t}\n\t\tif len(frame.Frame) == 0 {\n\t\t\tpanic(\"empty raw frame\")\n\t\t}\n\t\tif frame.ParsingError != nil {\n\t\t\tresults = append(results, 0)\n\t\t\tcontinue\n\t\t}\n\t\tswitch frame.Type {\n\t\tcase STTReport:\n\t\t\tif frame.STT == nil {\n\t\t\t\tpanic(\"nil STT\")\n\t\t\t}\n\t\tcase EMGReport:\n\t\t\tif frame.EMG == nil {\n\t\t\t\tpanic(\"nil EMG\")\n\t\t\t}\n\t\tcase EVTReport:\n\t\t\tif frame.EVT == nil {\n\t\t\t\tpanic(\"nil EVT\")\n\t\t\t}\n\t\tcase ALTReport:\n\t\t\tif frame.ALT == nil {\n\t\t\t\tpanic(\"nil ALT\")\n\t\t\t}\n\t\tcase ALVReport:\n\t\t\tif frame.ALV == nil {\n\t\t\t\tpanic(\"nil ALV\")\n\t\t\t}\n\t\tcase UEXReport:\n\t\t\tif frame.UEX == nil {\n\t\t\t\tpanic(\"nil UEX\")\n\t\t\t}\n\t\tcase UnknownMsg:\n\t\tdefault:\n\t\t\tpanic(\"invalid Type\")\n\t\t}\n\n\t\t// good frame\n\t\tresults = append(results, 1)\n\t}\n\n\t// count results (zeroes and ones)\n\tzeroCount := 0\n\toneCount := 0\n\tfor _, r := range results {\n\t\tswitch r {\n\t\tcase 0:\n\t\t\tzeroCount++\n\t\tcase 1:\n\t\t\toneCount++\n\t\tdefault:\n\t\t\tpanic(\"fuzz programming error\")\n\t\t}\n\t}\n\n\tswitch {\n\tcase oneCount == 0:\n\t\treturn 0\n\tcase zeroCount == 0 || zeroCount == 1: // at most one error permitted\n\t\treturn 1\n\tdefault:\n\t\treturn 0\n\t}\n}\n"},"avg_line_length":{"kind":"number","value":16.2236842105,"string":"16.223684"},"max_line_length":{"kind":"number","value":70,"string":"70"},"alphanum_fraction":{"kind":"number","value":0.5831305758,"string":"0.583131"},"score":{"kind":"number","value":3.03125,"string":"3.03125"}}},{"rowIdx":912,"cells":{"hexsha":{"kind":"string","value":"a1a6cf56c78ce242de3be9a964be2f1ff6b74f8a"},"size":{"kind":"number","value":2736,"string":"2,736"},"ext":{"kind":"string","value":"go"},"lang":{"kind":"string","value":"Go"},"max_stars_repo_path":{"kind":"string","value":"cmd/ports.go"},"max_stars_repo_name":{"kind":"string","value":"rmohr/cli"},"max_stars_repo_head_hexsha":{"kind":"string","value":"a95a4b96a0c93899e2e6c3f1ef0ce911b118a382"},"max_stars_repo_licenses":{"kind":"list like","value":["Apache-2.0"],"string":"[\n \"Apache-2.0\"\n]"},"max_stars_count":{"kind":"null"},"max_stars_repo_stars_event_min_datetime":{"kind":"null"},"max_stars_repo_stars_event_max_datetime":{"kind":"null"},"max_issues_repo_path":{"kind":"string","value":"cmd/ports.go"},"max_issues_repo_name":{"kind":"string","value":"rmohr/cli"},"max_issues_repo_head_hexsha":{"kind":"string","value":"a95a4b96a0c93899e2e6c3f1ef0ce911b118a382"},"max_issues_repo_licenses":{"kind":"list like","value":["Apache-2.0"],"string":"[\n \"Apache-2.0\"\n]"},"max_issues_count":{"kind":"null"},"max_issues_repo_issues_event_min_datetime":{"kind":"null"},"max_issues_repo_issues_event_max_datetime":{"kind":"null"},"max_forks_repo_path":{"kind":"string","value":"cmd/ports.go"},"max_forks_repo_name":{"kind":"string","value":"rmohr/cli"},"max_forks_repo_head_hexsha":{"kind":"string","value":"a95a4b96a0c93899e2e6c3f1ef0ce911b118a382"},"max_forks_repo_licenses":{"kind":"list like","value":["Apache-2.0"],"string":"[\n \"Apache-2.0\"\n]"},"max_forks_count":{"kind":"null"},"max_forks_repo_forks_event_min_datetime":{"kind":"null"},"max_forks_repo_forks_event_max_datetime":{"kind":"null"},"content":{"kind":"string","value":"package cmd\n\nimport (\n\t\"fmt\"\n\t\"github.com/docker/docker/api/types\"\n\t\"github.com/docker/docker/client\"\n\t\"github.com/docker/go-connections/nat\"\n\t\"github.com/rmohr/cli/docker\"\n\t\"github.com/spf13/cobra\"\n\t\"strconv\"\n)\n\nconst (\n\tPORT_SSH = 2201\n\tPORT_REGISTRY = 5000\n\tPORT_OCP = 8443\n\tPORT_K8S = 6443\n\tPORT_VNC = 5901\n\n\tPORT_NAME_SSH = \"ssh\"\n\tPORT_NAME_OCP = \"ocp\"\n\tPORT_NAME_REGISTRY = \"registry\"\n\tPORT_NAME_K8S = \"k8s\"\n\tPORT_NAME_VNC = \"vnc\"\n)\n\nfunc NewPortCommand() *cobra.Command {\n\n\tport := &cobra.Command{\n\t\tUse: \"ports\",\n\t\tShort: \"ports shows exposed ports of the cluster\",\n\t\tLong: `ports shows exposed ports of the cluster\n\nIf no port name is specified, all exposed ports are printed.\nIf an extra port name is specified, only the exposed port is printed.\n\nKnown port names are 'ssh', 'registry', 'ocp' and 'k8s'.\n`,\n\t\tRunE: ports,\n\t\tArgs: func(cmd *cobra.Command, args []string) error {\n\t\t\tif len(args) > 1 {\n\t\t\t\treturn fmt.Errorf(\"only one port name can be specified at once\")\n\t\t\t}\n\n\t\t\tif len(args) == 1 {\n\t\t\t\tswitch args[0] {\n\t\t\t\tcase PORT_NAME_SSH, PORT_NAME_K8S, PORT_NAME_OCP, PORT_NAME_REGISTRY, PORT_NAME_VNC:\n\t\t\t\t\treturn nil\n\t\t\t\tdefault:\n\t\t\t\t\treturn fmt.Errorf(\"unknown port name %s\", args[0])\n\t\t\t\t}\n\t\t\t}\n\t\t\treturn nil\n\t\t},\n\t}\n\treturn port\n}\n\nfunc ports(cmd *cobra.Command, args []string) error {\n\n\tprefix, err := cmd.Flags().GetString(\"prefix\")\n\tif err != nil {\n\t\treturn err\n\t}\n\n\tcli, err := client.NewEnvClient()\n\tif err != nil {\n\t\treturn err\n\t}\n\tcontainer, err := docker.GetDDNSMasqContainer(cli, prefix)\n\tif err != nil {\n\t\treturn err\n\t}\n\n\tportName := \"\"\n\tif len(args) > 0 {\n\t\tportName = args[0]\n\t}\n\n\tif portName != \"\" {\n\t\terr = nil\n\t\tswitch portName {\n\t\tcase PORT_NAME_SSH:\n\t\t\terr = printPort(PORT_SSH, container.Ports)\n\t\tcase PORT_NAME_K8S:\n\t\t\terr = printPort(PORT_K8S, container.Ports)\n\t\tcase PORT_NAME_REGISTRY:\n\t\t\terr = printPort(PORT_REGISTRY, container.Ports)\n\t\tcase PORT_NAME_OCP:\n\t\t\terr = printPort(PORT_OCP, container.Ports)\n\t\tcase PORT_NAME_VNC:\n\t\t\terr = printPort(PORT_VNC, container.Ports)\n\t\t}\n\n\t\tif err != nil {\n\t\t\treturn err\n\t\t}\n\n\t} else {\n\t\tfor _, p := range container.Ports {\n\t\t\tfmt.Printf(\"%d/%s -> %s:%d\\n\", p.PrivatePort, p.Type, p.IP, p.PublicPort)\n\t\t}\n\t}\n\n\treturn nil\n}\n\nfunc getPort(port uint16, ports []types.Port) (uint16, error) {\n\tfor _, p := range ports {\n\t\tif p.PrivatePort == port {\n\t\t\treturn p.PublicPort, nil\n\t\t}\n\t}\n\treturn 0, fmt.Errorf(\"port is not exposed\")\n}\n\nfunc printPort(port uint16, ports []types.Port) error {\n\tp, err := getPort(port, ports)\n\tif err != nil {\n\t\treturn err\n\t}\n\tfmt.Println(p)\n\treturn nil\n}\n\nfunc tcpPortOrDie(port int) nat.Port {\n\tp, err := nat.NewPort(\"tcp\", strconv.Itoa(port))\n\tif err != nil {\n\t\tpanic(err)\n\t}\n\treturn p\n}\n"},"avg_line_length":{"kind":"number","value":20.5714285714,"string":"20.571429"},"max_line_length":{"kind":"number","value":88,"string":"88"},"alphanum_fraction":{"kind":"number","value":0.6549707602,"string":"0.654971"},"score":{"kind":"number","value":3.125,"string":"3.125"}}},{"rowIdx":913,"cells":{"hexsha":{"kind":"string","value":"bd3a662fb349989868d51eb2243cb72e1eac5b28"},"size":{"kind":"number","value":2124,"string":"2,124"},"ext":{"kind":"string","value":"rs"},"lang":{"kind":"string","value":"Rust"},"max_stars_repo_path":{"kind":"string","value":"web-server/sgx-wallet-impl/src/schema/types.rs"},"max_stars_repo_name":{"kind":"string","value":"ntls-io/nautilus-wallet"},"max_stars_repo_head_hexsha":{"kind":"string","value":"31a6a534c920d58548a8ac5869b3cb918d3b7c11"},"max_stars_repo_licenses":{"kind":"list like","value":["Apache-2.0"],"string":"[\n \"Apache-2.0\"\n]"},"max_stars_count":{"kind":"number","value":1,"string":"1"},"max_stars_repo_stars_event_min_datetime":{"kind":"string","value":"2022-01-30T03:54:55.000Z"},"max_stars_repo_stars_event_max_datetime":{"kind":"string","value":"2022-01-30T03:54:55.000Z"},"max_issues_repo_path":{"kind":"string","value":"web-server/sgx-wallet-impl/src/schema/types.rs"},"max_issues_repo_name":{"kind":"string","value":"ntls-io/nautilus-wallet"},"max_issues_repo_head_hexsha":{"kind":"string","value":"31a6a534c920d58548a8ac5869b3cb918d3b7c11"},"max_issues_repo_licenses":{"kind":"list like","value":["Apache-2.0"],"string":"[\n \"Apache-2.0\"\n]"},"max_issues_count":{"kind":"number","value":32,"string":"32"},"max_issues_repo_issues_event_min_datetime":{"kind":"string","value":"2021-11-15T08:43:10.000Z"},"max_issues_repo_issues_event_max_datetime":{"kind":"string","value":"2022-03-20T22:35:56.000Z"},"max_forks_repo_path":{"kind":"string","value":"web-server/sgx-wallet-impl/src/schema/types.rs"},"max_forks_repo_name":{"kind":"string","value":"ntls-io/nautilus-wallet"},"max_forks_repo_head_hexsha":{"kind":"string","value":"31a6a534c920d58548a8ac5869b3cb918d3b7c11"},"max_forks_repo_licenses":{"kind":"list like","value":["Apache-2.0"],"string":"[\n \"Apache-2.0\"\n]"},"max_forks_count":{"kind":"number","value":2,"string":"2"},"max_forks_repo_forks_event_min_datetime":{"kind":"string","value":"2021-11-21T19:19:08.000Z"},"max_forks_repo_forks_event_max_datetime":{"kind":"string","value":"2021-11-22T09:17:01.000Z"},"content":{"kind":"string","value":"//! Supporting data types.\n\nuse std::boxed::Box;\nuse std::prelude::v1::String;\n\nuse ripple_keypairs::{Algorithm, EntropyArray};\nuse serde::{Deserialize, Serialize};\n\npub type Bytes = Box<[u8]>;\n\n/// Nautilus Wallet ID.\npub type WalletId = String;\n\n/// A wallet owner's authenticating PIN.\npub type WalletPin = String;\n\n/// Algorand account seed, as bytes.\npub type AlgorandAccountSeedBytes = [u8; 32];\n\n/// Algorand account address, as bytes.\npub type AlgorandAddressBytes = [u8; 32];\n\n/// Algorand account address, as base32 with checksum.\npub type AlgorandAddressBase32 = String;\n\n/// XRPL key type (signing algorithm).\n///\n/// Docs: \n#[derive(Copy, Clone, Eq, PartialEq, Debug)] // core\n#[derive(Deserialize, Serialize)] // serde\n#[serde(rename_all = \"lowercase\")]\npub enum XrplKeyType {\n Secp256k1,\n Ed25519,\n}\n\n/// Default to `secp256k1`, like the XRP Ledger.\nimpl Default for XrplKeyType {\n fn default() -> Self {\n Self::Secp256k1\n }\n}\n\n// Convert between our representation and ripple-keypairs.\n\n/// Convert from `&Algorithm`, as used by ripple-keypairs.\nimpl From<&Algorithm> for XrplKeyType {\n fn from(algorithm: &Algorithm) -> Self {\n match algorithm {\n Algorithm::Secp256k1 => Self::Secp256k1,\n Algorithm::Ed25519 => Self::Ed25519,\n }\n }\n}\n\n/// Convert to `&'static Algorithm`, as expected by ripple-keypairs.\nimpl From for &'static Algorithm {\n fn from(key_type: XrplKeyType) -> Self {\n match key_type {\n XrplKeyType::Secp256k1 => &Algorithm::Secp256k1,\n XrplKeyType::Ed25519 => &Algorithm::Ed25519,\n }\n }\n}\n\n/// XRP account seed, as bytes.\npub type XrplAccountSeedBytes = EntropyArray;\n\n/// XRP account address, as base58 with checksum (\"Base58Check\").\n///\n/// Docs: \npub type XrplAddressBase58 = String;\n\n/// XRP public key, as a hexadecimal string. Used to prepare unsigned transactions.\n///\n/// Docs: \npub type XrplPublicKeyHex = String;\n"},"avg_line_length":{"kind":"number","value":27.2307692308,"string":"27.230769"},"max_line_length":{"kind":"number","value":83,"string":"83"},"alphanum_fraction":{"kind":"number","value":0.6822033898,"string":"0.682203"},"score":{"kind":"number","value":3.09375,"string":"3.09375"}}},{"rowIdx":914,"cells":{"hexsha":{"kind":"string","value":"653e59fcbdd6ab10a8f9cfbeb5d59f5f53315a37"},"size":{"kind":"number","value":6114,"string":"6,114"},"ext":{"kind":"string","value":"py"},"lang":{"kind":"string","value":"Python"},"max_stars_repo_path":{"kind":"string","value":"src/trainer/transformations.py"},"max_stars_repo_name":{"kind":"string","value":"tiborkubik/Robust-Teeth-Detection-in-3D-Dental-Scans-by-Automated-Multi-View-Landmarking"},"max_stars_repo_head_hexsha":{"kind":"string","value":"c7d9fa29b3b94ea786da5f4ec11a11520c1b882a"},"max_stars_repo_licenses":{"kind":"list like","value":["MIT"],"string":"[\n \"MIT\"\n]"},"max_stars_count":{"kind":"number","value":2,"string":"2"},"max_stars_repo_stars_event_min_datetime":{"kind":"string","value":"2022-02-20T23:45:47.000Z"},"max_stars_repo_stars_event_max_datetime":{"kind":"string","value":"2022-03-14T07:36:53.000Z"},"max_issues_repo_path":{"kind":"string","value":"src/trainer/transformations.py"},"max_issues_repo_name":{"kind":"string","value":"tiborkubik/Robust-Teeth-Detection-in-3D-Dental-Scans-by-Automated-Multi-View-Landmarking"},"max_issues_repo_head_hexsha":{"kind":"string","value":"c7d9fa29b3b94ea786da5f4ec11a11520c1b882a"},"max_issues_repo_licenses":{"kind":"list like","value":["MIT"],"string":"[\n \"MIT\"\n]"},"max_issues_count":{"kind":"null"},"max_issues_repo_issues_event_min_datetime":{"kind":"null"},"max_issues_repo_issues_event_max_datetime":{"kind":"null"},"max_forks_repo_path":{"kind":"string","value":"src/trainer/transformations.py"},"max_forks_repo_name":{"kind":"string","value":"tiborkubik/Robust-Teeth-Detection-in-3D-Dental-Scans-by-Automated-Multi-View-Landmarking"},"max_forks_repo_head_hexsha":{"kind":"string","value":"c7d9fa29b3b94ea786da5f4ec11a11520c1b882a"},"max_forks_repo_licenses":{"kind":"list like","value":["MIT"],"string":"[\n \"MIT\"\n]"},"max_forks_count":{"kind":"null"},"max_forks_repo_forks_event_min_datetime":{"kind":"null"},"max_forks_repo_forks_event_max_datetime":{"kind":"null"},"content":{"kind":"string","value":"\"\"\"\n :filename transformations.py\n :author Tibor Kubik\n :email xkubik34@stud.fit.vutbr.cz\nfrom\n Classes of custom transformations that are applied during the training as additional augmentation of the depth maps.\n\"\"\"\n\nimport torch\nimport random\nimport numpy as np\nimport torch.nn.functional as F\n\nfrom random import randrange\nfrom skimage.transform import resize, warp, AffineTransform\n\n\nclass Normalize(object):\n \"\"\"Normalization of a depth map in the value of [0, 1] for each pixel.\"\"\"\n def __init__(self, input_type):\n self.input_type = input_type\n\n def __call__(self, sample):\n\n if self.input_type == 'geom':\n image, landmarks, label = sample['image'], sample['landmarks'], sample['label']\n\n mean, std = image.mean([1, 2]), image.std([1, 2])\n\n # TODO?\n\n return {'image': image,\n 'landmarks': landmarks,\n 'label': label}\n\n\nclass ToTensor(object):\n \"\"\"Transformation of a training sample into a torch tensor instance.\"\"\"\n def __init__(self, input_type):\n self.input_type = input_type\n\n def __call__(self, sample):\n image, landmarks, label = sample['image'], sample['landmarks'], sample['label']\n\n image = torch.from_numpy(image.copy())\n\n if self.input_type != 'depth+geom':\n image = image.unsqueeze(1)\n image = image.permute(1, 0, 2)\n else:\n image = image.permute(2, 0, 1)\n\n landmarks = np.asarray(landmarks)\n landmarks = torch.from_numpy(landmarks.copy())\n\n return {'image': image,\n 'landmarks': landmarks,\n 'label': label}\n\n\nclass Resize(object):\n \"\"\"Resizing of the input sample into provided dimensions.\"\"\"\n\n def __init__(self, width, height, input_type='image'):\n assert isinstance(width, int)\n assert isinstance(height, int)\n\n self.width = width\n self.height = height\n self.type = input_type\n\n def __call__(self, sample):\n image, landmarks, label = sample['image'], sample['landmarks'], sample['label']\n resized_landmarks = landmarks.copy()\n\n if self.type == 'image':\n image = resize(image, (self.height, self.width), anti_aliasing=True)\n if self.type == 'landmarks':\n resized_landmarks = []\n for landmark in landmarks:\n landmark_resized = resize(landmark, (self.height, self.width), anti_aliasing=True)\n resized_landmarks.append(landmark_resized)\n\n return {'image': image,\n 'landmarks': resized_landmarks,\n 'label': label}\n\n\nclass RandomTranslating(object):\n \"\"\"Randomly translate the input sample from range [-10 px, 10 px] with provided probability.\"\"\"\n\n def __init__(self, p=0.5):\n assert isinstance(p, float)\n\n self.p = p\n\n def __call__(self, sample):\n image, landmarks, label = sample['image'], sample['landmarks'], sample['label']\n translated_landmarks = landmarks.copy()\n\n if np.random.rand(1) < self.p:\n n1 = randrange(-10, 10)\n n2 = randrange(-10, 10)\n\n t = AffineTransform(translation=(n1, n2))\n\n image = warp(image, t.inverse)\n\n translated_landmarks = []\n for landmark in landmarks:\n translated_landmarks.append(warp(landmark, t.inverse))\n\n return {'image': image,\n 'landmarks': translated_landmarks,\n 'label': label}\n\n\nclass RandomScaling(object):\n \"\"\"Randomly scales the input sample with scale index from range [0.90, 1.10] with provided probability.\"\"\"\n\n def __init__(self, p=0.5):\n assert isinstance(p, float)\n\n self.p = p\n\n def __call__(self, sample):\n image, landmarks, label = sample['image'], sample['landmarks'], sample['label']\n scaled_landmarks = landmarks.copy()\n\n if np.random.rand(1) < self.p:\n n = random.uniform(0.90, 1.10)\n t = AffineTransform(scale=(n, n))\n\n image = warp(image, t.inverse)\n\n scaled_landmarks = []\n for landmark in landmarks:\n scaled_landmarks.append(warp(landmark, t.inverse))\n\n return {'image': image,\n 'landmarks': scaled_landmarks,\n 'label': label}\n\n\nclass RandomRotation(object):\n \"\"\"Randomly rotates the input sample from range [−11.25 deg, 11.25 deg] with provided probability.\"\"\"\n\n def __init__(self, p=0.5):\n assert isinstance(p, float)\n\n self.p = p\n\n def __call__(self, sample):\n image, landmarks, label = sample['image'], sample['landmarks'], sample['label']\n\n rnd_num1 = randrange(-32, -6)\n rnd_num2 = randrange(6, 32)\n rnd_num = random.choice([rnd_num1, rnd_num2])\n\n if np.random.rand(1) < self.p:\n rotated_image = self.rotate(x=image.unsqueeze(0).type(torch.FloatTensor), theta=np.pi/rnd_num)\n\n rotated_landmarks = []\n for _, landmark in enumerate(landmarks):\n rotated_landmark = self.rotate(x=landmark.unsqueeze(0).unsqueeze(0).type(torch.FloatTensor), theta=np.pi/rnd_num)\n rotated_landmarks.append(rotated_landmark.squeeze(0))\n\n result = torch.cat(rotated_landmarks, dim=0)\n\n return {'image': rotated_image.squeeze(0),\n 'landmarks': result,\n 'label': label}\n\n return {'image': image,\n 'landmarks': landmarks,\n 'label': label}\n\n @staticmethod\n def get_rotation_matrix(theta):\n \"\"\"Returns a tensor rotation matrix with given theta value.\"\"\"\n\n theta = torch.tensor(theta)\n\n return torch.tensor([[torch.cos(theta), -torch.sin(theta), 0],\n [torch.sin(theta), torch.cos(theta), 0]])\n\n def rotate(self, x, theta):\n rot_mat = self.get_rotation_matrix(theta)[None, ...].repeat(x.shape[0], 1, 1)\n grid = F.affine_grid(rot_mat, x.size(), align_corners=False)\n x = F.grid_sample(x, grid, align_corners=False)\n\n return x\n"},"avg_line_length":{"kind":"number","value":31.6787564767,"string":"31.678756"},"max_line_length":{"kind":"number","value":129,"string":"129"},"alphanum_fraction":{"kind":"number","value":0.5966633955,"string":"0.596663"},"score":{"kind":"number","value":3.21875,"string":"3.21875"}}},{"rowIdx":915,"cells":{"hexsha":{"kind":"string","value":"b2c14d3bb32a9d0a97a9d773d034e8784a7e69a4"},"size":{"kind":"number","value":5641,"string":"5,641"},"ext":{"kind":"string","value":"py"},"lang":{"kind":"string","value":"Python"},"max_stars_repo_path":{"kind":"string","value":"lljs.py"},"max_stars_repo_name":{"kind":"string","value":"Peter9192/wind_analytics"},"max_stars_repo_head_hexsha":{"kind":"string","value":"604136be1c2ef1155bdb7579c7d123525dbe10d8"},"max_stars_repo_licenses":{"kind":"list like","value":["Apache-2.0"],"string":"[\n \"Apache-2.0\"\n]"},"max_stars_count":{"kind":"null"},"max_stars_repo_stars_event_min_datetime":{"kind":"null"},"max_stars_repo_stars_event_max_datetime":{"kind":"null"},"max_issues_repo_path":{"kind":"string","value":"lljs.py"},"max_issues_repo_name":{"kind":"string","value":"Peter9192/wind_analytics"},"max_issues_repo_head_hexsha":{"kind":"string","value":"604136be1c2ef1155bdb7579c7d123525dbe10d8"},"max_issues_repo_licenses":{"kind":"list like","value":["Apache-2.0"],"string":"[\n \"Apache-2.0\"\n]"},"max_issues_count":{"kind":"null"},"max_issues_repo_issues_event_min_datetime":{"kind":"null"},"max_issues_repo_issues_event_max_datetime":{"kind":"null"},"max_forks_repo_path":{"kind":"string","value":"lljs.py"},"max_forks_repo_name":{"kind":"string","value":"Peter9192/wind_analytics"},"max_forks_repo_head_hexsha":{"kind":"string","value":"604136be1c2ef1155bdb7579c7d123525dbe10d8"},"max_forks_repo_licenses":{"kind":"list like","value":["Apache-2.0"],"string":"[\n \"Apache-2.0\"\n]"},"max_forks_count":{"kind":"null"},"max_forks_repo_forks_event_min_datetime":{"kind":"null"},"max_forks_repo_forks_event_max_datetime":{"kind":"null"},"content":{"kind":"string","value":"\"\"\" Identify low-level jets in wind profile data.\n\nPeter Kalverla\nDecember 2020\n\"\"\"\n\nimport numpy as np\nimport xarray as xr\n\n\ndef detect_llj(x, axis=None, falloff=0, output='strength', inverse=False):\n \"\"\" Identify maxima in wind profiles.\n\n args:\n - x : ndarray with wind profile data\n - axis : specifies the vertical dimension\n is internally used with np.apply_along_axis\n - falloff : threshold for labeling as low-level jet\n default 0; can be masked later, e.g. llj[falloff>2.0]\n - output : specifiy return type: 'strength' or 'index'\n\n returns (depending on argument):\n - strength : 0 if no maximum identified, otherwise falloff strength\n - index : nan if no maximum identified, otherwise index along\n , to get the height of the jet etc.\n \"\"\"\n def inner(x, output):\n if inverse:\n x = x[::-1, ...]\n\n # Identify local maxima\n x = x[~np.isnan(x)]\n dx = x[1:] - x[:-1]\n ind = np.where((np.hstack((dx, 0)) < 0) & (np.hstack((0, dx)) >= 0))[0]\n\n # Last value of x cannot be llj\n if ind.size and ind[-1] == x.size - 1:\n ind = ind[:-1]\n\n # Compute the falloff strength for each local maxima\n if ind.size: # this assumes height increases along axis!!!\n strength = np.array([x[i] - min(x[i:]) for i in ind])\n imax = np.argmax(strength)\n\n # Return jet_strength and index of maximum:\n if output == 'strength':\n r = max(strength) if ind.size else 0\n elif output == 'index':\n r = ind[imax] if ind.size else 0\n\n return r\n\n # Wrapper interface to apply 1d function to ndarray\n return np.apply_along_axis(inner, axis, x, output=output)\n\n\ndef detect_llj_vectorized(xs,\n axis=-1,\n output='falloff',\n mask_inv=False,\n inverse=False):\n \"\"\" Identify local maxima in wind profiles.\n\n args:\n - x : ndarray with wind profile data\n - axis : specifies the vertical dimension\n - output : specifiy return type: 'falloff', 'strength' or 'index'\n - mask_inv : use np.ma to mask nan values\n\n returns (depending on argument and whether llj is identified):\n - falloff : 0 or largest difference between local max and subseq min\n - strength : 0 or wind speed at jet height\n - index : -1 or index along \n \"\"\"\n # Move to first dimension, to easily index and iterate over it.\n xv = np.rollaxis(xs, axis)\n\n if inverse:\n xv = xv[::-1, ...]\n\n if mask_inv:\n xv = np.ma.masked_invalid(xv)\n\n # Set initial arrays\n min_elem = xv[-1].copy()\n max_elem = np.zeros(min_elem.shape)\n max_diff = np.zeros(min_elem.shape)\n max_idx = np.ones(min_elem.shape, dtype=int) * (-1)\n\n # Start at end of array and search backwards for larger differences.\n for i, elem in reversed(list(enumerate(xv))):\n min_elem = np.minimum(elem, min_elem)\n new_max_identified = elem - min_elem > max_diff\n max_diff = np.where(new_max_identified, elem - min_elem, max_diff)\n max_elem = np.where(new_max_identified, elem, max_elem)\n max_idx = np.where(new_max_identified, i, max_idx)\n\n if output == 'falloff':\n r = max_diff\n elif output == 'strength':\n r = max_elem\n elif output == 'index':\n r = max_idx\n else:\n raise ValueError('Invalid argument for : %s' % output)\n\n return r\n\n\ndef detect_llj_xarray(da, inverse=False):\n \"\"\" Identify local maxima in wind profiles.\n\n args:\n - da : xarray.DataArray with wind profile data\n - inverse : to flip the array if the data is stored upside down\n\n returns: : xarray.Dataset with vertical dimension removed containing:\n - falloff : 0 or largest difference between local max and subseq min\n - strength : 0 or wind speed at jet height\n - index : -1 or index along \n\n Note: vertical dimension should be labeled 'level' and axis=1\n \"\"\"\n # Move to first dimension, to easily index and iterate over it.\n xv = np.rollaxis(da.values, 1)\n\n if inverse:\n xv = xv[::-1, ...]\n\n # Set initial arrays\n min_elem = xv[-1].copy()\n max_elem = np.zeros(min_elem.shape)\n max_diff = np.zeros(min_elem.shape)\n max_idx = np.ones(min_elem.shape, dtype=int) * (-1)\n\n # Start at end of array and search backwards for larger differences.\n for i, elem in reversed(list(enumerate(xv))):\n min_elem = np.minimum(elem, min_elem)\n new_max_identified = elem - min_elem > max_diff\n max_diff = np.where(new_max_identified, elem - min_elem, max_diff)\n max_elem = np.where(new_max_identified, elem, max_elem)\n max_idx = np.where(new_max_identified, i, max_idx)\n\n # Combine the results in a dataframe\n get_height = lambda i: np.where(i > 0, da.level.values[i], da.level.values[\n -1])\n dims = da.isel(level=0).drop('level').dims\n coords = da.isel(level=0).drop('level').coords\n lljs = xr.Dataset(\n {\n 'falloff': (dims, max_diff),\n 'strength': (dims, max_elem),\n 'level': (dims, get_height(max_idx)),\n },\n coords=coords)\n\n print(\n 'Beware! Level is also filled if no jet is detected! '\n 'Use ds.sel(level=lljs.level).where(lljs.falloff>0) to get rid of them'\n )\n\n return lljs"},"avg_line_length":{"kind":"number","value":34.8209876543,"string":"34.820988"},"max_line_length":{"kind":"number","value":80,"string":"80"},"alphanum_fraction":{"kind":"number","value":0.5919163269,"string":"0.591916"},"score":{"kind":"number","value":3.21875,"string":"3.21875"}}},{"rowIdx":916,"cells":{"hexsha":{"kind":"string","value":"08963ea78b7a7c8e6fdbd6c088271a84b9c0754d"},"size":{"kind":"number","value":5743,"string":"5,743"},"ext":{"kind":"string","value":"go"},"lang":{"kind":"string","value":"Go"},"max_stars_repo_path":{"kind":"string","value":"pkg/langserver/diagnostics.go"},"max_stars_repo_name":{"kind":"string","value":"lizelive/yodk"},"max_stars_repo_head_hexsha":{"kind":"string","value":"dc37d204598e1ff39ccbbd3bec5ba897486c2df6"},"max_stars_repo_licenses":{"kind":"list like","value":["MIT"],"string":"[\n \"MIT\"\n]"},"max_stars_count":{"kind":"number","value":59,"string":"59"},"max_stars_repo_stars_event_min_datetime":{"kind":"string","value":"2019-11-19T08:58:08.000Z"},"max_stars_repo_stars_event_max_datetime":{"kind":"string","value":"2021-10-02T20:23:48.000Z"},"max_issues_repo_path":{"kind":"string","value":"pkg/langserver/diagnostics.go"},"max_issues_repo_name":{"kind":"string","value":"lizelive/yodk"},"max_issues_repo_head_hexsha":{"kind":"string","value":"dc37d204598e1ff39ccbbd3bec5ba897486c2df6"},"max_issues_repo_licenses":{"kind":"list like","value":["MIT"],"string":"[\n \"MIT\"\n]"},"max_issues_count":{"kind":"number","value":114,"string":"114"},"max_issues_repo_issues_event_min_datetime":{"kind":"string","value":"2019-11-05T08:15:53.000Z"},"max_issues_repo_issues_event_max_datetime":{"kind":"string","value":"2021-12-27T21:20:52.000Z"},"max_forks_repo_path":{"kind":"string","value":"pkg/langserver/diagnostics.go"},"max_forks_repo_name":{"kind":"string","value":"lizelive/yodk"},"max_forks_repo_head_hexsha":{"kind":"string","value":"dc37d204598e1ff39ccbbd3bec5ba897486c2df6"},"max_forks_repo_licenses":{"kind":"list like","value":["MIT"],"string":"[\n \"MIT\"\n]"},"max_forks_count":{"kind":"number","value":14,"string":"14"},"max_forks_repo_forks_event_min_datetime":{"kind":"string","value":"2020-08-01T17:42:41.000Z"},"max_forks_repo_forks_event_max_datetime":{"kind":"string","value":"2021-10-21T04:24:45.000Z"},"content":{"kind":"string","value":"package langserver\n\nimport (\n\t\"context\"\n\t\"log\"\n\t\"net/url\"\n\t\"path/filepath\"\n\t\"strings\"\n\n\t\"github.com/dbaumgarten/yodk/pkg/lsp\"\n\t\"github.com/dbaumgarten/yodk/pkg/nolol\"\n\t\"github.com/dbaumgarten/yodk/pkg/nolol/nast\"\n\t\"github.com/dbaumgarten/yodk/pkg/optimizers\"\n\t\"github.com/dbaumgarten/yodk/pkg/parser\"\n\t\"github.com/dbaumgarten/yodk/pkg/parser/ast\"\n\t\"github.com/dbaumgarten/yodk/pkg/validators\"\n)\n\n// fs is a special filesystem that retrieves the main file from the cache and all\n// other files from the filesystem. It is used when compiling a nolol file, as nolol files may\n// depend on files from the file-system using includes\ntype fs struct {\n\t*nolol.DiskFileSystem\n\tls *LangServer\n\tMainfile string\n}\n\nfunc getFilePath(u lsp.DocumentURI) string {\n\tur, _ := url.Parse(string(u))\n\ts := filepath.FromSlash(ur.Path)\n\n\tif !strings.HasSuffix(s, \"\\\\\\\\\") {\n\t\ts = strings.TrimPrefix(s, \"\\\\\")\n\t}\n\treturn s\n}\n\nfunc newfs(ls *LangServer, mainfile lsp.DocumentURI) *fs {\n\treturn &fs{\n\t\tls: ls,\n\t\tDiskFileSystem: &nolol.DiskFileSystem{\n\t\t\tDir: filepath.Dir(getFilePath(mainfile)),\n\t\t},\n\t\tMainfile: string(mainfile),\n\t}\n}\n\nfunc (f fs) Get(name string) (string, error) {\n\tif name == f.Mainfile {\n\t\treturn f.ls.cache.Get(lsp.DocumentURI(name))\n\t}\n\treturn f.DiskFileSystem.Get(name)\n}\n\nfunc convertToErrorlist(errs error) parser.Errors {\n\tif errs == nil {\n\t\treturn make(parser.Errors, 0)\n\t}\n\tswitch e := errs.(type) {\n\tcase parser.Errors:\n\t\treturn e\n\tcase *parser.Error:\n\t\t// if it is a single error, convert it to a one-element list\n\t\terrlist := make(parser.Errors, 1)\n\t\terrlist[0] = e\n\t\treturn errlist\n\tdefault:\n\t\tlog.Printf(\"Unknown error type: %T\\n (%s)\", errs, errs.Error())\n\t\treturn nil\n\t}\n}\n\nfunc convertErrorsToDiagnostics(errs parser.Errors, source string, severity lsp.DiagnosticSeverity) []lsp.Diagnostic {\n\tdiags := make([]lsp.Diagnostic, 0)\n\n\tfor _, err := range errs {\n\t\tdiag := convertErrorToDiagnostic(err, source, severity)\n\t\tdiags = append(diags, diag)\n\t}\n\n\treturn diags\n}\n\nfunc convertErrorToDiagnostic(err *parser.Error, source string, severity lsp.DiagnosticSeverity) lsp.Diagnostic {\n\treturn lsp.Diagnostic{\n\t\tSource: source,\n\t\tMessage: err.Message,\n\t\tSeverity: severity,\n\t\tRange: lsp.Range{\n\t\t\tStart: lsp.Position{\n\t\t\t\tLine: float64(err.StartPosition.Line) - 1,\n\t\t\t\tCharacter: float64(err.StartPosition.Coloumn) - 1,\n\t\t\t},\n\t\t\tEnd: lsp.Position{\n\t\t\t\tLine: float64(err.EndPosition.Line) - 1,\n\t\t\t\tCharacter: float64(err.EndPosition.Coloumn) - 1,\n\t\t\t},\n\t\t},\n\t}\n}\n\nfunc (s *LangServer) validateCodeLength(uri lsp.DocumentURI, text string, parsed *ast.Program) []lsp.Diagnostic {\n\t// check if the code-length of yolol-code is OK\n\tif s.settings.Yolol.LengthChecking.Mode != LengthCheckModeOff {\n\t\tlengtherror := validators.ValidateCodeLength(text)\n\n\t\t// check if the code is small enough after optimizing it\n\t\tif lengtherror != nil && s.settings.Yolol.LengthChecking.Mode == LengthCheckModeOptimize && parsed != nil {\n\n\t\t\topt := optimizers.NewCompoundOptimizer()\n\t\t\terr := opt.Optimize(parsed)\n\t\t\tif err == nil {\n\t\t\t\tprinter := parser.Printer{}\n\t\t\t\toptimized, err := printer.Print(parsed)\n\t\t\t\tif err == nil {\n\t\t\t\t\tlengtherror = validators.ValidateCodeLength(optimized)\n\t\t\t\t}\n\t\t\t}\n\t\t}\n\n\t\tif lengtherror != nil {\n\t\t\terr := lengtherror.(*parser.Error)\n\t\t\tdiag := convertErrorToDiagnostic(err, \"validator\", lsp.SeverityWarning)\n\t\t\treturn []lsp.Diagnostic{diag}\n\n\t\t}\n\t}\n\treturn []lsp.Diagnostic{}\n}\n\nfunc (s *LangServer) validateAvailableOperations(uri lsp.DocumentURI, parsed ast.Node) []lsp.Diagnostic {\n\tchipType, _ := validators.AutoChooseChipType(s.settings.Yolol.ChipType, string(uri))\n\terr := validators.ValidateAvailableOperations(parsed, chipType)\n\n\tif err != nil {\n\t\terrors := convertToErrorlist(err)\n\t\treturn convertErrorsToDiagnostics(errors, \"validator\", lsp.SeverityError)\n\t}\n\n\treturn []lsp.Diagnostic{}\n}\n\nfunc (s *LangServer) Diagnose(ctx context.Context, uri lsp.DocumentURI) {\n\n\tgo func() {\n\t\tvar parserError error\n\t\tvar validationDiagnostics []lsp.Diagnostic\n\t\tvar diagRes DiagnosticResults\n\t\ttext, _ := s.cache.Get(uri)\n\n\t\tprevDiag, err := s.cache.GetDiagnostics(uri)\n\t\tif err == nil {\n\t\t\tdiagRes = *prevDiag\n\t\t}\n\n\t\tif strings.HasSuffix(string(uri), \".yolol\") {\n\t\t\tp := parser.NewParser()\n\t\t\tvar parsed *ast.Program\n\t\t\tparsed, parserError = p.Parse(text)\n\n\t\t\tif parsed != nil {\n\t\t\t\tdiagRes.Variables = findUsedVariables(parsed)\n\t\t\t}\n\n\t\t\tif parserError == nil {\n\t\t\t\tvalidationDiagnostics = s.validateAvailableOperations(uri, parsed)\n\t\t\t\tvalidationDiagnostics = append(validationDiagnostics, s.validateCodeLength(uri, text, parsed)...)\n\t\t\t}\n\n\t\t} else if strings.HasSuffix(string(uri), \".nolol\") {\n\t\t\tmainfile := string(uri)\n\t\t\tconverter := nolol.NewConverter()\n\t\t\tconverter.SetChipType(s.settings.Yolol.ChipType)\n\t\t\tincluded := converter.LoadFileEx(mainfile, newfs(s, uri)).ProcessIncludes()\n\t\t\tparserError = included.Error()\n\n\t\t\tif parserError == nil {\n\t\t\t\tintermediate := included.GetIntermediateProgram()\n\t\t\t\t// Analyze() will mutate the ast, so we create a copy of it\n\t\t\t\tanalyse := nast.CopyAst(intermediate).(*nast.Program)\n\t\t\t\tanalysis, err := nolol.Analyse(analyse)\n\t\t\t\tif err == nil {\n\t\t\t\t\tdiagRes.AnalysisReport = analysis\n\t\t\t\t}\n\n\t\t\t\tparserError = included.ProcessCodeExpansion().ProcessNodes().ProcessLineNumbers().ProcessFinalize().Error()\n\t\t\t}\n\t\t} else {\n\t\t\treturn\n\t\t}\n\n\t\ts.cache.SetDiagnostics(uri, diagRes)\n\n\t\tparserErrors := convertToErrorlist(parserError)\n\t\tif parserErrors == nil {\n\t\t\treturn\n\t\t}\n\n\t\tdiags := convertErrorsToDiagnostics(parserErrors, \"parser\", lsp.SeverityError)\n\t\tif validationDiagnostics != nil {\n\t\t\tdiags = append(diags, validationDiagnostics...)\n\t\t}\n\n\t\ts.client.PublishDiagnostics(ctx, &lsp.PublishDiagnosticsParams{\n\t\t\tURI: uri,\n\t\t\tDiagnostics: diags,\n\t\t})\n\n\t}()\n}\n"},"avg_line_length":{"kind":"number","value":27.2180094787,"string":"27.218009"},"max_line_length":{"kind":"number","value":118,"string":"118"},"alphanum_fraction":{"kind":"number","value":0.7050322131,"string":"0.705032"},"score":{"kind":"number","value":3.046875,"string":"3.046875"}}},{"rowIdx":917,"cells":{"hexsha":{"kind":"string","value":"62725e36e9c4383385bbd866c1009e735beeb419"},"size":{"kind":"number","value":2979,"string":"2,979"},"ext":{"kind":"string","value":"rs"},"lang":{"kind":"string","value":"Rust"},"max_stars_repo_path":{"kind":"string","value":"lexical-core/src/itoa/naive.rs"},"max_stars_repo_name":{"kind":"string","value":"ignatenkobrain/rust-lexical"},"max_stars_repo_head_hexsha":{"kind":"string","value":"fefe81850e5678450ec0f001f562b182694caadf"},"max_stars_repo_licenses":{"kind":"list like","value":["Apache-2.0","MIT"],"string":"[\n \"Apache-2.0\",\n \"MIT\"\n]"},"max_stars_count":{"kind":"null"},"max_stars_repo_stars_event_min_datetime":{"kind":"null"},"max_stars_repo_stars_event_max_datetime":{"kind":"null"},"max_issues_repo_path":{"kind":"string","value":"lexical-core/src/itoa/naive.rs"},"max_issues_repo_name":{"kind":"string","value":"ignatenkobrain/rust-lexical"},"max_issues_repo_head_hexsha":{"kind":"string","value":"fefe81850e5678450ec0f001f562b182694caadf"},"max_issues_repo_licenses":{"kind":"list like","value":["Apache-2.0","MIT"],"string":"[\n \"Apache-2.0\",\n \"MIT\"\n]"},"max_issues_count":{"kind":"null"},"max_issues_repo_issues_event_min_datetime":{"kind":"null"},"max_issues_repo_issues_event_max_datetime":{"kind":"null"},"max_forks_repo_path":{"kind":"string","value":"lexical-core/src/itoa/naive.rs"},"max_forks_repo_name":{"kind":"string","value":"ignatenkobrain/rust-lexical"},"max_forks_repo_head_hexsha":{"kind":"string","value":"fefe81850e5678450ec0f001f562b182694caadf"},"max_forks_repo_licenses":{"kind":"list like","value":["Apache-2.0","MIT"],"string":"[\n \"Apache-2.0\",\n \"MIT\"\n]"},"max_forks_count":{"kind":"null"},"max_forks_repo_forks_event_min_datetime":{"kind":"null"},"max_forks_repo_forks_event_max_datetime":{"kind":"null"},"content":{"kind":"string","value":"//! Slow, simple lexical integer-to-string conversion routine.\n\nuse util::*;\n\n// Naive itoa algorithm.\nmacro_rules! naive_algorithm {\n ($value:ident, $radix:ident, $buffer:ident, $index:ident) => ({\n while $value >= $radix {\n let r = ($value % $radix).as_usize();\n $value /= $radix;\n\n // This is always safe, since r must be [0, radix).\n $index -= 1;\n unchecked_index_mut!($buffer[$index] = digit_to_char(r));\n }\n\n // Decode last digit.\n let r = ($value % $radix).as_usize();\n // This is always safe, since r must be [0, radix).\n $index -= 1;\n unchecked_index_mut!($buffer[$index] = digit_to_char(r));\n });\n}\n\n// Naive implementation for radix-N numbers.\n// Precondition: `value` must be non-negative and mutable.\nperftools_inline!{\nfn naive(mut value: T, radix: u32, buffer: &mut [u8])\n -> usize\n where T: UnsignedInteger\n{\n // Decode all but last digit, 1 at a time.\n let mut index = buffer.len();\n let radix: T = as_cast(radix);\n naive_algorithm!(value, radix, buffer, index);\n index\n}}\n\npub(crate) trait Naive {\n // Export integer to string.\n fn naive(self, radix: u32, buffer: &mut [u8]) -> usize;\n}\n\n// Implement naive for type.\nmacro_rules! naive_impl {\n ($($t:ty)*) => ($(\n impl Naive for $t {\n perftools_inline_always!{\n fn naive(self, radix: u32, buffer: &mut [u8]) -> usize {\n naive(self, radix, buffer)\n }}\n }\n )*);\n}\n\nnaive_impl! { u8 u16 u32 u64 usize }\n\n// Naive implementation for 128-bit radix-N numbers.\n// Precondition: `value` must be non-negative and mutable.\nperftools_inline!{\n#[cfg(has_i128)]\nfn naive_u128(value: u128, radix: u32, buffer: &mut [u8])\n -> usize\n{\n // Decode all but last digit, 1 at a time.\n let (divisor, digits_per_iter, d_cltz) = u128_divisor(radix);\n let radix: u64 = as_cast(radix);\n\n // To deal with internal 0 values or values with internal 0 digits set,\n // we store the starting index, and if not all digits are written,\n // we just skip down `digits` digits for the next value.\n let mut index = buffer.len();\n let mut start_index = index;\n let (value, mut low) = u128_divrem(value, divisor, d_cltz);\n naive_algorithm!(low, radix, buffer, index);\n if value != 0 {\n start_index -= digits_per_iter;\n index = index.min(start_index);\n let (value, mut mid) = u128_divrem(value, divisor, d_cltz);\n naive_algorithm!(mid, radix, buffer, index);\n\n if value != 0 {\n start_index -= digits_per_iter;\n index = index.min(start_index);\n let mut high = value as u64;\n naive_algorithm!(high, radix, buffer, index);\n }\n }\n index\n}}\n\n#[cfg(has_i128)]\nimpl Naive for u128 {\n perftools_inline_always!{\n fn naive(self, radix: u32, buffer: &mut [u8]) -> usize {\n naive_u128(self, radix, buffer)\n }}\n}\n"},"avg_line_length":{"kind":"number","value":30.0909090909,"string":"30.090909"},"max_line_length":{"kind":"number","value":75,"string":"75"},"alphanum_fraction":{"kind":"number","value":0.5981873112,"string":"0.598187"},"score":{"kind":"number","value":3.28125,"string":"3.28125"}}},{"rowIdx":918,"cells":{"hexsha":{"kind":"string","value":"330e71b3b4bc8bea5b484ce2931ef4411a75120b"},"size":{"kind":"number","value":2311,"string":"2,311"},"ext":{"kind":"string","value":"py"},"lang":{"kind":"string","value":"Python"},"max_stars_repo_path":{"kind":"string","value":"2d-lin_sep.py"},"max_stars_repo_name":{"kind":"string","value":"rzepinskip/optimization-svm"},"max_stars_repo_head_hexsha":{"kind":"string","value":"9682980e19d5fc9f09353aa1284e86874e954aec"},"max_stars_repo_licenses":{"kind":"list like","value":["MIT"],"string":"[\n \"MIT\"\n]"},"max_stars_count":{"kind":"null"},"max_stars_repo_stars_event_min_datetime":{"kind":"null"},"max_stars_repo_stars_event_max_datetime":{"kind":"null"},"max_issues_repo_path":{"kind":"string","value":"2d-lin_sep.py"},"max_issues_repo_name":{"kind":"string","value":"rzepinskip/optimization-svm"},"max_issues_repo_head_hexsha":{"kind":"string","value":"9682980e19d5fc9f09353aa1284e86874e954aec"},"max_issues_repo_licenses":{"kind":"list like","value":["MIT"],"string":"[\n \"MIT\"\n]"},"max_issues_count":{"kind":"number","value":2,"string":"2"},"max_issues_repo_issues_event_min_datetime":{"kind":"string","value":"2020-01-16T21:35:43.000Z"},"max_issues_repo_issues_event_max_datetime":{"kind":"string","value":"2020-03-24T18:02:41.000Z"},"max_forks_repo_path":{"kind":"string","value":"2d-lin_sep.py"},"max_forks_repo_name":{"kind":"string","value":"rzepinskip/optimization-svm"},"max_forks_repo_head_hexsha":{"kind":"string","value":"9682980e19d5fc9f09353aa1284e86874e954aec"},"max_forks_repo_licenses":{"kind":"list like","value":["MIT"],"string":"[\n \"MIT\"\n]"},"max_forks_count":{"kind":"null"},"max_forks_repo_forks_event_min_datetime":{"kind":"null"},"max_forks_repo_forks_event_max_datetime":{"kind":"null"},"content":{"kind":"string","value":"import numpy as np\nfrom matplotlib import pyplot as plt\nfrom optsvm.svm import SVM\n\nx_neg = np.array([[3, 4], [1, 4], [2, 3]])\ny_neg = np.array([-1, -1, -1])\nx_pos = np.array([[6, -1], [7, -1], [5, -3]])\ny_pos = np.array([1, 1, 1])\nx1 = np.linspace(-10, 10)\nx = np.vstack((np.linspace(-10, 10), np.linspace(-10, 10)))\n\n# Data for the next section\nX = np.vstack((x_neg, x_pos))\ny = np.concatenate((y_neg, y_pos))\n\n# Plot\nfig = plt.figure(figsize=(10, 10))\nplt.scatter(x_neg[:, 0], x_neg[:, 1], marker=\"x\", color=\"r\", label=\"Negative -1\")\nplt.scatter(x_pos[:, 0], x_pos[:, 1], marker=\"o\", color=\"b\", label=\"Positive +1\")\nplt.plot(x1, x1 - 3, color=\"darkblue\")\nplt.plot(x1, x1 - 7, linestyle=\"--\", alpha=0.3, color=\"b\")\nplt.plot(x1, x1 + 1, linestyle=\"--\", alpha=0.3, color=\"r\")\nplt.xlim(-2, 12)\nplt.ylim(-7, 7)\nplt.xticks(np.arange(0, 10, step=1))\nplt.yticks(np.arange(-5, 5, step=1))\n\n# Lines\nplt.axvline(0, color=\"black\", alpha=0.5)\nplt.axhline(0, color=\"black\", alpha=0.5)\nplt.plot([2, 6], [3, -1], linestyle=\"-\", color=\"darkblue\", alpha=0.5)\nplt.plot([4, 6], [1, 1], [6, 6], [1, -1], linestyle=\":\", color=\"darkblue\", alpha=0.5)\nplt.plot(\n [0, 1.5], [0, -1.5], [6, 6], [1, -1], linestyle=\":\", color=\"darkblue\", alpha=0.5\n)\n\n# Annotations\nplt.annotate(s=\"$A \\ (6,-1)$\", xy=(5, -1), xytext=(6, -1.5))\nplt.annotate(\n s=\"$B \\ (2,3)$\", xy=(2, 3), xytext=(2, 3.5)\n) # , arrowprops = {'width':.2, 'headwidth':8})\nplt.annotate(s=\"$2$\", xy=(5, 1.2), xytext=(5, 1.2))\nplt.annotate(s=\"$2$\", xy=(6.2, 0.5), xytext=(6.2, 0.5))\nplt.annotate(s=\"$2\\sqrt{2}$\", xy=(4.5, -0.5), xytext=(4.5, -0.5))\nplt.annotate(s=\"$2\\sqrt{2}$\", xy=(2.5, 1.5), xytext=(2.5, 1.5))\nplt.annotate(s=\"$w^Tx + b = 0$\", xy=(8, 4.5), xytext=(8, 4.5))\nplt.annotate(\n s=\"$(\\\\frac{1}{4},-\\\\frac{1}{4}) \\\\binom{x_1}{x_2}- \\\\frac{3}{4} = 0$\",\n xy=(7.5, 4),\n xytext=(7.5, 4),\n)\nplt.annotate(s=\"$\\\\frac{3}{\\sqrt{2}}$\", xy=(0.5, -1), xytext=(0.5, -1))\n\n# Labels and show\nplt.xlabel(\"$x_1$\")\nplt.ylabel(\"$x_2$\")\nplt.legend(loc=\"lower right\")\nplt.show()\n\nsvm = SVM(C=10)\n\nsvm.fit(X, y)\n\n# Display results\nprint(\"---Our results\")\nprint(\"w = \", svm.w_.flatten())\nprint(\"b = \", svm.b_)\n\nfrom sklearn.svm import SVC\n\nclf = SVC(C=10, kernel=\"linear\")\nclf.fit(X, y.ravel())\n\nprint(\"---SVM library\")\nprint(\"w = \", clf.coef_)\nprint(\"b = \", clf.intercept_)\n"},"avg_line_length":{"kind":"number","value":30.012987013,"string":"30.012987"},"max_line_length":{"kind":"number","value":85,"string":"85"},"alphanum_fraction":{"kind":"number","value":0.5685850281,"string":"0.568585"},"score":{"kind":"number","value":3.0625,"string":"3.0625"}}},{"rowIdx":919,"cells":{"hexsha":{"kind":"string","value":"aef7e7c2a975620456846d7b4b268a42a96b390e"},"size":{"kind":"number","value":1501,"string":"1,501"},"ext":{"kind":"string","value":"sql"},"lang":{"kind":"string","value":"SQL"},"max_stars_repo_path":{"kind":"string","value":"sql/error_info.sql"},"max_stars_repo_name":{"kind":"string","value":"pllua/pllua-deprecated"},"max_stars_repo_head_hexsha":{"kind":"string","value":"942388ac518b6890063e8158c7f5eb52712a67f2"},"max_stars_repo_licenses":{"kind":"list like","value":["PostgreSQL","Unlicense","MIT"],"string":"[\n \"PostgreSQL\",\n \"Unlicense\",\n \"MIT\"\n]"},"max_stars_count":{"kind":"null"},"max_stars_repo_stars_event_min_datetime":{"kind":"null"},"max_stars_repo_stars_event_max_datetime":{"kind":"null"},"max_issues_repo_path":{"kind":"string","value":"sql/error_info.sql"},"max_issues_repo_name":{"kind":"string","value":"pllua/pllua-deprecated"},"max_issues_repo_head_hexsha":{"kind":"string","value":"942388ac518b6890063e8158c7f5eb52712a67f2"},"max_issues_repo_licenses":{"kind":"list like","value":["PostgreSQL","Unlicense","MIT"],"string":"[\n \"PostgreSQL\",\n \"Unlicense\",\n \"MIT\"\n]"},"max_issues_count":{"kind":"null"},"max_issues_repo_issues_event_min_datetime":{"kind":"null"},"max_issues_repo_issues_event_max_datetime":{"kind":"null"},"max_forks_repo_path":{"kind":"string","value":"sql/error_info.sql"},"max_forks_repo_name":{"kind":"string","value":"pllua/pllua-deprecated"},"max_forks_repo_head_hexsha":{"kind":"string","value":"942388ac518b6890063e8158c7f5eb52712a67f2"},"max_forks_repo_licenses":{"kind":"list like","value":["PostgreSQL","Unlicense","MIT"],"string":"[\n \"PostgreSQL\",\n \"Unlicense\",\n \"MIT\"\n]"},"max_forks_count":{"kind":"number","value":1,"string":"1"},"max_forks_repo_forks_event_min_datetime":{"kind":"string","value":"2021-06-24T02:03:18.000Z"},"max_forks_repo_forks_event_max_datetime":{"kind":"string","value":"2021-06-24T02:03:18.000Z"},"content":{"kind":"string","value":"do $$ \nlocal testfunc = function () error(\"my error\") end\nlocal f = function()\n\tlocal status, err = pcall(testfunc)\n\tif (err) then\n\t\terror(err)\n\tend\nend\nf()\n$$language pllua;\n\ncreate or replace function pg_temp.function_with_error() returns integer as $$\n\tlocal testfunc = function () error(\"my error\") end\n\tlocal f = function()\n\t\tlocal status, err = pcall(testfunc)\n\t\tif (err) then\n\t\t\terror(err)\n\t\tend\n\tend\n\tf()\n$$language plluau;\n\ncreate or replace function pg_temp.second_function() returns void as $$\n\tlocal k = server.execute('select pg_temp.function_with_error()') [0]\n$$language plluau;\n\ndo $$ \n\tserver.execute('select pg_temp.second_function()') \n$$language pllua;\n\ndo $$\nlocal status, err = subtransaction(function() assert(1==2) end)\nif (err) then\n error(err)\nend\n$$language pllua;\n\ndo $$\ninfo({message=\"info message\", hint=\"info hint\", detail=\"info detail\"})\n$$language pllua;\n\ndo $$\ninfo(\"info message\")\n$$language pllua;\n\ndo $$\nwarning({message=\"warning message\", hint=\"warning hint\", detail=\"warning detail\"})\n$$language pllua;\n\ndo $$\nwarning(\"warning message\")\n$$language pllua;\n\ndo $$\nerror({message=\"error message\", hint=\"error hint\", detail=\"error detail\"})\n$$language pllua;\n\ndo $$\nerror(\"error message\")\n$$language pllua;\n\ndo $$\ninfo()\n$$language pllua;\n\ndo $$\nwarning()\n$$language pllua;\n\ndo $$\nerror()\n$$language pllua;\n\ndo $$\nlocal status, err = subtransaction(function() local _ = fromstring('no_type_text','qwerty') end)\nif (err) then\n print(err)\nend\n$$ language pllua\n\n"},"avg_line_length":{"kind":"number","value":18.5308641975,"string":"18.530864"},"max_line_length":{"kind":"number","value":96,"string":"96"},"alphanum_fraction":{"kind":"number","value":0.690206529,"string":"0.690207"},"score":{"kind":"number","value":3.328125,"string":"3.328125"}}},{"rowIdx":920,"cells":{"hexsha":{"kind":"string","value":"6364a84db8d9c16cc598090ab621288ec02090e2"},"size":{"kind":"number","value":8303,"string":"8,303"},"ext":{"kind":"string","value":"kt"},"lang":{"kind":"string","value":"Kotlin"},"max_stars_repo_path":{"kind":"string","value":"snakebroadcast/src/main/java/com/uiza/sdkbroadcast/helpers/ICameraHelper.kt"},"max_stars_repo_name":{"kind":"string","value":"uizaio/snake.android.sdk"},"max_stars_repo_head_hexsha":{"kind":"string","value":"681adca006f61e38151cf73cf711a951f05c57dc"},"max_stars_repo_licenses":{"kind":"list like","value":["BSD-2-Clause"],"string":"[\n \"BSD-2-Clause\"\n]"},"max_stars_count":{"kind":"null"},"max_stars_repo_stars_event_min_datetime":{"kind":"null"},"max_stars_repo_stars_event_max_datetime":{"kind":"null"},"max_issues_repo_path":{"kind":"string","value":"snakebroadcast/src/main/java/com/uiza/sdkbroadcast/helpers/ICameraHelper.kt"},"max_issues_repo_name":{"kind":"string","value":"uizaio/snake.android.sdk"},"max_issues_repo_head_hexsha":{"kind":"string","value":"681adca006f61e38151cf73cf711a951f05c57dc"},"max_issues_repo_licenses":{"kind":"list like","value":["BSD-2-Clause"],"string":"[\n \"BSD-2-Clause\"\n]"},"max_issues_count":{"kind":"null"},"max_issues_repo_issues_event_min_datetime":{"kind":"null"},"max_issues_repo_issues_event_max_datetime":{"kind":"null"},"max_forks_repo_path":{"kind":"string","value":"snakebroadcast/src/main/java/com/uiza/sdkbroadcast/helpers/ICameraHelper.kt"},"max_forks_repo_name":{"kind":"string","value":"uizaio/snake.android.sdk"},"max_forks_repo_head_hexsha":{"kind":"string","value":"681adca006f61e38151cf73cf711a951f05c57dc"},"max_forks_repo_licenses":{"kind":"list like","value":["BSD-2-Clause"],"string":"[\n \"BSD-2-Clause\"\n]"},"max_forks_count":{"kind":"null"},"max_forks_repo_forks_event_min_datetime":{"kind":"null"},"max_forks_repo_forks_event_max_datetime":{"kind":"null"},"content":{"kind":"string","value":"package com.uiza.sdkbroadcast.helpers\n\nimport android.content.Context\nimport android.view.MotionEvent\nimport com.pedro.encoder.input.gl.render.filters.BaseFilterRender\nimport com.pedro.encoder.input.video.CameraHelper.Facing\nimport com.pedro.rtplibrary.view.OpenGlView\nimport com.uiza.sdkbroadcast.interfaces.UZCameraChangeListener\nimport com.uiza.sdkbroadcast.interfaces.UZCameraOpenException\nimport com.uiza.sdkbroadcast.interfaces.UZRecordListener\nimport com.uiza.sdkbroadcast.interfaces.UZTakePhotoCallback\nimport com.uiza.sdkbroadcast.profile.AudioAttributes\nimport com.uiza.sdkbroadcast.profile.VideoAttributes\nimport com.uiza.sdkbroadcast.profile.VideoSize\nimport java.io.IOException\n\ninterface ICameraHelper {\n val mOpenGlView: OpenGlView?\n\n /**\n * @param reTries retry connect reTries times\n */\n fun setConnectReTries(reTries: Int)\n fun setUZCameraChangeListener(uzCameraChangeListener: UZCameraChangeListener?)\n fun setUZRecordListener(uzRecordListener: UZRecordListener?)\n fun replaceView(openGlView: OpenGlView?)\n fun replaceView(context: Context?)\n fun setVideoAttributes(attributes: VideoAttributes?)\n fun setAudioAttributes(attributes: AudioAttributes?)\n fun setLandscape(landscape: Boolean)\n\n /**\n * Set filter in position 0.\n *\n * @param filterReader filter to set. You can modify parameters to filter after set it to stream.\n */\n fun setFilter(filterReader: BaseFilterRender?)\n\n /**\n * @param filterPosition position of filter\n * @param filterReader filter to set. You can modify parameters to filter after set it to stream.\n */\n fun setFilter(filterPosition: Int, filterReader: BaseFilterRender?)\n\n /**\n * Get Anti alias is enabled.\n *\n * @return true is enabled, false is disabled.\n */\n val isAAEnabled: Boolean\n\n /**\n * Enable or disable Anti aliasing (This method use FXAA).\n *\n * @param aAEnabled true is AA enabled, false is AA disabled. False by default.\n */\n fun enableAA(aAEnabled: Boolean)\n\n /**\n * get Stream Width\n */\n val streamWidth: Int\n\n /**\n * get Stream Height\n */\n val streamHeight: Int\n\n /**\n * Enable a muted microphone, can be called before, while and after broadcast.\n */\n fun enableAudio()\n\n /**\n * Mute microphone, can be called before, while and after broadcast.\n */\n fun disableAudio()\n\n /**\n * Get mute state of microphone.\n *\n * @return true if muted, false if enabled\n */\n val isAudioMuted: Boolean\n\n /**\n * You will do a portrait broadcast\n *\n * @return true if success, false if you get a error (Normally because the encoder selected\n * doesn't support any configuration seated or your device hasn't a H264 encoder).\n */\n fun prepareBroadCast(): Boolean\n\n /**\n * @param isLandscape boolean\n * @return true if success, false if you get a error (Normally because the encoder selected\n * doesn't support any configuration seated or your device hasn't a H264 encoder).\n */\n fun prepareBroadCast(isLandscape: Boolean): Boolean\n\n /**\n * Call this method before use [.startBroadCast].\n *\n * @param audioAttributes [AudioAttributes] If null you will do a broadcast without audio.\n * @param videoAttributes [VideoAttributes]\n * @param isLandscape boolean you will broadcast is landscape\n * @return true if success, false if you get a error (Normally because the encoder selected\n * doesn't support any configuration seated or your device hasn't a AAC encoder).\n */\n fun prepareBroadCast(\n audioAttributes: AudioAttributes?,\n videoAttributes: VideoAttributes,\n isLandscape: Boolean\n ): Boolean\n\n /**\n * Get video camera state\n *\n * @return true if disabled, false if enabled\n */\n val isVideoEnabled: Boolean\n\n /**\n * Need be called after [.prepareBroadCast] or/and [.prepareBroadCast].\n *\n * @param broadCastUrl of the broadcast like: rtmp://ip:port/application/stream_name\n *\n *\n * RTMP: rtmp://192.168.1.1:1935/fmp4/live_stream_name\n * [.startPreview] to resolution seated in\n * [.prepareBroadCast].\n * If you never startPreview this method [.startPreview] for you to resolution seated in\n * [.prepareBroadCast].\n */\n fun startBroadCast(broadCastUrl: String?)\n\n /**\n * Stop BroadCast started with [.startBroadCast]\n */\n fun stopBroadCast()\n\n /**\n * Get broadcast state.\n *\n * @return true if broadcasting, false if not broadcasting.\n */\n val isBroadCasting: Boolean\n\n /**\n * @return list of [VideoSize]\n */\n val supportedResolutions: List?\n\n /**\n * Switch camera used. Can be called on preview or while stream, ignored with preview off.\n *\n * @throws UZCameraOpenException If the other camera doesn't support same resolution.\n */\n @Throws(UZCameraOpenException::class)\n fun switchCamera()\n\n /**\n * Start camera preview. Ignored, if stream or preview is started.\n * resolution of preview 640x480\n *\n * @param cameraFacing front or back camera. Like: [com.pedro.encoder.input.video.CameraHelper.Facing.BACK]\n * [com.pedro.encoder.input.video.CameraHelper.Facing.FRONT]\n */\n fun startPreview(cameraFacing: Facing?)\n\n /**\n * Start camera preview. Ignored, if stream or preview is started.\n *\n * @param cameraFacing front or back camera. Like: [com.pedro.encoder.input.video.CameraHelper.Facing.BACK]\n * [com.pedro.encoder.input.video.CameraHelper.Facing.FRONT]\n * @param width of preview in px.\n * @param height of preview in px.\n */\n fun startPreview(cameraFacing: Facing?, width: Int, height: Int)\n\n /**\n * is Front Camera\n */\n val isFrontCamera: Boolean\n\n /**\n * check is on preview\n *\n * @return true if onpreview, false if not preview.\n */\n val isOnPreview: Boolean\n\n /**\n * Stop camera preview. Ignored if streaming or already stopped. You need call it after\n *\n * stopStream to release camera properly if you will close activity.\n */\n fun stopPreview()\n\n /**\n * Get record state.\n *\n * @return true if recording, false if not recoding.\n */\n val isRecording: Boolean\n\n /**\n * Start record a MP4 video. Need be called while stream.\n *\n * @param savePath where file will be saved.\n * @throws IOException If you init it before start stream.\n */\n @Throws(IOException::class)\n fun startRecord(savePath: String?)\n\n /**\n * Stop record MP4 video started with @startRecord. If you don't call it file will be unreadable.\n */\n fun stopRecord()\n\n /**\n * take a photo\n *\n * @param callback [UZTakePhotoCallback]\n */\n fun takePhoto(callback: UZTakePhotoCallback?)\n\n /**\n * Set video bitrate of H264 in kb while stream.\n *\n * @param bitrate H264 in kb.\n */\n fun setVideoBitrateOnFly(bitrate: Int)\n\n /**\n * @return bitrate in kps\n */\n val bitrate: Int\n fun reTry(delay: Long, reason: String?): Boolean\n\n /**\n * Check support Flashlight\n * if use Camera1 always return false\n *\n * @return true if support, false if not support.\n */\n val isLanternSupported: Boolean\n\n /**\n * required: \n */\n @Throws(Exception::class)\n fun enableLantern()\n\n /**\n * required: \n */\n fun disableLantern()\n val isLanternEnabled: Boolean\n\n /**\n * Return max zoom level\n *\n * @return max zoom level\n */\n val maxZoom: Float\n /**\n * Return current zoom level\n *\n * @return current zoom level\n */\n /**\n * Set zoomIn or zoomOut to camera.\n * Use this method if you use a zoom slider.\n *\n * @param level Expected to be >= 1 and <= max zoom level\n * @see Camera2Base.getMaxZoom\n */\n var zoom: Float\n\n /**\n * Set zoomIn or zoomOut to camera.\n *\n * @param event motion event. Expected to get event.getPointerCount() > 1\n */\n fun setZoom(event: MotionEvent?)\n}\n"},"avg_line_length":{"kind":"number","value":28.8298611111,"string":"28.829861"},"max_line_length":{"kind":"number","value":111,"string":"111"},"alphanum_fraction":{"kind":"number","value":0.6574732025,"string":"0.657473"},"score":{"kind":"number","value":3.046875,"string":"3.046875"}}},{"rowIdx":921,"cells":{"hexsha":{"kind":"string","value":"f14ffd020b17c8f1a5a89a1bc7cc4ba4f22a988b"},"size":{"kind":"number","value":38965,"string":"38,965"},"ext":{"kind":"string","value":"rb"},"lang":{"kind":"string","value":"Ruby"},"max_stars_repo_path":{"kind":"string","value":"lib/orm_asciidoctor.rb"},"max_stars_repo_name":{"kind":"string","value":"gvaish/orm_asciidoctor"},"max_stars_repo_head_hexsha":{"kind":"string","value":"f9b2fc999d4c620007462ee7b4bbe71beaf45de6"},"max_stars_repo_licenses":{"kind":"list like","value":["MIT"],"string":"[\n \"MIT\"\n]"},"max_stars_count":{"kind":"number","value":1,"string":"1"},"max_stars_repo_stars_event_min_datetime":{"kind":"string","value":"2017-01-16T06:06:53.000Z"},"max_stars_repo_stars_event_max_datetime":{"kind":"string","value":"2017-01-16T06:06:53.000Z"},"max_issues_repo_path":{"kind":"string","value":"lib/orm_asciidoctor.rb"},"max_issues_repo_name":{"kind":"string","value":"gvaish/orm_asciidoctor"},"max_issues_repo_head_hexsha":{"kind":"string","value":"f9b2fc999d4c620007462ee7b4bbe71beaf45de6"},"max_issues_repo_licenses":{"kind":"list like","value":["MIT"],"string":"[\n \"MIT\"\n]"},"max_issues_count":{"kind":"null"},"max_issues_repo_issues_event_min_datetime":{"kind":"null"},"max_issues_repo_issues_event_max_datetime":{"kind":"null"},"max_forks_repo_path":{"kind":"string","value":"lib/orm_asciidoctor.rb"},"max_forks_repo_name":{"kind":"string","value":"gvaish/orm_asciidoctor"},"max_forks_repo_head_hexsha":{"kind":"string","value":"f9b2fc999d4c620007462ee7b4bbe71beaf45de6"},"max_forks_repo_licenses":{"kind":"list like","value":["MIT"],"string":"[\n \"MIT\"\n]"},"max_forks_count":{"kind":"null"},"max_forks_repo_forks_event_min_datetime":{"kind":"null"},"max_forks_repo_forks_event_max_datetime":{"kind":"null"},"content":{"kind":"string","value":"RUBY_ENGINE = 'unknown' unless defined? RUBY_ENGINE\nrequire 'strscan'\nrequire 'set'\n\n$:.unshift(File.dirname(__FILE__))\n\n# Public: Methods for parsing Asciidoc input files and rendering documents\n# using eRuby templates.\n#\n# Asciidoc documents comprise a header followed by zero or more sections.\n# Sections are composed of blocks of content. For example:\n#\n# = Doc Title\n#\n# == Section 1\n#\n# This is a paragraph block in the first section.\n#\n# == Section 2\n#\n# This section has a paragraph block and an olist block.\n#\n# . Item 1\n# . Item 2\n#\n# Examples:\n#\n# Use built-in templates:\n#\n# lines = File.readlines(\"your_file.asc\")\n# doc = Asciidoctor::Document.new(lines)\n# html = doc.render\n# File.open(\"your_file.html\", \"w+\") do |file|\n# file.puts html\n# end\n#\n# Use custom (Tilt-supported) templates:\n#\n# lines = File.readlines(\"your_file.asc\")\n# doc = Asciidoctor::Document.new(lines, :template_dir => 'templates')\n# html = doc.render\n# File.open(\"your_file.html\", \"w+\") do |file|\n# file.puts html\n# end\nmodule Asciidoctor\n\n module SafeMode\n\n # A safe mode level that disables any of the security features enforced\n # by Asciidoctor (Ruby is still subject to its own restrictions).\n UNSAFE = 0;\n\n # A safe mode level that closely parallels safe mode in AsciiDoc. This value\n # prevents access to files which reside outside of the parent directory of\n # the source file and disables any macro other than the include::[] macro.\n SAFE = 1;\n\n # A safe mode level that disallows the document from setting attributes\n # that would affect the rendering of the document, in addition to all the\n # security features of SafeMode::SAFE. For instance, this level disallows\n # changing the backend or the source-highlighter using an attribute defined\n # in the source document. This is the most fundamental level of security\n # for server-side deployments (hence the name).\n SERVER = 10;\n\n # A safe mode level that disallows the document from attempting to read\n # files from the file system and including the contents of them into the\n # document, in additional to all the security features of SafeMode::SERVER.\n # For instance, this level disallows use of the include::[] macro and the\n # embedding of binary content (data uri), stylesheets and JavaScripts\n # referenced by the document.(Asciidoctor and trusted extensions may still\n # be allowed to embed trusted content into the document).\n #\n # Since Asciidoctor is aiming for wide adoption, this level is the default\n # and is recommended for server-side deployments.\n SECURE = 20;\n\n # A planned safe mode level that disallows the use of passthrough macros and\n # prevents the document from setting any known attributes, in addition to all\n # the security features of SafeMode::SECURE.\n #\n # Please note that this level is not currently implemented (and therefore not\n # enforced)!\n #PARANOID = 100;\n\n end\n\n # Flags to control compliance with the behavior of AsciiDoc\n module Compliance\n # AsciiDoc supports both single-line and underlined\n # section titles.\n # This option disables the underlined variant.\n # Compliance value: true\n @underline_style_section_titles = true\n class << self\n attr_accessor :underline_style_section_titles\n end\n\n # Asciidoctor will recognize commonly-used Markdown syntax\n # to the degree it does not interfere with existing\n # AsciiDoc syntax and behavior.\n # Compliance value: false\n @markdown_syntax = true\n class << self\n attr_accessor :markdown_syntax\n end\n end\n\n # The root path of the Asciidoctor gem\n ROOT_PATH = File.expand_path(File.join(File.dirname(__FILE__), '..'))\n\n # Flag to indicate whether encoding of external strings needs to be forced to UTF-8\n # _All_ input data must be force encoded to UTF-8 if Encoding.default_external is *not* UTF-8\n # Address failures performing string operations that are reported as \"invalid byte sequence in US-ASCII\" \n # Ruby 1.8 doesn't seem to experience this problem (perhaps because it isn't validating the encodings)\n FORCE_ENCODING = RUBY_VERSION > '1.9' && Encoding.default_external != Encoding::UTF_8\n\n # Flag to indicate that line length should be calculated using a unicode mode hint\n FORCE_UNICODE_LINE_LENGTH = RUBY_VERSION < '1.9'\n\n # The endline character to use when rendering output\n EOL = \"\\n\"\n\n # The default document type\n # Can influence markup generated by render templates\n DEFAULT_DOCTYPE = 'article'\n\n # The backend determines the format of the rendered output, default to htmlbook (for ORM)\n DEFAULT_BACKEND = 'htmlbook'\n\n DEFAULT_STYLESHEET_KEYS = ['', 'DEFAULT'].to_set\n\n DEFAULT_STYLESHEET_NAME = 'asciidoctor.css'\n\n # Pointers to the preferred version for a given backend.\n BACKEND_ALIASES = {\n 'html' => 'html5',\n 'docbook' => 'docbook45',\n 'htmlbook' => 'htmlbook'\n }\n\n # Default page widths for calculating absolute widths\n DEFAULT_PAGE_WIDTHS = {\n 'docbook' => 425\n }\n\n # Default extensions for the respective base backends\n DEFAULT_EXTENSIONS = {\n 'html' => '.html',\n 'htmlbook' => '.html',\n 'docbook' => '.xml',\n 'asciidoc' => '.ad',\n 'markdown' => '.md'\n }\n\n # Set of file extensions recognized as AsciiDoc documents (stored as a truth hash)\n ASCIIDOC_EXTENSIONS = {\n '.asciidoc' => true,\n '.adoc' => true,\n '.ad' => true,\n '.asc' => true,\n '.txt' => true\n }\n\n SECTION_LEVELS = {\n '=' => 0,\n '-' => 1,\n '~' => 2,\n '^' => 3,\n '+' => 4\n }\n\n ADMONITION_STYLES = ['NOTE', 'TIP', 'IMPORTANT', 'WARNING', 'CAUTION'].to_set\n\n PARAGRAPH_STYLES = ['comment', 'example', 'literal', 'listing', 'normal', 'pass', 'quote', 'sidebar', 'source', 'verse', 'abstract', 'partintro'].to_set\n\n VERBATIM_STYLES = ['literal', 'listing', 'source', 'verse'].to_set\n\n DELIMITED_BLOCKS = {\n '--' => [:open, ['comment', 'example', 'literal', 'listing', 'pass', 'quote', 'sidebar', 'source', 'verse', 'admonition', 'abstract', 'partintro'].to_set],\n '----' => [:listing, ['literal', 'source'].to_set],\n '....' => [:literal, ['listing', 'source'].to_set],\n '====' => [:example, ['admonition'].to_set],\n '****' => [:sidebar, Set.new],\n '____' => [:quote, ['verse'].to_set],\n '\"\"' => [:quote, ['verse'].to_set],\n '++++' => [:pass, Set.new],\n '|===' => [:table, Set.new],\n ',===' => [:table, Set.new],\n ':===' => [:table, Set.new],\n '!===' => [:table, Set.new],\n '////' => [:comment, Set.new],\n '```' => [:fenced_code, Set.new],\n '~~~' => [:fenced_code, Set.new]\n }\n\n DELIMITED_BLOCK_LEADERS = DELIMITED_BLOCKS.keys.map {|key| key[0..1] }.to_set\n\n BREAK_LINES = {\n '\\'' => :ruler,\n '-' => :ruler,\n '*' => :ruler,\n '_' => :ruler,\n '<' => :page_break\n }\n\n #LIST_CONTEXTS = [:ulist, :olist, :dlist, :colist]\n\n NESTABLE_LIST_CONTEXTS = [:ulist, :olist, :dlist]\n\n # TODO validate use of explicit style name above ordered list (this list is for selecting an implicit style)\n ORDERED_LIST_STYLES = [:arabic, :loweralpha, :lowerroman, :upperalpha, :upperroman] #, :lowergreek]\n\n ORDERED_LIST_MARKER_PATTERNS = {\n :arabic => /\\d+[.>]/,\n :loweralpha => /[a-z]\\./,\n :lowerroman => /[ivx]+\\)/,\n :upperalpha => /[A-Z]\\./,\n :upperroman => /[IVX]+\\)/\n #:lowergreek => /[a-z]\\]/\n }\n\n ORDERED_LIST_KEYWORDS = {\n 'loweralpha' => 'a',\n 'lowerroman' => 'i',\n 'upperalpha' => 'A',\n 'upperroman' => 'I'\n #'lowergreek' => 'a'\n #'arabic' => '1'\n #'decimal' => '1'\n }\n\n LIST_CONTINUATION = '+'\n\n LINE_BREAK = ' +'\n\n # attributes which be changed within the content of the document (but not\n # header) because it has semantic meaning; ex. numbered\n FLEXIBLE_ATTRIBUTES = %w(numbered)\n\n # NOTE allows for empty space in line as it could be left by the template engine\n BLANK_LINE_PATTERN = /^[[:blank:]]*\\n/\n\n LINE_FEED_ENTITY = '&#10;' # or &#x0A;\n\n # Flags to control compliance with the behavior of AsciiDoc\n COMPLIANCE = {\n # AsciiDoc terminates paragraphs adjacent to\n # block content (delimiter or block attribute list)\n # Compliance value: true\n # TODO what about literal paragraph?\n :block_terminates_paragraph => true,\n\n # AsciiDoc does not treat paragraphs labeled with a\n # verbatim style (literal, listing, source, verse)\n # as verbatim; override this behavior\n # Compliance value: false\n :strict_verbatim_paragraphs => true,\n\n # AsciiDoc allows start and end delimiters around\n # a block to be different lengths\n # this option requires that they be the same\n # Compliance value: false\n :congruent_block_delimiters => true,\n\n # AsciiDoc drops lines that contain references to missing attributes.\n # This behavior is not intuitive to most writers\n # Compliance value: 'drop-line'\n :attribute_missing => 'skip',\n\n # AsciiDoc drops lines that contain an attribute unassignemnt.\n # This behavior may need to be tuned depending on the circumstances.\n # Compliance value: 'drop-line'\n :attribute_undefined => 'drop-line',\n }\n\n # The following pattern, which appears frequently, captures the contents between square brackets,\n # ignoring escaped closing brackets (closing brackets prefixed with a backslash '\\' character)\n #\n # Pattern:\n # (?:\\[((?:\\\\\\]|[^\\]])*?)\\])\n # Matches:\n # [enclosed text here] or [enclosed [text\\] here]\n REGEXP = {\n # NOTE: this is a inline admonition note\n :admonition_inline => /^(#{ADMONITION_STYLES.to_a * '|'}):\\s/,\n\n # [[Foo]]\n :anchor => /^\\[\\[([^\\s\\[\\]]+)\\]\\]$/,\n\n # Foowhatevs [[Bar]]\n :anchor_embedded => /^(.*?)\\s*\\[\\[([^\\[\\]]+)\\]\\]$/,\n\n # [[ref]] (anywhere inline)\n :anchor_macro => /\\\\?\\[\\[([\\w\":].*?)\\]\\]/,\n\n # matches any unbounded block delimiter:\n # listing, literal, example, sidebar, quote, passthrough, table, fenced code\n # does not include open block or air quotes\n # TIP position the most common blocks towards the front of the pattern\n :any_blk => %r{^(?:(?:-|\\.|=|\\*|_|\\+|/){4,}|[\\|,;!]={3,}|(?:`|~){3,}.*)$},\n\n # detect a list item of any sort\n # [[:graph:]] is a non-blank character\n :any_list => /^(?:\n [[:blank:]]+[[:graph:]]|\n [[:blank:]]*(?:-|(?:\\*|\\.){1,5}|\\d+\\.|[A-Za-z]\\.|[IVXivx]+\\))[[:blank:]]+[[:graph:]]|\n [[:blank:]]*.*?(?::{2,4}|;;)(?:[[:blank:]]+[[:graph:]]|$)\n )/x,\n\n # :foo: bar\n # :Author: Dan\n # :numbered!:\n # :long-entry: Attribute value lines ending in ' +'\n # are joined together as a single value,\n # collapsing the line breaks and indentation to\n # a single space.\n :attr_entry => /^:(!?\\w.*?):(?:[[:blank:]]+(.*))?$/,\n\n # An attribute list above a block element\n #\n # Can be strictly positional:\n # [quote, Adam Smith, Wealth of Nations]\n # Or can have name/value pairs\n # [NOTE, caption=\"Good to know\"]\n # Can be defined by an attribute\n # [{lead}]\n :blk_attr_list => /^\\[(|[[:blank:]]*[\\w\\{,.#\"'%].*)\\]$/,\n\n # block attribute list or block id (bulk query)\n :attr_line => /^\\[(|[[:blank:]]*[\\w\\{,.#\"'%].*|\\[[^\\[\\]]*\\])\\]$/,\n\n # attribute reference\n # {foo}\n # {counter:pcount:1}\n # {set:foo:bar}\n # {set:name!}\n :attr_ref => /(\\\\)?\\{((set|counter2?):.+?|\\w+(?:[\\-]\\w+)*)(\\\\)?\\}/,\n\n # The author info line the appears immediately following the document title\n # John Doe \n :author_info => /^(\\w[\\w\\-'.]*)(?: +(\\w[\\w\\-'.]*))?(?: +(\\w[\\w\\-'.]*))?(?: +<([^>]+)>)?$/,\n\n # [[[Foo]]] (anywhere inline)\n :biblio_macro => /\\\\?\\[\\[\\[([\\w:][\\w:.-]*?)\\]\\]\\]/,\n\n # callout reference inside literal text\n # <1> (optionally prefixed by //, # or ;; line comment chars)\n # <1> <2> (multiple callouts on one line)\n # (for XML-based languages)\n # special characters are already be replaced at this point during render\n :callout_render => /(?:(?:\\/\\/|#|;;) ?)?(\\\\)?&lt;!?(--|)(\\d+)\\2&gt;(?=(?: ?\\\\?&lt;!?\\2\\d+\\2&gt;)*$)/,\n # ...but not while scanning\n :callout_quick_scan => /\\\\?(?=(?: ?\\\\?)*$)/,\n :callout_scan => /(?:(?:\\/\\/|#|;;) ?)?(\\\\)?(?=(?: ?\\\\?)*$)/,\n\n # <1> Foo\n :colist => /^[[:blank:]]+(.*)/,\n\n # ////\n # comment block\n # ////\n :comment_blk => %r{^/{4,}$},\n\n # // (and then whatever)\n :comment => %r{^//(?:[^/]|$)},\n\n # one,two;three;four\n :ssv_or_csv_delim => /,|;/,\n\n # one two\tthree\n :space_delim => /([^\\\\])[[:blank:]]+/,\n\n # Ctrl + Alt+T\n # Ctrl,T\n :kbd_delim => /(?:\\+|,)(?=[[:blank:]]*[^\\1])/,\n\n # one\\ two\\\tthree\n :escaped_space => /\\\\([[:blank:]])/,\n\n # 29\n :digits => /^\\d+$/,\n\n # foo:: || foo::: || foo:::: || foo;;\n # Should be followed by a definition, on the same line...\n # foo:: That which precedes 'bar' (see also, <>)\n # ...or on a separate line\n # foo::\n # That which precedes 'bar' (see also, <>)\n # The term may be an attribute reference\n # {term_foo}:: {def_foo}\n # NOTE negative match for comment line is intentional since that isn't handled when looking for next list item\n # QUESTION should we check for line comment in regex or when scanning the lines?\n :dlist => /^(?!\\/\\/)[[:blank:]]*(.*?)(:{2,4}|;;)(?:[[:blank:]]+(.*))?$/,\n :dlist_siblings => {\n # (?:.*?[^:])? - a non-capturing group which grabs longest sequence of characters that doesn't end w/ colon\n '::' => /^(?!\\/\\/)[[:blank:]]*((?:.*[^:])?)(::)(?:[[:blank:]]+(.*))?$/,\n ':::' => /^(?!\\/\\/)[[:blank:]]*((?:.*[^:])?)(:::)(?:[[:blank:]]+(.*))?$/,\n '::::' => /^(?!\\/\\/)[[:blank:]]*((?:.*[^:])?)(::::)(?:[[:blank:]]+(.*))?$/,\n ';;' => /^(?!\\/\\/)[[:blank:]]*(.*)(;;)(?:[[:blank:]]+(.*))?$/\n },\n\n :illegal_sectid_chars => /&(?:[[:alpha:]]+|#[[:digit:]]+|#x[[:alnum:]]+);|\\W+?/,\n\n # footnote:[text]\n # footnoteref:[id,text]\n # footnoteref:[id]\n :footnote_macro => /\\\\?(footnote|footnoteref):\\[((?:\\\\\\]|[^\\]])*?)\\]/,\n\n # gist::123456[]\n :generic_blk_macro => /^(\\w[\\w\\-]*)::(\\S+?)\\[((?:\\\\\\]|[^\\]])*?)\\]$/,\n\n # kbd:[F3]\n # kbd:[Ctrl+Shift+T]\n # kbd:[Ctrl+\\]]\n # kbd:[Ctrl,T]\n # btn:[Save]\n :kbd_btn_macro => /\\\\?(?:kbd|btn):\\[((?:\\\\\\]|[^\\]])+?)\\]/,\n\n # menu:File[New...]\n # menu:View[Page Style > No Style]\n # menu:View[Page Style, No Style]\n :menu_macro => /\\\\?menu:(\\w|\\w.*?\\S)\\[[[:blank:]]*(.+?)?\\]/,\n\n # \"File > New...\"\n :menu_inline_macro => /\\\\?\"(\\w[^\"]*?[[:blank:]]*&gt;[[:blank:]]*[^\"[:blank:]][^\"]*)\"/,\n\n # image::filename.png[Caption]\n # video::http://youtube.com/12345[Cats vs Dogs]\n :media_blk_macro => /^(image|video|audio)::(\\S+?)\\[((?:\\\\\\]|[^\\]])*?)\\]$/,\n\n # image:filename.png[Alt Text]\n # image:http://example.com/images/filename.png[Alt Text]\n # image:filename.png[More [Alt\\] Text] (alt text becomes \"More [Alt] Text\")\n # icon:github[large]\n :image_macro => /\\\\?(?:image|icon):([^:\\[][^\\[]*)\\[((?:\\\\\\]|[^\\]])*?)\\]/,\n\n # indexterm:[Tigers,Big cats]\n # (((Tigers,Big cats)))\n :indexterm_macro => /\\\\?(?:indexterm:(?:\\[((?:\\\\\\]|[^\\]])*?)\\])|\\(\\(\\((.*?)\\)\\)\\)(?!\\)))/m,\n\n # indexterm2:[Tigers]\n # ((Tigers))\n :indexterm2_macro => /\\\\?(?:indexterm2:(?:\\[((?:\\\\\\]|[^\\]])*?)\\])|\\(\\((.*?)\\)\\)(?!\\)))/m,\n\n # whitespace at the beginning of the line\n :leading_blanks => /^([[:blank:]]*)/,\n\n # leading parent directory references in path\n :leading_parent_dirs => /^(?:\\.\\.\\/)*/,\n\n # + From the Asciidoc User Guide: \"A plus character preceded by at\n # least one space character at the end of a non-blank line forces\n # a line break. It generates a line break (br) tag for HTML outputs.\n #\n # + (would not match because there's no space before +)\n # + (would match and capture '')\n # Foo + (would and capture 'Foo')\n :line_break => /^(.*)[[:blank:]]\\+$/,\n\n # inline link and some inline link macro\n # FIXME revisit!\n :link_inline => %r{(^|link:|\\s|>|&lt;|[\\(\\)\\[\\]])(\\\\?(?:https?|ftp|irc)://[^\\s\\[\\]<]*[^\\s.,\\[\\]<])(?:\\[((?:\\\\\\]|[^\\]])*?)\\])?},\n\n # inline link macro\n # link:path[label]\n :link_macro => /\\\\?(?:link|mailto):([^\\s\\[]+)(?:\\[((?:\\\\\\]|[^\\]])*?)\\])/,\n\n # inline email address\n # doc.writer@asciidoc.org\n :email_inline => /[\\\\>:]?\\w[\\w.%+-]*@[[:alnum:]][[:alnum:].-]*\\.[[:alpha:]]{2,4}\\b/, \n\n # Foo or one-or-more-spaces-or-tabs then whatever\n :lit_par => /^([[:blank:]]+.*)$/,\n\n # . Foo (up to 5 consecutive dots)\n # 1. Foo (arabic, default)\n # a. Foo (loweralpha)\n # A. Foo (upperalpha)\n # i. Foo (lowerroman)\n # I. Foo (upperroman)\n # REVIEW leading space has already been stripped, so may not need in regex\n :olist => /^[[:blank:]]*(\\.{1,5}|\\d+\\.|[A-Za-z]\\.|[IVXivx]+\\))[[:blank:]]+(.*)$/,\n\n # ''' (ruler)\n # <<< (pagebreak)\n :break_line => /^('|<){3,}$/,\n\n # ''' or ' ' ' (ruler)\n # --- or - - - (ruler)\n # *** or * * * (ruler)\n # <<< (pagebreak)\n :break_line_plus => /^(?:'|<){3,}$|^ {0,3}([-\\*_])( *)\\1\\2\\1$/,\n\n # inline passthrough macros\n # +++text+++\n # $$text$$\n # pass:quotes[text]\n :pass_macro => /\\\\?(?:(\\+{3}|\\${2})(.*?)\\1|pass:([a-z,]*)\\[((?:\\\\\\]|[^\\]])*?)\\])/m,\n\n # passthrough macro allowed in value of attribute assignment\n # pass:[text]\n :pass_macro_basic => /^pass:([a-z,]*)\\[(.*)\\]$/,\n\n # inline literal passthrough macro\n # `text`\n :pass_lit => /(^|[^`\\w])(?:\\[([^\\]]+?)\\])?(\\\\?`([^`\\s]|[^`\\s].*?\\S)`)(?![`\\w])/m,\n\n # placeholder for extracted passthrough text\n :pass_placeholder => /\\e(\\d+)\\e/,\n\n # The document revision info line the appears immediately following the\n # document title author info line, if present\n # v1.0, 2013-01-01: Ring in the new year release\n :revision_info => /^(?:\\D*(.*?),)?(?:\\s*(?!:)(.*?))(?:\\s*(?!^):\\s*(.*))?$/,\n\n # \\' within a word\n :single_quote_esc => /(\\w)\\\\'(\\w)/,\n # an alternative if our backend generated single-quoted html/xml attributes\n #:single_quote_esc => /(\\w|=)\\\\'(\\w)/,\n\n # used for sanitizing attribute names\n :illegal_attr_name_chars => /[^\\w\\-]/,\n\n # 1*h,2*,^3e\n :table_colspec => /^(?:(\\d+)\\*)?([<^>](?:\\.[<^>]?)?|(?:[<^>]?\\.)?[<^>])?(\\d+%?)?([a-z])?$/,\n\n # 2.3+<.>m\n # TODO might want to use step-wise scan rather than this mega-regexp\n :table_cellspec => {\n :start => /^[[:blank:]]*(?:(\\d+(?:\\.\\d*)?|(?:\\d*\\.)?\\d+)([*+]))?([<^>](?:\\.[<^>]?)?|(?:[<^>]?\\.)?[<^>])?([a-z])?\\|/,\n :end => /[[:blank:]]+(?:(\\d+(?:\\.\\d*)?|(?:\\d*\\.)?\\d+)([*+]))?([<^>](?:\\.[<^>]?)?|(?:[<^>]?\\.)?[<^>])?([a-z])?$/\n },\n\n # docbook45\n # html5\n :trailing_digit => /[[:digit:]]+$/,\n\n # .Foo but not . Foo or ..Foo\n :blk_title => /^\\.([^\\s.].*)$/,\n\n # matches double quoted text, capturing quote char and text (single-line)\n :dbl_quoted => /^(\"|)(.*)\\1$/,\n\n # matches double quoted text, capturing quote char and text (multi-line)\n :m_dbl_quoted => /^(\"|)(.*)\\1$/m,\n\n # == Foo\n # ^ yields a level 2 title\n #\n # == Foo ==\n # ^ also yields a level 2 title\n #\n # both equivalent to this two-line version:\n # Foo\n # ~~~\n #\n # match[1] is the delimiter, whose length determines the level\n # match[2] is the title itself\n # match[3] is an inline anchor, which becomes the section id\n :section_title => /^((?:=|#){1,6})\\s+(\\S.*?)(?:\\s*\\[\\[([^\\[]+)\\]\\])?(?:\\s+\\1)?$/,\n\n # does not begin with a dot and has at least one alphanumeric character\n :section_name => /^((?=.*\\w+.*)[^.].*?)$/,\n\n # ====== || ------ || ~~~~~~ || ^^^^^^ || ++++++\n # TODO build from SECTION_LEVELS keys\n :section_underline => /^(?:=|-|~|\\^|\\+)+$/,\n\n # toc::[]\n # toc::[levels=2]\n :toc => /^toc::\\[(.*?)\\]$/,\n\n # * Foo (up to 5 consecutive asterisks)\n # - Foo\n # REVIEW leading space has already been stripped, so may not need in regex\n :ulist => /^[[:blank:]]*(-|\\*{1,5})[[:blank:]]+(.*)$/,\n\n # inline xref macro\n # <> (special characters have already been escaped, hence the entity references)\n # xref:id[reftext]\n :xref_macro => /\\\\?(?:&lt;&lt;([\\w\":].*?)&gt;&gt;|xref:([\\w\":].*?)\\[(.*?)\\])/m,\n\n # ifdef::basebackend-html[]\n # ifndef::theme[]\n # ifeval::[\"{asciidoctor-version}\" >= \"0.1.0\"]\n # ifdef::asciidoctor[Asciidoctor!]\n # endif::theme[]\n # endif::basebackend-html[]\n # endif::[]\n :ifdef_macro => /^[\\\\]?(ifdef|ifndef|ifeval|endif)::(\\S*?(?:([,\\+])\\S+?)?)\\[(.+)?\\]$/,\n\n # \"{asciidoctor-version}\" >= \"0.1.0\"\n :eval_expr => /^(\\S.*?)[[:blank:]]*(==|!=|<=|>=|<|>)[[:blank:]]*(\\S.*)$/,\n # ...or if we want to be more strict up front about what's on each side\n #:eval_expr => /^(true|false|(\"|'|)\\{\\w+(?:\\-\\w+)*\\}\\2|(\"|')[^\\3]*\\3|\\-?\\d+(?:\\.\\d+)*)[[:blank:]]*(==|!=|<=|>=|<|>)[[:blank:]]*(true|false|(\"|'|)\\{\\w+(?:\\-\\w+)*\\}\\6|(\"|')[^\\7]*\\7|\\-?\\d+(?:\\.\\d+)*)$/,\n\n # include::chapter1.ad[]\n # include::example.txt[lines=1;2;5..10]\n :include_macro => /^\\\\?include::([^\\[]+)\\[(.*?)\\]$/,\n\n # http://domain\n # https://domain\n # data:info\n :uri_sniff => %r{\\A[[:alpha:]][[:alnum:].+-]*:/*},\n\n :uri_encode_chars => /[^\\w\\-.!~*';:@=+$,()\\[\\]]/,\n\n :mantitle_manvolnum => /^(.*)\\((.*)\\)$/,\n\n :manname_manpurpose => /^(.*?)[[:blank:]]+-[[:blank:]]+(.*)$/\n }\n\n INTRINSICS = Hash.new{|h,k| STDERR.puts \"Missing intrinsic: #{k.inspect}\"; \"{#{k}}\"}.merge(\n {\n 'startsb' => '[',\n 'endsb' => ']',\n 'brvbar' => '|',\n 'caret' => '^',\n 'asterisk' => '*',\n 'tilde' => '~',\n 'plus' => '&#43;',\n 'apostrophe' => '\\'',\n 'backslash' => '\\\\',\n 'backtick' => '`',\n 'empty' => '',\n 'sp' => ' ',\n 'space' => ' ',\n 'two-colons' => '::',\n 'two-semicolons' => ';;',\n 'nbsp' => '&#160;',\n 'deg' => '&#176;',\n 'zwsp' => '&#8203;',\n 'quot' => '&#34;',\n 'apos' => '&#39;',\n 'lsquo' => '&#8216;',\n 'rsquo' => '&#8217;',\n 'ldquo' => '&#8220;',\n 'rdquo' => '&#8221;',\n 'wj' => '&#8288;',\n 'amp' => '&',\n 'lt' => '<',\n 'gt' => '>'\n }\n )\n\n SPECIAL_CHARS = {\n '<' => '&lt;',\n '>' => '&gt;',\n '&' => '&amp;'\n }\n\n SPECIAL_CHARS_PATTERN = /[#{SPECIAL_CHARS.keys.join}]/\n #SPECIAL_CHARS_PATTERN = /(?:<|>|&(?![[:alpha:]]{2,};|#[[:digit:]]{2,}+;|#x[[:alnum:]]{2,}+;))/\n\n # unconstrained quotes:: can appear anywhere\n # constrained quotes:: must be bordered by non-word characters\n # NOTE these substituions are processed in the order they appear here and\n # the order in which they are replaced is important\n QUOTE_SUBS = [\n\n # **strong**\n [:strong, :unconstrained, /\\\\?(?:\\[([^\\]]+?)\\])?\\*\\*(.+?)\\*\\*/m],\n\n # *strong*\n [:strong, :constrained, /(^|[^\\w;:}])(?:\\[([^\\]]+?)\\])?\\*(\\S|\\S.*?\\S)\\*(?=\\W|$)/m],\n\n # ``double-quoted''\n [:double, :constrained, /(^|[^\\w;:}])(?:\\[([^\\]]+?)\\])?``(\\S|\\S.*?\\S)''(?=\\W|$)/m],\n\n # 'emphasis'\n [:emphasis, :constrained, /(^|[^\\w;:}])(?:\\[([^\\]]+?)\\])?'(\\S|\\S.*?\\S)'(?=\\W|$)/m],\n\n # `single-quoted'\n [:single, :constrained, /(^|[^\\w;:}])(?:\\[([^\\]]+?)\\])?`(\\S|\\S.*?\\S)'(?=\\W|$)/m],\n\n # ++monospaced++\n [:monospaced, :unconstrained, /\\\\?(?:\\[([^\\]]+?)\\])?\\+\\+(.+?)\\+\\+/m],\n\n # +monospaced+\n [:monospaced, :constrained, /(^|[^\\w;:}])(?:\\[([^\\]]+?)\\])?\\+(\\S|\\S.*?\\S)\\+(?=\\W|$)/m],\n\n # __emphasis__\n [:emphasis, :unconstrained, /\\\\?(?:\\[([^\\]]+?)\\])?\\_\\_(.+?)\\_\\_/m],\n\n # _emphasis_\n [:emphasis, :constrained, /(^|[^\\w;:}])(?:\\[([^\\]]+?)\\])?_(\\S|\\S.*?\\S)_(?=\\W|$)/m],\n\n # ##unquoted##\n [:none, :unconstrained, /\\\\?(?:\\[([^\\]]+?)\\])?##(.+?)##/m],\n\n # #unquoted#\n [:none, :constrained, /(^|[^\\w;:}])(?:\\[([^\\]]+?)\\])?#(\\S|\\S.*?\\S)#(?=\\W|$)/m],\n\n # ^superscript^\n [:superscript, :unconstrained, /\\\\?(?:\\[([^\\]]+?)\\])?\\^(.+?)\\^/m],\n\n # ~subscript~\n [:subscript, :unconstrained, /\\\\?(?:\\[([^\\]]+?)\\])?\\~(.+?)\\~/m]\n ]\n\n # NOTE in Ruby 1.8.7, [^\\\\] does not match start of line,\n # so we need to match it explicitly\n # order is significant\n REPLACEMENTS = [\n # (C)\n [/\\\\?\\(C\\)/, '&#169;', :none],\n # (R)\n [/\\\\?\\(R\\)/, '&#174;', :none],\n # (TM)\n [/\\\\?\\(TM\\)/, '&#8482;', :none],\n # foo -- bar\n [/(^|\\n| |\\\\)--( |\\n|$)/, '&#8201;&#8212;&#8201;', :none],\n # foo--bar\n [/(\\w)\\\\?--(?=\\w)/, '&#8212;', :leading],\n # ellipsis\n [/\\\\?\\.\\.\\./, '&#8230;', :leading],\n # single quotes\n [/(\\w)\\\\?'(\\w)/, '&#8217;', :bounding],\n # right arrow ->\n [/\\\\?-&gt;/, '&#8594;', :none],\n # right double arrow =>\n [/\\\\?=&gt;/, '&#8658;', :none],\n # left arrow <-\n [/\\\\?&lt;-/, '&#8592;', :none],\n # right left arrow <=\n [/\\\\?&lt;=/, '&#8656;', :none],\n # restore entities\n [/\\\\?(&)amp;((?:[[:alpha:]]+|#[[:digit:]]+|#x[[:alnum:]]+);)/, '', :bounding]\n ]\n\n # Public: Parse the AsciiDoc source input into an Asciidoctor::Document\n #\n # Accepts input as an IO (or StringIO), String or String Array object. If the\n # input is a File, information about the file is stored in attributes on the\n # Document object.\n #\n # input - the AsciiDoc source as a IO, String or Array.\n # options - a String, Array or Hash of options to control processing (default: {})\n # String and Array values are converted into a Hash.\n # See Asciidoctor::Document#initialize for details about options.\n #\n # returns the Asciidoctor::Document\n def self.load(input, options = {})\n if (monitor = options.fetch(:monitor, false))\n start = Time.now\n end\n\n attrs = (options[:attributes] ||= {})\n if attrs.is_a?(Hash) || (RUBY_ENGINE == 'jruby' && attrs.is_a?(Java::JavaUtil::Map))\n # all good; placed here as optimization\n elsif attrs.is_a? Array\n attrs = options[:attributes] = attrs.inject({}) do |accum, entry|\n k, v = entry.split '=', 2\n accum[k] = v || ''\n accum\n end\n elsif attrs.is_a? String\n # convert non-escaped spaces into null character, so we split on the\n # correct spaces chars, and restore escaped spaces\n attrs = attrs.gsub(REGEXP[:space_delim], \"\\\\1\\0\").gsub(REGEXP[:escaped_space], '\\1')\n\n attrs = options[:attributes] = attrs.split(\"\\0\").inject({}) do |accum, entry|\n k, v = entry.split '=', 2\n accum[k] = v || ''\n accum\n end\n elsif attrs.respond_to?('keys') && attrs.respond_to?('[]')\n # convert it to a Hash as we know it\n original_attrs = attrs\n attrs = options[:attributes] = {}\n original_attrs.keys.each do |key|\n attrs[key] = original_attrs[key]\n end\n else\n raise ArgumentError, \"illegal type for attributes option: #{attrs.class.ancestors}\"\n end\n\n lines = nil\n if input.is_a? File\n lines = input.readlines\n input_mtime = input.mtime\n input_path = File.expand_path(input.path)\n # hold off on setting infile and indir until we get a better sense of their purpose\n attrs['docfile'] = input_path\n attrs['docdir'] = File.dirname(input_path)\n attrs['docname'] = File.basename(input_path, File.extname(input_path))\n attrs['docdate'] = docdate = input_mtime.strftime('%Y-%m-%d')\n attrs['doctime'] = doctime = input_mtime.strftime('%H:%M:%S %Z')\n attrs['docdatetime'] = %(#{docdate} #{doctime})\n elsif input.respond_to?(:readlines)\n input.rewind rescue nil\n lines = input.readlines\n elsif input.is_a?(String)\n lines = input.lines.entries\n elsif input.is_a?(Array)\n lines = input.dup\n else\n raise \"Unsupported input type: #{input.class}\"\n end\n\n if monitor\n read_time = Time.now - start\n start = Time.now\n end\n\n doc = Document.new(lines, options) \n if monitor\n parse_time = Time.now - start\n monitor[:read] = read_time\n monitor[:parse] = parse_time\n monitor[:load] = read_time + parse_time\n end\n doc\n end\n\n # Public: Parse the contents of the AsciiDoc source file into an Asciidoctor::Document\n #\n # Accepts input as an IO, String or String Array object. If the\n # input is a File, information about the file is stored in\n # attributes on the Document.\n #\n # input - the String AsciiDoc source filename\n # options - a String, Array or Hash of options to control processing (default: {})\n # String and Array values are converted into a Hash.\n # See Asciidoctor::Document#initialize for details about options.\n #\n # returns the Asciidoctor::Document\n def self.load_file(filename, options = {})\n Asciidoctor.load(File.new(filename), options)\n end\n\n # Public: Parse the AsciiDoc source input into an Asciidoctor::Document and render it\n # to the specified backend format\n #\n # Accepts input as an IO, String or String Array object. If the\n # input is a File, information about the file is stored in\n # attributes on the Document.\n #\n # If the :in_place option is true, and the input is a File, the output is\n # written to a file adjacent to the input file, having an extension that\n # corresponds to the backend format. Otherwise, if the :to_file option is\n # specified, the file is written to that file. If :to_file is not an absolute\n # path, it is resolved relative to :to_dir, if given, otherwise the\n # Document#base_dir. If the target directory does not exist, it will not be\n # created unless the :mkdirs option is set to true. If the file cannot be\n # written because the target directory does not exist, or because it falls\n # outside of the Document#base_dir in safe mode, an IOError is raised.\n #\n # If the output is going to be written to a file, the header and footer are\n # rendered unless specified otherwise (writing to a file implies creating a\n # standalone document). Otherwise, the header and footer are not rendered by\n # default and the rendered output is returned.\n #\n # input - the String AsciiDoc source filename\n # options - a String, Array or Hash of options to control processing (default: {})\n # String and Array values are converted into a Hash.\n # See Asciidoctor::Document#initialize for details about options.\n #\n # returns the Document object if the rendered result String is written to a\n # file, otherwise the rendered result String\n def self.render(input, options = {})\n in_place = options.delete(:in_place) || false\n to_file = options.delete(:to_file)\n to_dir = options.delete(:to_dir)\n mkdirs = options.delete(:mkdirs) || false\n monitor = options.fetch(:monitor, false)\n\n write_in_place = in_place && input.is_a?(File)\n write_to_target = to_file || to_dir\n stream_output = !to_file.nil? && to_file.respond_to?(:write)\n\n if write_in_place && write_to_target\n raise ArgumentError, 'the option :in_place cannot be used with either the :to_dir or :to_file option'\n end\n\n if !options.has_key?(:header_footer) && (write_in_place || write_to_target)\n options[:header_footer] = true\n end\n\n doc = Asciidoctor.load(input, options)\n\n if to_file == '/dev/null'\n return doc\n elsif write_in_place\n to_file = File.join(File.dirname(input.path), \"#{doc.attributes['docname']}#{doc.attributes['outfilesuffix']}\")\n elsif !stream_output && write_to_target\n working_dir = options.has_key?(:base_dir) ? File.expand_path(options[:base_dir]) : File.expand_path(Dir.pwd)\n # QUESTION should the jail be the working_dir or doc.base_dir???\n jail = doc.safe >= SafeMode::SAFE ? working_dir : nil\n if to_dir\n to_dir = doc.normalize_system_path(to_dir, working_dir, jail, :target_name => 'to_dir', :recover => false)\n if to_file\n to_file = doc.normalize_system_path(to_file, to_dir, nil, :target_name => 'to_dir', :recover => false)\n # reestablish to_dir as the final target directory (in the case to_file had directory segments)\n to_dir = File.dirname(to_file)\n else\n to_file = File.join(to_dir, \"#{doc.attributes['docname']}#{doc.attributes['outfilesuffix']}\")\n end\n elsif to_file\n to_file = doc.normalize_system_path(to_file, working_dir, jail, :target_name => 'to_dir', :recover => false)\n # establish to_dir as the final target directory (in the case to_file had directory segments)\n to_dir = File.dirname(to_file)\n end\n\n if !File.directory? to_dir\n if mkdirs\n Helpers.require_library 'fileutils'\n FileUtils.mkdir_p to_dir\n else\n raise IOError, \"target directory does not exist: #{to_dir}\"\n end\n end\n end\n\n start = Time.now if monitor\n output = doc.render\n\n if monitor\n render_time = Time.now - start\n monitor[:render] = render_time\n monitor[:load_render] = monitor[:load] + render_time\n end\n\n if to_file\n start = Time.now if monitor\n if stream_output\n to_file.write output.rstrip\n # ensure there's a trailing endline\n to_file.write EOL\n else\n File.open(to_file, 'w') {|file| file.write output }\n # these assignments primarily for testing, diagnostics or reporting\n doc.attributes['outfile'] = outfile = File.expand_path(to_file)\n doc.attributes['outdir'] = File.dirname(outfile)\n end\n if monitor\n write_time = Time.now - start\n monitor[:write] = write_time\n monitor[:total] = monitor[:load_render] + write_time\n end\n\n # NOTE document cannot control this behavior if safe >= SafeMode::SERVER\n if !stream_output && doc.safe < SafeMode::SECURE && (doc.attr? 'basebackend-html') &&\n (doc.attr? 'linkcss') && (doc.attr? 'copycss')\n copy_asciidoctor_stylesheet = DEFAULT_STYLESHEET_KEYS.include?(stylesheet = (doc.attr 'stylesheet'))\n #copy_user_stylesheet = !copy_asciidoctor_stylesheet && (doc.attr? 'copycss')\n copy_coderay_stylesheet = (doc.attr? 'source-highlighter', 'coderay') && (doc.attr 'coderay-css', 'class') == 'class'\n copy_pygments_stylesheet = (doc.attr? 'source-highlighter', 'pygments') && (doc.attr 'pygments-css', 'class') == 'class'\n if copy_asciidoctor_stylesheet || copy_coderay_stylesheet || copy_pygments_stylesheet\n Helpers.require_library 'fileutils'\n outdir = doc.attr('outdir')\n stylesdir = doc.normalize_system_path(doc.attr('stylesdir'), outdir,\n doc.safe >= SafeMode::SAFE ? outdir : nil)\n Helpers.mkdir_p stylesdir if mkdirs\n if copy_asciidoctor_stylesheet\n File.open(File.join(stylesdir, DEFAULT_STYLESHEET_NAME), 'w') {|f|\n f.write Asciidoctor::HTML5.default_asciidoctor_stylesheet\n }\n end\n\n #if copy_user_stylesheet\n #end\n\n if copy_coderay_stylesheet\n File.open(File.join(stylesdir, 'asciidoctor-coderay.css'), 'w') {|f|\n f.write Asciidoctor::HTML5.default_coderay_stylesheet\n }\n end\n\n if copy_pygments_stylesheet\n File.open(File.join(stylesdir, 'asciidoctor-pygments.css'), 'w') {|f|\n f.write Asciidoctor::HTML5.pygments_stylesheet(doc.attr 'pygments-style')\n }\n end\n end\n end\n\n if !stream_output && doc.safe < SafeMode::SECURE && (doc.attr? 'basebackend-htmlbook') &&\n (doc.attr? 'linkcss') && (doc.attr? 'copycss')\n copy_asciidoctor_stylesheet = DEFAULT_STYLESHEET_KEYS.include?(stylesheet = (doc.attr 'stylesheet'))\n #copy_user_stylesheet = !copy_asciidoctor_stylesheet && (doc.attr? 'copycss')\n copy_coderay_stylesheet = (doc.attr? 'source-highlighter', 'coderay') && (doc.attr 'coderay-css', 'class') == 'class'\n copy_pygments_stylesheet = (doc.attr? 'source-highlighter', 'pygments') && (doc.attr 'pygments-css', 'class') == 'class'\n if copy_asciidoctor_stylesheet || copy_coderay_stylesheet || copy_pygments_stylesheet\n Helpers.require_library 'fileutils'\n outdir = doc.attr('outdir')\n stylesdir = doc.normalize_system_path(doc.attr('stylesdir'), outdir,\n doc.safe >= SafeMode::SAFE ? outdir : nil)\n Helpers.mkdir_p stylesdir if mkdirs\n if copy_asciidoctor_stylesheet\n File.open(File.join(stylesdir, DEFAULT_STYLESHEET_NAME), 'w') {|f|\n f.write Asciidoctor::HTMLBook.default_asciidoctor_stylesheet\n }\n end\n\n #if copy_user_stylesheet\n #end\n\n if copy_coderay_stylesheet\n File.open(File.join(stylesdir, 'asciidoctor-coderay.css'), 'w') {|f|\n f.write Asciidoctor::HTMLBook.default_coderay_stylesheet\n }\n end\n\n if copy_pygments_stylesheet\n File.open(File.join(stylesdir, 'asciidoctor-pygments.css'), 'w') {|f|\n f.write Asciidoctor::HTMLBook.pygments_stylesheet(doc.attr 'pygments-style')\n }\n end\n end\n end\n doc\n else\n output\n end\n end\n\n # Public: Parse the contents of the AsciiDoc source file into an Asciidoctor::Document\n # and render it to the specified backend format\n #\n # input - the String AsciiDoc source filename\n # options - a String, Array or Hash of options to control processing (default: {})\n # String and Array values are converted into a Hash.\n # See Asciidoctor::Document#initialize for details about options.\n #\n # returns the Document object if the rendered result String is written to a\n # file, otherwise the rendered result String\n def self.render_file(filename, options = {})\n Asciidoctor.render(File.new(filename), options)\n end\n\n # modules\n require 'orm_asciidoctor/debug'\n require 'orm_asciidoctor/substituters'\n require 'orm_asciidoctor/helpers'\n\n # abstract classes\n require 'orm_asciidoctor/abstract_node'\n require 'orm_asciidoctor/abstract_block'\n\n # concrete classes\n require 'orm_asciidoctor/attribute_list'\n require 'orm_asciidoctor/backends/base_template'\n require 'orm_asciidoctor/block'\n require 'orm_asciidoctor/callouts'\n require 'orm_asciidoctor/document'\n require 'orm_asciidoctor/inline'\n require 'orm_asciidoctor/lexer'\n require 'orm_asciidoctor/list'\n require 'orm_asciidoctor/path_resolver'\n require 'orm_asciidoctor/reader'\n require 'orm_asciidoctor/renderer'\n require 'orm_asciidoctor/section'\n require 'orm_asciidoctor/table'\n\n # info\n require 'orm_asciidoctor/version'\nend\n"},"avg_line_length":{"kind":"number","value":36.7594339623,"string":"36.759434"},"max_line_length":{"kind":"number","value":210,"string":"210"},"alphanum_fraction":{"kind":"number","value":0.5649428975,"string":"0.564943"},"score":{"kind":"number","value":3.015625,"string":"3.015625"}}},{"rowIdx":922,"cells":{"hexsha":{"kind":"string","value":"fb8ef83325d631383360c1adf5cff235fb28d90c"},"size":{"kind":"number","value":2710,"string":"2,710"},"ext":{"kind":"string","value":"c"},"lang":{"kind":"string","value":"C"},"max_stars_repo_path":{"kind":"string","value":"src/pal/tests/palsuite/threading/SwitchToThread/test1/test1.c"},"max_stars_repo_name":{"kind":"string","value":"CyberSys/coreclr-mono"},"max_stars_repo_head_hexsha":{"kind":"string","value":"83b2cb83b32faa45b4f790237b5c5e259692294a"},"max_stars_repo_licenses":{"kind":"list like","value":["MIT"],"string":"[\n \"MIT\"\n]"},"max_stars_count":{"kind":"number","value":277,"string":"277"},"max_stars_repo_stars_event_min_datetime":{"kind":"string","value":"2015-01-04T20:42:36.000Z"},"max_stars_repo_stars_event_max_datetime":{"kind":"string","value":"2022-03-21T06:52:03.000Z"},"max_issues_repo_path":{"kind":"string","value":"src/pal/tests/palsuite/threading/SwitchToThread/test1/test1.c"},"max_issues_repo_name":{"kind":"string","value":"CyberSys/coreclr-mono"},"max_issues_repo_head_hexsha":{"kind":"string","value":"83b2cb83b32faa45b4f790237b5c5e259692294a"},"max_issues_repo_licenses":{"kind":"list like","value":["MIT"],"string":"[\n \"MIT\"\n]"},"max_issues_count":{"kind":"number","value":31,"string":"31"},"max_issues_repo_issues_event_min_datetime":{"kind":"string","value":"2015-01-05T08:00:38.000Z"},"max_issues_repo_issues_event_max_datetime":{"kind":"string","value":"2016-01-05T01:18:59.000Z"},"max_forks_repo_path":{"kind":"string","value":"src/pal/tests/palsuite/threading/SwitchToThread/test1/test1.c"},"max_forks_repo_name":{"kind":"string","value":"CyberSys/coreclr-mono"},"max_forks_repo_head_hexsha":{"kind":"string","value":"83b2cb83b32faa45b4f790237b5c5e259692294a"},"max_forks_repo_licenses":{"kind":"list like","value":["MIT"],"string":"[\n \"MIT\"\n]"},"max_forks_count":{"kind":"number","value":46,"string":"46"},"max_forks_repo_forks_event_min_datetime":{"kind":"string","value":"2015-01-21T00:41:59.000Z"},"max_forks_repo_forks_event_max_datetime":{"kind":"string","value":"2021-03-23T07:00:01.000Z"},"content":{"kind":"string","value":"//\n// Copyright (c) Microsoft. All rights reserved.\n// Licensed under the MIT license. See LICENSE file in the project root for full license information. \n//\n\n/*=============================================================================\n**\n** Source: test1.c\n**\n** Purpose: Test to ensure SwitchToThread works, without \n** causing test to hang\n**\n** Dependencies: PAL_Initialize\n** Fail\n** SwitchToThread\n** WaitForMultipleObject\n** CreateThread\n** GetLastError\n** \n\n**\n**===========================================================================*/\n\n\n#include \n#define THREAD_COUNT 10\n#define REPEAT_COUNT 1000\n#define TIMEOUT 60000\nvoid PALAPI Run_Thread(LPVOID lpParam);\n\n/**\n * main\n *\n * executable entry point\n */\nINT __cdecl main( INT argc, CHAR **argv )\n{\n DWORD dwParam;\n HANDLE hThread[THREAD_COUNT];\n DWORD threadId[THREAD_COUNT];\n \n int i = 0; \n int returnCode = 0;\n\n /*PAL initialization */\n if( (PAL_Initialize(argc, argv)) != 0 )\n {\n\t return FAIL;\n }\n\n\n for( i = 0; i < THREAD_COUNT; i++ )\n {\n dwParam = (int) i;\n //Create thread\n hThread[i] = CreateThread(\n NULL, /* no security attributes */\n 0, /* use default stack size */\n (LPTHREAD_START_ROUTINE)Run_Thread,/* thread function */\n (LPVOID)dwParam, /* argument to thread function */\n 0, /* use default creation flags */\n &threadId[i] /* returns the thread identifier*/ \n );\n\n if(hThread[i] == NULL)\n {\n Fail(\"Create Thread failed for iteration %d GetLastError value is %d\\n\", i, GetLastError());\n }\n \n } \n\n\n returnCode = WaitForMultipleObjects(THREAD_COUNT, hThread, TRUE, TIMEOUT);\n if( WAIT_OBJECT_0 != returnCode )\n {\n Trace(\"Wait for Object(s) returned %d, expected value is %d, and GetLastError value is %d\\n\", returnCode, WAIT_OBJECT_0, GetLastError());\n }\n\n PAL_Terminate();\n return PASS;\n\n}\n\nvoid PALAPI Run_Thread (LPVOID lpParam)\n{\n int i = 0;\n int Id=(int)lpParam;\n\n for(i=0; i < REPEAT_COUNT; i++ )\n {\n // No Last Error is set..\n if(!SwitchToThread())\n {\n Trace( \"The operating system did not switch execution to another thread,\"\n \"for thread id[%d], iteration [%d]\\n\", Id, i );\n }\n }\n}\n"},"avg_line_length":{"kind":"number","value":27.3737373737,"string":"27.373737"},"max_line_length":{"kind":"number","value":146,"string":"146"},"alphanum_fraction":{"kind":"number","value":0.4826568266,"string":"0.482657"},"score":{"kind":"number","value":3.171875,"string":"3.171875"}}},{"rowIdx":923,"cells":{"hexsha":{"kind":"string","value":"15b1bd569f5f103bf58c3e319ea2f77c1372ccb0"},"size":{"kind":"number","value":2783,"string":"2,783"},"ext":{"kind":"string","value":"kt"},"lang":{"kind":"string","value":"Kotlin"},"max_stars_repo_path":{"kind":"string","value":"lib/src/main/kotlin/com/amarland/iconvector/lib/Utils.kt"},"max_stars_repo_name":{"kind":"string","value":"amarland/iconvector"},"max_stars_repo_head_hexsha":{"kind":"string","value":"e05c63280dc922ce6bd3ea961c53ce1f9a415213"},"max_stars_repo_licenses":{"kind":"list like","value":["Apache-2.0"],"string":"[\n \"Apache-2.0\"\n]"},"max_stars_count":{"kind":"null"},"max_stars_repo_stars_event_min_datetime":{"kind":"null"},"max_stars_repo_stars_event_max_datetime":{"kind":"null"},"max_issues_repo_path":{"kind":"string","value":"lib/src/main/kotlin/com/amarland/iconvector/lib/Utils.kt"},"max_issues_repo_name":{"kind":"string","value":"amarland/iconvector"},"max_issues_repo_head_hexsha":{"kind":"string","value":"e05c63280dc922ce6bd3ea961c53ce1f9a415213"},"max_issues_repo_licenses":{"kind":"list like","value":["Apache-2.0"],"string":"[\n \"Apache-2.0\"\n]"},"max_issues_count":{"kind":"null"},"max_issues_repo_issues_event_min_datetime":{"kind":"null"},"max_issues_repo_issues_event_max_datetime":{"kind":"null"},"max_forks_repo_path":{"kind":"string","value":"lib/src/main/kotlin/com/amarland/iconvector/lib/Utils.kt"},"max_forks_repo_name":{"kind":"string","value":"amarland/iconvector"},"max_forks_repo_head_hexsha":{"kind":"string","value":"e05c63280dc922ce6bd3ea961c53ce1f9a415213"},"max_forks_repo_licenses":{"kind":"list like","value":["Apache-2.0"],"string":"[\n \"Apache-2.0\"\n]"},"max_forks_count":{"kind":"null"},"max_forks_repo_forks_event_min_datetime":{"kind":"null"},"max_forks_repo_forks_event_max_datetime":{"kind":"null"},"content":{"kind":"string","value":"/*\n * Copyright 2021 Anthony Marland\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\npackage com.amarland.iconvector.lib\n\nimport kotlin.math.roundToInt\nimport com.amarland.iconvector.lib.IconVGIntermediateRepresentation as IR\n\ninternal fun IntArray.insert(index: Int, value: Int): IntArray {\n require(index in 0..size)\n\n return IntArray(size + 1).apply {\n val source = this@insert\n val destination = this\n destination[index] = value\n if (index == 0) {\n System.arraycopy(source, 0, destination, 1, source.size)\n } else {\n System.arraycopy(source, 0, destination, 0, index)\n if (index < source.size - 1) {\n System.arraycopy(source, index, destination, index + 1, source.size - index)\n }\n }\n }\n}\n\ninternal fun FloatArray.insert(index: Int, value: Float): FloatArray {\n require(index in 0..size)\n\n return FloatArray(size + 1).apply {\n val source = this@insert\n val destination = this\n destination[index] = value\n if (index == 0) {\n System.arraycopy(source, 0, destination, 1, source.size)\n } else {\n System.arraycopy(source, 0, destination, 0, index)\n if (index < source.size - 1) {\n System.arraycopy(source, index, destination, index + 1, source.size - index)\n }\n }\n }\n}\n\nfun argbColorToHexString(argb: UInt) =\n '#' + ((((argb shr 24) / 255U) * (argb and 0x00FFFFFFU)).toString(16)\n .padStart(length = 6, padChar = '0'))\n\nfun Iterable.toSvgPathDataString(decimalPlaces: Int = Int.MAX_VALUE) =\n buildString {\n joinTo(this, separator = \" \") { segment ->\n var index = 0\n segment.arguments.joinToString(\n separator = \" \",\n prefix = \"${segment.command.value} \"\n ) { value ->\n if (segment.command == IR.Path.Command.ARC_TO && (index == 3 || index == 4)) {\n value.roundToInt().toString()\n } else if (decimalPlaces in 0..5) {\n \"%.${decimalPlaces}f\".format(value)\n } else {\n value.toString()\n }.also { index++ }\n }\n }\n }\n"},"avg_line_length":{"kind":"number","value":34.7875,"string":"34.7875"},"max_line_length":{"kind":"number","value":94,"string":"94"},"alphanum_fraction":{"kind":"number","value":0.5889328063,"string":"0.588933"},"score":{"kind":"number","value":3.078125,"string":"3.078125"}}},{"rowIdx":924,"cells":{"hexsha":{"kind":"string","value":"8588ad3fb52fe92ae1e82e6a5e5390afc41987b0"},"size":{"kind":"number","value":3088,"string":"3,088"},"ext":{"kind":"string","value":"js"},"lang":{"kind":"string","value":"JavaScript"},"max_stars_repo_path":{"kind":"string","value":"rollup.config.js"},"max_stars_repo_name":{"kind":"string","value":"icesjs/theme"},"max_stars_repo_head_hexsha":{"kind":"string","value":"cac4488165bc1379d26d948cc8b448d6ef03c53c"},"max_stars_repo_licenses":{"kind":"list like","value":["MIT"],"string":"[\n \"MIT\"\n]"},"max_stars_count":{"kind":"null"},"max_stars_repo_stars_event_min_datetime":{"kind":"null"},"max_stars_repo_stars_event_max_datetime":{"kind":"null"},"max_issues_repo_path":{"kind":"string","value":"rollup.config.js"},"max_issues_repo_name":{"kind":"string","value":"icesjs/theme"},"max_issues_repo_head_hexsha":{"kind":"string","value":"cac4488165bc1379d26d948cc8b448d6ef03c53c"},"max_issues_repo_licenses":{"kind":"list like","value":["MIT"],"string":"[\n \"MIT\"\n]"},"max_issues_count":{"kind":"null"},"max_issues_repo_issues_event_min_datetime":{"kind":"null"},"max_issues_repo_issues_event_max_datetime":{"kind":"null"},"max_forks_repo_path":{"kind":"string","value":"rollup.config.js"},"max_forks_repo_name":{"kind":"string","value":"icesjs/theme"},"max_forks_repo_head_hexsha":{"kind":"string","value":"cac4488165bc1379d26d948cc8b448d6ef03c53c"},"max_forks_repo_licenses":{"kind":"list like","value":["MIT"],"string":"[\n \"MIT\"\n]"},"max_forks_count":{"kind":"null"},"max_forks_repo_forks_event_min_datetime":{"kind":"null"},"max_forks_repo_forks_event_max_datetime":{"kind":"null"},"content":{"kind":"string","value":"import * as path from 'path'\nimport * as fs from 'fs'\nimport cp from 'child_process'\nimport typescript from '@rollup/plugin-typescript'\nimport externals from 'rollup-plugin-node-externals'\nimport pkg from './package.json'\n\nconst isEnvDevelopment = process.env.NODE_ENV === 'development'\nconst input = 'src/index.ts'\nconst webpackPlugin = `@ices/theme-webpack-plugin`\nconst sourcemap = !isEnvDevelopment || 'inline'\n\nfunction writeFileSync(filePath, content) {\n const unExistsDirs = []\n let file = filePath\n while (!fs.existsSync((file = path.dirname(file)))) {\n unExistsDirs.unshift(file)\n }\n for (const dir of unExistsDirs) {\n fs.mkdirSync(dir)\n }\n fs.writeFileSync(filePath, content)\n}\n\nfunction makeFakeThemeFile() {\n writeFileSync(\n path.join(path.resolve(path.dirname(pkg.main)), 'theme.js'),\n `// Auto generated code\nthrow new Error(\n \\`Please add ThemeWebpackPlugin from '${webpackPlugin}' to your config of webpack first:\n // webpack.config.js\n const ThemeWebpackPlugin = require('${webpackPlugin}')\n module.exports = {\n plugins: [new ThemeWebpackPlugin()]\n }\\`\n)\n`\n )\n}\n\nfunction makeTypesFile() {\n cp.execSync('yarn types', { stdio: 'ignore' })\n const paths = ['types/react', 'types/vue']\n for (const p of paths) {\n const dir = path.resolve(p)\n if (fs.existsSync(dir)) {\n for (const dts of fs.readdirSync(dir)) {\n fs.renameSync(path.join(dir, dts), path.join(path.resolve(p.replace(/^types\\//, '')), dts))\n }\n }\n }\n}\n\nfunction getPlugins(format, makeTypes) {\n return [\n externals({\n builtins: true,\n deps: true,\n peerDeps: true,\n exclude: 'tslib',\n }),\n typescript({\n removeComments: true,\n noUnusedLocals: !isEnvDevelopment,\n target: 'es5',\n }),\n makeTypes && {\n name: 'make-types',\n generateBundle: makeTypesFile,\n },\n ].filter(Boolean)\n}\n\nmakeFakeThemeFile()\n\nexport default [\n {\n input,\n external: ['./theme'],\n output: {\n file: pkg.module,\n format: 'es',\n sourcemap,\n },\n plugins: getPlugins('es'),\n },\n {\n input,\n external: ['./theme'],\n output: {\n file: pkg.main,\n exports: 'auto',\n format: 'cjs',\n sourcemap,\n },\n plugins: getPlugins('cjs'),\n },\n {\n input: 'src/react/index.tsx',\n external: ['../index'],\n output: {\n file: 'react/index.js',\n paths: (id) => {\n if (id === path.resolve('src/index')) {\n return path.relative(path.resolve('react'), path.resolve(pkg.module)).replace(/\\\\/g, '/')\n }\n return id\n },\n format: 'es',\n sourcemap,\n },\n plugins: getPlugins('es'),\n },\n {\n input: 'src/react/index.tsx',\n external: ['../index'],\n output: {\n file: 'react/index.cjs.js',\n paths: (id) => {\n if (id === path.resolve('src/index')) {\n return path.relative(path.resolve('react'), path.resolve(pkg.main)).replace(/\\\\/g, '/')\n }\n return id\n },\n exports: 'auto',\n format: 'cjs',\n sourcemap,\n },\n plugins: getPlugins('cjs', true),\n },\n]\n"},"avg_line_length":{"kind":"number","value":23.3939393939,"string":"23.393939"},"max_line_length":{"kind":"number","value":99,"string":"99"},"alphanum_fraction":{"kind":"number","value":0.5913212435,"string":"0.591321"},"score":{"kind":"number","value":3.1875,"string":"3.1875"}}},{"rowIdx":925,"cells":{"hexsha":{"kind":"string","value":"a101f6805c4eee7efe596729930038f01c347ba2"},"size":{"kind":"number","value":2914,"string":"2,914"},"ext":{"kind":"string","value":"asm"},"lang":{"kind":"string","value":"Assembly"},"max_stars_repo_path":{"kind":"string","value":"Mips/fizzbuzz.asm"},"max_stars_repo_name":{"kind":"string","value":"Mystic-Developer/FizzBuzz-EVERYTHING"},"max_stars_repo_head_hexsha":{"kind":"string","value":"2daed09d9d7b5e25e1027b7a740c179e653f74af"},"max_stars_repo_licenses":{"kind":"list like","value":["MIT"],"string":"[\n \"MIT\"\n]"},"max_stars_count":{"kind":"null"},"max_stars_repo_stars_event_min_datetime":{"kind":"null"},"max_stars_repo_stars_event_max_datetime":{"kind":"null"},"max_issues_repo_path":{"kind":"string","value":"Mips/fizzbuzz.asm"},"max_issues_repo_name":{"kind":"string","value":"Mystic-Developer/FizzBuzz-EVERYTHING"},"max_issues_repo_head_hexsha":{"kind":"string","value":"2daed09d9d7b5e25e1027b7a740c179e653f74af"},"max_issues_repo_licenses":{"kind":"list like","value":["MIT"],"string":"[\n \"MIT\"\n]"},"max_issues_count":{"kind":"null"},"max_issues_repo_issues_event_min_datetime":{"kind":"null"},"max_issues_repo_issues_event_max_datetime":{"kind":"null"},"max_forks_repo_path":{"kind":"string","value":"Mips/fizzbuzz.asm"},"max_forks_repo_name":{"kind":"string","value":"Mystic-Developer/FizzBuzz-EVERYTHING"},"max_forks_repo_head_hexsha":{"kind":"string","value":"2daed09d9d7b5e25e1027b7a740c179e653f74af"},"max_forks_repo_licenses":{"kind":"list like","value":["MIT"],"string":"[\n \"MIT\"\n]"},"max_forks_count":{"kind":"null"},"max_forks_repo_forks_event_min_datetime":{"kind":"null"},"max_forks_repo_forks_event_max_datetime":{"kind":"null"},"content":{"kind":"string","value":".data\n fizz: .asciiz \"Fizz\"\n buzz: .asciiz \"Buzz\"\n fizzbuzz: .asciiz \"FizzBuzz\"\n new_line: .asciiz \"\\n\"\n prompt: .asciiz \"Please enter the highest number you want to go for FizzBuzz: \"\n \n.text\n addi $s0, $zero, 5 # s0 = 5;\n addi $s1, $zero, 3\t# s1 = 3;\n addi $t0, $zero, 1\t# t0 = 1; This will be the while loop counter!\n \n # sends user prompt message\n li $v0, 4\n la $a0, prompt\n syscall\n \n # gets number from user and stores it in $v0\n li $v0, 5\n syscall\n \n # moves users input from $v0 to $s4\n move $s4, $v0 \n \n while: \n \tbgt $t0, $s4, exit\t# while t0 < 100\n \t\n \tdiv $t0, $s1\t# divide $t0 by 3\n \tmfhi $s2\t# store the remainder of $t0 / 3 in $s2\n \t\n \tdiv $t0, $s0\t# divide $t0 by 5\n \tmfhi $s3\t# store the remainder of $t0 / 5 in $s3\n \t\n \tbeq $s2, $zero, three_is_good_check_five_for_fizzbuzz # if $s2 is equal to 0, then go check $s3 and see if remainder of 5 checks out for fizzbuzz\t\n \tfive_wasnt_good_continue_looping: # if s3 didn't check out for fizzbuzz return to this point of while loop\n \t\n \tdiv $t0, $s1\t\t# divide $t0 by 3\n\tmfhi $s2\t\t# store the remainder of $t0 / 3 in $s2\n\t\n\tbeq $s2, $zero, print_fizz #if $s2 is equal to 0, then print fizz\n\t\n\tdiv $t0, $s0\t\t# divide $t0 by 5\n\tmfhi $s2\t\t# store the remainder of $t0 / 5 in $s2\n\t\n\tbeq $s2, $zero, print_buzz # if $s2 is equal to 0, then print buzz\n\t\n\t# prints $t0 current number\n \tli $v0, 1\t\n \tadd $a0, $t0, $zero\n \tsyscall\n \t\n \t# prints a newline\n \tli $v0, 4\n \tla $a0, new_line\n \tsyscall\n \t\n \taddi $t0, $t0, 1\t# increases t0 counter by 1\n \t\n \tj while\t\t\t# jumps back to top of while loop\n \n \n three_is_good_check_five_for_fizzbuzz:\n \tbeq $s3, $zero, print_fizzbuzz\t# checks to see if the remainder of t0/5, stored in $s3 is 0. If so jumps to print fizzbuzz!\n \t\n \tj five_wasnt_good_continue_looping # if it wasnt 0, then you return to the while loop\n \n print_fizzbuzz:\n \t\n \t# prints fizzbuzz\n \tli $v0, 4\n \tla $a0, fizzbuzz\n \tsyscall\n \t\n \t# prints a newline\n \tli $v0, 4\n \tla $a0, new_line\n \tsyscall \n \t\n \taddi $t0, $t0, 1\t# increases t0 counter by 1\n \t\n \tj while\t\t\t# jumps back to top of while loop\n \n print_fizz:\n \t# prints fizz\n \tli $v0, 4\n \tla $a0, fizz\n \tsyscall\n \t\n \t# prints a newline\n \tli $v0, 4\n \tla $a0, new_line\n \tsyscall \n \t\n \taddi $t0, $t0, 1\t# increases t0 counter by 1\n \t\n \tj while\t\t\t# jumps back to top of while loop\n \n \n print_buzz:\n \t#prints buzz\n \tli $v0, 4\n \tla $a0, buzz\n \tsyscall\n \t\n \t# prints a newline\n \tli $v0, 4\n \tla $a0, new_line\n \tsyscall \n \t\n \taddi $t0, $t0, 1\t# increases t0 counter by 1\n \t\n \tj while\t\t\t# jumps back to top of while loop\n \n \n exit:\n \t# ends program\n \tli $v0, 10\n \tsyscall\t\n \n \n \n \n"},"avg_line_length":{"kind":"number","value":23.6910569106,"string":"23.691057"},"max_line_length":{"kind":"number","value":151,"string":"151"},"alphanum_fraction":{"kind":"number","value":0.5638297872,"string":"0.56383"},"score":{"kind":"number","value":3.171875,"string":"3.171875"}}},{"rowIdx":926,"cells":{"hexsha":{"kind":"string","value":"0b6ffbf766a563164a019a52f34be9e1263ae173"},"size":{"kind":"number","value":4197,"string":"4,197"},"ext":{"kind":"string","value":"py"},"lang":{"kind":"string","value":"Python"},"max_stars_repo_path":{"kind":"string","value":"core/env.py"},"max_stars_repo_name":{"kind":"string","value":"ayyuriss/EigenFunctions"},"max_stars_repo_head_hexsha":{"kind":"string","value":"8cb6c22871fcddb633392c0a12691e960dad5143"},"max_stars_repo_licenses":{"kind":"list like","value":["MIT"],"string":"[\n \"MIT\"\n]"},"max_stars_count":{"kind":"null"},"max_stars_repo_stars_event_min_datetime":{"kind":"null"},"max_stars_repo_stars_event_max_datetime":{"kind":"null"},"max_issues_repo_path":{"kind":"string","value":"core/env.py"},"max_issues_repo_name":{"kind":"string","value":"ayyuriss/EigenFunctions"},"max_issues_repo_head_hexsha":{"kind":"string","value":"8cb6c22871fcddb633392c0a12691e960dad5143"},"max_issues_repo_licenses":{"kind":"list like","value":["MIT"],"string":"[\n \"MIT\"\n]"},"max_issues_count":{"kind":"null"},"max_issues_repo_issues_event_min_datetime":{"kind":"null"},"max_issues_repo_issues_event_max_datetime":{"kind":"null"},"max_forks_repo_path":{"kind":"string","value":"core/env.py"},"max_forks_repo_name":{"kind":"string","value":"ayyuriss/EigenFunctions"},"max_forks_repo_head_hexsha":{"kind":"string","value":"8cb6c22871fcddb633392c0a12691e960dad5143"},"max_forks_repo_licenses":{"kind":"list like","value":["MIT"],"string":"[\n \"MIT\"\n]"},"max_forks_count":{"kind":"null"},"max_forks_repo_forks_event_min_datetime":{"kind":"null"},"max_forks_repo_forks_event_max_datetime":{"kind":"null"},"content":{"kind":"string","value":"import xxhash\nimport numpy as np\nfrom base.grid import SimpleGRID\nimport scipy.sparse as SP\n\nh = xxhash.xxh64()\n \ns_to_i = lambda x,size : size*x[0]+x[1]\ni_to_s = lambda x,size : (x%size,x//size)\n\ndef hash(x):\n h.reset()\n h.update(x)\n return h.digest()\n\nclass Indexer(object):\n \n def __init__(self):\n self.total = 0\n self.dict = {}\n \n def get(self,hs):\n val = self.dict.get(hs,-1)\n if val == -1:\n val = self.total\n self.dict[hs] = val\n self.total += 1\n return val\n \n def reset(self):\n self.__init__()\n \nclass HashIndexer(object):\n \n def __init__(self):\n self.total = 0\n self.dict = {}\n \n def get(self,state):\n hs=hash(state)\n val = self.dict.get(hs,-1)\n if val == -1:\n val = self.total\n self.dict[hs] = val\n self.total += 1\n return val\n def reset(self):\n self.__init__()\n\ndef get_graph(size):\n \n env = SimpleGRID(grid_size=size,max_time=5000)\n input_shape = env.observation_space.shape\n min_batch = size**2-size\n\n indexer = Indexer()\n W = np.zeros((min_batch,min_batch))\n states = np.zeros(min_batch).astype(int)\n data = np.zeros((min_batch,)+input_shape)\n\n while indexer.total 0)[0]\n n_target = float(len(idx_target))\n rank_list = np.array(_score_to_rank(y_score))\n target_rank_list = rank_list[idx_target]\n\n temp_sum_2 = 0\n for target_rank in target_rank_list:\n mm = sum([1 for ii in idx_target\n if rank_list[ii] <= target_rank])/float(target_rank)\n temp_sum_2 += mm\n temp_sum += temp_sum_2/n_target\n\n measure = temp_sum/p\n return measure\n\n\ndef map(Y_target, Y_score):\n return mean_average_precision(Y_target, Y_score)\n\n\ndef map_x(Y_target, Y_score):\n return mean_average_precision(Y_target, Y_score)\n\n\ndef map_y(Y_target, Y_score):\n return mean_average_precision(np.array(Y_target).T,\n np.array(Y_score).T)\n"},"avg_line_length":{"kind":"number","value":24.9136690647,"string":"24.913669"},"max_line_length":{"kind":"number","value":74,"string":"74"},"alphanum_fraction":{"kind":"number","value":0.636442391,"string":"0.636442"},"score":{"kind":"number","value":3.59375,"string":"3.59375"}}},{"rowIdx":928,"cells":{"hexsha":{"kind":"string","value":"dfe7872eca28b5a45f2dcfe0d8b6eff301974230"},"size":{"kind":"number","value":4858,"string":"4,858"},"ext":{"kind":"string","value":"ts"},"lang":{"kind":"string","value":"TypeScript"},"max_stars_repo_path":{"kind":"string","value":"packages/@glimmer/syntax/lib/traversal/traverse.ts"},"max_stars_repo_name":{"kind":"string","value":"cyk/glimmer-vm"},"max_stars_repo_head_hexsha":{"kind":"string","value":"e30b3d64232a0972365c8cbb1dd8e236a0f33dcf"},"max_stars_repo_licenses":{"kind":"list like","value":["MIT"],"string":"[\n \"MIT\"\n]"},"max_stars_count":{"kind":"null"},"max_stars_repo_stars_event_min_datetime":{"kind":"null"},"max_stars_repo_stars_event_max_datetime":{"kind":"null"},"max_issues_repo_path":{"kind":"string","value":"packages/@glimmer/syntax/lib/traversal/traverse.ts"},"max_issues_repo_name":{"kind":"string","value":"cyk/glimmer-vm"},"max_issues_repo_head_hexsha":{"kind":"string","value":"e30b3d64232a0972365c8cbb1dd8e236a0f33dcf"},"max_issues_repo_licenses":{"kind":"list like","value":["MIT"],"string":"[\n \"MIT\"\n]"},"max_issues_count":{"kind":"null"},"max_issues_repo_issues_event_min_datetime":{"kind":"null"},"max_issues_repo_issues_event_max_datetime":{"kind":"null"},"max_forks_repo_path":{"kind":"string","value":"packages/@glimmer/syntax/lib/traversal/traverse.ts"},"max_forks_repo_name":{"kind":"string","value":"cyk/glimmer-vm"},"max_forks_repo_head_hexsha":{"kind":"string","value":"e30b3d64232a0972365c8cbb1dd8e236a0f33dcf"},"max_forks_repo_licenses":{"kind":"list like","value":["MIT"],"string":"[\n \"MIT\"\n]"},"max_forks_count":{"kind":"null"},"max_forks_repo_forks_event_min_datetime":{"kind":"null"},"max_forks_repo_forks_event_max_datetime":{"kind":"null"},"content":{"kind":"string","value":"import visitorKeys from '../types/visitor-keys';\nimport {\n cannotRemoveNode,\n cannotReplaceNode,\n cannotReplaceOrRemoveInKeyHandlerYet,\n} from './errors';\nimport { Node, NodeType, ParentNode, ChildKey } from '../types/nodes';\nimport { NodeVisitor, NodeFunction, NodeHandler, KeyFunction, KeyHandler } from '../types/visitor';\n\nfunction getEnterFunction(handler: KeyHandler): KeyFunction | undefined;\nfunction getEnterFunction(handler: NodeHandler): NodeFunction | undefined;\nfunction getEnterFunction(\n handler: NodeHandler | KeyHandler\n): NodeFunction | KeyFunction | undefined {\n return typeof handler === 'function' ? handler : handler.enter;\n}\n\nfunction getExitFunction(handler: KeyHandler): KeyFunction | undefined;\nfunction getExitFunction(handler: NodeHandler): NodeFunction | undefined;\nfunction getExitFunction(\n handler: NodeHandler | KeyHandler\n): NodeFunction | KeyFunction | undefined {\n return typeof handler !== 'function' ? handler.exit : undefined;\n}\n\nfunction getKeyHandler(handler: NodeHandler, key: ChildKey): KeyHandler | undefined {\n let keyVisitor = typeof handler !== 'function' ? handler.keys : undefined;\n if (keyVisitor === undefined) return;\n let keyHandler = keyVisitor[key];\n if (keyHandler !== undefined) {\n // widen specific key to all keys\n return keyHandler as KeyHandler;\n }\n return keyVisitor.All;\n}\n\nfunction getNodeHandler(visitor: NodeVisitor, nodeType: NodeType): NodeHandler | undefined {\n let handler = visitor[nodeType];\n if (handler !== undefined) {\n // widen specific Node to all nodes\n return handler as NodeHandler;\n }\n return visitor.All;\n}\n\nfunction visitNode(visitor: NodeVisitor, node: Node): Node | Node[] | undefined | null | void {\n let handler = getNodeHandler(visitor, node.type);\n let enter: NodeFunction | undefined;\n let exit: NodeFunction | undefined;\n\n if (handler !== undefined) {\n enter = getEnterFunction(handler);\n exit = getExitFunction(handler);\n }\n\n let result: Node | Node[] | undefined | null | void;\n if (enter !== undefined) {\n result = enter(node);\n }\n\n if (result !== undefined && result !== null) {\n if (JSON.stringify(node) === JSON.stringify(result)) {\n result = undefined;\n } else if (Array.isArray(result)) {\n return visitArray(visitor, result) || result;\n } else {\n return visitNode(visitor, result) || result;\n }\n }\n\n if (result === undefined) {\n let keys = visitorKeys[node.type];\n\n for (let i = 0; i < keys.length; i++) {\n // we know if it has child keys we can widen to a ParentNode\n visitKey(visitor, handler, node as ParentNode, keys[i]);\n }\n\n if (exit !== undefined) {\n result = exit(node);\n }\n }\n\n return result;\n}\n\nfunction visitKey(\n visitor: NodeVisitor,\n handler: NodeHandler | undefined,\n node: ParentNode,\n key: ChildKey\n) {\n let value = node[key] as Node | Node[] | null | undefined;\n if (!value) {\n return;\n }\n\n let keyEnter: KeyFunction | undefined;\n let keyExit: KeyFunction | undefined;\n\n if (handler !== undefined) {\n let keyHandler = getKeyHandler(handler, key);\n if (keyHandler !== undefined) {\n keyEnter = getEnterFunction(keyHandler);\n keyExit = getExitFunction(keyHandler);\n }\n }\n\n if (keyEnter !== undefined) {\n if (keyEnter(node, key) !== undefined) {\n throw cannotReplaceOrRemoveInKeyHandlerYet(node, key);\n }\n }\n\n if (Array.isArray(value)) {\n visitArray(visitor, value);\n } else {\n let result = visitNode(visitor, value);\n if (result !== undefined) {\n assignKey(node, key, result);\n }\n }\n\n if (keyExit !== undefined) {\n if (keyExit(node, key) !== undefined) {\n throw cannotReplaceOrRemoveInKeyHandlerYet(node, key);\n }\n }\n}\n\nfunction visitArray(visitor: NodeVisitor, array: Node[]) {\n for (let i = 0; i < array.length; i++) {\n let result = visitNode(visitor, array[i]);\n if (result !== undefined) {\n i += spliceArray(array, i, result) - 1;\n }\n }\n}\n\nfunction assignKey(node: Node, key: ChildKey, result: Node | Node[] | null) {\n if (result === null) {\n throw cannotRemoveNode(node[key], node, key);\n } else if (Array.isArray(result)) {\n if (result.length === 1) {\n node[key] = result[0];\n } else {\n if (result.length === 0) {\n throw cannotRemoveNode(node[key], node, key);\n } else {\n throw cannotReplaceNode(node[key], node, key);\n }\n }\n } else {\n node[key] = result;\n }\n}\n\nfunction spliceArray(array: Node[], index: number, result: Node | Node[] | null) {\n if (result === null) {\n array.splice(index, 1);\n return 0;\n } else if (Array.isArray(result)) {\n array.splice(index, 1, ...result);\n return result.length;\n } else {\n array.splice(index, 1, result);\n return 1;\n }\n}\n\nexport default function traverse(node: Node, visitor: NodeVisitor) {\n visitNode(visitor, node);\n}\n"},"avg_line_length":{"kind":"number","value":27.9195402299,"string":"27.91954"},"max_line_length":{"kind":"number","value":99,"string":"99"},"alphanum_fraction":{"kind":"number","value":0.6566488267,"string":"0.656649"},"score":{"kind":"number","value":3.09375,"string":"3.09375"}}},{"rowIdx":929,"cells":{"hexsha":{"kind":"string","value":"16b7512b11bf197d8a97456fb0175e50565b85ad"},"size":{"kind":"number","value":2218,"string":"2,218"},"ext":{"kind":"string","value":"ts"},"lang":{"kind":"string","value":"TypeScript"},"max_stars_repo_path":{"kind":"string","value":"src/core/virtual-node.ts"},"max_stars_repo_name":{"kind":"string","value":"xyzingh/EvNet"},"max_stars_repo_head_hexsha":{"kind":"string","value":"fbc14dd4ef10b237a322dd54763d25f160f4aa82"},"max_stars_repo_licenses":{"kind":"list like","value":["MIT"],"string":"[\n \"MIT\"\n]"},"max_stars_count":{"kind":"number","value":1,"string":"1"},"max_stars_repo_stars_event_min_datetime":{"kind":"string","value":"2020-09-10T07:23:30.000Z"},"max_stars_repo_stars_event_max_datetime":{"kind":"string","value":"2020-09-10T07:23:30.000Z"},"max_issues_repo_path":{"kind":"string","value":"src/core/virtual-node.ts"},"max_issues_repo_name":{"kind":"string","value":"xyzingh/EvNet"},"max_issues_repo_head_hexsha":{"kind":"string","value":"fbc14dd4ef10b237a322dd54763d25f160f4aa82"},"max_issues_repo_licenses":{"kind":"list like","value":["MIT"],"string":"[\n \"MIT\"\n]"},"max_issues_count":{"kind":"null"},"max_issues_repo_issues_event_min_datetime":{"kind":"null"},"max_issues_repo_issues_event_max_datetime":{"kind":"null"},"max_forks_repo_path":{"kind":"string","value":"src/core/virtual-node.ts"},"max_forks_repo_name":{"kind":"string","value":"xyzingh/EvNet"},"max_forks_repo_head_hexsha":{"kind":"string","value":"fbc14dd4ef10b237a322dd54763d25f160f4aa82"},"max_forks_repo_licenses":{"kind":"list like","value":["MIT"],"string":"[\n \"MIT\"\n]"},"max_forks_count":{"kind":"null"},"max_forks_repo_forks_event_min_datetime":{"kind":"null"},"max_forks_repo_forks_event_max_datetime":{"kind":"null"},"content":{"kind":"string","value":"import {\n VirtualNodeAction,\n VirtualNode,\n Node,\n VirtualNodeActionTypes,\n Port,\n ElementType,\n} from 'core/types';\nimport { isVirtualPort } from 'core/utilities';\nimport { PortSet } from 'core/portset';\nimport { VirtualPort } from 'core/virtual-port';\n\n/**\n * Record the actions taking place on VirtualNode.\n * @internal\n */\nexport class VirtualNodeActionQueue {\n public queue: VirtualNodeAction[] = [];\n\n public add(action: VirtualNodeAction): void {\n this.queue.push(action);\n }\n public shift(): VirtualNodeAction | undefined {\n return this.queue.shift();\n }\n public clear(): VirtualNodeAction[] {\n const result = this.queue;\n this.queue = [];\n return result;\n }\n\n public replaceVirtualNodeWithRealNode(\n virtualNode: VirtualNode,\n realNode: Node,\n doActionsIfPossible = true,\n ): void {\n const len = this.queue.length;\n for (let i = 0; i < len; ++i) {\n const action = this.queue[i];\n switch (action.type) {\n case VirtualNodeActionTypes.PipeAction:\n if (action.from.node === virtualNode) {\n action.from = realNode.ports.get(action.from.name);\n }\n if (action.to.node === virtualNode) {\n action.to = realNode.ports.get(action.to.name);\n }\n if (\n doActionsIfPossible &&\n !isVirtualPort(action.from) &&\n !isVirtualPort(action.to)\n ) {\n this.doAction(action);\n this.queue.splice(i, 1);\n }\n break;\n }\n }\n }\n\n public doAction(action: VirtualNodeAction): void {\n switch (action.type) {\n case VirtualNodeActionTypes.PipeAction:\n (action.from as Port).pipe(action.to as Port);\n break;\n }\n }\n}\n\nexport const virtualNodeActionQueue = new VirtualNodeActionQueue();\n\nexport class NextNode implements VirtualNode {\n public readonly type = ElementType.VirtualNode;\n public readonly brand = 'NextNode';\n public readonly ports: PortSet = new PortSet(\n VirtualPort,\n false,\n this,\n );\n}\n"},"avg_line_length":{"kind":"number","value":27.3827160494,"string":"27.382716"},"max_line_length":{"kind":"number","value":74,"string":"74"},"alphanum_fraction":{"kind":"number","value":0.5825067628,"string":"0.582507"},"score":{"kind":"number","value":3.09375,"string":"3.09375"}}},{"rowIdx":930,"cells":{"hexsha":{"kind":"string","value":"b2de089e75f188f3482c29fc33bcbb7a91997599"},"size":{"kind":"number","value":27975,"string":"27,975"},"ext":{"kind":"string","value":"py"},"lang":{"kind":"string","value":"Python"},"max_stars_repo_path":{"kind":"string","value":"src/app.py"},"max_stars_repo_name":{"kind":"string","value":"chunyuyuan/NEWS_2019_network-master"},"max_stars_repo_head_hexsha":{"kind":"string","value":"0eec84b383156c82fbd64d900dce578700575d99"},"max_stars_repo_licenses":{"kind":"list like","value":["MIT"],"string":"[\n \"MIT\"\n]"},"max_stars_count":{"kind":"null"},"max_stars_repo_stars_event_min_datetime":{"kind":"null"},"max_stars_repo_stars_event_max_datetime":{"kind":"null"},"max_issues_repo_path":{"kind":"string","value":"src/app.py"},"max_issues_repo_name":{"kind":"string","value":"chunyuyuan/NEWS_2019_network-master"},"max_issues_repo_head_hexsha":{"kind":"string","value":"0eec84b383156c82fbd64d900dce578700575d99"},"max_issues_repo_licenses":{"kind":"list like","value":["MIT"],"string":"[\n \"MIT\"\n]"},"max_issues_count":{"kind":"null"},"max_issues_repo_issues_event_min_datetime":{"kind":"null"},"max_issues_repo_issues_event_max_datetime":{"kind":"null"},"max_forks_repo_path":{"kind":"string","value":"src/app.py"},"max_forks_repo_name":{"kind":"string","value":"chunyuyuan/NEWS_2019_network-master"},"max_forks_repo_head_hexsha":{"kind":"string","value":"0eec84b383156c82fbd64d900dce578700575d99"},"max_forks_repo_licenses":{"kind":"list like","value":["MIT"],"string":"[\n \"MIT\"\n]"},"max_forks_count":{"kind":"null"},"max_forks_repo_forks_event_min_datetime":{"kind":"null"},"max_forks_repo_forks_event_max_datetime":{"kind":"null"},"content":{"kind":"string","value":"from flask import Flask, request, render_template, send_file, Response\n\nimport io\n\nimport base64\n\nimport csv\n\nimport json\n\nimport time\n\nfrom collections import OrderedDict\n\nimport numpy\n\nimport pandas as pd\n\nfrom numpy import genfromtxt\n\nfrom flask import jsonify\n\nfrom flask_cors import CORS\n\nfrom LoadingNetwork import EchoWebSocket\n\nimport shutil\n\nimport gc\n\nfrom tornado.wsgi import WSGIContainer\n\nfrom tornado.web import Application, FallbackHandler\n\nfrom tornado.websocket import WebSocketHandler\n\nfrom tornado.ioloop import IOLoop\n\n\n\napp = Flask('flasknado')\n\n#app = Flask(__name__)\n\napp.debug = True\n\nCORS(app)\n\n\n\n\n\n##initial netwrok csv data############################\n\nrawdata = open('NetworkWithDistance.txt')\n\nwith open('NetworkWithDistance.txt') as f:\n\n rawdata = f.readlines()\n\n# you may also want to remove whitespace characters like `\\n` at the end\n\n# of each line\n\nrawdata = [x.strip() for x in rawdata]\n\nmy_data = genfromtxt('networkwithdist.csv', delimiter=',')\n\n# my_data=numpy.delete(my_data,(0),axis=0)\n\nheader = ['id', 'id_to', 'lon', 'lat', 'basinid']\n\nframe = pd.DataFrame(my_data, columns=header)\n\ndata = []\n\nMY_GLOBAL = []\n\nwith open('tempcsv.csv') as f:\n\n for line in f:\n\n temp = line.strip().split(',')\n\n data.append(temp)\n\n#############################\n\ndata1 = []\n\nwith open('MyFile1.txt') as f:\n\n r = 0\n\n for line in f:\n\n if(r > 0):\n\n data2 = []\n\n # print(line)\n\n temp = line.split(\"\\\",\")\n\n data2.append(temp[0][1:])\n\n temp1 = temp[1].split(\",[\")\n\n data2.append(temp1[0])\n\n data2.append(temp1[1][:-2])\n\n data1.append(data2)\n\n r += 1\n\nheader = ['celllist', 'cellid', 'cellto']\n\nframe_celllist = pd.DataFrame(data1, columns=header)\n\nframe_celllist = frame_celllist.drop_duplicates()\n\ndel data1[:]\n\n##################\n\ndata_c = []\n\nwith open('powerplant_cell_loc.csv') as f:\n\n r = 0\n\n for line in f:\n\n if(r > 0):\n\n\n\n data_cc = line.split(\",\")\n\n data_c.append(data_cc)\n\n\n\n # print(line)\n\n r += 1\n\nheader = ['cellid', 'loc']\n\nframe_cell = pd.DataFrame(data_c, columns=header)\n\nframe_cell = frame_cell.drop_duplicates()\n\ndel data_c[:]\n\n\n\n########################################################\n\nimport os\n\nimport sys\n\nfrom SimpleHTTPServer import SimpleHTTPRequestHandler\n\nimport BaseHTTPServer\n\n\n\n# class MyHTTPRequestHandler(SimpleHTTPRequestHandler):\n\n# def translate_path(self,path):\n\n# path = SimpleHTTPRequestHandler.translate_path(self,path)\n\n# if os.path.isdir(path):\n\n# for base in \"index\", \"default\":\n\n# for ext in \".html\", \".htm\", \".txt\":\n\n# index = path + \"/\" + base + ext\n\n# if os.path.exists(index):\n\n# return index\n\n# return path\n\n\n\n# def test(HandlerClass = MyHTTPRequestHandler,\n\n# ServerClass = BaseHTTPServer.HTTPServer):\n\n# BaseHTTPServer.test(HandlerClass, ServerClass)\n\n\n\n\n\n##################travesal network upstream############\n\n'''def find_upstream(value):\n\n gc.collect()\n\n ii=0\n\n li = []\n\n temp=[]\n\n a=frame.ix[int(value)]\n\n temp.append(a)\n\n #print(MY_GLOBAL)\n\n MY_GLOBAL[:]=[]\n\n #x=data[int(value)]\n\n\n\n #x=frame[frame['id']==a['id_to']]\n\n #print x\n\n\n\n i=0\n\n z=0\n\n zz=0\n\n while zz 0:\n\n print(templist[z].strip())\n\n curid = templist[z].strip()\n\n if curid != str(dcellid):\n\n z -= 1\n\n else:\n\n print(z)\n\n break\n\n\n\n while z > 0:\n\n curid = templist[z].strip()\n\n # print(curid,templist)\n\n curidloc = frame_cell[frame_cell['cellid'] == curid]['loc'].tolist()\n\n curidloc1 = curidloc[0].split(\"_\")\n\n # print(curidloc1[0],curidloc1[1][:-1],curidloc[0])\n\n\n\n temp = frame_celllist[frame_celllist['cellid']\n\n == templist[z].strip()]['cellto'].tolist()\n\n z -= 1\n\n print(temp)\n\n temp = temp[0].split(\",\")\n\n\n\n if len(temp) == 1 and temp[0][:-1] == \"none\":\n\n # print(temp[0])\n\n z -= 1\n\n continue\n\n else:\n\n zz = 0\n\n aaaa = 'false'\n\n while zz < len(temp):\n\n # print(temp[zz],temp)\n\n x = temp[zz]\n\n zz += 1\n\n if zz == len(temp):\n\n if x[:-1] == curid:\n\n aaaa = 'true'\n\n nextloc = frame_cell[frame_cell['cellid']\n\n == x[:-1]]['loc'].tolist()\n\n else:\n\n if x == curid:\n\n aaaa = 'true'\n\n nextloc = frame_cell[frame_cell['cellid']\n\n == x]['loc'].tolist()\n\n if aaaa == 'true':\n\n nextloc1 = nextloc[0].split(\"_\")\n\n\n\n # print(nextloc1[0],nextloc1[1][:-1],nextloc1)\n\n jstring += '{\"type\": \"Feature\",\"geometry\": { \"type\": \"MultiLineString\", \"coordinates\": [[[' + str(curidloc1[0]) + ',' + str(curidloc1[1][:-1]) + '],[' + str(\n\n nextloc1[0]) + ',' + str(nextloc1[1][:-1]) + ']]]},\"properties\": {\"lat\":' + str(curidloc1[1][:-1]) + ',\"lon\": ' + str(curidloc1[0]) + '}},'\n\n # jstring+='{\"type\": \"Feature\",\"geometry\": { \"type\": \"MultiLineString\", \"coordinates\": [[['+str(float(xx['lon']))+','+str(float(xx['lat']))+'],['+str(float(item['lon']))+','+str(float(item['lat']))+']]]},\"properties\": {\"id_to\": '+str(int(xx['id_to']))+',\"id\":'+str(int(xx['id']))+',\"lat\":'+str(float(xx['lat']))+',\"lon\": '+str(float(xx['lon']))+'}},';\n\n print(jstring)\n\n if len(jstring) > 0:\n\n\n\n return jstring[:-1], 200\n\n else:\n\n return jstring, 200\n\n\n\n\n\n\n\n\n\n@app.route(\"/\", methods=['GET', 'POST'])\n\ndef index():\n\n print(request)\n\n return render_template('test1.html')\n\n\n\n\n\n@app.route(\"/api/\", methods=['GET', 'POST'])\n\ndef update():\n\n print(request.method)\n\n if request.method == \"POST\":\n\n\n\n source = request.form[\"source\"]\n\n dist = request.form[\"dist\"]\n\n pic = request.form[\"pic\"]\n\n downfirst = request.form[\"downfirst\"]\n\n\n\n pp = request.form[\"pp\"]\n\n print(pp, source, dist, downfirst, pic)\n\n\n\n if(pp == 'yes'):\n\n upstream = request.form[\"upstream\"]\n\n if(upstream == 'yes'):\n\n\n\n ucellid = request.form[\"ucellid\"]\n\n re, ii = find_upstream_pp(ucellid)\n\n # print(re)\n\n return json.dumps(re), ii\n\n\n\n # if(upstream=='no'):\n\n\n\n ### ucellid = request.form[\"ucellid\"]\n\n # dcellid = request.form[\"dcellid\"]\n\n # re,ii=find_downstream_pp(ucellid,dcellid)\n\n # print(re)\n\n\n\n # if(pp=='no'):\n\n source = request.form[\"source\"]\n\n dist = request.form[\"dist\"]\n\n pic = request.form[\"pic\"]\n\n downfirst = request.form[\"downfirst\"]\n\n\n\n #print dist\n\n if(downfirst == 'no'):\n\n if(source == 'yes'):\n\n sourceid = request.form[\"sourceid\"]\n\n #print sourceid\n\n import time\n\n start = time. time()\n\n\n\n re, ii = find_upstream(sourceid)\n\n end = time. time()\n\n #print ii,(end-start)\n\n # print(re)\n\n # print(MY_GLOBAL)\n\n\n\n return json.dumps(re), ii\n\n\n\n if(dist == 'yes'):\n\n distid = request.form[\"distid\"]\n\n sourceid = request.form[\"sourceid\"]\n\n MY_GLOBAL[:] = []\n\n#print distid,sourceid\n\n re, ii = find_downstream(int(distid), int(sourceid))\n\n print (re)\n\n gc.collect()\n\n MY_GLOBAL.append(100)\n\n return json.dumps(re, sort_keys=False, indent=4), ii\n\n if(downfirst == 'yes'):\n\n\n\n if(dist == 'yes'):\n\n distid = request.form[\"distid\"]\n\n sourceid = request.form[\"sourceid\"]\n\n MY_GLOBAL[:] = []\n\n#print distid,sourceid\n\n re, ii = find_downstream1(int(distid))\n\n print (re)\n\n gc.collect()\n\n MY_GLOBAL.append(100)\n\n return json.dumps(re, sort_keys=False, indent=4), ii\n\n\n\n if(pic == 'yes'):\n\n #print request.form\n\n MY_GLOBAL[:] = []\n\n start1 = request.form[\"dist_lat\"]\n\n start2 = request.form[\"dist_lon\"]\n\n goal1 = request.form[\"source_lat\"]\n\n goal2 = request.form[\"source_lon\"]\n\n fromdate = request.form[\"from\"]\n\n todate = request.form[\"to\"]\n\n\n import time\n\n before = time.time()\n\n output, str1, str2, str3 = LoadingNetwork.main(\n\n [start1, start2], [goal1, goal2], fromdate, todate, rawdata)\n\n #print str1,str2,str3\n\n after = time.time()\n\n print (\"time,\", after - before)\n\n\n\n if(isinstance(output, str)):\n\n\n return output, 201\n\n else:\n\n # gc.collect()\n\n #print base64.b64encode(output.getvalue())\n\n return base64.b64encode(\n\n output.getvalue()) + \"***\" + str1 + \"***\" + str2 + \"***\" + str3, 200\n\n\n\n\n\n\n\n\nclass WebSocket(WebSocketHandler):\n\n\n\n\n\n def on_message(self, message):\n\n # self.write_message(\"Received: \" + message)\n\n # self.write_message(\"Received2: \" + message)\n\n # m=message.split(\"&\")\n\n \n\n print(\"Received message: \" + m[0])\n\n print(\"Received message: \" + m[1])\n\n print(\"Received message: \" + m[2])\n\n print(\"Received message: \" + m[3])\n\n print(\"Received message: \" + m[4])\n\n print(\"Received message: \" + m[5])\n\n print(\"Received message: \" + m[6])\n\n\n\n m=message[1:-1].split(\"&\")\n\n \n\n source = m[0].split(\"=\")[1]\n\n value = m[1].split(\"=\")[1]\n\n dist = m[2].split(\"=\")[1]\n\n value1 = m[3].split(\"=\")[1]\n\n pic = m[4].split(\"=\")[1]\n\n downfirst = m[5].split(\"=\")[1]\n\n\n\n pp = m[6].split(\"=\")\n\n print(pp, source, dist, downfirst, pic,value,value1)\n\n###################################upstram##########################3\n\n if(downfirst == 'no'):\n\n if(source == 'yes'):\n\n\n##################\n\n gc.collect()\n\n ii = 0\n\n li = []\n\n temp = []\n\n a = frame.ix[int(value)]\n\n temp.append(int(value))\n\n\n\n \n\n\n\n\n\n i = 0\n\n z = 0\n\n zz = 0\n\n\n\n jstring = ''\n\n \n\n while z < len(temp):\n\n\n\n item = frame.ix[temp[z]]\n\n z += 1\n\n\n\n x = data[int(float(item['id']))]\n\n #print x\n\n \n\n i = 1\n\n\n\n while i < len(x):\n\n\n\n\n\n xx = frame.ix[int(float(x[i]))]\n\n jstring += '{\"type\": \"Feature\",\"geometry\": { \"type\": \"MultiLineString\", \"coordinates\": [[[' + str(float(xx['lon'])) + ',' + str(float(xx['lat'])) + '],[' + str(float(item['lon'])) + ',' + str(\n\n float(item['lat'])) + ']]]},\"properties\": {\"id_to\": ' + str(int(xx['id_to'])) + ',\"id\":' + str(int(xx['id'])) + ',\"lat\":' + str(float(xx['lat'])) + ',\"lon\": ' + str(float(xx['lon'])) + '}},'\n\n\n\n\n\n\n\n ii += 1\n\n temp.append(int(float(x[i])))\n\n i += 1\n\n if(len(jstring)>1500000):\n\n zz+=5\n\n self.write_message( jstring[:-1])\n\n self.write_message( '~'+str(zz*1.0/100))\n\n jstring = ''\n\n \n\n \n\n self.write_message( jstring[:-1])\n\n self.write_message( '~1')\n\n \n\n \n\n\n\n############################downstream#########################\n\n if(dist == 'yes'):\n\n\n\n########################################################################\n\n ii = 0\n\n li = []\n\n temp = []\n\n jstring = ''\n\n # MY_GLOBAL[:]=[]\n\n a = frame.ix[int(value1)]\n\n temp.append(a)\n\n check = True\n\n z = 0\n\n zz=0\n\n while z < len(temp) and check:\n\n item = temp[z]\n\n z += 1\n\n if(item['id_to'] == int(value)):\n\n check = False\n\n\n x = frame.ix[frame['id'] == item['id_to']]\n\n #print x\n\n\n\n i = 0\n\n while i < len(x):\n\n # d = OrderedDict()\n\n xx = x.ix[x.index[i]]\n\n jstring += '{\"type\": \"Feature\",\"geometry\": { \"type\": \"MultiLineString\", \"coordinates\": [[[' + str(float(xx['lon'])) + ',' + str(float(xx['lat'])) + '],[' + str(float(item['lon'])) + ',' + str(\n\n float(item['lat'])) + ']]]},\"properties\": {\"id_to\": ' + str(int(xx['id_to'])) + ',\"id\":' + str(int(xx['id'])) + ',\"lat\":' + str(float(xx['lat'])) + ',\"lon\": ' + str(float(xx['lon'])) + '}},'\n\n\n\n\n i += 1\n\n ii += 1\n\n\n\n temp.append(xx)\n\n\n if(len(jstring)>150000):\n\n zz+=5\n\n self.write_message( jstring[:-1])\n\n self.write_message( '~'+str(zz*1.0/100))\n\n jstring = ''\n\n \n\n \n\n self.write_message( jstring[:-1])\n\n self.write_message( '~1')\n\n\n\n##########################downfirst##############################################\n\n if(downfirst == 'yes'):\n\n if(dist == 'yes'):\n\n ii = 0\n\n li = []\n\n temp = []\n\n jstring = ''\n\n # MY_GLOBAL[:]=[]\n\n a = frame.ix[int(value1)]\n\n temp.append(a)\n\n\n\n z = 0\n\n zz=0\n\n while z < len(temp) :\n\n item = temp[z]\n\n z += 1\n\n\n\n # break\n\n ## print item\n\n # if(item['id']==sourceid):\n\n # check=False\n\n x = frame.ix[frame['id'] == item['id_to']]\n\n #print x\n\n\n\n i = 0\n\n while i < len(x):\n\n # d = OrderedDict()\n\n xx = x.ix[x.index[i]]\n\n jstring += '{\"type\": \"Feature\",\"geometry\": { \"type\": \"MultiLineString\", \"coordinates\": [[[' + str(float(xx['lon'])) + ',' + str(float(xx['lat'])) + '],[' + str(float(item['lon'])) + ',' + str(\n\n float(item['lat'])) + ']]]},\"properties\": {\"id_to\": ' + str(int(xx['id_to'])) + ',\"id\":' + str(int(xx['id'])) + ',\"lat\":' + str(float(xx['lat'])) + ',\"lon\": ' + str(float(xx['lon'])) + '}},'\n\n\n\n # d['type'] = 'Feature'\n\n # d['geometry'] = {\n\n # 'type': 'MultiLineString',\n\n # 'coordinates': [[[float(xx['lon']),float(xx['lat'])],[float(item['lon']), float(item['lat'])]]]\n\n # }\n\n # d['properties'] = { \"id\":int(xx['id']),\"id_to\":int(xx['id_to']),\"lon\": float(xx['lon']),\"lat\": float(xx['lat'])\n\n # }\n\n # li.append(d)\n\n # d=OrderedDict()\n\n i += 1\n\n ii += 1\n\n\n\n temp.append(xx)\n\n # if(item['id']==sourceid):\n\n # check=False\n\n # MY_GLOBAL.append(100)\n\n\n\n # d = OrderedDict()\n\n # d['type'] = 'FeatureCollection'\n\n # d['features'] = li\n\n # print li\n\n # if (check==False):\n\n if(len(jstring)>150000):\n\n zz+=5\n\n self.write_message( jstring[:-1])\n\n self.write_message( '~'+str(zz*1.0/100))\n\n jstring = ''\n\n\n self.write_message( jstring[:-1])\n\n self.write_message( '~1')\n # if(downfirst == 'yes'):\n if(pic == 'yes'):\n\n #print request.form\n\n #\"&dist_lat=\"+dist_lat+\"&dist_lon=\"+dist_lon+\"&source_lat=\"+source_lat+\"&source_lon=\"+source_lon+\"&from=\"+value3.value+\"&to=\"+value4.value); \n#m[6].split(\"=\")\n # start1 = request.form[\"dist_lat\"]\n\n # start2 = request.form[\"dist_lon\"]\n\n # goal1 = request.form[\"source_lat\"]\n\n # goal2 = request.form[\"source_lon\"]\n\n # fromdate = request.form[\"from\"]\n\n # todate = request.form[\"to\"]\n start1 = m[7].split(\"=\")[1]\n\n start2 = m[8].split(\"=\")[1]\n\n goal1 =m[9].split(\"=\")[1]\n\n goal2 = m[10].split(\"=\")[1]\n\n fromdate = m[11].split(\"=\")[1]\n\n todate = m[12].split(\"=\")[1] \n print(start1,start2,goal1,goal2,fromdate,todate)\n\n\n import time\n\n before = time.time()\n\n output, str1, str2, str3 = LoadingNetwork.main(\n\n [start1, start2], [goal1, goal2], fromdate, todate, rawdata)\n\n #print str1,str2,str3\n # print(output)\n\n after = time.time()\n\n print (\"time,\", after - before)\n\n\n\n # if(isinstance(output, str)):\n\n\n # return output, 201\n\n # else:\n\n # gc.collect()\n\n #print base64.b64encode(output.getvalue())\n\n # return base64.b64encode(\n\n # output.getvalue()) + \"***\" + str1 + \"***\" + str2 + \"***\" + str3, 200\n#\n\n\n\n\n\n\n\n\n\n\n\nif __name__ == \"__main__\":\n\n container = WSGIContainer(app)\n\n server = Application([\n\n (r'/websocket/', WebSocket),\n (r'/we/', EchoWebSocket),\n\n (r'.*', FallbackHandler, dict(fallback=container))\n\n ])\n\n server.listen(5000)\n\n IOLoop.instance().start()\n\n\n\n # test()\n\n"},"avg_line_length":{"kind":"number","value":16.9135429262,"string":"16.913543"},"max_line_length":{"kind":"number","value":360,"string":"360"},"alphanum_fraction":{"kind":"number","value":0.4337801609,"string":"0.43378"},"score":{"kind":"number","value":3.484375,"string":"3.484375"}}},{"rowIdx":931,"cells":{"hexsha":{"kind":"string","value":"437bedebb256fe41023d70a8b4d2313d4a24d111"},"size":{"kind":"number","value":11383,"string":"11,383"},"ext":{"kind":"string","value":"go"},"lang":{"kind":"string","value":"Go"},"max_stars_repo_path":{"kind":"string","value":"service/endpoint.go"},"max_stars_repo_name":{"kind":"string","value":"jirenius/rest2res"},"max_stars_repo_head_hexsha":{"kind":"string","value":"9a64fa84fea1eb741368be5875aea096f59ce32a"},"max_stars_repo_licenses":{"kind":"list like","value":["MIT"],"string":"[\n \"MIT\"\n]"},"max_stars_count":{"kind":"number","value":12,"string":"12"},"max_stars_repo_stars_event_min_datetime":{"kind":"string","value":"2019-05-07T01:30:30.000Z"},"max_stars_repo_stars_event_max_datetime":{"kind":"string","value":"2021-12-12T14:57:06.000Z"},"max_issues_repo_path":{"kind":"string","value":"service/endpoint.go"},"max_issues_repo_name":{"kind":"string","value":"jirenius/rest2res"},"max_issues_repo_head_hexsha":{"kind":"string","value":"9a64fa84fea1eb741368be5875aea096f59ce32a"},"max_issues_repo_licenses":{"kind":"list like","value":["MIT"],"string":"[\n \"MIT\"\n]"},"max_issues_count":{"kind":"null"},"max_issues_repo_issues_event_min_datetime":{"kind":"null"},"max_issues_repo_issues_event_max_datetime":{"kind":"null"},"max_forks_repo_path":{"kind":"string","value":"service/endpoint.go"},"max_forks_repo_name":{"kind":"string","value":"jirenius/rest2res"},"max_forks_repo_head_hexsha":{"kind":"string","value":"9a64fa84fea1eb741368be5875aea096f59ce32a"},"max_forks_repo_licenses":{"kind":"list like","value":["MIT"],"string":"[\n \"MIT\"\n]"},"max_forks_count":{"kind":"null"},"max_forks_repo_forks_event_min_datetime":{"kind":"null"},"max_forks_repo_forks_event_max_datetime":{"kind":"null"},"content":{"kind":"string","value":"package service\n\nimport (\n\t\"encoding/json\"\n\t\"errors\"\n\t\"fmt\"\n\t\"io/ioutil\"\n\t\"net/http\"\n\t\"reflect\"\n\t\"strconv\"\n\t\"strings\"\n\t\"sync\"\n\t\"time\"\n\n\tres \"github.com/jirenius/go-res\"\n\t\"github.com/jirenius/timerqueue\"\n)\n\ntype endpoint struct {\n\ts *Service\n\turl string\n\turlParams []string\n\trefreshCount int\n\tcachedURLs map[string]*cachedResponse\n\taccess res.AccessHandler\n\ttimeout time.Duration\n\tgroup string\n\tresetPatterns []string\n\ttq *timerqueue.Queue\n\tmu sync.RWMutex\n\tnode\n}\n\ntype cachedResponse struct {\n\treloads int\n\treqParams map[string]string\n\tcrs map[string]cachedResource\n\trerr *res.Error\n}\n\ntype cachedResource struct {\n\ttyp resourceType\n\tmodel map[string]interface{}\n\tcollection []interface{}\n}\n\ntype resourceType byte\n\nconst defaultRefreshDuration = time.Second * 3\n\nconst (\n\tresourceTypeUnset resourceType = iota\n\tresourceTypeModel\n\tresourceTypeCollection\n)\n\nfunc newEndpoint(s *Service, cep *EndpointCfg) (*endpoint, error) {\n\tif cep.URL == \"\" {\n\t\treturn nil, errors.New(\"missing url\")\n\t}\n\tif cep.Pattern == \"\" {\n\t\treturn nil, errors.New(\"missing pattern\")\n\t}\n\n\turlParams, err := urlParams(cep.URL)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\tep := &endpoint{\n\t\ts: s,\n\t\turl: cep.URL,\n\t\turlParams: urlParams,\n\t\trefreshCount: cep.RefreshCount,\n\t\tcachedURLs: make(map[string]*cachedResponse),\n\t\taccess: cep.Access,\n\t\ttimeout: time.Millisecond * time.Duration(cep.Timeout),\n\t}\n\tep.tq = timerqueue.New(ep.handleRefresh, time.Millisecond*time.Duration(cep.RefreshTime))\n\n\treturn ep, nil\n}\n\nfunc (ep *endpoint) handler() res.Handler {\n\treturn res.Handler{\n\t\tAccess: ep.access,\n\t\tGetResource: ep.getResource,\n\t\tGroup: ep.url,\n\t}\n}\n\nfunc (ep *endpoint) handleRefresh(i interface{}) {\n\tep.s.Debugf(\"Refreshing %s\", i)\n\n\turl := i.(string)\n\n\t// Check if url is cached\n\tep.mu.RLock()\n\tcresp, ok := ep.cachedURLs[url]\n\tep.mu.RUnlock()\n\tif !ok {\n\t\tep.s.Logf(\"Url %s not found in cache on refresh\", url)\n\t\treturn\n\t}\n\n\tparams := cresp.reqParams\n\n\tep.s.res.WithGroup(url, func(s *res.Service) {\n\t\tcresp.reloads++\n\t\tif cresp.rerr != nil || cresp.reloads > ep.refreshCount {\n\t\t\t// Reset resources\n\t\t\tep.mu.Lock()\n\t\t\tdelete(ep.cachedURLs, url)\n\t\t\tep.mu.Unlock()\n\n\t\t\tresetResources := make([]string, len(ep.resetPatterns))\n\t\t\tfor i, rp := range ep.resetPatterns {\n\t\t\t\tfor _, param := range ep.urlParams {\n\t\t\t\t\trp = strings.Replace(rp, \"${\"+param+\"}\", params[param], 1)\n\t\t\t\t}\n\t\t\t\tresetResources[i] = rp\n\t\t\t}\n\t\t\tep.s.res.Reset(resetResources, nil)\n\t\t\treturn\n\t\t}\n\n\t\tdefer ep.tq.Add(i)\n\n\t\tncresp := ep.getURL(url, params)\n\t\tif ncresp.rerr != nil {\n\t\t\tep.s.Logf(\"Error refreshing url %s:\\n\\t%s\", url, ncresp.rerr.Message)\n\t\t\treturn\n\t\t}\n\n\t\tfor rid, nv := range ncresp.crs {\n\t\t\tv, ok := cresp.crs[rid]\n\t\t\tif ok {\n\t\t\t\tr, err := ep.s.res.Resource(rid)\n\t\t\t\tif err != nil {\n\t\t\t\t\t// This shouldn't be possible. Let's panic.\n\t\t\t\t\tpanic(fmt.Sprintf(\"error getting res resource %s:\\n\\t%s\", rid, err))\n\t\t\t\t}\n\n\t\t\t\tupdateResource(v, nv, r)\n\t\t\t\tdelete(cresp.crs, rid)\n\t\t\t}\n\t\t}\n\n\t\t// for rid := range cresp.crs {\n\t\t// \tr, err := ep.s.res.Resource(rid)\n\t\t// \tr.DeleteEvent()\n\t\t// }\n\n\t\t// Replacing the old cachedResources with the new ones\n\t\tcresp.crs = ncresp.crs\n\t})\n}\n\nfunc updateResource(v, nv cachedResource, r res.Resource) {\n\tswitch v.typ {\n\tcase resourceTypeModel:\n\t\tupdateModel(v.model, nv.model, r)\n\tcase resourceTypeCollection:\n\t\tupdateCollection(v.collection, nv.collection, r)\n\t}\n}\n\nfunc updateModel(a, b map[string]interface{}, r res.Resource) {\n\tch := make(map[string]interface{})\n\tfor k := range a {\n\t\tif _, ok := b[k]; !ok {\n\t\t\tch[k] = res.DeleteAction\n\t\t}\n\t}\n\n\tfor k, v := range b {\n\t\tov, ok := a[k]\n\t\tif !(ok && reflect.DeepEqual(v, ov)) {\n\t\t\tch[k] = v\n\t\t}\n\t}\n\n\tr.ChangeEvent(ch)\n}\n\nfunc updateCollection(a, b []interface{}, r res.Resource) {\n\tvar i, j int\n\t// Do a LCS matric calculation\n\t// https://en.wikipedia.org/wiki/Longest_common_subsequence_problem\n\ts := 0\n\tm := len(a)\n\tn := len(b)\n\n\t// Trim of matches at the start and end\n\tfor s < m && s < n && reflect.DeepEqual(a[s], b[s]) {\n\t\ts++\n\t}\n\n\tif s == m && s == n {\n\t\treturn\n\t}\n\n\tfor s < m && s < n && reflect.DeepEqual(a[m-1], b[n-1]) {\n\t\tm--\n\t\tn--\n\t}\n\n\tvar aa, bb []interface{}\n\tif s > 0 || m < len(a) {\n\t\taa = a[s:m]\n\t\tm = m - s\n\t} else {\n\t\taa = a\n\t}\n\tif s > 0 || n < len(b) {\n\t\tbb = b[s:n]\n\t\tn = n - s\n\t} else {\n\t\tbb = b\n\t}\n\n\t// Create matrix and initialize it\n\tw := m + 1\n\tc := make([]int, w*(n+1))\n\n\tfor i = 0; i < m; i++ {\n\t\tfor j = 0; j < n; j++ {\n\t\t\tif reflect.DeepEqual(aa[i], bb[j]) {\n\t\t\t\tc[(i+1)+w*(j+1)] = c[i+w*j] + 1\n\t\t\t} else {\n\t\t\t\tv1 := c[(i+1)+w*j]\n\t\t\t\tv2 := c[i+w*(j+1)]\n\t\t\t\tif v2 > v1 {\n\t\t\t\t\tc[(i+1)+w*(j+1)] = v2\n\t\t\t\t} else {\n\t\t\t\t\tc[(i+1)+w*(j+1)] = v1\n\t\t\t\t}\n\t\t\t}\n\t\t}\n\t}\n\n\tidx := m + s\n\ti = m\n\tj = n\n\trm := 0\n\n\tvar adds [][3]int\n\taddCount := n - c[w*(n+1)-1]\n\tif addCount > 0 {\n\t\tadds = make([][3]int, 0, addCount)\n\t}\nLoop:\n\tfor {\n\t\tm = i - 1\n\t\tn = j - 1\n\t\tswitch {\n\t\tcase i > 0 && j > 0 && reflect.DeepEqual(aa[m], bb[n]):\n\t\t\tidx--\n\t\t\ti--\n\t\t\tj--\n\t\tcase j > 0 && (i == 0 || c[i+w*n] >= c[m+w*j]):\n\t\t\tadds = append(adds, [3]int{n, idx, rm})\n\t\t\tj--\n\t\tcase i > 0 && (j == 0 || c[i+w*n] < c[m+w*j]):\n\t\t\tidx--\n\t\t\tr.RemoveEvent(idx)\n\t\t\trm++\n\t\t\ti--\n\t\tdefault:\n\t\t\tbreak Loop\n\t\t}\n\t}\n\n\t// Do the adds\n\tl := len(adds) - 1\n\tfor i := l; i >= 0; i-- {\n\t\tadd := adds[i]\n\t\tr.AddEvent(bb[add[0]], add[1]-rm+add[2]+l-i)\n\t}\n}\n\nfunc (ep *endpoint) getResource(r res.GetRequest) {\n\t// Replace param placeholders\n\turl := ep.url\n\tfor _, param := range ep.urlParams {\n\t\turl = strings.Replace(url, \"${\"+param+\"}\", r.PathParam(param), 1)\n\t}\n\n\t// Check if url is cached\n\tep.mu.RLock()\n\tcresp, ok := ep.cachedURLs[url]\n\tep.mu.RUnlock()\n\tif !ok {\n\t\tif ep.timeout > 0 {\n\t\t\tr.Timeout(ep.timeout)\n\t\t}\n\t\tcresp = ep.cacheURL(url, r.PathParams())\n\t}\n\n\t// Return any encountered error when getting the endpoint\n\tif cresp.rerr != nil {\n\t\tr.Error(cresp.rerr)\n\t\treturn\n\t}\n\n\t// Check if resource exists\n\tcr, ok := cresp.crs[r.ResourceName()]\n\tif !ok {\n\t\tr.NotFound()\n\t\treturn\n\t}\n\n\tswitch cr.typ {\n\tcase resourceTypeModel:\n\t\tr.Model(cr.model)\n\tcase resourceTypeCollection:\n\t\tr.Collection(cr.collection)\n\t}\n}\n\nfunc (ep *endpoint) cacheURL(url string, reqParams map[string]string) *cachedResponse {\n\tcresp := ep.getURL(url, reqParams)\n\tep.mu.Lock()\n\tep.cachedURLs[url] = cresp\n\tep.mu.Unlock()\n\tep.tq.Add(url)\n\n\treturn cresp\n}\n\nfunc (ep *endpoint) getURL(url string, reqParams map[string]string) *cachedResponse {\n\tcr := cachedResponse{reqParams: reqParams}\n\t// Make HTTP request\n\tresp, err := http.Get(url)\n\tif err != nil {\n\t\tep.s.Debugf(\"Error fetching endpoint: %s\\n\\t%s\", url, err)\n\t\tcr.rerr = res.InternalError(err)\n\t\treturn &cr\n\t}\n\tdefer resp.Body.Close()\n\n\t// Handle non-2XX status codes\n\tif resp.StatusCode == 404 {\n\t\tcr.rerr = res.ErrNotFound\n\t\treturn &cr\n\t}\n\tif resp.StatusCode < 200 || resp.StatusCode >= 300 {\n\t\tcr.rerr = res.InternalError(fmt.Errorf(\"unexpected response code: %d\", resp.StatusCode))\n\t\treturn &cr\n\t}\n\n\t// Read body\n\tbody, err := ioutil.ReadAll(resp.Body)\n\tif err != nil {\n\t\tcr.rerr = res.InternalError(err)\n\t\treturn &cr\n\t}\n\t// Unmarshal body\n\tvar v value\n\tif err = json.Unmarshal(body, &v); err != nil {\n\t\tcr.rerr = res.InternalError(err)\n\t\treturn &cr\n\t}\n\n\t// Traverse the data\n\tcrs := make(map[string]cachedResource)\n\terr = ep.traverse(crs, v, nil, reqParams)\n\tif err != nil {\n\t\tcr.rerr = res.InternalError(fmt.Errorf(\"invalid data structure for %s: %s\", url, err))\n\t\treturn &cr\n\t}\n\n\tcr.crs = crs\n\treturn &cr\n}\n\nfunc (ep *endpoint) traverse(crs map[string]cachedResource, v value, path []string, reqParams map[string]string) error {\n\tvar err error\n\tswitch v.typ {\n\tcase valueTypeObject:\n\t\t_, err = traverseModel(crs, v, path, &ep.node, reqParams, \"\")\n\tcase valueTypeArray:\n\t\t_, err = traverseCollection(crs, v, path, &ep.node, reqParams, \"\")\n\tdefault:\n\t\treturn errors.New(\"endpoint didn't respond with a json object or array\")\n\t}\n\tif err != nil {\n\t\treturn err\n\t}\n\treturn nil\n}\n\nfunc traverseModel(crs map[string]cachedResource, v value, path []string, n *node, reqParams map[string]string, pathPart string) (res.Ref, error) {\n\tif n.typ != resourceTypeModel {\n\t\treturn \"\", fmt.Errorf(\"expected a model at %s\", pathStr(path))\n\t}\n\n\t// Append path part\n\tswitch n.ptyp {\n\tcase pathTypeDefault:\n\t\tpath = append(path, pathPart)\n\tcase pathTypeProperty:\n\t\tidv, ok := v.obj[n.idProp]\n\t\tif !ok {\n\t\t\treturn \"\", fmt.Errorf(\"missing id property %s at:\\n\\t%s\", n.idProp, pathStr(path))\n\t\t}\n\t\tswitch idv.typ {\n\t\tcase valueTypeString:\n\t\t\tvar idstr string\n\t\t\terr := json.Unmarshal(idv.raw, &idstr)\n\t\t\tif err != nil {\n\t\t\t\treturn \"\", err\n\t\t\t}\n\t\t\tpath = append(path, idstr)\n\t\tcase valueTypeNumber:\n\t\t\tpath = append(path, string(idv.raw))\n\t\tdefault:\n\t\t\treturn \"\", fmt.Errorf(\"invalid id value for property %s at:\\n\\t%s\", n.idProp, pathStr(path))\n\t\t}\n\t\tpath = append(path)\n\t}\n\n\tmodel := make(map[string]interface{})\n\tfor k, kv := range v.obj {\n\t\t// Get next node\n\t\tnext := n.nodes[k]\n\t\tif next == nil {\n\t\t\tnext = n.param\n\t\t}\n\n\t\tswitch kv.typ {\n\t\tcase valueTypeObject:\n\t\t\tif next != nil {\n\t\t\t\tref, err := traverseModel(crs, kv, path, next, reqParams, k)\n\t\t\t\tif err != nil {\n\t\t\t\t\treturn \"\", err\n\t\t\t\t}\n\t\t\t\tmodel[k] = ref\n\t\t\t}\n\t\tcase valueTypeArray:\n\t\t\tif next != nil {\n\t\t\t\tref, err := traverseCollection(crs, kv, path, next, reqParams, k)\n\t\t\t\tif err != nil {\n\t\t\t\t\treturn \"\", err\n\t\t\t\t}\n\t\t\t\tmodel[k] = ref\n\t\t\t}\n\t\tdefault:\n\t\t\tif next != nil {\n\t\t\t\treturn \"\", fmt.Errorf(\"unexpected primitive value for property %s at %s\", k, pathStr(path))\n\t\t\t}\n\t\t\tmodel[k] = kv\n\t\t}\n\t}\n\n\t// Create rid\n\tp := make([]interface{}, len(n.params))\n\tfor j, pp := range n.params {\n\t\tswitch pp.typ {\n\t\tcase paramTypeURL:\n\t\t\tp[j] = reqParams[pp.name]\n\t\tcase paramTypePath:\n\t\t\tp[j] = path[pp.idx]\n\t\t}\n\t}\n\trid := fmt.Sprintf(n.pattern, p...)\n\n\tcrs[rid] = cachedResource{\n\t\ttyp: resourceTypeModel,\n\t\tmodel: model,\n\t}\n\treturn res.Ref(rid), nil\n}\n\nfunc traverseCollection(crs map[string]cachedResource, v value, path []string, n *node, reqParams map[string]string, pathPart string) (res.Ref, error) {\n\tif n.typ != resourceTypeCollection {\n\t\treturn \"\", fmt.Errorf(\"expected a collection at %s\", pathStr(path))\n\t}\n\n\tif n.ptyp != pathTypeRoot {\n\t\t// Append path part\n\t\tpath = append(path, pathPart)\n\t}\n\n\tcollection := make([]interface{}, len(v.arr))\n\tfor j, kv := range v.arr {\n\t\tnext := n.param\n\n\t\tswitch kv.typ {\n\t\tcase valueTypeObject:\n\t\t\tif next != nil {\n\t\t\t\tref, err := traverseModel(crs, kv, path, next, reqParams, strconv.Itoa(j))\n\t\t\t\tif err != nil {\n\t\t\t\t\treturn \"\", err\n\t\t\t\t}\n\t\t\t\tcollection[j] = ref\n\t\t\t}\n\t\tcase valueTypeArray:\n\t\t\tif next != nil {\n\t\t\t\tref, err := traverseCollection(crs, kv, path, next, reqParams, strconv.Itoa(j))\n\t\t\t\tif err != nil {\n\t\t\t\t\treturn \"\", err\n\t\t\t\t}\n\t\t\t\tcollection[j] = ref\n\t\t\t}\n\t\tdefault:\n\t\t\tif next != nil {\n\t\t\t\treturn \"\", fmt.Errorf(\"unexpected primitive value for element %d at %s\", j, pathStr(path))\n\t\t\t}\n\t\t\tcollection[j] = kv\n\t\t}\n\t}\n\n\t// Create rid\n\tp := make([]interface{}, len(n.params))\n\tfor k, pp := range n.params {\n\t\tswitch pp.typ {\n\t\tcase paramTypeURL:\n\t\t\tp[k] = reqParams[pp.name]\n\t\tcase paramTypePath:\n\t\t\tp[k] = path[pp.idx]\n\t\t}\n\t}\n\trid := fmt.Sprintf(n.pattern, p...)\n\n\tcrs[rid] = cachedResource{\n\t\ttyp: resourceTypeCollection,\n\t\tcollection: collection,\n\t}\n\treturn res.Ref(rid), nil\n}\n\nfunc pathStr(path []string) string {\n\tif len(path) == 0 {\n\t\treturn \"endpoint root\"\n\t}\n\treturn strings.Join(path, \".\")\n}\n"},"avg_line_length":{"kind":"number","value":21.1187384045,"string":"21.118738"},"max_line_length":{"kind":"number","value":152,"string":"152"},"alphanum_fraction":{"kind":"number","value":0.6128437143,"string":"0.612844"},"score":{"kind":"number","value":3.125,"string":"3.125"}}},{"rowIdx":932,"cells":{"hexsha":{"kind":"string","value":"77d7b261ad1febba3308206e114ebff010e09836"},"size":{"kind":"number","value":6061,"string":"6,061"},"ext":{"kind":"string","value":"rs"},"lang":{"kind":"string","value":"Rust"},"max_stars_repo_path":{"kind":"string","value":"src/lib.rs"},"max_stars_repo_name":{"kind":"string","value":"y-fujii/nanore"},"max_stars_repo_head_hexsha":{"kind":"string","value":"e9ee360de11c49109a45829cb85d4c21db270e22"},"max_stars_repo_licenses":{"kind":"list like","value":["MIT"],"string":"[\n \"MIT\"\n]"},"max_stars_count":{"kind":"null"},"max_stars_repo_stars_event_min_datetime":{"kind":"null"},"max_stars_repo_stars_event_max_datetime":{"kind":"null"},"max_issues_repo_path":{"kind":"string","value":"src/lib.rs"},"max_issues_repo_name":{"kind":"string","value":"y-fujii/nanore"},"max_issues_repo_head_hexsha":{"kind":"string","value":"e9ee360de11c49109a45829cb85d4c21db270e22"},"max_issues_repo_licenses":{"kind":"list like","value":["MIT"],"string":"[\n \"MIT\"\n]"},"max_issues_count":{"kind":"null"},"max_issues_repo_issues_event_min_datetime":{"kind":"null"},"max_issues_repo_issues_event_max_datetime":{"kind":"null"},"max_forks_repo_path":{"kind":"string","value":"src/lib.rs"},"max_forks_repo_name":{"kind":"string","value":"y-fujii/nanore"},"max_forks_repo_head_hexsha":{"kind":"string","value":"e9ee360de11c49109a45829cb85d4c21db270e22"},"max_forks_repo_licenses":{"kind":"list like","value":["MIT"],"string":"[\n \"MIT\"\n]"},"max_forks_count":{"kind":"null"},"max_forks_repo_forks_event_min_datetime":{"kind":"null"},"max_forks_repo_forks_event_max_datetime":{"kind":"null"},"content":{"kind":"string","value":"// (c) Yasuhiro Fujii , under MIT License.\nuse std::*;\n\n\npub enum RegEx<'a, T, U: Copy = ()> {\n\tEps,\n\tAtom( Box bool> ),\n\tAlt( Box>, Box> ),\n\tSeq( Box>, Box>, usize ),\n\tRepeat( Box>, usize ),\n\tWeight( isize ),\n\tMark( U ),\n}\n\nimpl<'a, T, U: Copy> ops::Add for Box> {\n\ttype Output = Box>;\n\n\tfn add( self, other: Self ) -> Self::Output {\n\t\tBox::new( RegEx::Alt( self, other ) )\n\t}\n}\n\nimpl<'a, T, U: Copy> ops::Mul for Box> {\n\ttype Output = Box>;\n\n\tfn mul( self, other: Self ) -> Self::Output {\n\t\tBox::new( RegEx::Seq( self, other, usize::MAX ) )\n\t}\n}\n\npub fn eps<'a, T, U: Copy>() -> Box> {\n\tBox::new( RegEx::Eps )\n}\n\npub fn atom<'a, T, U: Copy, F: 'a + Fn( usize, &T ) -> bool>( f: F ) -> Box> {\n\tBox::new( RegEx::Atom( Box::new( f ) ) )\n}\n\npub fn rep<'a, T, U: Copy>( e0: Box> ) -> Box> {\n\tBox::new( RegEx::Repeat( e0, usize::MAX ) )\n}\n\npub fn weight<'a, T, U: Copy>( w: isize ) -> Box> {\n\tBox::new( RegEx::Weight( w ) )\n}\n\npub fn mark<'a, T, U: Copy>( m: U ) -> Box> {\n\tBox::new( RegEx::Mark( m ) )\n}\n\npub fn opt<'a, T, U: Copy>( e0: Box> ) -> Box> {\n\teps() + e0\n}\n\npub fn any<'a, T, U: Copy>() -> Box> {\n\tatom( move |_, _| true )\n}\n\npub fn val<'a, T: 'a + PartialEq, U: Copy>( v0: T ) -> Box> {\n\tatom( move |_, v| *v == v0 )\n}\n\npub struct RegExRoot<'a, T, U: Copy = ()> {\n\tregex: Box>,\n\tnstate: usize,\n}\n\nimpl<'a, T, U: Copy> RegExRoot<'a, T, U> {\n\tpub fn new( mut e: Box> ) -> RegExRoot<'a, T, U> {\n\t\tlet n = Self::renumber( &mut e, 0 );\n\t\tRegExRoot{\n\t\t\tregex: e,\n\t\t\tnstate: n,\n\t\t}\n\t}\n\n\tfn renumber( e: &mut RegEx<'a, T, U>, i: usize ) -> usize {\n\t\tmatch *e {\n\t\t\tRegEx::Eps => i,\n\t\t\tRegEx::Atom( _ ) => i,\n\t\t\tRegEx::Alt( ref mut e0, ref mut e1 ) => {\n\t\t\t\tSelf::renumber( e1, Self::renumber( e0, i ) )\n\t\t\t}\n\t\t\tRegEx::Seq( ref mut e0, ref mut e1, ref mut s ) => {\n\t\t\t\t*s = Self::renumber( e0, i );\n\t\t\t\tSelf::renumber( e1, *s + 1 )\n\t\t\t}\n\t\t\tRegEx::Repeat( ref mut e0, ref mut s ) => {\n\t\t\t\t*s = i;\n\t\t\t\tSelf::renumber( e0, i + 1 )\n\t\t\t}\n\t\t\tRegEx::Weight( _ ) => i,\n\t\t\tRegEx::Mark( _ ) => i,\n\t\t}\n\t}\n}\n\nstruct Path( usize, T, Option>> );\n\n#[derive( Clone )]\nstruct State( isize, Option>> );\n\n#[derive( Clone )]\npub struct Matcher<'a, T, U: Copy = ()> {\n\troot: &'a RegExRoot<'a, T, U>,\n\tindex: usize,\n\ts0: State,\n\tstates: Vec>,\n\ts1: State,\n}\n\nimpl<'a, T, U: Copy> Matcher<'a, T, U> {\n\tpub fn new( root: &'a RegExRoot<'a, T, U> ) -> Matcher<'a, T, U> {\n\t\tlet mut this = Matcher{\n\t\t\troot: root,\n\t\t\tindex: 0,\n\t\t\ts0: State( 0, None ),\n\t\t\tstates: vec![ State( isize::MAX, None ); root.nstate ],\n\t\t\ts1: State( isize::MAX, None ),\n\t\t};\n\t\tthis.s1 = this.propagate( &root.regex, State( 0, None ) );\n\t\tthis\n\t}\n\n\tpub fn feed( &mut self, v: &T ) {\n\t\tlet s0 = mem::replace( &mut self.s0, State( isize::MAX, None ) );\n\t\tlet s1 = self.shift( &self.root.regex, v, s0 );\n\t\tself.index += 1;\n\t\tlet s2 = self.propagate( &self.root.regex, State( isize::MAX, None ) );\n\t\tself.s1 = Self::choice( s1, s2 );\n\t}\n\n\tpub fn feed_iter<'b, Iter: IntoIterator>( &mut self, iter: Iter ) where 'a: 'b {\n\t\tfor v in iter {\n\t\t\tself.feed( v );\n\t\t}\n\t}\n\n\tpub fn is_match( &self ) -> bool {\n\t\tself.s1.0 != isize::MAX\n\t}\n\n\tpub fn is_alive( &self ) -> bool {\n\t\tself.s0.0 != isize::MAX ||\n\t\tself.s1.0 != isize::MAX ||\n\t\tself.states.iter().any( |s| s.0 != isize::MAX )\n\t}\n\n\tpub fn path( &self ) -> Vec<(usize, U)> {\n\t\tlet mut result = Vec::new();\n\t\tlet mut it = self.s1.1.clone();\n\t\twhile let Some( e ) = it {\n\t\t\tresult.push( (e.0, e.1) );\n\t\t\tit = e.2.clone();\n\t\t}\n\t\tresult.reverse();\n\t\tresult\n\t}\n\n\tfn choice( s0: State, s1: State ) -> State {\n\t\tif s1.0 < s0.0 { s1 } else { s0 }\n\t}\n\n\tfn choice_inplace( s0: &mut State, s1: State ) {\n\t\tif s1.0 < s0.0 {\n\t\t\t*s0 = s1;\n\t\t}\n\t}\n\n\t// handle epsilon transition.\n\tfn propagate( &mut self, e: &RegEx<'a, T, U>, s0: State ) -> State {\n\t\tmatch *e {\n\t\t\tRegEx::Eps => s0,\n\t\t\tRegEx::Atom( _ ) => State( isize::MAX, None ),\n\t\t\tRegEx::Alt( ref e0, ref e1 ) => {\n\t\t\t\tlet s1 = self.propagate( e0, s0.clone() );\n\t\t\t\tlet s2 = self.propagate( e1, s0 );\n\t\t\t\tSelf::choice( s1, s2 )\n\t\t\t}\n\t\t\tRegEx::Seq( ref e0, ref e1, s ) => {\n\t\t\t\tlet s1 = self.propagate( e0, s0 );\n\t\t\t\tSelf::choice_inplace( &mut self.states[s], s1 );\n\t\t\t\tlet s2 = self.states[s].clone();\n\t\t\t\tself.propagate( e1, s2 )\n\t\t\t}\n\t\t\tRegEx::Repeat( ref e0, s ) => {\n\t\t\t\tSelf::choice_inplace( &mut self.states[s], s0 );\n\t\t\t\tlet s1 = self.states[s].clone();\n\t\t\t\tlet s2 = self.propagate( e0, s1 );\n\t\t\t\tSelf::choice_inplace( &mut self.states[s], s2 );\n\t\t\t\tself.states[s].clone()\n\t\t\t}\n\t\t\tRegEx::Weight( w ) => {\n\t\t\t\tlet dw = if s0.0 != isize::MAX { w } else { 0 };\n\t\t\t\tState( s0.0 + dw, s0.1 )\n\t\t\t}\n\t\t\tRegEx::Mark( m ) => {\n\t\t\t\tState( s0.0, Some( rc::Rc::new( Path( self.index, m, s0.1 ) ) ) )\n\t\t\t}\n\t\t}\n\t}\n\n\t// handle normal transition.\n\tfn shift( &mut self, e: &RegEx<'a, T, U>, v: &T, s0: State ) -> State {\n\t\tmatch *e {\n\t\t\tRegEx::Eps => State( isize::MAX, None ),\n\t\t\tRegEx::Atom( ref f ) => {\n\t\t\t\tif s0.0 != isize::MAX && f( self.index, v ) {\n\t\t\t\t\ts0\n\t\t\t\t}\n\t\t\t\telse {\n\t\t\t\t\tState( isize::MAX, None )\n\t\t\t\t}\n\t\t\t}\n\t\t\tRegEx::Alt( ref e0, ref e1 ) => {\n\t\t\t\tlet s1 = self.shift( e0, v, s0.clone() );\n\t\t\t\tlet s2 = self.shift( e1, v, s0 );\n\t\t\t\tSelf::choice( s1, s2 )\n\t\t\t}\n\t\t\tRegEx::Seq( ref e0, ref e1, s ) => {\n\t\t\t\tlet s1 = self.shift( e0, v, s0 );\n\t\t\t\tlet s2 = mem::replace( &mut self.states[s], s1 );\n\t\t\t\tself.shift( e1, v, s2 )\n\t\t\t}\n\t\t\tRegEx::Repeat( ref e0, s ) => {\n\t\t\t\tlet s1 = mem::replace( &mut self.states[s], State( isize::MAX, None ) );\n\t\t\t\tself.states[s] = self.shift( e0, v, s1 );\n\t\t\t\tState( isize::MAX, None )\n\t\t\t}\n\t\t\tRegEx::Weight( _ ) => State( isize::MAX, None ),\n\t\t\tRegEx::Mark( _ ) => State( isize::MAX, None ),\n\t\t}\n\t}\n}\n"},"avg_line_length":{"kind":"number","value":25.7914893617,"string":"25.791489"},"max_line_length":{"kind":"number","value":95,"string":"95"},"alphanum_fraction":{"kind":"number","value":0.5198812077,"string":"0.519881"},"score":{"kind":"number","value":3.125,"string":"3.125"}}},{"rowIdx":933,"cells":{"hexsha":{"kind":"string","value":"e5243bcaf71ccb5aaa825244f3d85a3ec0bf22aa"},"size":{"kind":"number","value":1540,"string":"1,540"},"ext":{"kind":"string","value":"ts"},"lang":{"kind":"string","value":"TypeScript"},"max_stars_repo_path":{"kind":"string","value":"src/services/input.ts"},"max_stars_repo_name":{"kind":"string","value":"petli-full/awk-vscode"},"max_stars_repo_head_hexsha":{"kind":"string","value":"849f40f8b6ad81a5472817688b702d15f916c477"},"max_stars_repo_licenses":{"kind":"list like","value":["MIT"],"string":"[\n \"MIT\"\n]"},"max_stars_count":{"kind":"number","value":2,"string":"2"},"max_stars_repo_stars_event_min_datetime":{"kind":"string","value":"2021-03-26T15:36:14.000Z"},"max_stars_repo_stars_event_max_datetime":{"kind":"string","value":"2021-07-28T05:48:07.000Z"},"max_issues_repo_path":{"kind":"string","value":"src/services/input.ts"},"max_issues_repo_name":{"kind":"string","value":"petli-full/awk-vscode"},"max_issues_repo_head_hexsha":{"kind":"string","value":"849f40f8b6ad81a5472817688b702d15f916c477"},"max_issues_repo_licenses":{"kind":"list like","value":["MIT"],"string":"[\n \"MIT\"\n]"},"max_issues_count":{"kind":"number","value":1,"string":"1"},"max_issues_repo_issues_event_min_datetime":{"kind":"string","value":"2021-07-28T05:50:13.000Z"},"max_issues_repo_issues_event_max_datetime":{"kind":"string","value":"2021-08-09T02:03:48.000Z"},"max_forks_repo_path":{"kind":"string","value":"src/services/input.ts"},"max_forks_repo_name":{"kind":"string","value":"petli-full/awk-vscode"},"max_forks_repo_head_hexsha":{"kind":"string","value":"849f40f8b6ad81a5472817688b702d15f916c477"},"max_forks_repo_licenses":{"kind":"list like","value":["MIT"],"string":"[\n \"MIT\"\n]"},"max_forks_count":{"kind":"number","value":1,"string":"1"},"max_forks_repo_forks_event_min_datetime":{"kind":"string","value":"2021-03-26T15:36:17.000Z"},"max_forks_repo_forks_event_max_datetime":{"kind":"string","value":"2021-03-26T15:36:17.000Z"},"content":{"kind":"string","value":"import * as vscode from 'vscode';\n\n\ninterface Input {\n load: () => void;\n reset: () => void;\n ready: (text: string) => Thenable;\n get: () => string;\n getFilename: () => string;\n};\n\nlet _input = '';\nlet _filename = '';\n\nconst load = () => {\n const doc = vscode.window.activeTextEditor?.document;\n _input = (doc ? doc.getText() : '').trim();\n _filename = (doc ? doc.fileName : '').trim();\n};\n\nlet editor$: null | Thenable = null;\nlet _editor: null | vscode.TextEditor = null;\n\nconst reset = () => {\n _input = '';\n editor$ = null;\n _editor = null;\n};\n\nconst ready = (text: string): Thenable => {\n if (editor$ === null || (_editor !== null && _editor.document.isClosed)) {\n _input = text;\n editor$ = vscode.workspace.openTextDocument({ language: 'plaintext', content: '' }).then(doc => {\n return vscode.window.showTextDocument(doc).then(editor => {\n editor.edit(builder => {\n builder.insert(doc.positionAt(0), text);\n }).then(() => editor);\n _editor = editor;\n return editor;\n });\n });\n return editor$;\n } else if (_editor === null) {\n return editor$.then(() => ready(text));\n }\n\n return editor$;\n};\n\nconst get = (): string => {\n return _input;\n};\n\nconst getFilename = (): string => {\n return _filename;\n};\n\n\nexport const input: Input = {\n load,\n reset,\n ready,\n get,\n getFilename,\n};\n"},"avg_line_length":{"kind":"number","value":23.3333333333,"string":"23.333333"},"max_line_length":{"kind":"number","value":105,"string":"105"},"alphanum_fraction":{"kind":"number","value":0.5402597403,"string":"0.54026"},"score":{"kind":"number","value":3.015625,"string":"3.015625"}}},{"rowIdx":934,"cells":{"hexsha":{"kind":"string","value":"9bd21b0a0645626b2270758effb332d3ec926934"},"size":{"kind":"number","value":2395,"string":"2,395"},"ext":{"kind":"string","value":"js"},"lang":{"kind":"string","value":"JavaScript"},"max_stars_repo_path":{"kind":"string","value":"api/controllers/expenseController.js"},"max_stars_repo_name":{"kind":"string","value":"SebastianDochia/budget-app"},"max_stars_repo_head_hexsha":{"kind":"string","value":"7b03c3bf03866f30756eaa0dbf7669fc74767aac"},"max_stars_repo_licenses":{"kind":"list like","value":["MIT"],"string":"[\n \"MIT\"\n]"},"max_stars_count":{"kind":"null"},"max_stars_repo_stars_event_min_datetime":{"kind":"null"},"max_stars_repo_stars_event_max_datetime":{"kind":"null"},"max_issues_repo_path":{"kind":"string","value":"api/controllers/expenseController.js"},"max_issues_repo_name":{"kind":"string","value":"SebastianDochia/budget-app"},"max_issues_repo_head_hexsha":{"kind":"string","value":"7b03c3bf03866f30756eaa0dbf7669fc74767aac"},"max_issues_repo_licenses":{"kind":"list like","value":["MIT"],"string":"[\n \"MIT\"\n]"},"max_issues_count":{"kind":"null"},"max_issues_repo_issues_event_min_datetime":{"kind":"null"},"max_issues_repo_issues_event_max_datetime":{"kind":"null"},"max_forks_repo_path":{"kind":"string","value":"api/controllers/expenseController.js"},"max_forks_repo_name":{"kind":"string","value":"SebastianDochia/budget-app"},"max_forks_repo_head_hexsha":{"kind":"string","value":"7b03c3bf03866f30756eaa0dbf7669fc74767aac"},"max_forks_repo_licenses":{"kind":"list like","value":["MIT"],"string":"[\n \"MIT\"\n]"},"max_forks_count":{"kind":"null"},"max_forks_repo_forks_event_min_datetime":{"kind":"null"},"max_forks_repo_forks_event_max_datetime":{"kind":"null"},"content":{"kind":"string","value":"'use strict';\n\nconst firebase = require('../db');\nconst Expense = require('../models/expense');\nconst firestore = firebase.firestore();\n\nconst addExpense = async (req, res, next) => {\n try {\n const data = req.body;\n await firestore.collection('expenses').doc().set(data);\n res.send('Expense saved successfuly');\n } catch (error) {\n res.status(400).send(error.message);\n }\n}\n\nconst getAllExpenses = async (req, res, next) => {\n try {\n const expenses = await firestore.collection('expenses');\n const data = await expenses.get();\n const expensesArray = [];\n if(data.empty) {\n res.status(404).send('No expense found');\n }else {\n data.forEach(doc => {\n const expense = new Expense(\n doc.id,\n doc.data().body.name,\n doc.data().body.value,\n doc.data().body.date,\n doc.data().body.category,\n );\n expensesArray.push(expense);\n });\n res.send(expensesArray);\n }\n } catch (error) {\n res.status(400).send(error.message);\n }\n}\n\nconst getExpense = async (req, res, next) => {\n try {\n const id = req.params.id;\n const expense = await firestore.collection('expense').doc(id);\n const data = await expense.get();\n if(!data.exists) {\n res.status(404).send('Expense with the given ID not found');\n }else {\n res.send(data.data());\n }\n } catch (error) {\n res.status(400).send(error.message);\n }\n}\n\nconst updateExpense = async (req, res, next) => {\n try {\n const id = req.params.id;\n const data = req.body;\n const expense = await firestore.collection('expenses').doc(id);\n await expense.update(data);\n res.send('Expense updated successfuly'); \n } catch (error) {\n res.status(400).send(error.message);\n }\n}\n\nconst deleteExpense = async (req, res, next) => {\n try {\n const id = req.params.id;\n await firestore.collection('expenses').doc(id).delete();\n res.send('Expense deleted successfuly');\n } catch (error) {\n res.status(400).send(error.message);\n }\n}\n\nmodule.exports = {\n addExpense,\n getAllExpenses,\n getExpense,\n updateExpense,\n deleteExpense,\n}"},"avg_line_length":{"kind":"number","value":28.1764705882,"string":"28.176471"},"max_line_length":{"kind":"number","value":72,"string":"72"},"alphanum_fraction":{"kind":"number","value":0.5423799582,"string":"0.54238"},"score":{"kind":"number","value":3.03125,"string":"3.03125"}}},{"rowIdx":935,"cells":{"hexsha":{"kind":"string","value":"369fa64ebc77cae1df42bea6460e705c18ab2ee7"},"size":{"kind":"number","value":2988,"string":"2,988"},"ext":{"kind":"string","value":"rs"},"lang":{"kind":"string","value":"Rust"},"max_stars_repo_path":{"kind":"string","value":"src/chess/moves/rook.rs"},"max_stars_repo_name":{"kind":"string","value":"bloatoo/zchess"},"max_stars_repo_head_hexsha":{"kind":"string","value":"e7cacb4e36d4646b6ef086c1e634a8c314223744"},"max_stars_repo_licenses":{"kind":"list like","value":["MIT"],"string":"[\n \"MIT\"\n]"},"max_stars_count":{"kind":"number","value":2,"string":"2"},"max_stars_repo_stars_event_min_datetime":{"kind":"string","value":"2021-12-28T22:41:51.000Z"},"max_stars_repo_stars_event_max_datetime":{"kind":"string","value":"2021-12-29T10:02:33.000Z"},"max_issues_repo_path":{"kind":"string","value":"src/chess/moves/rook.rs"},"max_issues_repo_name":{"kind":"string","value":"bloatoo/zchess"},"max_issues_repo_head_hexsha":{"kind":"string","value":"e7cacb4e36d4646b6ef086c1e634a8c314223744"},"max_issues_repo_licenses":{"kind":"list like","value":["MIT"],"string":"[\n \"MIT\"\n]"},"max_issues_count":{"kind":"null"},"max_issues_repo_issues_event_min_datetime":{"kind":"null"},"max_issues_repo_issues_event_max_datetime":{"kind":"null"},"max_forks_repo_path":{"kind":"string","value":"src/chess/moves/rook.rs"},"max_forks_repo_name":{"kind":"string","value":"bloatoo/zchess"},"max_forks_repo_head_hexsha":{"kind":"string","value":"e7cacb4e36d4646b6ef086c1e634a8c314223744"},"max_forks_repo_licenses":{"kind":"list like","value":["MIT"],"string":"[\n \"MIT\"\n]"},"max_forks_count":{"kind":"null"},"max_forks_repo_forks_event_min_datetime":{"kind":"null"},"max_forks_repo_forks_event_max_datetime":{"kind":"null"},"content":{"kind":"string","value":"use crate::chess::utils::calculate_squares_to_edge;\nuse crate::chess::{board::Edge, Board, Move, Piece};\n\npub const ROOK_MOVES: &[Move] = &[\n Move {\n x: 8,\n y: 0,\n constraints: &[],\n },\n Move {\n x: 0,\n y: 8,\n constraints: &[],\n },\n];\n\npub fn generate_rook_moves(board: &Board, sq: usize, piece: &Piece) -> Vec {\n let mut moves = vec![];\n\n for mv in ROOK_MOVES.iter() {\n if mv.x == 0 {\n let top_edge = calculate_squares_to_edge(Edge::Top, sq);\n let mut valid = true;\n\n for i in 1..=top_edge {\n if !valid {\n continue;\n }\n let final_sq = sq + i as usize * 8;\n\n match board.piece_at(final_sq) {\n Some(p) => {\n if p.side() != piece.side() {\n moves.push(final_sq);\n }\n valid = false;\n }\n None => moves.push(final_sq),\n };\n }\n\n let bottom_edge = calculate_squares_to_edge(Edge::Bottom, sq);\n let mut valid = true;\n\n for i in 1..=bottom_edge {\n if !valid {\n continue;\n }\n\n let final_sq = sq - i * 8;\n match board.piece_at(final_sq) {\n Some(p) => {\n if p.side() != piece.side() {\n moves.push(final_sq);\n }\n valid = false;\n }\n None => moves.push(final_sq),\n }\n }\n } else {\n let right_edge = calculate_squares_to_edge(Edge::Right, sq);\n let mut valid = true;\n for i in 1..=right_edge {\n if !valid {\n continue;\n }\n\n let final_sq = sq + i;\n\n match board.piece_at(final_sq) {\n Some(p) => {\n if p.side() != piece.side() {\n moves.push(final_sq);\n }\n valid = false;\n }\n None => moves.push(final_sq),\n }\n }\n\n let left_edge = calculate_squares_to_edge(Edge::Left, sq);\n let mut valid = true;\n\n for i in 1..=left_edge {\n if !valid {\n continue;\n }\n\n let final_sq = sq - i;\n\n match board.piece_at(final_sq) {\n Some(p) => {\n if p.side() != piece.side() {\n moves.push(final_sq);\n }\n valid = false;\n }\n None => moves.push(final_sq),\n }\n }\n }\n }\n\n moves\n}\n"},"avg_line_length":{"kind":"number","value":27.9252336449,"string":"27.925234"},"max_line_length":{"kind":"number","value":83,"string":"83"},"alphanum_fraction":{"kind":"number","value":0.3584337349,"string":"0.358434"},"score":{"kind":"number","value":3,"string":"3"}}},{"rowIdx":936,"cells":{"hexsha":{"kind":"string","value":"404cb8362bcfdd803cb1b26c1c5bdcf7eb8586c2"},"size":{"kind":"number","value":1977,"string":"1,977"},"ext":{"kind":"string","value":"kt"},"lang":{"kind":"string","value":"Kotlin"},"max_stars_repo_path":{"kind":"string","value":"kotlin/src/test/kotlin/nonamedb/test/specs/unit/storage/engines/MemoryEngineSpec.kt"},"max_stars_repo_name":{"kind":"string","value":"sndnv/nonamedb"},"max_stars_repo_head_hexsha":{"kind":"string","value":"e229ea19df94a89ddd135cf9e2b5bce5f1aa0f6f"},"max_stars_repo_licenses":{"kind":"list like","value":["Apache-2.0"],"string":"[\n \"Apache-2.0\"\n]"},"max_stars_count":{"kind":"number","value":1,"string":"1"},"max_stars_repo_stars_event_min_datetime":{"kind":"string","value":"2019-04-21T05:29:54.000Z"},"max_stars_repo_stars_event_max_datetime":{"kind":"string","value":"2019-04-21T05:29:54.000Z"},"max_issues_repo_path":{"kind":"string","value":"kotlin/src/test/kotlin/nonamedb/test/specs/unit/storage/engines/MemoryEngineSpec.kt"},"max_issues_repo_name":{"kind":"string","value":"sndnv/nonamedb"},"max_issues_repo_head_hexsha":{"kind":"string","value":"e229ea19df94a89ddd135cf9e2b5bce5f1aa0f6f"},"max_issues_repo_licenses":{"kind":"list like","value":["Apache-2.0"],"string":"[\n \"Apache-2.0\"\n]"},"max_issues_count":{"kind":"null"},"max_issues_repo_issues_event_min_datetime":{"kind":"null"},"max_issues_repo_issues_event_max_datetime":{"kind":"null"},"max_forks_repo_path":{"kind":"string","value":"kotlin/src/test/kotlin/nonamedb/test/specs/unit/storage/engines/MemoryEngineSpec.kt"},"max_forks_repo_name":{"kind":"string","value":"sndnv/nonamedb"},"max_forks_repo_head_hexsha":{"kind":"string","value":"e229ea19df94a89ddd135cf9e2b5bce5f1aa0f6f"},"max_forks_repo_licenses":{"kind":"list like","value":["Apache-2.0"],"string":"[\n \"Apache-2.0\"\n]"},"max_forks_count":{"kind":"null"},"max_forks_repo_forks_event_min_datetime":{"kind":"null"},"max_forks_repo_forks_event_max_datetime":{"kind":"null"},"content":{"kind":"string","value":"package nonamedb.test.specs.unit.storage.engines\n\nimport io.kotlintest.*\nimport io.kotlintest.specs.StringSpec\nimport nonamedb.storage.Done\nimport nonamedb.storage.engines.MemoryEngine\n\nclass MemoryEngineSpec : StringSpec(){\n init {\n val timeout = 5.seconds\n val testKey = \"some key\"\n val testValue = \"some value\".toByteArray()\n val updatedTestValue = \"some updated value\".toByteArray()\n val testEngine = MemoryEngine()\n\n \"should fail to retrieve missing data\" {\n val result = testEngine.get(testKey)\n eventually(timeout) {\n result.getCompleted() shouldBe null\n }\n }\n\n \"should successfully add data\" {\n val result = testEngine.put(testKey, testValue)\n eventually(timeout) {\n result.getCompleted() shouldBe Done\n }\n }\n\n \"should successfully retrieve data\" {\n val result = testEngine.get(testKey)\n eventually(timeout) {\n result.getCompleted() shouldBe testValue\n }\n }\n\n \"should successfully update data\" {\n val result = testEngine.put(testKey, updatedTestValue)\n eventually(timeout) {\n result.getCompleted() shouldBe Done\n }\n }\n\n \"should successfully retrieve updated data\" {\n val result = testEngine.get(testKey)\n eventually(timeout) {\n result.getCompleted() shouldBe updatedTestValue\n }\n }\n\n \"should successfully remove data\" {\n val result = testEngine.put(testKey, \"\".toByteArray())\n eventually(timeout) {\n result.getCompleted() shouldBe Done\n }\n }\n\n \"should fail to retrieve removed data\" {\n val result = testEngine.get(testKey)\n eventually(timeout) {\n result.getCompleted() shouldBe null\n }\n }\n }\n}\n"},"avg_line_length":{"kind":"number","value":29.9545454545,"string":"29.954545"},"max_line_length":{"kind":"number","value":66,"string":"66"},"alphanum_fraction":{"kind":"number","value":0.569549823,"string":"0.56955"},"score":{"kind":"number","value":3.171875,"string":"3.171875"}}},{"rowIdx":937,"cells":{"hexsha":{"kind":"string","value":"7f1610e61fe4d126e2a9d2e38f0b4661c65c9e03"},"size":{"kind":"number","value":993,"string":"993"},"ext":{"kind":"string","value":"go"},"lang":{"kind":"string","value":"Go"},"max_stars_repo_path":{"kind":"string","value":"cmd/util/pool_test.go"},"max_stars_repo_name":{"kind":"string","value":"pop/wash"},"max_stars_repo_head_hexsha":{"kind":"string","value":"16e54fa159f28802fadef1d0d5632d32b07c6f86"},"max_stars_repo_licenses":{"kind":"list like","value":["Apache-2.0"],"string":"[\n \"Apache-2.0\"\n]"},"max_stars_count":{"kind":"number","value":180,"string":"180"},"max_stars_repo_stars_event_min_datetime":{"kind":"string","value":"2019-03-19T16:54:48.000Z"},"max_stars_repo_stars_event_max_datetime":{"kind":"string","value":"2022-03-21T01:20:21.000Z"},"max_issues_repo_path":{"kind":"string","value":"cmd/util/pool_test.go"},"max_issues_repo_name":{"kind":"string","value":"pop/wash"},"max_issues_repo_head_hexsha":{"kind":"string","value":"16e54fa159f28802fadef1d0d5632d32b07c6f86"},"max_issues_repo_licenses":{"kind":"list like","value":["Apache-2.0"],"string":"[\n \"Apache-2.0\"\n]"},"max_issues_count":{"kind":"number","value":413,"string":"413"},"max_issues_repo_issues_event_min_datetime":{"kind":"string","value":"2019-03-19T17:05:50.000Z"},"max_issues_repo_issues_event_max_datetime":{"kind":"string","value":"2021-07-01T16:45:26.000Z"},"max_forks_repo_path":{"kind":"string","value":"cmd/util/pool_test.go"},"max_forks_repo_name":{"kind":"string","value":"pop/wash"},"max_forks_repo_head_hexsha":{"kind":"string","value":"16e54fa159f28802fadef1d0d5632d32b07c6f86"},"max_forks_repo_licenses":{"kind":"list like","value":["Apache-2.0"],"string":"[\n \"Apache-2.0\"\n]"},"max_forks_count":{"kind":"number","value":39,"string":"39"},"max_forks_repo_forks_event_min_datetime":{"kind":"string","value":"2019-03-19T16:55:47.000Z"},"max_forks_repo_forks_event_max_datetime":{"kind":"string","value":"2022-01-28T10:57:23.000Z"},"content":{"kind":"string","value":"package cmdutil\n\nimport (\n\t\"sync\"\n\t\"testing\"\n\n\t\"github.com/stretchr/testify/assert\"\n)\n\n// Test that a pool with a single worker finishes.\nfunc TestPool1(t *testing.T) {\n\tp := NewPool(1)\n\n\tval := 0\n\tp.Submit(func() {\n\t\tval++\n\t\tp.Done()\n\t})\n\n\tp.Finish()\n\tassert.Equal(t, 1, val)\n}\n\n// Test that a pool with two workers executes them concurrently and finishes.\nfunc TestPool2(t *testing.T) {\n\tp := NewPool(2)\n\n\tvar mux1, mux2 sync.Mutex\n\tval := 0\n\t// Start with both mutexes locked. In sequence wait on one and unlock the other so that both\n\t// functions must run concurrently to correctly unlock them.\n\tmux1.Lock()\n\tmux2.Lock()\n\tp.Submit(func() {\n\t\t// Wait on 1.\n\t\tmux1.Lock()\n\t\tval++\n\t\t// Signal 2.\n\t\tmux2.Unlock()\n\t\tp.Done()\n\t})\n\n\tp.Submit(func() {\n\t\t// Signal 1.\n\t\tmux1.Unlock()\n\t\t// Wait on 2.\n\t\tmux2.Lock()\n\t\tval++\n\t\tp.Done()\n\t})\n\t// At the end both mutexes are again locked.\n\n\t// Wait for completion and ensure both functions have updated the value.\n\tp.Finish()\n\tassert.Equal(t, 2, val)\n}\n"},"avg_line_length":{"kind":"number","value":17.4210526316,"string":"17.421053"},"max_line_length":{"kind":"number","value":93,"string":"93"},"alphanum_fraction":{"kind":"number","value":0.6505538771,"string":"0.650554"},"score":{"kind":"number","value":3.3125,"string":"3.3125"}}},{"rowIdx":938,"cells":{"hexsha":{"kind":"string","value":"9671d678b496b5bd1714a0e06a8fe25b3fbd9372"},"size":{"kind":"number","value":2769,"string":"2,769"},"ext":{"kind":"string","value":"php"},"lang":{"kind":"string","value":"PHP"},"max_stars_repo_path":{"kind":"string","value":"app/Http/Controllers/LabelsResourceController.php"},"max_stars_repo_name":{"kind":"string","value":"austin-dudzik/spark"},"max_stars_repo_head_hexsha":{"kind":"string","value":"4f2014121473ffe1a1f5929636b85272ff2ef421"},"max_stars_repo_licenses":{"kind":"list like","value":["MIT"],"string":"[\n \"MIT\"\n]"},"max_stars_count":{"kind":"null"},"max_stars_repo_stars_event_min_datetime":{"kind":"null"},"max_stars_repo_stars_event_max_datetime":{"kind":"null"},"max_issues_repo_path":{"kind":"string","value":"app/Http/Controllers/LabelsResourceController.php"},"max_issues_repo_name":{"kind":"string","value":"austin-dudzik/spark"},"max_issues_repo_head_hexsha":{"kind":"string","value":"4f2014121473ffe1a1f5929636b85272ff2ef421"},"max_issues_repo_licenses":{"kind":"list like","value":["MIT"],"string":"[\n \"MIT\"\n]"},"max_issues_count":{"kind":"null"},"max_issues_repo_issues_event_min_datetime":{"kind":"null"},"max_issues_repo_issues_event_max_datetime":{"kind":"null"},"max_forks_repo_path":{"kind":"string","value":"app/Http/Controllers/LabelsResourceController.php"},"max_forks_repo_name":{"kind":"string","value":"austin-dudzik/spark"},"max_forks_repo_head_hexsha":{"kind":"string","value":"4f2014121473ffe1a1f5929636b85272ff2ef421"},"max_forks_repo_licenses":{"kind":"list like","value":["MIT"],"string":"[\n \"MIT\"\n]"},"max_forks_count":{"kind":"null"},"max_forks_repo_forks_event_min_datetime":{"kind":"null"},"max_forks_repo_forks_event_max_datetime":{"kind":"null"},"content":{"kind":"string","value":" Label::query()->\n with(['tasks'])->\n where('user_id', '=', Auth::id())->\n get()\n ]);\n }\n\n /**\n * Store a new label in the database\n *\n * @param Request $request\n */\n public function store(Request $request)\n {\n // Validate the request\n $fields = $request->validateWithBag('new_label', [\n 'name' => 'required',\n 'color' => 'required',\n ]);\n\n // Assign the user ID to the request\n $fields['user_id'] = Auth::id();\n\n // Create the label\n Label::query()->create($fields);\n\n // Redirect back\n return redirect()->back();\n }\n\n /**\n * Display the specified label\n *\n * @param Label $label\n */\n public function show(Label $label)\n {\n // Return the single label view\n return view('label-single', [\n 'label' => Label::query()->\n where('user_id', '=', Auth::id())->\n where('id', '=', $label->id)->\n first(),\n 'tasks' => Task::query()->\n where('user_id', '=', Auth::id())->\n where('label_id', '=', $label->id)->\n whereNull('completed')->\n orderBy($this->getSorters()->sort_by, $this->getSorters()->order_by)->\n get(),\n ]);\n }\n\n /**\n * Update the specified resource in storage.\n *\n * @param Request $request\n * @param Label $label\n */\n public function update(Request $request, Label $label)\n {\n // Validate the request\n $fields = $request->validateWithBag('edit_label_' . $label->id, [\n 'name' => 'required',\n 'color' => 'required',\n ]);\n\n // Update the label\n Label::query()->find($label->id)->update($fields);\n\n // Redirect to index with success\n return redirect()->back();\n }\n\n /**\n * Remove the specified label from storage.\n *\n * @param Label $label\n */\n public function destroy(Label $label)\n {\n // Find and delete existing label\n Label::query()->find($label->id)->delete();\n\n // Remove label from tasks\n Task::query()->\n where('label_id', '=', $label->id)->\n update(['label_id' => null]);\n\n // Redirect to index\n return redirect()->back();\n }\n}\n"},"avg_line_length":{"kind":"number","value":23.8706896552,"string":"23.87069"},"max_line_length":{"kind":"number","value":82,"string":"82"},"alphanum_fraction":{"kind":"number","value":0.5066811123,"string":"0.506681"},"score":{"kind":"number","value":3.15625,"string":"3.15625"}}},{"rowIdx":939,"cells":{"hexsha":{"kind":"string","value":"70dc725ccf32e9d60106d386ebd957e8de6b19cd"},"size":{"kind":"number","value":2342,"string":"2,342"},"ext":{"kind":"string","value":"c"},"lang":{"kind":"string","value":"C"},"max_stars_repo_path":{"kind":"string","value":"Array/wuchao/other/7-16 c实现string函数/StringUtils.c"},"max_stars_repo_name":{"kind":"string","value":"JessonYue/LeetCodeLearning"},"max_stars_repo_head_hexsha":{"kind":"string","value":"3c22a4fcdfe8b47f9f64b939c8b27742c4e30b79"},"max_stars_repo_licenses":{"kind":"list like","value":["MIT"],"string":"[\n \"MIT\"\n]"},"max_stars_count":{"kind":"number","value":39,"string":"39"},"max_stars_repo_stars_event_min_datetime":{"kind":"string","value":"2020-05-31T06:14:39.000Z"},"max_stars_repo_stars_event_max_datetime":{"kind":"string","value":"2021-01-09T11:06:39.000Z"},"max_issues_repo_path":{"kind":"string","value":"Array/wuchao/other/7-16 c实现string函数/StringUtils.c"},"max_issues_repo_name":{"kind":"string","value":"JessonYue/LeetCodeLearning"},"max_issues_repo_head_hexsha":{"kind":"string","value":"3c22a4fcdfe8b47f9f64b939c8b27742c4e30b79"},"max_issues_repo_licenses":{"kind":"list like","value":["MIT"],"string":"[\n \"MIT\"\n]"},"max_issues_count":{"kind":"number","value":7,"string":"7"},"max_issues_repo_issues_event_min_datetime":{"kind":"string","value":"2020-06-02T11:04:14.000Z"},"max_issues_repo_issues_event_max_datetime":{"kind":"string","value":"2020-06-11T14:11:58.000Z"},"max_forks_repo_path":{"kind":"string","value":"Array/wuchao/other/7-16 c实现string函数/StringUtils.c"},"max_forks_repo_name":{"kind":"string","value":"JessonYue/LeetCodeLearning"},"max_forks_repo_head_hexsha":{"kind":"string","value":"3c22a4fcdfe8b47f9f64b939c8b27742c4e30b79"},"max_forks_repo_licenses":{"kind":"list like","value":["MIT"],"string":"[\n \"MIT\"\n]"},"max_forks_count":{"kind":"number","value":20,"string":"20"},"max_forks_repo_forks_event_min_datetime":{"kind":"string","value":"2020-05-31T06:21:57.000Z"},"max_forks_repo_forks_event_max_datetime":{"kind":"string","value":"2020-10-01T04:48:38.000Z"},"content":{"kind":"string","value":"//\n// Created by 吴超 on 2020/7/16.\n//\n#include \n#include \n\n/**\n * C语言实现strcpy(字符串复制)、strcat(字符串链接)、strstr(字符串包含)、strchr(字符出现位置)、memcpy(拷贝)\n */\nchar *strcpy(char *source);\n\nchar *strcat(char *dest, char *source);\n\nint strstr(char *dest, char *source);\n\nint strchr(char *source, char target);\n\nvoid* memcpy(char *dest, char* source, size_t size);\n\nsize_t stringLen(char *source);\n\nchar *strcpy(char *source) {\n char *dest = malloc(stringLen(source) * sizeof(char));\n int i = 0;\n while (source[i] != '\\0') {\n dest[i] = source[i];\n i++;\n }\n return dest;\n}\n\nchar *strcat(char *dest, char *source) {\n int destLen = stringLen(dest);\n int sourceLen = stringLen(source);\n char *result = malloc((destLen + sourceLen) * sizeof(char));\n int i = 0;\n while (i < destLen) {\n result[i] = dest[i];\n i++;\n }\n int j = 0;\n while (j < sourceLen) {\n result[i] = source[j];\n i++;\n j++;\n }\n return result;\n}\n\nint strstr(char *dest, char *source){\n int i = 0;\n int j = 0;\n int destLen = stringLen(dest);\n int sourceLen = stringLen(source);\n if(sourceLen>destLen) return -1;\n while(irequire('../type/run-config')\nconst logger = require('../middleware/logger')\n\nWalletServer.ConfigureServerURL(Config.walletServerUrl)\nWalletServer.ConfigureAuthorizationHeader(Config.serverAuthHeader)\n\nconst filename = 'bitcoin-s-backup.zip'\n\nexports.downloadBackup = (req: Request, res: Response) => {\n // const r = req.body // don't currently care about request\n logger.info('downloadBackup ' + Config.backupDirectory)\n\n const fullPath = path.join(Config.backupDirectory, filename)\n\n logger.info('fullPath: ' + fullPath + ' walletServerUrl: ' + Config.walletServerUrl)\n\n // logger.info('auth header: ' + res.getHeader('Authorization'))\n\n // Sanity check\n try {\n fs.accessSync(Config.backupDirectory) // Will throw error if directory does not exist\n } catch (err) {\n logger.error('downloadBackup backupDirectory is not accessible ' + Config.backupDirectory)\n res.end() // Blob size 0 returned\n }\n\n // Use wallet-ts to create backup\n WalletServer.ZipDataDir(fullPath).then(result => {\n logger.info('ZipDataDir() complete')\n if (result.result === null) { // success case\n // Sanity check\n try {\n fs.accessSync(fullPath) // Will throw error if file does not exist\n } catch (err) {\n logger.error('downloadBackup fullPath is not accessible ' + fullPath)\n res.end() // Blob size 0 returned\n }\n\n const readStream = fs.createReadStream(fullPath)\n readStream.on('open', () =>\n res.setHeader('Content-Type', 'application/zip; charset=utf-8'))\n readStream.on('error',\n (err) => { logger.error('readStream error ' + err) })\n readStream.on('end', () => {\n // Always delete backup zip after sending\n fs.unlink(fullPath, function() {\n // Nothing to do\n })\n })\n readStream.pipe(res)\n } else {\n logger.error('downloadBackup ZipDataDir failed')\n res.end() // Blob size 0 returned\n }\n })\n}\n"},"avg_line_length":{"kind":"number","value":32.0597014925,"string":"32.059701"},"max_line_length":{"kind":"number","value":94,"string":"94"},"alphanum_fraction":{"kind":"number","value":0.6643389199,"string":"0.664339"},"score":{"kind":"number","value":3.25,"string":"3.25"}}},{"rowIdx":941,"cells":{"hexsha":{"kind":"string","value":"c6d25c4f72141f03ee058e94885ee553877d91e6"},"size":{"kind":"number","value":930,"string":"930"},"ext":{"kind":"string","value":"rb"},"lang":{"kind":"string","value":"Ruby"},"max_stars_repo_path":{"kind":"string","value":"spec/controllers/admin/choices_controller_spec.rb"},"max_stars_repo_name":{"kind":"string","value":"ianfleeton/zmey"},"max_stars_repo_head_hexsha":{"kind":"string","value":"d533ea22a6bbc051d6743aafb63beb3d69d8825c"},"max_stars_repo_licenses":{"kind":"list like","value":["MIT"],"string":"[\n \"MIT\"\n]"},"max_stars_count":{"kind":"null"},"max_stars_repo_stars_event_min_datetime":{"kind":"null"},"max_stars_repo_stars_event_max_datetime":{"kind":"null"},"max_issues_repo_path":{"kind":"string","value":"spec/controllers/admin/choices_controller_spec.rb"},"max_issues_repo_name":{"kind":"string","value":"ianfleeton/zmey"},"max_issues_repo_head_hexsha":{"kind":"string","value":"d533ea22a6bbc051d6743aafb63beb3d69d8825c"},"max_issues_repo_licenses":{"kind":"list like","value":["MIT"],"string":"[\n \"MIT\"\n]"},"max_issues_count":{"kind":"number","value":8,"string":"8"},"max_issues_repo_issues_event_min_datetime":{"kind":"string","value":"2015-03-19T13:05:58.000Z"},"max_issues_repo_issues_event_max_datetime":{"kind":"string","value":"2021-08-10T18:34:30.000Z"},"max_forks_repo_path":{"kind":"string","value":"spec/controllers/admin/choices_controller_spec.rb"},"max_forks_repo_name":{"kind":"string","value":"ianfleeton/zmey"},"max_forks_repo_head_hexsha":{"kind":"string","value":"d533ea22a6bbc051d6743aafb63beb3d69d8825c"},"max_forks_repo_licenses":{"kind":"list like","value":["MIT"],"string":"[\n \"MIT\"\n]"},"max_forks_count":{"kind":"null"},"max_forks_repo_forks_event_min_datetime":{"kind":"null"},"max_forks_repo_forks_event_max_datetime":{"kind":"null"},"content":{"kind":"string","value":"require \"rails_helper\"\n\nmodule Admin\n RSpec.describe ChoicesController, type: :controller do\n before do\n logged_in_as_admin\n end\n\n describe \"GET new\" do\n it \"instantiates a new Choice\" do\n allow(controller).to receive(:feature_valid?)\n expect(Choice).to receive(:new).and_return(double(Choice).as_null_object)\n get \"new\"\n end\n\n it \"sets @choice.feature_id to the feature_id supplied as a parameter\" do\n choice = Choice.new\n allow(Choice).to receive(:new).and_return(choice)\n get \"new\", params: {feature_id: 123}\n expect(choice.feature_id).to eq 123\n end\n\n context \"when the feature is invalid\" do\n it \"redirects to the products page\" do\n allow(controller).to receive(:feature_valid?).and_return(false)\n get \"new\"\n expect(response).to redirect_to(admin_products_path)\n end\n end\n end\n end\nend\n"},"avg_line_length":{"kind":"number","value":28.1818181818,"string":"28.181818"},"max_line_length":{"kind":"number","value":81,"string":"81"},"alphanum_fraction":{"kind":"number","value":0.6494623656,"string":"0.649462"},"score":{"kind":"number","value":3.046875,"string":"3.046875"}}},{"rowIdx":942,"cells":{"hexsha":{"kind":"string","value":"4009b06c7f2a79a55036a8541e8d8b3d6f9a817d"},"size":{"kind":"number","value":19880,"string":"19,880"},"ext":{"kind":"string","value":"py"},"lang":{"kind":"string","value":"Python"},"max_stars_repo_path":{"kind":"string","value":"ost/s1/grd_batch.py"},"max_stars_repo_name":{"kind":"string","value":"KBodolai/OpenSarToolkit"},"max_stars_repo_head_hexsha":{"kind":"string","value":"29af1df36f10f28a17b56f39ad67f0c7f530b93a"},"max_stars_repo_licenses":{"kind":"list like","value":["MIT"],"string":"[\n \"MIT\"\n]"},"max_stars_count":{"kind":"null"},"max_stars_repo_stars_event_min_datetime":{"kind":"null"},"max_stars_repo_stars_event_max_datetime":{"kind":"null"},"max_issues_repo_path":{"kind":"string","value":"ost/s1/grd_batch.py"},"max_issues_repo_name":{"kind":"string","value":"KBodolai/OpenSarToolkit"},"max_issues_repo_head_hexsha":{"kind":"string","value":"29af1df36f10f28a17b56f39ad67f0c7f530b93a"},"max_issues_repo_licenses":{"kind":"list like","value":["MIT"],"string":"[\n \"MIT\"\n]"},"max_issues_count":{"kind":"null"},"max_issues_repo_issues_event_min_datetime":{"kind":"null"},"max_issues_repo_issues_event_max_datetime":{"kind":"null"},"max_forks_repo_path":{"kind":"string","value":"ost/s1/grd_batch.py"},"max_forks_repo_name":{"kind":"string","value":"KBodolai/OpenSarToolkit"},"max_forks_repo_head_hexsha":{"kind":"string","value":"29af1df36f10f28a17b56f39ad67f0c7f530b93a"},"max_forks_repo_licenses":{"kind":"list like","value":["MIT"],"string":"[\n \"MIT\"\n]"},"max_forks_count":{"kind":"null"},"max_forks_repo_forks_event_min_datetime":{"kind":"null"},"max_forks_repo_forks_event_max_datetime":{"kind":"null"},"content":{"kind":"string","value":"#! /usr/bin/env python\n# -*- coding: utf-8 -*-\n\n\"\"\"Batch processing for GRD products\n\n\"\"\"\n\nimport os\nimport json\nimport itertools\nimport logging\nimport pandas as pd\nfrom pathlib import Path\n\nfrom godale._concurrent import Executor\n\nfrom ost import Sentinel1Scene\nfrom ost.s1 import grd_to_ard\nfrom ost.helpers import raster as ras\nfrom ost.generic import ts_extent\nfrom ost.generic import ts_ls_mask\nfrom ost.generic import ard_to_ts\nfrom ost.generic import timescan\nfrom ost.generic import mosaic\n\nlogger = logging.getLogger(__name__)\n\n\ndef _create_processing_dict(inventory_df):\n \"\"\"Function that creates a dictionary to handle GRD batch processing\n\n This helper function takes the inventory dataframe and creates\n a dictionary with the track as key, and all the files to process as\n a list, whereas the list is\n\n :param inventory_df:\n :return:\n \"\"\"\n\n # initialize empty dictionary\n dict_scenes = {}\n\n # get relative orbits and loop through each\n track_list = inventory_df[\"relativeorbit\"].unique()\n\n for track in track_list:\n\n # get acquisition dates and loop through each\n acquisition_dates = inventory_df[\"acquisitiondate\"][\n inventory_df[\"relativeorbit\"] == track\n ].unique()\n\n # loop through dates\n for i, acquisition_date in enumerate(acquisition_dates):\n\n # get the scene ids per acquisition_date and write into a list\n single_id = inventory_df[\"identifier\"][\n (inventory_df[\"relativeorbit\"] == track)\n & (inventory_df[\"acquisitiondate\"] == acquisition_date)\n ].tolist()\n\n # add this list to the dictionary and associate the track number\n # as dict key\n dict_scenes[f\"{track}_{i+1}\"] = single_id\n\n return dict_scenes\n\n\ndef create_processed_df(inventory_df, list_of_scenes, outfile, out_ls, error):\n\n df = pd.DataFrame(columns=[\"identifier\", \"outfile\", \"out_ls\", \"error\"])\n\n for scene in list_of_scenes:\n\n temp_df = pd.DataFrame()\n # get scene_id\n temp_df[\"identifier\"] = inventory_df.identifier[\n inventory_df.identifier == scene\n ].values\n # fill outfiles/error\n temp_df[\"outfile\"] = outfile\n temp_df[\"out_ls\"] = out_ls\n temp_df[\"error\"] = error\n\n # append to final df and delete temp_df for next loop\n df = pd.concat([df, temp_df])\n del temp_df\n\n return df\n\n\ndef grd_to_ard_batch(inventory_df, config_file):\n\n # load relevant config parameters\n with open(config_file, \"r\") as file:\n config_dict = json.load(file)\n download_dir = Path(config_dict[\"download_dir\"])\n data_mount = Path(config_dict[\"data_mount\"])\n\n # where all frames are grouped into acquisitions\n processing_dict = _create_processing_dict(inventory_df)\n processing_df = pd.DataFrame(columns=[\"identifier\", \"outfile\", \"out_ls\", \"error\"])\n\n iter_list = []\n for _, list_of_scenes in processing_dict.items():\n\n # get the paths to the file\n scene_paths = [\n Sentinel1Scene(scene).get_path(download_dir, data_mount)\n for scene in list_of_scenes\n ]\n\n iter_list.append(scene_paths)\n\n # now we run with godale, which works also with 1 worker\n executor = Executor(\n executor=config_dict[\"executor_type\"], max_workers=config_dict[\"max_workers\"]\n )\n\n for task in executor.as_completed(\n func=grd_to_ard.grd_to_ard,\n iterable=iter_list,\n fargs=(\n [\n str(config_file),\n ]\n ),\n ):\n\n list_of_scenes, outfile, out_ls, error = task.result()\n\n # return the info of processing as dataframe\n temp_df = create_processed_df(\n inventory_df, list_of_scenes, outfile, out_ls, error\n )\n\n processing_df = pd.concat([processing_df, temp_df])\n\n return processing_df\n\n\ndef ards_to_timeseries(inventory_df, config_file):\n\n with open(config_file) as file:\n config_dict = json.load(file)\n ard = config_dict[\"processing\"][\"single_ARD\"]\n ard_mt = config_dict[\"processing\"][\"time-series_ARD\"]\n\n # create all extents\n _create_extents(inventory_df, config_file)\n\n # update extents in case of ls_mask\n if ard[\"create_ls_mask\"] or ard_mt[\"apply_ls_mask\"]:\n _create_mt_ls_mask(inventory_df, config_file)\n\n # finally create time-series\n _create_timeseries(inventory_df, config_file)\n\n\ndef _create_extents(inventory_df, config_file):\n\n with open(config_file, \"r\") as file:\n config_dict = json.load(file)\n processing_dir = Path(config_dict[\"processing_dir\"])\n\n iter_list = []\n for track in inventory_df.relativeorbit.unique():\n\n # get the burst directory\n track_dir = processing_dir / track\n\n list_of_extents = list(track_dir.glob(\"*/*/*bounds.json\"))\n\n # if extent does not already exist, add to iterable\n if not (track_dir / f\"{track}.min_bounds.json\").exists():\n iter_list.append(list_of_extents)\n\n # now we run with godale, which works also with 1 worker\n executor = Executor(\n executor=config_dict[\"executor_type\"], max_workers=os.cpu_count()\n )\n\n out_dict = {\"track\": [], \"list_of_scenes\": [], \"extent\": []}\n for task in executor.as_completed(\n func=ts_extent.mt_extent,\n iterable=iter_list,\n fargs=(\n [\n str(config_file),\n ]\n ),\n ):\n track, list_of_scenes, extent = task.result()\n out_dict[\"track\"].append(track)\n out_dict[\"list_of_scenes\"].append(list_of_scenes)\n out_dict[\"extent\"].append(extent)\n\n return pd.DataFrame.from_dict(out_dict)\n\n\ndef _create_extents_old(inventory_df, config_file):\n\n with open(config_file, \"r\") as file:\n config_dict = json.load(file)\n processing_dir = Path(config_dict[\"processing_dir\"])\n\n iter_list = []\n for track in inventory_df.relativeorbit.unique():\n\n # get the burst directory\n track_dir = processing_dir / track\n\n # get common burst extent\n list_of_scenes = list(track_dir.glob(\"**/*img\"))\n\n list_of_scenes = [str(x) for x in list_of_scenes if \"layover\" not in str(x)]\n\n # if extent does not already exist, add to iterable\n if not (track_dir / f\"{track}.extent.gpkg\").exists():\n iter_list.append(list_of_scenes)\n\n # now we run with godale, which works also with 1 worker\n executor = Executor(\n executor=config_dict[\"executor_type\"], max_workers=config_dict[\"max_workers\"]\n )\n\n out_dict = {\"track\": [], \"list_of_scenes\": [], \"extent\": []}\n for task in executor.as_completed(\n func=ts_extent.mt_extent,\n iterable=iter_list,\n fargs=(\n [\n str(config_file),\n ]\n ),\n ):\n track, list_of_scenes, extent = task.result()\n out_dict[\"track\"].append(track)\n out_dict[\"list_of_scenes\"].append(list_of_scenes)\n out_dict[\"extent\"].append(extent)\n\n return pd.DataFrame.from_dict(out_dict)\n\n\ndef _create_mt_ls_mask(inventory_df, config_file):\n \"\"\"Helper function to union the Layover/Shadow masks of a Time-series\n\n This function creates a\n\n :param inventory_df:\n :param config_file:\n :return:\n \"\"\"\n with open(config_file, \"r\") as file:\n config_dict = json.load(file)\n processing_dir = Path(config_dict[\"processing_dir\"])\n\n iter_list = []\n for track in inventory_df.relativeorbit.unique():\n\n # get the burst directory\n track_dir = processing_dir / track\n\n # get common burst extent\n list_of_masks = list(track_dir.glob(\"*/*/*_ls_mask.json\"))\n\n # if extent does not already exist, add to iterable\n if not (track_dir / f\"{track}.ls_mask.json\").exists():\n iter_list.append(list_of_masks)\n\n # now we run with godale, which works also with 1 worker\n executor = Executor(\n executor=config_dict[\"executor_type\"], max_workers=os.cpu_count()\n )\n\n for task in executor.as_completed(func=ts_ls_mask.mt_layover, iterable=iter_list):\n task.result()\n\n\ndef _create_mt_ls_mask_old(inventory_df, config_file):\n\n with open(config_file, \"r\") as file:\n config_dict = json.load(file)\n processing_dir = Path(config_dict[\"processing_dir\"])\n\n iter_list = []\n for track in inventory_df.relativeorbit.unique():\n\n # get the burst directory\n track_dir = processing_dir / track\n\n # get common burst extent\n list_of_scenes = list(track_dir.glob(\"**/*img\"))\n\n list_of_layover = [str(x) for x in list_of_scenes if \"layover\" in str(x)]\n\n iter_list.append(list_of_layover)\n\n # now we run with godale, which works also with 1 worker\n executor = Executor(\n executor=config_dict[\"executor_type\"], max_workers=config_dict[\"max_workers\"]\n )\n\n out_dict = {\"track\": [], \"list_of_layover\": [], \"ls_mask\": [], \"ls_extent\": []}\n for task in executor.as_completed(\n func=ts_ls_mask.mt_layover,\n iterable=iter_list,\n fargs=(\n [\n str(config_file),\n ]\n ),\n ):\n track, list_of_layover, ls_mask, ls_extent = task.result()\n out_dict[\"track\"].append(track)\n out_dict[\"list_of_layover\"].append(list_of_layover)\n out_dict[\"ls_mask\"].append(list_of_layover)\n out_dict[\"ls_extent\"].append(ls_extent)\n\n return pd.DataFrame.from_dict(out_dict)\n\n\ndef _create_timeseries(inventory_df, config_file):\n \"\"\"Helper function to create Timeseries out of OST ARD products\n\n Based on the inventory GeoDataFrame and the configuration file,\n this function triggers the time-series processing for all bursts/tracks\n within the respective project. Each product/polarisation is treated\n singularly.\n\n Based on the ARD type/configuration settings, the function uses\n SNAP's Create-Stack function to unify the grid of each scene and\n applies a multi-temporal speckle filter if selected.\n\n The output are single GeoTiff files, whereas there is the possibility to\n reduce the data by converting the data format into uint8 or uint16.\n This is done by linearly stretching the data between -30 and +5\n for backscatter, 0 and 1 for coherence, polarimetric anisotropy #\n and entropy, as well 0 and 90 for polarimetric alpha channel. All\n the data is cropped to the same extent based on the minimum bounds layer.\n\n This function executes the underlying functions using the godale framework\n for parallel execution. Executor type and number of parallel processes is\n defined within the configuration file.\n\n\n :param inventory_df:\n :type GeoDataFrame\n :param config_file:\n :type str/Path\n :return:\n \"\"\"\n with open(config_file, \"r\") as file:\n config_dict = json.load(file)\n processing_dir = Path(config_dict[\"processing_dir\"])\n\n iter_list = []\n for track in inventory_df.relativeorbit.unique():\n\n # get the burst directory\n track_dir = processing_dir / track\n\n for pol in [\"VV\", \"VH\", \"HH\", \"HV\"]:\n\n # see if there is actually any imagery in thi polarisation\n list_of_files = sorted(\n str(file) for file in list(track_dir.glob(f\"20*/*data*/*ma0*{pol}*img\"))\n )\n\n if len(list_of_files) <= 1:\n continue\n\n # create list of dims if polarisation is present\n list_of_dims = sorted(\n str(dim) for dim in list(track_dir.glob(\"20*/*bs*dim\"))\n )\n\n iter_list.append([list_of_dims, track, \"bs\", pol])\n\n executor = Executor(\n executor=config_dict[\"executor_type\"], max_workers=config_dict[\"max_workers\"]\n )\n\n out_dict = {\n \"track\": [],\n \"list_of_dims\": [],\n \"out_files\": [],\n \"out_vrt\": [],\n \"product\": [],\n \"error\": [],\n }\n for task in executor.as_completed(\n func=ard_to_ts.gd_ard_to_ts,\n iterable=iter_list,\n fargs=(\n [\n str(config_file),\n ]\n ),\n ):\n track, list_of_dims, out_files, out_vrt, product, error = task.result()\n out_dict[\"track\"].append(track)\n out_dict[\"list_of_dims\"].append(list_of_dims)\n out_dict[\"out_files\"].append(out_files)\n out_dict[\"out_vrt\"].append(out_vrt)\n out_dict[\"product\"].append(product)\n out_dict[\"error\"].append(error)\n\n return pd.DataFrame.from_dict(out_dict)\n\n\ndef timeseries_to_timescan(inventory_df, config_file):\n\n # load ard parameters\n with open(config_file, \"r\") as file:\n config_dict = json.load(file)\n processing_dir = Path(config_dict[\"processing_dir\"])\n ard = config_dict[\"processing\"][\"single_ARD\"]\n ard_mt = config_dict[\"processing\"][\"time-series_ARD\"]\n ard_tscan = config_dict[\"processing\"][\"time-scan_ARD\"]\n\n # get the db scaling right\n to_db = ard[\"to_db\"]\n if ard[\"to_db\"] or ard_mt[\"to_db\"]:\n to_db = True\n\n dtype_conversion = True if ard_mt[\"dtype_output\"] != \"float32\" else False\n\n iter_list, vrt_iter_list = [], []\n for track in inventory_df.relativeorbit.unique():\n\n # get track directory\n track_dir = processing_dir / track\n # define and create Timescan directory\n timescan_dir = track_dir / \"Timescan\"\n timescan_dir.mkdir(parents=True, exist_ok=True)\n\n # loop thorugh each polarization\n for polar in [\"VV\", \"VH\", \"HH\", \"HV\"]:\n\n if (timescan_dir / f\".bs.{polar}.processed\").exists():\n logger.info(f\"Timescans for track {track} already processed.\")\n continue\n\n # get timeseries vrt\n time_series = track_dir / \"Timeseries\" / f\"Timeseries.bs.{polar}.vrt\"\n\n if not time_series.exists():\n continue\n\n # create a datelist for harmonics\n scene_list = list(track_dir.glob(f\"Timeseries/*bs.{polar}.tif\"))\n\n # create a datelist for harmonics calculation\n datelist = []\n for file in sorted(scene_list):\n datelist.append(file.name.split(\".\")[1])\n\n # define timescan prefix\n timescan_prefix = timescan_dir / f\"bs.{polar}\"\n\n iter_list.append(\n [\n time_series,\n timescan_prefix,\n ard_tscan[\"metrics\"],\n dtype_conversion,\n to_db,\n ard_tscan[\"remove_outliers\"],\n datelist,\n ]\n )\n\n vrt_iter_list.append(timescan_dir)\n\n # now we run with godale, which works also with 1 worker\n executor = Executor(\n executor=config_dict[\"executor_type\"], max_workers=config_dict[\"max_workers\"]\n )\n\n # run timescan creation\n out_dict = {\"track\": [], \"prefix\": [], \"metrics\": [], \"error\": []}\n for task in executor.as_completed(func=timescan.gd_mt_metrics, iterable=iter_list):\n burst, prefix, metrics, error = task.result()\n out_dict[\"track\"].append(burst)\n out_dict[\"prefix\"].append(prefix)\n out_dict[\"metrics\"].append(metrics)\n out_dict[\"error\"].append(error)\n\n timescan_df = pd.DataFrame.from_dict(out_dict)\n\n # run vrt creation\n for task in executor.as_completed(\n func=ras.create_tscan_vrt,\n iterable=vrt_iter_list,\n fargs=(\n [\n str(config_file),\n ]\n ),\n ):\n task.result()\n\n return timescan_df\n\n\ndef mosaic_timeseries(inventory_df, config_file):\n\n print(\" -----------------------------------\")\n logger.info(\"Mosaicking Time-series layers\")\n print(\" -----------------------------------\")\n\n # -------------------------------------\n # 1 load project config\n with open(config_file, \"r\") as ard_file:\n config_dict = json.load(ard_file)\n processing_dir = Path(config_dict[\"processing_dir\"])\n\n # create output folder\n ts_dir = processing_dir / \"Mosaic\" / \"Timeseries\"\n ts_dir.mkdir(parents=True, exist_ok=True)\n\n # loop through polarisations\n iter_list, vrt_iter_list = [], []\n for p in [\"VV\", \"VH\", \"HH\", \"HV\"]:\n\n tracks = inventory_df.relativeorbit.unique()\n nr_of_ts = len(\n list((processing_dir / f\"{tracks[0]}\" / \"Timeseries\").glob(f\"*.{p}.tif\"))\n )\n\n if not nr_of_ts >= 1:\n continue\n\n outfiles = []\n for i in range(1, nr_of_ts + 1):\n\n filelist = list(processing_dir.glob(f\"*/Timeseries/{i:02d}.*.{p}.tif\"))\n filelist = [str(file) for file in filelist if \"Mosaic\" not in str(file)]\n\n # create\n datelist = []\n for file in filelist:\n datelist.append(Path(file).name.split(\".\")[1])\n\n filelist = \" \".join(filelist)\n start, end = sorted(datelist)[0], sorted(datelist)[-1]\n\n if start == end:\n outfile = ts_dir / f\"{i:02d}.{start}.bs.{p}.tif\"\n else:\n outfile = ts_dir / f\"{i:02d}.{start}-{end}.bs.{p}.tif\"\n\n check_file = outfile.parent / f\".{outfile.stem}.processed\"\n\n outfiles.append(outfile)\n\n if check_file.exists():\n logger.info(f\"Mosaic layer {outfile.name} already processed.\")\n continue\n\n logger.info(f\"Mosaicking layer {outfile.name}.\")\n iter_list.append([filelist, outfile, config_file])\n\n vrt_iter_list.append([ts_dir, p, outfiles])\n\n # now we run with godale, which works also with 1 worker\n executor = Executor(\n executor=config_dict[\"executor_type\"], max_workers=config_dict[\"max_workers\"]\n )\n\n # run mosaicking\n for task in executor.as_completed(func=mosaic.gd_mosaic, iterable=iter_list):\n task.result()\n\n # run mosaicking vrts\n for task in executor.as_completed(\n func=mosaic.create_timeseries_mosaic_vrt, iterable=vrt_iter_list\n ):\n task.result()\n\n\ndef mosaic_timescan(config_file):\n\n # load ard parameters\n with open(config_file, \"r\") as ard_file:\n config_dict = json.load(ard_file)\n processing_dir = Path(config_dict[\"processing_dir\"])\n metrics = config_dict[\"processing\"][\"time-scan_ARD\"][\"metrics\"]\n\n if \"harmonics\" in metrics:\n metrics.remove(\"harmonics\")\n metrics.extend([\"amplitude\", \"phase\", \"residuals\"])\n\n if \"percentiles\" in metrics:\n metrics.remove(\"percentiles\")\n metrics.extend([\"p95\", \"p5\"])\n\n # create out directory of not existent\n tscan_dir = processing_dir / \"Mosaic\" / \"Timescan\"\n tscan_dir.mkdir(parents=True, exist_ok=True)\n\n # loop through all pontial proucts\n iter_list = []\n for polar, metric in itertools.product([\"VV\", \"HH\", \"VH\", \"HV\"], metrics):\n\n # create a list of files based on polarisation and metric\n filelist = list(processing_dir.glob(f\"*/Timescan/*bs.{polar}.{metric}.tif\"))\n\n # break loop if there are no files\n if not len(filelist) >= 2:\n continue\n\n # get number\n filelist = \" \".join([str(file) for file in filelist])\n outfile = tscan_dir / f\"bs.{polar}.{metric}.tif\"\n check_file = outfile.parent / f\".{outfile.stem}.processed\"\n\n if check_file.exists():\n logger.info(f\"Mosaic layer {outfile.name} already processed.\")\n continue\n\n iter_list.append([filelist, outfile, config_file])\n\n # now we run with godale, which works also with 1 worker\n executor = Executor(\n executor=config_dict[\"executor_type\"], max_workers=config_dict[\"max_workers\"]\n )\n\n # run mosaicking\n for task in executor.as_completed(func=mosaic.gd_mosaic, iterable=iter_list):\n task.result()\n\n ras.create_tscan_vrt(tscan_dir, config_file)\n"},"avg_line_length":{"kind":"number","value":31.3070866142,"string":"31.307087"},"max_line_length":{"kind":"number","value":88,"string":"88"},"alphanum_fraction":{"kind":"number","value":0.6247484909,"string":"0.624748"},"score":{"kind":"number","value":3.359375,"string":"3.359375"}}},{"rowIdx":943,"cells":{"hexsha":{"kind":"string","value":"9bc1651f5e75769617029e484b804f33c2d105f0"},"size":{"kind":"number","value":4748,"string":"4,748"},"ext":{"kind":"string","value":"js"},"lang":{"kind":"string","value":"JavaScript"},"max_stars_repo_path":{"kind":"string","value":"web/app/sw.js"},"max_stars_repo_name":{"kind":"string","value":"nicolasparada/nakama"},"max_stars_repo_head_hexsha":{"kind":"string","value":"8d5aa79aa5ab30d405354408eb60399a345a20e1"},"max_stars_repo_licenses":{"kind":"list like","value":["0BSD"],"string":"[\n \"0BSD\"\n]"},"max_stars_count":{"kind":"number","value":183,"string":"183"},"max_stars_repo_stars_event_min_datetime":{"kind":"string","value":"2017-09-16T12:59:48.000Z"},"max_stars_repo_stars_event_max_datetime":{"kind":"string","value":"2022-03-21T02:19:19.000Z"},"max_issues_repo_path":{"kind":"string","value":"web/app/sw.js"},"max_issues_repo_name":{"kind":"string","value":"arjundoye/nakama"},"max_issues_repo_head_hexsha":{"kind":"string","value":"31403b79476bf5176b4f18374cf87dbd67c85008"},"max_issues_repo_licenses":{"kind":"list like","value":["ISC"],"string":"[\n \"ISC\"\n]"},"max_issues_count":{"kind":"number","value":50,"string":"50"},"max_issues_repo_issues_event_min_datetime":{"kind":"string","value":"2019-02-01T06:16:40.000Z"},"max_issues_repo_issues_event_max_datetime":{"kind":"string","value":"2021-12-26T18:30:04.000Z"},"max_forks_repo_path":{"kind":"string","value":"web/app/sw.js"},"max_forks_repo_name":{"kind":"string","value":"nakamauwu/nakama"},"max_forks_repo_head_hexsha":{"kind":"string","value":"a0b481441d3400602b83c9f6681e6fb3f871ef05"},"max_forks_repo_licenses":{"kind":"list like","value":["0BSD"],"string":"[\n \"0BSD\"\n]"},"max_forks_count":{"kind":"number","value":28,"string":"28"},"max_forks_repo_forks_event_min_datetime":{"kind":"string","value":"2018-01-15T16:44:09.000Z"},"max_forks_repo_forks_event_max_datetime":{"kind":"string","value":"2022-03-10T21:09:51.000Z"},"content":{"kind":"string","value":"const OFFLINE_VERSION = 1\nconst CACHE_NAME = \"offline\"\nconst OFFLINE_URL = \"/offline.html\"\n\nself.addEventListener(\"install\", ev => {\n ev.waitUntil(cacheOfflinePage())\n self.skipWaiting()\n})\n\nasync function cacheOfflinePage() {\n const cache = await caches.open(CACHE_NAME)\n await cache.add(new Request(OFFLINE_URL, { cache: \"reload\" }))\n}\n\nself.addEventListener(\"activate\", ev => {\n ev.waitUntil(enableNavigationPreload())\n self.clients.claim()\n})\n\nasync function enableNavigationPreload() {\n if (\"navigationPreload\" in self.registration) {\n await self.registration.navigationPreload.enable()\n }\n}\n\nself.addEventListener(\"fetch\", ev => {\n if (ev.request.mode === \"navigate\") {\n ev.respondWith(networkWithOfflineNavigationFallback(ev))\n }\n})\n\nself.addEventListener(\"push\", ev => {\n if (!ev.data) {\n return\n }\n\n const n = ev.data.json()\n if (!n) {\n return\n }\n\n ev.waitUntil(showNotification(n))\n})\n\nself.addEventListener(\"notificationclick\", ev => {\n ev.notification.close()\n ev.waitUntil(openNotificationsPage(ev.notification.data))\n})\n\nasync function showNotification(n) {\n const title = notificationTitle(n)\n const body = notificationBody(n)\n return self.registration.showNotification(title, {\n body,\n tag: n.id,\n timestamp: n.issuedAt,\n data: n,\n icon: location.origin + \"/icons/logo-circle-512.png\",\n }).then(() => {\n if (\"setAppBadge\" in navigator) {\n return navigator.setAppBadge()\n }\n })\n}\n\nasync function openNotificationsPage(n) {\n return clients.matchAll({\n type: \"window\"\n }).then(clientList => {\n const pathname = notificationPathname(n)\n for (const client of clientList) {\n if (client.url === pathname && \"focus\" in client) {\n return client.focus()\n }\n }\n\n for (const client of clientList) {\n if (client.url === \"/notifications\" && \"focus\" in client) {\n return client.focus()\n }\n }\n\n for (const client of clientList) {\n if (\"focused\" in client && client.focused) {\n return client.navigate(pathname).then(client => \"focus\" in client ? client.focus() : client)\n }\n if (\"visibilityState\" in client && client.visibilityState === \"visible\") {\n return client.navigate(pathname).then(client => \"focus\" in client ? client.focus() : client)\n }\n }\n\n if (\"openWindow\" in clients) {\n return clients.openWindow(pathname)\n }\n }).then(client => client.postMessage({\n type: \"notificationclick\",\n detail: n,\n }).then(() => {\n if (\"clearAppBadge\" in navigator) {\n return navigator.clearAppBadge()\n }\n }))\n}\n\nfunction notificationPathname(n) {\n if (typeof n.postID === \"string\" && n.postID !== \"\") {\n return \"/posts/\" + encodeURIComponent(n.postID)\n }\n\n if (n.type === \"follow\") {\n return \"/@\" + encodeURIComponent(n.actors[0])\n }\n\n return \"/notifications\"\n}\n\nasync function networkWithOfflineNavigationFallback(ev) {\n try {\n const preloadResponse = await ev.preloadResponse\n if (preloadResponse) {\n return preloadResponse\n }\n\n const networkResponse = await fetch(ev.request)\n return networkResponse\n } catch (error) {\n const cache = await caches.open(CACHE_NAME)\n const cachedResponse = await cache.match(OFFLINE_URL)\n return cachedResponse\n }\n}\n\nfunction notificationTitle(n) {\n switch (n.type) {\n case \"follow\":\n return \"New follow\"\n case \"comment\":\n return \"New commented\"\n case \"post_mention\":\n return \"New post mention\"\n case \"comment_mention\":\n return \"New comment mention\"\n }\n return \"New notification\"\n}\n\nfunction notificationBody(n) {\n const getActors = () => {\n const aa = n.actors\n switch (aa.length) {\n case 0:\n return \"Someone\"\n case 1:\n return aa[0]\n case 2:\n return `${aa[0]} and ${aa[1]}`\n }\n\n return `${aa[0]} and ${aa.length - 1} others`\n }\n\n const getAction = () => {\n switch (n.type) {\n case \"follow\":\n return \"followed you\"\n case \"comment\":\n return \"commented in a post\"\n case \"post_mention\":\n return \"mentioned you in a post\"\n case \"comment_mention\":\n return \"mentioned you in a comment\"\n }\n return \"did something\"\n }\n\n return getActors() + \" \" + getAction()\n}\n"},"avg_line_length":{"kind":"number","value":26.6741573034,"string":"26.674157"},"max_line_length":{"kind":"number","value":108,"string":"108"},"alphanum_fraction":{"kind":"number","value":0.5671861837,"string":"0.567186"},"score":{"kind":"number","value":3.046875,"string":"3.046875"}}},{"rowIdx":944,"cells":{"hexsha":{"kind":"string","value":"5b1d05ff04a50d724df5ca369fe808fe7708e5a2"},"size":{"kind":"number","value":1293,"string":"1,293"},"ext":{"kind":"string","value":"kt"},"lang":{"kind":"string","value":"Kotlin"},"max_stars_repo_path":{"kind":"string","value":"klay-demo/src/main/kotlin/klay/tests/core/CanvasStressTest.kt"},"max_stars_repo_name":{"kind":"string","value":"cdietze/klay"},"max_stars_repo_head_hexsha":{"kind":"string","value":"72031aa267cd304a0612b31c871e2f5cf73d2c4c"},"max_stars_repo_licenses":{"kind":"list like","value":["Apache-2.0"],"string":"[\n \"Apache-2.0\"\n]"},"max_stars_count":{"kind":"number","value":4,"string":"4"},"max_stars_repo_stars_event_min_datetime":{"kind":"string","value":"2018-09-30T06:36:13.000Z"},"max_stars_repo_stars_event_max_datetime":{"kind":"string","value":"2019-03-26T02:37:25.000Z"},"max_issues_repo_path":{"kind":"string","value":"klay-demo/src/main/kotlin/klay/tests/core/CanvasStressTest.kt"},"max_issues_repo_name":{"kind":"string","value":"cdietze/klay"},"max_issues_repo_head_hexsha":{"kind":"string","value":"72031aa267cd304a0612b31c871e2f5cf73d2c4c"},"max_issues_repo_licenses":{"kind":"list like","value":["Apache-2.0"],"string":"[\n \"Apache-2.0\"\n]"},"max_issues_count":{"kind":"null"},"max_issues_repo_issues_event_min_datetime":{"kind":"null"},"max_issues_repo_issues_event_max_datetime":{"kind":"null"},"max_forks_repo_path":{"kind":"string","value":"klay-demo/src/main/kotlin/klay/tests/core/CanvasStressTest.kt"},"max_forks_repo_name":{"kind":"string","value":"cdietze/klay"},"max_forks_repo_head_hexsha":{"kind":"string","value":"72031aa267cd304a0612b31c871e2f5cf73d2c4c"},"max_forks_repo_licenses":{"kind":"list like","value":["Apache-2.0"],"string":"[\n \"Apache-2.0\"\n]"},"max_forks_count":{"kind":"null"},"max_forks_repo_forks_event_min_datetime":{"kind":"null"},"max_forks_repo_forks_event_max_datetime":{"kind":"null"},"content":{"kind":"string","value":"package klay.tests.core\n\nimport euklid.f.MathUtil\nimport klay.core.Clock\nimport klay.scene.CanvasLayer\nimport kotlin.math.cos\nimport kotlin.math.sin\n\nclass CanvasStressTest(game: TestsGame) : Test(game, \"Canvas Stress\", \"Animates a full-screen sized canvas, forcing a massive reupload of image data to \" + \"the GPU on every frame.\") {\n\n override fun init() {\n val clayer = CanvasLayer(game.graphics, game.graphics.viewSize)\n game.rootLayer.add(clayer)\n\n var noSegs = 30\n var direction = 1\n conns.add(game.update.connect { clock: Clock ->\n val canvas = clayer.begin()\n canvas.clear()\n canvas.setStrokeWidth(3f)\n canvas.setStrokeColor(0x88ff0000.toInt())\n\n noSegs += direction\n if (noSegs > 50) direction = -1\n if (noSegs < 20) direction = 1\n\n val r = 100f\n for (ii in 0..noSegs - 1) {\n val angle = 2 * MathUtil.PI * ii / noSegs\n val viewSize = game.plat.graphics.viewSize\n val x = r * cos(angle) + viewSize.width / 2\n val y = r * sin(angle) + viewSize.height / 2\n canvas.strokeCircle(x, y, 100f)\n }\n\n clayer.end() // reupload the image data\n })\n }\n}\n"},"avg_line_length":{"kind":"number","value":32.325,"string":"32.325"},"max_line_length":{"kind":"number","value":184,"string":"184"},"alphanum_fraction":{"kind":"number","value":0.5769528229,"string":"0.576953"},"score":{"kind":"number","value":3.234375,"string":"3.234375"}}},{"rowIdx":945,"cells":{"hexsha":{"kind":"string","value":"709bddbffae043056480c6a72ff06e06383b5294"},"size":{"kind":"number","value":1671,"string":"1,671"},"ext":{"kind":"string","value":"go"},"lang":{"kind":"string","value":"Go"},"max_stars_repo_path":{"kind":"string","value":"src/snippetgen/common/metadata/metadata_test.go"},"max_stars_repo_name":{"kind":"string","value":"cclauss/discovery-artifact-manager"},"max_stars_repo_head_hexsha":{"kind":"string","value":"9eb6bcef290ef87006758349c725d440fbfc85d6"},"max_stars_repo_licenses":{"kind":"list like","value":["Apache-2.0"],"string":"[\n \"Apache-2.0\"\n]"},"max_stars_count":{"kind":"number","value":38,"string":"38"},"max_stars_repo_stars_event_min_datetime":{"kind":"string","value":"2017-07-20T17:54:08.000Z"},"max_stars_repo_stars_event_max_datetime":{"kind":"string","value":"2022-02-20T02:16:31.000Z"},"max_issues_repo_path":{"kind":"string","value":"src/snippetgen/common/metadata/metadata_test.go"},"max_issues_repo_name":{"kind":"string","value":"cclauss/discovery-artifact-manager"},"max_issues_repo_head_hexsha":{"kind":"string","value":"9eb6bcef290ef87006758349c725d440fbfc85d6"},"max_issues_repo_licenses":{"kind":"list like","value":["Apache-2.0"],"string":"[\n \"Apache-2.0\"\n]"},"max_issues_count":{"kind":"number","value":183,"string":"183"},"max_issues_repo_issues_event_min_datetime":{"kind":"string","value":"2017-03-23T17:17:24.000Z"},"max_issues_repo_issues_event_max_datetime":{"kind":"string","value":"2022-02-09T00:07:17.000Z"},"max_forks_repo_path":{"kind":"string","value":"src/snippetgen/common/metadata/metadata_test.go"},"max_forks_repo_name":{"kind":"string","value":"cclauss/discovery-artifact-manager"},"max_forks_repo_head_hexsha":{"kind":"string","value":"9eb6bcef290ef87006758349c725d440fbfc85d6"},"max_forks_repo_licenses":{"kind":"list like","value":["Apache-2.0"],"string":"[\n \"Apache-2.0\"\n]"},"max_forks_count":{"kind":"number","value":42,"string":"42"},"max_forks_repo_forks_event_min_datetime":{"kind":"string","value":"2017-03-23T19:20:20.000Z"},"max_forks_repo_forks_event_max_datetime":{"kind":"string","value":"2022-02-20T02:17:09.000Z"},"content":{"kind":"string","value":"package metadata\n\nimport \"testing\"\n\nfunc TestGetLanguage(t *testing.T) {\n\tfor _, l := range AllowedLanguages {\n\t\tif g, ok := GetLanguage(l.Name); !ok {\n\t\t\tt.Errorf(\"language defined but not found: %s\", l.Name)\n\t\t} else if g != l {\n\t\t\tt.Errorf(\"found wrong language, expected %v, found %v\", l, g)\n\t\t}\n\t}\n}\n\nfunc TestNoLanguage(t *testing.T) {\n\tlangs := [...]string{\"foobar language\"}\n\tfor _, l := range langs {\n\t\tif _, exist := GetLanguage(l); exist {\n\t\t\tt.Errorf(\"language found but should not exist: %s\", l)\n\t\t}\n\t}\n}\n\nfunc TestRequiredLanguages(t *testing.T) {\n\tfor _, l := range RequiredLanguages {\n\t\tif !l.Required {\n\t\t\tt.Errorf(\"language is required but not marked required: %s\", l.Name)\n\t\t}\n\t\tif g, ok := GetLanguage(l.Name); !ok {\n\t\t\tt.Errorf(\"language required but not defined: %s\", l.Name)\n\t\t} else if l != g {\n\t\t\tt.Errorf(\"required language different from the definition: %s\", l.Name)\n\t\t}\n\t}\n\n\tfor _, l := range AllowedLanguages {\n\t\tfound := false\n\t\tfor _, r := range RequiredLanguages {\n\t\t\tif l.Name == r.Name {\n\t\t\t\tfound = true\n\t\t\t\tbreak\n\t\t\t}\n\t\t}\n\t\tif l.Required && !found {\n\t\t\tt.Errorf(\"language marked required but not in RequiredLanguages: %s\", l.Name)\n\t\t}\n\t}\n}\n\nfunc TestGetLanguageFromExt(t *testing.T) {\n\tfor _, l := range AllowedLanguages {\n\t\tif g, ok := GetLanguageFromExt(l.Ext); !ok {\n\t\t\tt.Errorf(\"cannot look up extension: %s\", l.Ext)\n\t\t} else if l != g {\n\t\t\tt.Errorf(\"language different from definition: %s\", l.Name)\n\t\t}\n\t}\n}\n\nfunc TestNoLanguageFromExt(t *testing.T) {\n\tlangs := [...]string{\"foo\", \"bar\"}\n\tfor _, l := range langs {\n\t\tif _, exist := GetLanguageFromExt(l); exist {\n\t\t\tt.Errorf(\"language found but should not exist: %s\", l)\n\t\t}\n\t}\n}\n"},"avg_line_length":{"kind":"number","value":24.5735294118,"string":"24.573529"},"max_line_length":{"kind":"number","value":80,"string":"80"},"alphanum_fraction":{"kind":"number","value":0.6301615799,"string":"0.630162"},"score":{"kind":"number","value":3.03125,"string":"3.03125"}}},{"rowIdx":946,"cells":{"hexsha":{"kind":"string","value":"a16097ae40d34fe1e6000e4d3e46862218e86dca"},"size":{"kind":"number","value":1405,"string":"1,405"},"ext":{"kind":"string","value":"go"},"lang":{"kind":"string","value":"Go"},"max_stars_repo_path":{"kind":"string","value":"2020/6/main.go"},"max_stars_repo_name":{"kind":"string","value":"bobcatfish/adventofcode2016"},"max_stars_repo_head_hexsha":{"kind":"string","value":"20f781f256f4d649544c1f6e28da0d73e3bbc9e3"},"max_stars_repo_licenses":{"kind":"list like","value":["Apache-2.0"],"string":"[\n \"Apache-2.0\"\n]"},"max_stars_count":{"kind":"null"},"max_stars_repo_stars_event_min_datetime":{"kind":"null"},"max_stars_repo_stars_event_max_datetime":{"kind":"null"},"max_issues_repo_path":{"kind":"string","value":"2020/6/main.go"},"max_issues_repo_name":{"kind":"string","value":"bobcatfish/adventofcode2016"},"max_issues_repo_head_hexsha":{"kind":"string","value":"20f781f256f4d649544c1f6e28da0d73e3bbc9e3"},"max_issues_repo_licenses":{"kind":"list like","value":["Apache-2.0"],"string":"[\n \"Apache-2.0\"\n]"},"max_issues_count":{"kind":"number","value":3,"string":"3"},"max_issues_repo_issues_event_min_datetime":{"kind":"string","value":"2017-12-02T17:58:41.000Z"},"max_issues_repo_issues_event_max_datetime":{"kind":"string","value":"2020-12-11T04:33:02.000Z"},"max_forks_repo_path":{"kind":"string","value":"2020/6/main.go"},"max_forks_repo_name":{"kind":"string","value":"bobcatfish/adventofcode2016"},"max_forks_repo_head_hexsha":{"kind":"string","value":"20f781f256f4d649544c1f6e28da0d73e3bbc9e3"},"max_forks_repo_licenses":{"kind":"list like","value":["Apache-2.0"],"string":"[\n \"Apache-2.0\"\n]"},"max_forks_count":{"kind":"null"},"max_forks_repo_forks_event_min_datetime":{"kind":"null"},"max_forks_repo_forks_event_max_datetime":{"kind":"null"},"content":{"kind":"string","value":"package main\n\nimport (\n\t\"fmt\"\n\t\"io/ioutil\"\n\t\"log\"\n\t\"strings\"\n)\n\nfunc load() ([][]string, error) {\n\tdata, err := ioutil.ReadFile(\"input.txt\")\n\tif err != nil {\n\t\treturn nil, fmt.Errorf(\"couldn't read file: %v\", err)\n\t}\n\n\tpp := [][]string{}\n\tfor _, v := range strings.Split(string(data), \"\\n\\n\") {\n\t\tpp = append(pp, strings.Fields(v))\n\t}\n\treturn pp, nil\n}\n\nfunc getCounts1(a [][]string) []map[rune]struct{} {\n\tmm := []map[rune]struct{}{}\n\tfor _, ss := range a {\n\t\tm := map[rune]struct{}{}\n\t\tfor _, s := range ss {\n\t\t\tfor _, r := range s {\n\t\t\t\tm[r] = struct{}{}\n\t\t\t}\n\t\t}\n\t\tmm = append(mm, m)\n\t}\n\treturn mm\n}\n\nfunc getCounts2(a [][]string) []map[rune]struct{} {\n\tmm := []map[rune]struct{}{}\n\tfor _, ss := range a {\n\t\tm := map[rune]struct{}{}\n\t\tfor i, s := range ss {\n\t\t\tif i == 0 {\n\t\t\t\tfor _, r := range s {\n\t\t\t\t\tm[r] = struct{}{}\n\t\t\t\t}\n\t\t\t} else {\n\t\t\t\tfor r, _ := range m {\n\t\t\t\t\tfound := false\n\t\t\t\t\tfor _, rr := range s {\n\t\t\t\t\t\tif r == rr {\n\t\t\t\t\t\t\tfound = true\n\t\t\t\t\t\t\tbreak\n\t\t\t\t\t\t}\n\t\t\t\t\t}\n\t\t\t\t\tif !found {\n\t\t\t\t\t\tdelete(m, r)\n\t\t\t\t\t}\n\t\t\t\t}\n\t\t\t}\n\t\t}\n\t\tmm = append(mm, m)\n\t}\n\treturn mm\n}\n\nfunc sum(counts []map[rune]struct{}) int {\n\tc := 0\n\tfor _, m := range counts {\n\t\tc += len(m)\n\t}\n\treturn c\n}\n\nfunc main() {\n\ta, err := load()\n\tif err != nil {\n\t\tlog.Fatalf(\"Couldn't load nums from file: %v\", err)\n\t}\n\tcounts1 := getCounts1(a)\n\tfmt.Println(sum(counts1))\n\n\tcounts2 := getCounts2(a)\n\tfmt.Println(sum(counts2))\n}\n"},"avg_line_length":{"kind":"number","value":16.5294117647,"string":"16.529412"},"max_line_length":{"kind":"number","value":56,"string":"56"},"alphanum_fraction":{"kind":"number","value":0.5181494662,"string":"0.518149"},"score":{"kind":"number","value":3.296875,"string":"3.296875"}}},{"rowIdx":947,"cells":{"hexsha":{"kind":"string","value":"70adac2679061c3253e42a94836a83bce765d64c"},"size":{"kind":"number","value":1486,"string":"1,486"},"ext":{"kind":"string","value":"go"},"lang":{"kind":"string","value":"Go"},"max_stars_repo_path":{"kind":"string","value":"pkg/storage/marker.go"},"max_stars_repo_name":{"kind":"string","value":"asecurityteam/vpcflow-digesterd"},"max_stars_repo_head_hexsha":{"kind":"string","value":"7143c659e26b26e88fd04b04e03250b783d5c9ef"},"max_stars_repo_licenses":{"kind":"list like","value":["Apache-2.0"],"string":"[\n \"Apache-2.0\"\n]"},"max_stars_count":{"kind":"null"},"max_stars_repo_stars_event_min_datetime":{"kind":"null"},"max_stars_repo_stars_event_max_datetime":{"kind":"null"},"max_issues_repo_path":{"kind":"string","value":"pkg/storage/marker.go"},"max_issues_repo_name":{"kind":"string","value":"asecurityteam/vpcflow-digesterd"},"max_issues_repo_head_hexsha":{"kind":"string","value":"7143c659e26b26e88fd04b04e03250b783d5c9ef"},"max_issues_repo_licenses":{"kind":"list like","value":["Apache-2.0"],"string":"[\n \"Apache-2.0\"\n]"},"max_issues_count":{"kind":"number","value":6,"string":"6"},"max_issues_repo_issues_event_min_datetime":{"kind":"string","value":"2019-05-17T20:31:27.000Z"},"max_issues_repo_issues_event_max_datetime":{"kind":"string","value":"2021-02-01T22:21:42.000Z"},"max_forks_repo_path":{"kind":"string","value":"pkg/storage/marker.go"},"max_forks_repo_name":{"kind":"string","value":"asecurityteam/vpcflow-digesterd"},"max_forks_repo_head_hexsha":{"kind":"string","value":"7143c659e26b26e88fd04b04e03250b783d5c9ef"},"max_forks_repo_licenses":{"kind":"list like","value":["Apache-2.0"],"string":"[\n \"Apache-2.0\"\n]"},"max_forks_count":{"kind":"null"},"max_forks_repo_forks_event_min_datetime":{"kind":"null"},"max_forks_repo_forks_event_max_datetime":{"kind":"null"},"content":{"kind":"string","value":"package storage\n\nimport (\n\t\"bytes\"\n\t\"context\"\n\t\"sync\"\n\t\"time\"\n\n\t\"github.com/aws/aws-sdk-go/aws\"\n\t\"github.com/aws/aws-sdk-go/service/s3\"\n\t\"github.com/aws/aws-sdk-go/service/s3/s3iface\"\n\t\"github.com/aws/aws-sdk-go/service/s3/s3manager\"\n\t\"github.com/aws/aws-sdk-go/service/s3/s3manager/s3manageriface\"\n)\n\n// ProgressMarker is an implementation of Marker which allows for marking/unmarking of digests in progress\ntype ProgressMarker struct {\n\tBucket string\n\tClient s3iface.S3API\n\tuploader s3manageriface.UploaderAPI\n\tlock sync.Mutex\n\tnow func() time.Time\n}\n\n// Mark flags the digest identified by key as being \"in progress\"\nfunc (m *ProgressMarker) Mark(ctx context.Context, key string) error {\n\tm.initUploader()\n\tnow := m.now\n\tif now == nil {\n\t\tnow = time.Now\n\t}\n\t_, err := m.uploader.UploadWithContext(ctx, &s3manager.UploadInput{\n\t\tBucket: aws.String(m.Bucket),\n\t\tKey: aws.String(key + inProgressSuffix),\n\t\tBody: bytes.NewReader([]byte(now().Format(time.RFC3339Nano))),\n\t})\n\treturn err\n}\n\n// Unmark flags the digest identified by key as not being \"in progress\"\nfunc (m *ProgressMarker) Unmark(ctx context.Context, key string) error {\n\t_, err := m.Client.DeleteObjectWithContext(ctx, &s3.DeleteObjectInput{\n\t\tBucket: aws.String(m.Bucket),\n\t\tKey: aws.String(key + inProgressSuffix),\n\t})\n\treturn err\n}\n\nfunc (m *ProgressMarker) initUploader() {\n\tm.lock.Lock()\n\tdefer m.lock.Unlock()\n\tif m.uploader == nil {\n\t\tm.uploader = s3manager.NewUploaderWithClient(m.Client)\n\t}\n}\n"},"avg_line_length":{"kind":"number","value":26.5357142857,"string":"26.535714"},"max_line_length":{"kind":"number","value":106,"string":"106"},"alphanum_fraction":{"kind":"number","value":0.7200538358,"string":"0.720054"},"score":{"kind":"number","value":3,"string":"3"}}},{"rowIdx":948,"cells":{"hexsha":{"kind":"string","value":"b2ffff75cff848e9cc4d8a6143bf4d9bf43e64d3"},"size":{"kind":"number","value":5702,"string":"5,702"},"ext":{"kind":"string","value":"py"},"lang":{"kind":"string","value":"Python"},"max_stars_repo_path":{"kind":"string","value":"sapy_script/SAP.py"},"max_stars_repo_name":{"kind":"string","value":"fkfouri/sapy_script"},"max_stars_repo_head_hexsha":{"kind":"string","value":"476041288367e2098b955bc2377f442ce503e822"},"max_stars_repo_licenses":{"kind":"list like","value":["MIT"],"string":"[\n \"MIT\"\n]"},"max_stars_count":{"kind":"number","value":3,"string":"3"},"max_stars_repo_stars_event_min_datetime":{"kind":"string","value":"2018-12-03T15:51:54.000Z"},"max_stars_repo_stars_event_max_datetime":{"kind":"string","value":"2020-11-20T01:05:39.000Z"},"max_issues_repo_path":{"kind":"string","value":"sapy_script/SAP.py"},"max_issues_repo_name":{"kind":"string","value":"whrocha/sapy_script"},"max_issues_repo_head_hexsha":{"kind":"string","value":"476041288367e2098b955bc2377f442ce503e822"},"max_issues_repo_licenses":{"kind":"list like","value":["MIT"],"string":"[\n \"MIT\"\n]"},"max_issues_count":{"kind":"null"},"max_issues_repo_issues_event_min_datetime":{"kind":"null"},"max_issues_repo_issues_event_max_datetime":{"kind":"null"},"max_forks_repo_path":{"kind":"string","value":"sapy_script/SAP.py"},"max_forks_repo_name":{"kind":"string","value":"whrocha/sapy_script"},"max_forks_repo_head_hexsha":{"kind":"string","value":"476041288367e2098b955bc2377f442ce503e822"},"max_forks_repo_licenses":{"kind":"list like","value":["MIT"],"string":"[\n \"MIT\"\n]"},"max_forks_count":{"kind":"number","value":3,"string":"3"},"max_forks_repo_forks_event_min_datetime":{"kind":"string","value":"2018-07-28T21:53:32.000Z"},"max_forks_repo_forks_event_max_datetime":{"kind":"string","value":"2018-08-22T13:51:17.000Z"},"content":{"kind":"string","value":"from multiprocessing import Pool, Manager\nfrom time import sleep\nfrom wmi import WMI\nfrom win32com.client import GetObject\nfrom subprocess import Popen\nfrom collections import Iterable\nfrom tqdm import tqdm\nfrom os import getpid\nfrom sapy_script.Session import Session\n\n\nsession_process = None\nall_processes_id = []\n\n\ndef _on_init(sid, p_ids):\n p_ids.append(getpid())\n global session_process\n app = SAP.app()\n i = 0\n while True:\n con = app.Children(i)\n if con.Children(0).Info.systemsessionid == sid:\n session = con.Children(p_ids.index(getpid()))\n session_process = Session(session)\n break\n i = i + 1\n\n\ndef _task_executor(task):\n task['func'](task['data'])\n\n\nclass SAP:\n\n def __init__(self, max_sessions=16):\n self._con = None\n self._tasks = []\n self.max_sessions = max_sessions\n self.session = lambda i=0: Session(self._con.Children(i))\n\n @staticmethod\n def app():\n \"\"\"Open SAPGui\"\"\"\n wmi_obj = WMI()\n\n sap_exists = len(wmi_obj.Win32_Process(name='saplgpad.exe')) > 0\n\n if not sap_exists:\n\n Popen(['C:\\Program Files (x86)\\SAP\\FrontEnd\\SAPgui\\saplgpad.exe'])\n\n while True:\n try:\n #temp = GetObject(\"SAPGUI\").GetScriptingEngine\n #temp.Change(\"teste 456\", \"\", \"\", \"\", \"\", \".\\LocalSystem\", \"\")\n #objService.Change(,, , , , , \".\\LocalSystem\", \"\")\n return GetObject(\"SAPGUI\").GetScriptingEngine\n except:\n sleep(1)\n pass\n\n def connect(self, environment, client=None, user=None, password=None, lang=None, force=False):\n con = SAP.app().OpenConnection(environment, True)\n session = Session(con.Children(0))\n\n if client is not None:\n session.findById(\"wnd[0]/usr/txtRSYST-MANDT\").Text = client\n\n if user is not None:\n session.findById(\"wnd[0]/usr/txtRSYST-BNAME\").Text = user\n\n if password is not None:\n session.findById(\"wnd[0]/usr/pwdRSYST-BCODE\").Text = password\n\n if lang is not None:\n session.findById(\"wnd[0]/usr/txtRSYST-LANGU\").Text = lang\n\n session.findById(\"wnd[0]\").sendVKey(0)\n\n # Eventual tela de mudanca de senha\n change_pwd = False\n try:\n session.findById(\"wnd[1]/usr/pwdRSYST-NCODE\").text = ''\n session.findById(\"wnd[1]/usr/pwdRSYST-NCOD2\").text = ''\n change_pwd = True\n except:\n pass\n\n if change_pwd:\n raise ValueError('Please, set a new Password')\n\n # Derruba conexão SAP\n if force:\n try:\n session.findById(\"wnd[1]/usr/radMULTI_LOGON_OPT1\").select()\n session.findById(\"wnd[1]/tbar[0]/btn[0]\").press()\n except:\n pass\n else:\n try:\n session.findById(\"wnd[1]/usr/radMULTI_LOGON_OPT1\").select()\n session.findById(\"wnd[1]\").sendVKey(12)\n return False\n except:\n pass\n\n # Teste da Conexao\n if session.is_connected():\n self._con = con\n return True\n\n self._con = None\n return False\n\n @property\n def connected(self):\n return self.session().is_connected()\n\n @staticmethod\n def session():\n global session_process\n return session_process\n\n def sid(self):\n return self.session().Info.systemsessionid\n\n def logout(self):\n session = self.session()\n session.findById(\"wnd[0]/tbar[0]/okcd\").text = \"/nex\"\n session.findById(\"wnd[0]\").sendVKey(0)\n del session\n self._con = None\n\n @property\n def number_of_sessions(self):\n return 0 if self._con is None else len(self._con.Children)\n\n @number_of_sessions.setter\n def number_of_sessions(self, value):\n size = self.number_of_sessions\n if size == 0:\n return\n\n value = min(max(int(value), 1), self.max_sessions)\n minus = value < size\n arr = list(range(size, value))\n arr.extend(reversed(range(value, size)))\n\n for i in arr:\n if minus:\n session = self.session(i)\n session.findById(\"wnd[0]/tbar[0]/okcd\").text = \"/i\"\n session.findById(\"wnd[0]\").sendVKey(0)\n else:\n self.session().createSession()\n sleep(0.5)\n\n def clear_tasks(self):\n self._tasks = []\n\n def add_task(self, func, data):\n for dt in data:\n self._tasks.append({'func': func, 'data': dt})\n\n def execute_tasks(self, resize_sessions=False):\n total = len(self._tasks)\n if total == 0:\n return\n\n if resize_sessions:\n self.number_of_sessions = total\n\n size = self.number_of_sessions\n\n if size == 0:\n return\n sess_manager = Manager().list([])\n\n pool = Pool(processes=self.number_of_sessions, initializer=_on_init, initargs=(self.sid(), sess_manager))\n response = list(tqdm(pool.imap_unordered(_task_executor, self._tasks)))\n pool.close()\n pool.join()\n return list(response)\n\n def execute_function(self, func, data, resize_sessions=False):\n if not isinstance(data, Iterable):\n data = [data]\n self.clear_tasks()\n self.add_task(func=func, data=data)\n response = self.execute_tasks(resize_sessions=resize_sessions)\n self.clear_tasks()\n return response\n\n @staticmethod\n def multi_arguments(func):\n def convert_args(pr):\n return func(**pr)\n\n return convert_args\n"},"avg_line_length":{"kind":"number","value":28.368159204,"string":"28.368159"},"max_line_length":{"kind":"number","value":113,"string":"113"},"alphanum_fraction":{"kind":"number","value":0.5738337425,"string":"0.573834"},"score":{"kind":"number","value":3.3125,"string":"3.3125"}}},{"rowIdx":949,"cells":{"hexsha":{"kind":"string","value":"bcbdeed076feb6d35baafa8510638704f681698f"},"size":{"kind":"number","value":1748,"string":"1,748"},"ext":{"kind":"string","value":"js"},"lang":{"kind":"string","value":"JavaScript"},"max_stars_repo_path":{"kind":"string","value":"lib/stateTransition/StateTransitionFactory.js"},"max_stars_repo_name":{"kind":"string","value":"jawid-h/js-dpp"},"max_stars_repo_head_hexsha":{"kind":"string","value":"47ae40331054a80569a4fe50ce7821f9636c2377"},"max_stars_repo_licenses":{"kind":"list like","value":["MIT"],"string":"[\n \"MIT\"\n]"},"max_stars_count":{"kind":"null"},"max_stars_repo_stars_event_min_datetime":{"kind":"null"},"max_stars_repo_stars_event_max_datetime":{"kind":"null"},"max_issues_repo_path":{"kind":"string","value":"lib/stateTransition/StateTransitionFactory.js"},"max_issues_repo_name":{"kind":"string","value":"jawid-h/js-dpp"},"max_issues_repo_head_hexsha":{"kind":"string","value":"47ae40331054a80569a4fe50ce7821f9636c2377"},"max_issues_repo_licenses":{"kind":"list like","value":["MIT"],"string":"[\n \"MIT\"\n]"},"max_issues_count":{"kind":"null"},"max_issues_repo_issues_event_min_datetime":{"kind":"null"},"max_issues_repo_issues_event_max_datetime":{"kind":"null"},"max_forks_repo_path":{"kind":"string","value":"lib/stateTransition/StateTransitionFactory.js"},"max_forks_repo_name":{"kind":"string","value":"jawid-h/js-dpp"},"max_forks_repo_head_hexsha":{"kind":"string","value":"47ae40331054a80569a4fe50ce7821f9636c2377"},"max_forks_repo_licenses":{"kind":"list like","value":["MIT"],"string":"[\n \"MIT\"\n]"},"max_forks_count":{"kind":"null"},"max_forks_repo_forks_event_min_datetime":{"kind":"null"},"max_forks_repo_forks_event_max_datetime":{"kind":"null"},"content":{"kind":"string","value":"const { decode } = require('../util/serializer');\n\nconst InvalidStateTransitionError = require('./errors/InvalidStateTransitionError');\n\nclass StateTransitionFactory {\n /**\n * @param {validateStateTransitionStructure} validateStateTransitionStructure\n * @param {createStateTransition} createStateTransition\n */\n constructor(validateStateTransitionStructure, createStateTransition) {\n this.validateStateTransitionStructure = validateStateTransitionStructure;\n this.createStateTransition = createStateTransition;\n }\n\n /**\n * Create State Transition from plain object\n *\n * @param {RawDataContractStateTransition|RawDocumentsStateTransition} rawStateTransition\n * @param {Object} options\n * @param {boolean} [options.skipValidation=false]\n * @return {DataContractStateTransition|DocumentsStateTransition}\n */\n async createFromObject(rawStateTransition, options = {}) {\n const opts = Object.assign({ skipValidation: false }, options);\n\n if (!opts.skipValidation) {\n const result = await this.validateStateTransitionStructure(rawStateTransition);\n\n if (!result.isValid()) {\n throw new InvalidStateTransitionError(result.getErrors(), rawStateTransition);\n }\n }\n\n return this.createStateTransition(rawStateTransition);\n }\n\n /**\n * Create State Transition from string/buffer\n *\n * @param {Buffer|string} payload\n * @param {Object} options\n * @param {boolean} [options.skipValidation=false]\n * @return {DataContractStateTransition|DocumentsStateTransition}\n */\n async createFromSerialized(payload, options = { }) {\n const rawStateTransition = decode(payload);\n\n return this.createFromObject(rawStateTransition, options);\n }\n}\n\nmodule.exports = StateTransitionFactory;\n"},"avg_line_length":{"kind":"number","value":32.9811320755,"string":"32.981132"},"max_line_length":{"kind":"number","value":91,"string":"91"},"alphanum_fraction":{"kind":"number","value":0.7459954233,"string":"0.745995"},"score":{"kind":"number","value":3.28125,"string":"3.28125"}}},{"rowIdx":950,"cells":{"hexsha":{"kind":"string","value":"37fc6856bfcabca29c5192ab7cc313e4a14ce07e"},"size":{"kind":"number","value":5872,"string":"5,872"},"ext":{"kind":"string","value":"swift"},"lang":{"kind":"string","value":"Swift"},"max_stars_repo_path":{"kind":"string","value":"VGLabel/Classes/VGLabelComponent.swift"},"max_stars_repo_name":{"kind":"string","value":"JJson/VGLabel"},"max_stars_repo_head_hexsha":{"kind":"string","value":"4511f8f01189c4ad96f0b9423a15845f49e849d0"},"max_stars_repo_licenses":{"kind":"list like","value":["MIT"],"string":"[\n \"MIT\"\n]"},"max_stars_count":{"kind":"number","value":1,"string":"1"},"max_stars_repo_stars_event_min_datetime":{"kind":"string","value":"2017-11-15T07:04:54.000Z"},"max_stars_repo_stars_event_max_datetime":{"kind":"string","value":"2017-11-15T07:04:54.000Z"},"max_issues_repo_path":{"kind":"string","value":"VGLabel/Classes/VGLabelComponent.swift"},"max_issues_repo_name":{"kind":"string","value":"JJson/VGLabel"},"max_issues_repo_head_hexsha":{"kind":"string","value":"4511f8f01189c4ad96f0b9423a15845f49e849d0"},"max_issues_repo_licenses":{"kind":"list like","value":["MIT"],"string":"[\n \"MIT\"\n]"},"max_issues_count":{"kind":"null"},"max_issues_repo_issues_event_min_datetime":{"kind":"null"},"max_issues_repo_issues_event_max_datetime":{"kind":"null"},"max_forks_repo_path":{"kind":"string","value":"VGLabel/Classes/VGLabelComponent.swift"},"max_forks_repo_name":{"kind":"string","value":"JJson/VGLabel"},"max_forks_repo_head_hexsha":{"kind":"string","value":"4511f8f01189c4ad96f0b9423a15845f49e849d0"},"max_forks_repo_licenses":{"kind":"list like","value":["MIT"],"string":"[\n \"MIT\"\n]"},"max_forks_count":{"kind":"number","value":1,"string":"1"},"max_forks_repo_forks_event_min_datetime":{"kind":"string","value":"2022-03-24T08:38:55.000Z"},"max_forks_repo_forks_event_max_datetime":{"kind":"string","value":"2022-03-24T08:38:55.000Z"},"content":{"kind":"string","value":"//\n// VGLabelComponent.swift\n// VGLabel\n//\n// Created by Vein on 2017/11/7.\n// Copyright © 2017年 Vein. All rights reserved.\n//\n\nimport Foundation\n\npublic class VGLabelComponent: NSObject {\n internal var componentIndex: Int = 0\n internal var text: String = \"\"\n internal var tagLabel: String?\n internal var attributes: [String: String]?\n internal var position: Int = 0\n \n class func compomemt(_ text: String, tag: String, attributes: [String: String]) -> VGLabelComponent {\n return VGLabelComponent(text, tag: tag, attributes: attributes)\n }\n \n init(_ text: String, tag: String, attributes: [String: String]) {\n self.text = text\n self.attributes = attributes\n tagLabel = tag\n }\n \n class func compomemt(_ tag: String, position: Int, attributes: [String: String]) -> VGLabelComponent {\n return VGLabelComponent(tag, position: position, attributes: attributes)\n }\n \n init(_ tag: String, position: Int, attributes: [String: String]) {\n self.attributes = attributes\n self.position = position\n tagLabel = tag\n }\n \n override public var description : String {\n var desc = \"text: \\(text), position: \\(position)\"\n if let tag = tagLabel {\n desc += \", tag: \\(tag)\"\n }\n \n if let att = attributes {\n desc += \", attributes: \\(att)\"\n }\n \n return desc\n }\n}\n\npublic class VGLabelExtractedComponent: NSObject {\n internal var textComponents: [VGLabelComponent]?\n internal var plainText: String?\n \n class func labelExtractedComponent(_ textComponents: [VGLabelComponent], plainText: String) -> VGLabelExtractedComponent {\n let extractedComponent = VGLabelExtractedComponent()\n extractedComponent.textComponents = textComponents\n extractedComponent.plainText = plainText\n return extractedComponent\n }\n \n class func extractTextStyle(_ data: String, paragraphReplacement: String) -> VGLabelExtractedComponent {\n var text: NSString? = nil\n var tag: String? = nil\n var styleData = data\n \n var components = [VGLabelComponent]()\n var lastPosition = 0\n let scanner = Scanner(string: data)\n \n while !scanner.isAtEnd {\n scanner.scanUpTo(\"<\", into: nil)\n scanner.scanUpTo(\">\", into: &text)\n \n let delimiter = String(format: \"%@>\", text!)\n let position = (styleData as NSString).range(of: delimiter).location\n \n if position != NSNotFound {\n if delimiter.range(of: \"\")\n }\n \n if text?.range(of: \" 0 {\n let key = pair[0].lowercased()\n \n if pair.count >= 2 {\n // Trim \" charactere\n var value = (pair[1...pair.count - 1]).joined(separator: \"=\")\n value = (value as NSString).replacingOccurrences(of: \"\\\"\", with: \"\", options: .literal, range: NSRange(location: 0, length: 1))\n value = (value as NSString).replacingOccurrences(of: \"\\\"\", with: \"\", options: .literal, range: NSRange(location: value.count-1, length: 1))\n \n attributes[key] = value\n } else if pair.count == 1 {\n attributes[key] = key\n }\n }\n }\n let component = VGLabelComponent.compomemt(\"\", tag: tag!, attributes: attributes)\n component.position = position\n components.append(component)\n }\n lastPosition = position\n }\n }\n return VGLabelExtractedComponent.labelExtractedComponent(components, plainText: styleData)\n }\n}\n"},"avg_line_length":{"kind":"number","value":42.8613138686,"string":"42.861314"},"max_line_length":{"kind":"number","value":171,"string":"171"},"alphanum_fraction":{"kind":"number","value":0.5357629428,"string":"0.535763"},"score":{"kind":"number","value":3.03125,"string":"3.03125"}}},{"rowIdx":951,"cells":{"hexsha":{"kind":"string","value":"f2cbc37b94c15a4b34d5f11c14e4a7235926cef3"},"size":{"kind":"number","value":2355,"string":"2,355"},"ext":{"kind":"string","value":"lua"},"lang":{"kind":"string","value":"Lua"},"max_stars_repo_path":{"kind":"string","value":"src/cards/theft.lua"},"max_stars_repo_name":{"kind":"string","value":"Barocena/unotfm"},"max_stars_repo_head_hexsha":{"kind":"string","value":"cf0e59eb08a04818a8bd3d838b04b06378aa6fb6"},"max_stars_repo_licenses":{"kind":"list like","value":["MIT"],"string":"[\n \"MIT\"\n]"},"max_stars_count":{"kind":"number","value":4,"string":"4"},"max_stars_repo_stars_event_min_datetime":{"kind":"string","value":"2020-07-08T20:04:29.000Z"},"max_stars_repo_stars_event_max_datetime":{"kind":"string","value":"2022-02-13T14:19:14.000Z"},"max_issues_repo_path":{"kind":"string","value":"src/cards/theft.lua"},"max_issues_repo_name":{"kind":"string","value":"Barocena/unotfm"},"max_issues_repo_head_hexsha":{"kind":"string","value":"cf0e59eb08a04818a8bd3d838b04b06378aa6fb6"},"max_issues_repo_licenses":{"kind":"list like","value":["MIT"],"string":"[\n \"MIT\"\n]"},"max_issues_count":{"kind":"null"},"max_issues_repo_issues_event_min_datetime":{"kind":"null"},"max_issues_repo_issues_event_max_datetime":{"kind":"null"},"max_forks_repo_path":{"kind":"string","value":"src/cards/theft.lua"},"max_forks_repo_name":{"kind":"string","value":"Barocena/unotfm"},"max_forks_repo_head_hexsha":{"kind":"string","value":"cf0e59eb08a04818a8bd3d838b04b06378aa6fb6"},"max_forks_repo_licenses":{"kind":"list like","value":["MIT"],"string":"[\n \"MIT\"\n]"},"max_forks_count":{"kind":"number","value":4,"string":"4"},"max_forks_repo_forks_event_min_datetime":{"kind":"string","value":"2021-07-22T05:04:43.000Z"},"max_forks_repo_forks_event_max_datetime":{"kind":"string","value":"2022-01-21T20:13:16.000Z"},"content":{"kind":"string","value":"function theft(n)\n\tif ROUND.gameMode.hell and ROUND.accumulated then\n\t\tROUND.accumulated = nil\n\t\taddBlockPoint(ROUND.chair[n].owner)\n\tend\n\tlocal image = {tfm.exec.addImage(IMG.misc.genericLayer, \"!1000\", 0, 0, ROUND.chair[n].owner)}\n\tlocal option = {}\n\tfor i, v in pairs(ROUND.chair) do\n\t\tif (v.mode ~= \"DELETED\") and i ~= n then\n\t\t\ttable.insert(image, tfm.exec.addImage(IMG.misc.target, \"!1000\", v.x-50, 30))\n\t\t\toption[i] = true\n\t\tend\n\tend\n\tROUND.chair[n].action = {\n\t\tname = \"THEFT\",\n\t\timg = image,\n\t\top = option,\n\t\tfunc = makeTheft\n\t}\n\tresetTimer()\n\tupdateTurnTimer()\nend\n\nfunction makeTheft(n, target)\n\tif target then\n\t\tlocal temp = {}\n\t\tif n ~= target and ROUND.chair[target].mode ~= \"DELETED\" then\n\t\t\tif ROUND.chair[n].action then\n\t\t\t\tfor i, v in pairs(ROUND.chair[n].action.img) do\n\t\t\t\t\ttfm.exec.removeImage(v)\n\t\t\t\tend\n\t\t\tend\n\t\t\tROUND.chair[n].action = false\n\t\t\t--\n\t\t\tlocal found = {}\n\t\t\tfor i, v in pairs(ROUND.chair[target].hand) do\n\t\t\t\tif v[1] == \"black\" then\n\t\t\t\t\ttable.insert(found, i)\n\t\t\t\tend\n\t\t\tend\n\t\t\t\n\t\t\tif #found > 0 then\n\t\t\t\tlocal rand = found[math.random(#found)]\n\t\t\t\ttemp = table.remove(ROUND.chair[target].hand, rand)\n\t\t\t\tdiscardEffect(target, rand)\n\t\t\t\ttable.insert(ROUND.chair[n].hand, temp)\n\t\t\t\t\n\t\t\t\tmissCard(n, temp, 2000)\n\t\t\t\texplosion(5, ROUND.chair[n].x, 100, 5, 10)\n\t\t\t\tsortHand(ROUND.chair[n].hand)\n\t\t\t\tROUND.chair[n].uno = nil\n\t\t\t\tupdateHand(n)\n\t\t\t\tupdateHand(target)\n\t\t\t\tif #ROUND.chair[target].hand == 1 then\n\t\t\t\t\tROUND.chair[target].uno = \"uno\"\n\t\t\t\tend\n\t\t\t\tif isIlluminati(n) then\n\t\t\t\t\tunlockChair(ROUND.chair[n].owner, \"illuminati\")\n\t\t\t\tend\n\t\t\t\tif temp[2] == \"draw4\" then\n\t\t\t\t\tif ROUND.chair[n].theft then\n\t\t\t\t\t\tunlockChair(ROUND.chair[n].owner, \"gaga\")\n\t\t\t\t\telse\n\t\t\t\t\t\tROUND.chair[n].theft = true\n\t\t\t\t\tend\n\t\t\t\tend\n\t\t\t\tupdateScore(n)\n\t\t\t\tupdateScore(target)\n\t\t\t\tshowCardsGainned(n, 1)\n\t\t\t\tshowCardsGainned(target, -1)\n\t\t\tend\n\t\t\t--\n\t\t\t\n\t\t\tif #ROUND.chair[target].hand == 0 then\n\t\t\t\tlocal name = ROUND.chair[target].owner\n\t\t\t\tendGame(ROUND.chair[target].owner)\n\t\t\t\tunlockChair(target, \"gift\")\n\t\t\telse\n\t\t\t\tpassTurn()\n\t\t\t\tROUND.chair[n].confuse = false\n\t\t\t\tbatataTimer(n)\n\t\t\t\tupdateShadow(n)\n\t\t\tend\n\t\tend\n\telse\n\t\tif ROUND.chair[n].action then\n\t\t\tfor i, v in pairs(ROUND.chair[n].action.img) do\n\t\t\t\ttfm.exec.removeImage(v)\n\t\t\tend\n\t\t\tROUND.chair[n].action = false\n\t\tend\n\t\tpassTurn()\n\t\tbatataTimer(n)\n\t\tupdateShadow(n)\n\tend\nend"},"avg_line_length":{"kind":"number","value":24.53125,"string":"24.53125"},"max_line_length":{"kind":"number","value":94,"string":"94"},"alphanum_fraction":{"kind":"number","value":0.638641189,"string":"0.638641"},"score":{"kind":"number","value":3.109375,"string":"3.109375"}}},{"rowIdx":952,"cells":{"hexsha":{"kind":"string","value":"3ab55339a3a98a636c9974ba92fd90088eaec34e"},"size":{"kind":"number","value":1852,"string":"1,852"},"ext":{"kind":"string","value":"sql"},"lang":{"kind":"string","value":"SQL"},"max_stars_repo_path":{"kind":"string","value":"oracle/ora/awr/awrload.sql"},"max_stars_repo_name":{"kind":"string","value":"hyee/dbcli"},"max_stars_repo_head_hexsha":{"kind":"string","value":"a39fdffdc5a15b9a6e17dc8e6e852003a8dedf0d"},"max_stars_repo_licenses":{"kind":"list like","value":["MIT"],"string":"[\n \"MIT\"\n]"},"max_stars_count":{"kind":"number","value":44,"string":"44"},"max_stars_repo_stars_event_min_datetime":{"kind":"string","value":"2015-05-07T16:11:03.000Z"},"max_stars_repo_stars_event_max_datetime":{"kind":"string","value":"2021-09-19T08:59:20.000Z"},"max_issues_repo_path":{"kind":"string","value":"oracle/ora/awr/awrload.sql"},"max_issues_repo_name":{"kind":"string","value":"hyee/dbcli"},"max_issues_repo_head_hexsha":{"kind":"string","value":"a39fdffdc5a15b9a6e17dc8e6e852003a8dedf0d"},"max_issues_repo_licenses":{"kind":"list like","value":["MIT"],"string":"[\n \"MIT\"\n]"},"max_issues_count":{"kind":"number","value":8,"string":"8"},"max_issues_repo_issues_event_min_datetime":{"kind":"string","value":"2015-05-08T03:38:03.000Z"},"max_issues_repo_issues_event_max_datetime":{"kind":"string","value":"2020-05-22T11:00:47.000Z"},"max_forks_repo_path":{"kind":"string","value":"oracle/ora/awr/awrload.sql"},"max_forks_repo_name":{"kind":"string","value":"hyee/dbcli"},"max_forks_repo_head_hexsha":{"kind":"string","value":"a39fdffdc5a15b9a6e17dc8e6e852003a8dedf0d"},"max_forks_repo_licenses":{"kind":"list like","value":["MIT"],"string":"[\n \"MIT\"\n]"},"max_forks_count":{"kind":"number","value":24,"string":"24"},"max_forks_repo_forks_event_min_datetime":{"kind":"string","value":"2015-05-07T16:17:41.000Z"},"max_forks_repo_forks_event_max_datetime":{"kind":"string","value":"2022-01-02T13:10:29.000Z"},"content":{"kind":"string","value":"/*[[Import AWR repository dump. Usage: @@NAME []\n --[[\n @ARGS: 2\n --]]\n]]*/\nSET SQLTIMEOUT 7200\n\nDECLARE\n dir VARCHAR2(128) := :V1;\n file VARCHAR2(512) := :V2;\n did INT := :V3;\n root VARCHAR2(512);\n dump BFILE;\n len NUMBER;\n stage VARCHAR2(30) := 'DBCLI_AWR';\nBEGIN\n SELECT MAX(directory_name), MAX(directory_path)\n INTO dir, root\n FROM ALL_DIRECTORIES\n WHERE upper(directory_name) = upper(dir);\n IF dir IS NULL THEN\n raise_application_error(-20001, 'Cannot access directory: ' || :V1);\n END IF;\n\n IF NOT regexp_like(root, '[\\\\/]$') THEN\n root := root || CASE WHEN root LIKE '%/%' THEN '/' ELSE '\\' END;\n END IF;\n\n dump := bfilename(dir, file||'.dmp');\n BEGIN \n dbms_lob.fileopen(dump);\n len := dbms_lob.getlength(dump);\n dbms_lob.fileclose(dump);\n EXCEPTION WHEN OTHERS THEN \n raise_application_error(-20001, 'Cannot access file: ' || root || file || '.dmp');\n END;\n\n $IF DBMS_DB_VERSION.VERSION>18 $THEN\n sys.dbms_workload_repository.awr_imp(dmpfile => file, dmpdir => dir, new_dbid => did);\n $ELSE\n BEGIN\n stage := CASE sys_context('userenv', 'con_name') WHEN 'CDB$ROOT' THEN 'C##' END || stage;\n EXCEPTION WHEN OTHERS NULL;\n END;\n $IF DBMS_DB_VERSION.VERSION>17 $THEN\n sys.dbms_workload_repository.load(schname => stage, dmpfile => file, dmpdir => dir, new_dbid => did);\n $ELSE\n dbms_swrf_internal.awr_load(schname => stage,dmpfile => file, dmpdir => dir);\n dbms_swrf_internal.move_to_awr(schname => stage, new_dbid => did);\n dbms_swrf_internal.clear_awr_dbid;\n $END\n $END\n dbms_output.put_line('AWR repository is imported from ' || root || file || '.dmp');\nEND;\n/"},"avg_line_length":{"kind":"number","value":33.6727272727,"string":"33.672727"},"max_line_length":{"kind":"number","value":113,"string":"113"},"alphanum_fraction":{"kind":"number","value":0.5998920086,"string":"0.599892"},"score":{"kind":"number","value":3.015625,"string":"3.015625"}}},{"rowIdx":953,"cells":{"hexsha":{"kind":"string","value":"28606435e1e5e3f888988d0dc49fb37281a76692"},"size":{"kind":"number","value":9578,"string":"9,578"},"ext":{"kind":"string","value":"lua"},"lang":{"kind":"string","value":"Lua"},"max_stars_repo_path":{"kind":"string","value":"modules/libraries/backend/pulseaudio/init.lua"},"max_stars_repo_name":{"kind":"string","value":"SkyyySi/norsome2"},"max_stars_repo_head_hexsha":{"kind":"string","value":"74e5977868230ddc9185f1d27a3d849440e9fb15"},"max_stars_repo_licenses":{"kind":"list like","value":["Unlicense"],"string":"[\n \"Unlicense\"\n]"},"max_stars_count":{"kind":"null"},"max_stars_repo_stars_event_min_datetime":{"kind":"null"},"max_stars_repo_stars_event_max_datetime":{"kind":"null"},"max_issues_repo_path":{"kind":"string","value":"modules/libraries/backend/pulseaudio/init.lua"},"max_issues_repo_name":{"kind":"string","value":"SkyyySi/norsome2"},"max_issues_repo_head_hexsha":{"kind":"string","value":"74e5977868230ddc9185f1d27a3d849440e9fb15"},"max_issues_repo_licenses":{"kind":"list like","value":["Unlicense"],"string":"[\n \"Unlicense\"\n]"},"max_issues_count":{"kind":"null"},"max_issues_repo_issues_event_min_datetime":{"kind":"null"},"max_issues_repo_issues_event_max_datetime":{"kind":"null"},"max_forks_repo_path":{"kind":"string","value":"modules/libraries/backend/pulseaudio/init.lua"},"max_forks_repo_name":{"kind":"string","value":"SkyyySi/norsome2"},"max_forks_repo_head_hexsha":{"kind":"string","value":"74e5977868230ddc9185f1d27a3d849440e9fb15"},"max_forks_repo_licenses":{"kind":"list like","value":["Unlicense"],"string":"[\n \"Unlicense\"\n]"},"max_forks_count":{"kind":"null"},"max_forks_repo_forks_event_min_datetime":{"kind":"null"},"max_forks_repo_forks_event_max_datetime":{"kind":"null"},"content":{"kind":"string","value":"#!/usr/bin/env lua5.3\nlocal awful = require(\"awful\")\nlocal wibox = require(\"wibox\")\nlocal gears = require(\"gears\")\nlocal naughty = require(\"naughty\")\n\n-- Define the base module\nlocal pulseaudio = {\n\t-- Create a new instance\n\tnew = function(self, selected_cli)\n\t\tlocal object = {\n\t\t\t-- There are multiple pulseaudio clis available, see below.\n\t\t\tselected_cli = selected_cli or \"pamixer\"\n\t\t}\n\t\tsetmetatable(object, self)\n\t\tself.__index = self\n\t\treturn object\n\tend\n}\n\n-- Signals used by this module:\n-- >\t\"pulseaudio::get_volume\": used for widgets to be notified when\n-- | \tthe volume was changed by another part of the code.\n\n-- Commands used to call the specific cli tools used to mamange pulseaudio.\n-- By default, this is configured for pamixer, but on Debian, you may want to\n-- override this with pulsemixer.\npulseaudio.pamixer = {}\npulseaudio.pulsemixer = {}\n\n--------------------------------------------------\n--- Get volume ---\n--------------------------------------------------\n-- Get the current playback volume. Callback must be nil or a function accepting\n-- a number value (the volume in percent). Note that this function is not\n-- intended to be called directly. Use the \"pulseaudio::get_volume\"-signal instead.\nfunction pulseaudio.pamixer.get_volume(callback)\n\tawful.spawn.easy_async({\"pamixer\", \"--get-volume\"}, function(volume)\n\t\tvolume = tonumber(volume)\n\n\t\tif callback then\n\t\t\tcallback(volume)\n\t\tend\n\n\t\tawesome.emit_signal(\"pulseaudio::get_volume\", volume)\n\tend)\nend\n\n-- Run it automatically in a loop.\npulseaudio.pamixer.volume_updater = gears.timer {\n\ttimeout = 0.3,\n\tcall_now = true,\n\tautostart = true,\n\tcallback = function()\n\t\tpulseaudio.pamixer.get_volume()\n\tend,\n}\n\n-- Signal that the timer is alreay running.\nawesome.emit_signal(\"pulseaudio::volume_updater_running\", true)\n\n-- Prevent multiple instances to needlessly run at the same time.\nawesome.connect_signal(\"pulseaudio::volume_updater_running\", function(status)\n\tif status then\n\t\tnotify(\"Stopped already running update timer!\")\n\t\tpulseaudio.pamixer.volume_updater:stop()\n\tend\nend)\n\n--------------------------------------------------\n--- Set volume ---\n--------------------------------------------------\n-- Set the current playback volume.\nfunction pulseaudio.pamixer.set_volume(volume)\n\tawful.spawn({\"pamixer\", \"--set-volume\", tostring(volume)})\nend\n\n-- Connect it to a signal.\nawesome.connect_signal(\"pulseaudio::set_volume\", function(volume)\n\tpulseaudio.pamixer.set_volume(volume)\nend)\n\n--------------------------------------------------\n--- Increment volume ---\n--------------------------------------------------\n-- Increment the volume by n or 1\nfunction pulseaudio.pamixer.increase_volume(volume)\n\tif not volume then volume = 1 end\n\tawful.spawn({\"pamixer\", \"--increase\", tostring(volume)})\nend\n\n-- Connect it to a signal.\nawesome.connect_signal(\"pulseaudio::increase_volume\", function(volume)\n\tpulseaudio.pamixer.increase_volume(volume)\nend)\n\n--------------------------------------------------\n--- Decrement volume ---\n--------------------------------------------------\n-- Decrement the volume by n or 1\nfunction pulseaudio.pamixer.decrease_volume(volume)\n\tif not volume then volume = 1 end\n\tawful.spawn({\"pamixer\", \"--decrease\", tostring(volume)})\nend\n\n-- Connect it to a signal.\nawesome.connect_signal(\"pulseaudio::decrease_volume\", function(volume)\n\tpulseaudio.pamixer.decrease_volume(volume)\nend)\n\n--------------------------------------------------\n--- Mute status ---\n--------------------------------------------------\n-- Get the current mute status. true = muted, false = unmuted.\n-- Callback must be nil or a function accepting a boolean value.\n-- Note that this function is not intended to be called directly.\n-- Use the \"pulseaudio::get_mute\"-signal instead.\nfunction pulseaudio.pamixer.get_mute(callback)\n\tawful.spawn.easy_async({\"pamixer\", \"--get-mute\"}, function(status)\n\t\t-- cli commands always return a string and lua does not have a\n\t\t-- `tobool()`-builtin.\n\t\tif status == \"true\" then\n\t\t\tstatus = true\n\t\telse\n\t\t\tstatus = false\n\t\tend\n\n\t\tif callback then\n\t\t\tcallback(tonumber(status))\n\t\tend\n\n\t\tawesome.emit_signal(\"pulseaudio::get_mute\", status)\n\tend)\nend\n\n--------------------------------------------------\n--- Mute ---\n--------------------------------------------------\n-- Mute the volume\nfunction pulseaudio.pamixer.mute()\n\tawful.spawn({\"pamixer\", \"--mute\"})\nend\n\n-- Connect it to a signal.\nawesome.connect_signal(\"pulseaudio::mute\", function()\n\tpulseaudio.pamixer.mute()\nend)\n\n--------------------------------------------------\n--- Unmute ---\n--------------------------------------------------\n-- Unmute the volume\nfunction pulseaudio.pamixer.unmute()\n\tawful.spawn({\"pamixer\", \"--mute\"})\nend\n\n-- Connect it to a signal.\nawesome.connect_signal(\"pulseaudio::unmute\", function()\n\tpulseaudio.pamixer.mute()\nend)\n\n--------------------------------------------------\n--- Toggle mute ---\n--------------------------------------------------\n-- Unmute the volume\nfunction pulseaudio.pamixer.unmute()\n\tawful.spawn({\"pamixer\", \"--toggle-mute\"})\nend\n\n-- Connect it to a signal.\nawesome.connect_signal(\"pulseaudio::toggle_mute\", function()\n\tpulseaudio.pamixer.mute()\nend)\n\n--------------------------------------------------\n--- Widgets ---\n--------------------------------------------------\n-- A few basic widgets for managing volume. Can be used as templates for\n-- creating your own, more advanced (or nicer looking) ones.\npulseaudio.widget = {}\n\n--------------------------------------------------\n--- Slider ---\n--------------------------------------------------\n-- A basic volume slider based on wibox.widget.slider\n-- The first argument must be arguments for wibox.widget.slider\n-- as a table or nil.\nfunction pulseaudio.widget.volume_slider(args) --> table\n\t-- Set a few basic default options if none are given.\n\tif not args then\n\t\targs = {\n\t\t\tminimum = 0,\n\t\t\tvalue = 50,\n\t\t\tmaximum = 100,\n\t\t\tforced_width = 200,\n\t\t}\n\tend\n\n\t-- Create a new slider widget.\n\tlocal slider = wibox.widget.slider(args)\n\n\t-- Connect the widget to the volume signal.\n\tawesome.connect_signal(\"pulseaudio::get_volume\", function(volume)\n\t\tslider:set_value(volume)\n\tend)\n\n\t-- Allow to set the volume using the mouse wheel.\n\tslider:connect_signal(\"button::press\", function(_,_,_,button)\n\t\tif button == 4 then\n\t\t\tawesome.emit_signal(\"pulseaudio::increase_volume\", 5)\n\t\telseif button == 5 then\n\t\t\tawesome.emit_signal(\"pulseaudio::decrease_volume\", 5)\n\t\tend\n\tend)\n\n\t-- Change the system volume when the slider value changes.\n\tslider:connect_signal(\"property::value\", function()\n\t\tawesome.emit_signal(\"pulseaudio::set_volume\", slider.value)\n\tend)\n\n\treturn slider\nend\n\n--------------------------------------------------\n--- Text label ---\n--------------------------------------------------\nfunction pulseaudio.widget.volume_label(args) --> table\n\t-- Create a new slider widget.\n\tlocal label = wibox.widget.textbox(args)\n\n\t-- Connect the widget to the volume signal.\n\tawesome.connect_signal(\"pulseaudio::get_volume\", function(volume)\n\t\tlabel:set_text(volume)\n\tend)\n\n\t-- Allow to (un-)mute by left clicking the label and to\n\t-- set the volume using the mouse wheel.\n\tlabel:connect_signal(\"button::press\", function(_,_,_,button)\n\t\tif button == 1 then\n\t\t\tawesome.emit_signal(\"pulseaudio::toggle_mute\")\n\t\telseif button == 4 then\n\t\t\tawesome.emit_signal(\"pulseaudio::increase_volume\", 5)\n\t\telseif button == 5 then\n\t\t\tawesome.emit_signal(\"pulseaudio::decrease_volume\", 5)\n\t\tend\n\tend)\n\n\treturn label\nend\n\n--------------------------------------------------\n--- Status notifications ---\n--------------------------------------------------\npulseaudio.notification = {\n\tvolume = { enabled = false },\n\tmute = { enabled = false },\n}\n\nfunction pulseaudio.notification.volume:enable()\n\tself.enabled = true\n\tawesome.connect_signal(\"pulseaudio::get_volume\", function(volume)\n\t\tnaughty.notification {\n\t\t\ttitle = \"Volume change\",\n\t\t\tmessage = \"Current volume: \" .. tostring(volume),\n\t\t\tcategory = \"pulseaudio.volume\",\n\t\t\tapp_name = \"pulseaudio_cli_awesome_bindings\",\n\t\t\ttimeout = 1,\n\t\t}\n\tend)\nend\n\nfunction pulseaudio.notification.volume:disable()\n\tself.enabled = false\n\tawesome.disconnect_signal(\"pulseaudio::get_volume\")\nend\n\nfunction pulseaudio.notification.volume:toggle()\n\tif self.enabled then\n\t\tself:disable()\n\telse\n\t\tself:enable()\n\tend\nend\n\nfunction pulseaudio.notification.mute:enable()\n\tself.enabled = true\n\tawesome.connect_signal(\"pulseaudio::get_mute\", function(muted)\n\t\tlocal title = \"Unmuted\"\n\t\tlocal text = \"System unmuted\"\n\n\t\tif muted then\n\t\t\ttitle = \"Muted\"\n\t\t\ttext = \"System muted\"\n\t\tend\n\n\t\tnaughty.notification {\n\t\t\ttitle = title,\n\t\t\tmessage = text,\n\t\t\tcategory = \"pulseaudio.mute\",\n\t\t\tapp_name = \"pulseaudio_cli_awesome_bindings\",\n\t\t\ttimeout = 1,\n\t\t}\n\tend)\nend\n\nfunction pulseaudio.notification.mute:disable()\n\tself.enabled = false\n\tawesome.disconnect_signal(\"pulseaudio::get_mute\")\nend\n\nfunction pulseaudio.notification.mute:toggle()\n\tif self.enabled then\n\t\tself:disable()\n\telse\n\t\tself:enable()\n\tend\nend\n\n-- Select the correct cli tool.\nif pulseaudio.current_cli == \"pamixer\" then\n\tpulseaudio.cli = pulseaudio.pamixer\nelseif pulseaudio.current_cli == \"pulsemixer\" then\n\tpulseaudio.cli = pulseaudio.pulsemixer\nend\n\nreturn pulseaudio:new()\n"},"avg_line_length":{"kind":"number","value":29.2905198777,"string":"29.29052"},"max_line_length":{"kind":"number","value":83,"string":"83"},"alphanum_fraction":{"kind":"number","value":0.609313009,"string":"0.609313"},"score":{"kind":"number","value":3.140625,"string":"3.140625"}}},{"rowIdx":954,"cells":{"hexsha":{"kind":"string","value":"99412084ad8354eeac28b286ec51d9199233a7ad"},"size":{"kind":"number","value":1220,"string":"1,220"},"ext":{"kind":"string","value":"h"},"lang":{"kind":"string","value":"C"},"max_stars_repo_path":{"kind":"string","value":"include/Crc.h"},"max_stars_repo_name":{"kind":"string","value":"Tomash667/carpglib"},"max_stars_repo_head_hexsha":{"kind":"string","value":"c8701b170e4e2cbdb4d52fe3b7c8529afb3e97ed"},"max_stars_repo_licenses":{"kind":"list like","value":["MIT"],"string":"[\n \"MIT\"\n]"},"max_stars_count":{"kind":"number","value":4,"string":"4"},"max_stars_repo_stars_event_min_datetime":{"kind":"string","value":"2019-08-18T19:33:04.000Z"},"max_stars_repo_stars_event_max_datetime":{"kind":"string","value":"2021-08-07T02:12:54.000Z"},"max_issues_repo_path":{"kind":"string","value":"include/Crc.h"},"max_issues_repo_name":{"kind":"string","value":"Tomash667/carpglib"},"max_issues_repo_head_hexsha":{"kind":"string","value":"c8701b170e4e2cbdb4d52fe3b7c8529afb3e97ed"},"max_issues_repo_licenses":{"kind":"list like","value":["MIT"],"string":"[\n \"MIT\"\n]"},"max_issues_count":{"kind":"number","value":5,"string":"5"},"max_issues_repo_issues_event_min_datetime":{"kind":"string","value":"2019-08-14T05:45:56.000Z"},"max_issues_repo_issues_event_max_datetime":{"kind":"string","value":"2021-03-15T07:47:24.000Z"},"max_forks_repo_path":{"kind":"string","value":"include/Crc.h"},"max_forks_repo_name":{"kind":"string","value":"Tomash667/carpglib"},"max_forks_repo_head_hexsha":{"kind":"string","value":"c8701b170e4e2cbdb4d52fe3b7c8529afb3e97ed"},"max_forks_repo_licenses":{"kind":"list like","value":["MIT"],"string":"[\n \"MIT\"\n]"},"max_forks_count":{"kind":"number","value":2,"string":"2"},"max_forks_repo_forks_event_min_datetime":{"kind":"string","value":"2019-10-05T02:36:35.000Z"},"max_forks_repo_forks_event_max_datetime":{"kind":"string","value":"2021-02-15T20:20:09.000Z"},"content":{"kind":"string","value":"#pragma once\n\n//-----------------------------------------------------------------------------\nclass Crc\n{\npublic:\n\tCrc() : m_crc(CRC32_NEGL) {}\n\tvoid Update(const byte *input, size_t length);\n\tuint Get() const { return ~m_crc; }\n\toperator uint() const { return Get(); }\n\n\t// Don't use for types with padding!\n\ttemplate\n\tvoid Update(const T& item)\n\t{\n\t\tUpdate((const byte*)&item, sizeof(item));\n\t}\n\n\ttemplate<>\n\tvoid Update(const string& str)\n\t{\n\t\tif(!str.empty())\n\t\t\tUpdate((const byte*)str.c_str(), str.length());\n\t}\n\n\ttemplate<>\n\tvoid Update(const cstring& str)\n\t{\n\t\tassert(str);\n\t\tUpdate((const byte*)str, strlen(str));\n\t}\n\n\t// Don't use for types with padding!\n\ttemplate\n\tvoid Update(const vector& v)\n\t{\n\t\tUpdate(v.size());\n\t\tif(!v.empty())\n\t\t\tUpdate((const byte*)v.data(), v.size() * sizeof(T));\n\t}\n\n\ttemplate<>\n\tvoid Update(const vector& v)\n\t{\n\t\tUpdate(v.size());\n\t\tfor(const string& s : v)\n\t\t\tUpdate(s);\n\t}\n\n\tvoid Update0()\n\t{\n\t\tUpdate(0);\n\t}\n\n\tvoid Update1()\n\t{\n\t\tUpdate(1);\n\t}\n\n\tstatic uint Calculate(Cstring filename);\n\tstatic uint Calculate(FileReader& file);\n\nprivate:\n\tstatic const uint CRC32_NEGL = 0xffffffffL;\n\tstatic const uint m_tab[256];\n\tuint m_crc;\n};\n"},"avg_line_length":{"kind":"number","value":17.9411764706,"string":"17.941176"},"max_line_length":{"kind":"number","value":79,"string":"79"},"alphanum_fraction":{"kind":"number","value":0.6024590164,"string":"0.602459"},"score":{"kind":"number","value":3.0625,"string":"3.0625"}}},{"rowIdx":955,"cells":{"hexsha":{"kind":"string","value":"c7d423fa068508464eb1bdffe43ab9ac7402f665"},"size":{"kind":"number","value":6603,"string":"6,603"},"ext":{"kind":"string","value":"py"},"lang":{"kind":"string","value":"Python"},"max_stars_repo_path":{"kind":"string","value":"pyowl.py"},"max_stars_repo_name":{"kind":"string","value":"vene/pyowl"},"max_stars_repo_head_hexsha":{"kind":"string","value":"edef4cfeb31c4ea52cc67dce581372e2270fce2a"},"max_stars_repo_licenses":{"kind":"list like","value":["BSD-3-Clause"],"string":"[\n \"BSD-3-Clause\"\n]"},"max_stars_count":{"kind":"number","value":57,"string":"57"},"max_stars_repo_stars_event_min_datetime":{"kind":"string","value":"2017-02-11T09:17:47.000Z"},"max_stars_repo_stars_event_max_datetime":{"kind":"string","value":"2021-02-21T11:07:28.000Z"},"max_issues_repo_path":{"kind":"string","value":"pyowl.py"},"max_issues_repo_name":{"kind":"string","value":"vene/pyowl"},"max_issues_repo_head_hexsha":{"kind":"string","value":"edef4cfeb31c4ea52cc67dce581372e2270fce2a"},"max_issues_repo_licenses":{"kind":"list like","value":["BSD-3-Clause"],"string":"[\n \"BSD-3-Clause\"\n]"},"max_issues_count":{"kind":"number","value":3,"string":"3"},"max_issues_repo_issues_event_min_datetime":{"kind":"string","value":"2019-08-07T15:09:00.000Z"},"max_issues_repo_issues_event_max_datetime":{"kind":"string","value":"2019-08-30T11:53:17.000Z"},"max_forks_repo_path":{"kind":"string","value":"pyowl.py"},"max_forks_repo_name":{"kind":"string","value":"vene/pyowl"},"max_forks_repo_head_hexsha":{"kind":"string","value":"edef4cfeb31c4ea52cc67dce581372e2270fce2a"},"max_forks_repo_licenses":{"kind":"list like","value":["BSD-3-Clause"],"string":"[\n \"BSD-3-Clause\"\n]"},"max_forks_count":{"kind":"number","value":13,"string":"13"},"max_forks_repo_forks_event_min_datetime":{"kind":"string","value":"2017-02-12T21:43:59.000Z"},"max_forks_repo_forks_event_max_datetime":{"kind":"string","value":"2019-09-15T03:10:06.000Z"},"content":{"kind":"string","value":"# Author: Vlad Niculae \n# License: BSD 3 clause\n\nfrom __future__ import print_function\nfrom __future__ import division\n\nimport numpy as np\n\nfrom sklearn.base import BaseEstimator, ClassifierMixin, RegressorMixin\nfrom sklearn.utils.extmath import safe_sparse_dot\nfrom sklearn.isotonic import isotonic_regression\nfrom sklearn.preprocessing import LabelBinarizer\n\nfrom fista import fista\nfrom loss import get_loss\n\n\ndef prox_owl(v, w):\n \"\"\"Proximal operator of the OWL norm dot(w, reversed(sort(v)))\n\n Follows description and notation from:\n X. Zeng, M. Figueiredo,\n The ordered weighted L1 norm: Atomic formulation, dual norm,\n and projections.\n eprint http://arxiv.org/abs/1409.4271\n \"\"\"\n\n # wlog operate on absolute values\n v_abs = np.abs(v)\n ix = np.argsort(v_abs)[::-1]\n v_abs = v_abs[ix]\n # project to K+ (monotone non-negative decreasing cone)\n v_abs = isotonic_regression(v_abs - w, y_min=0, increasing=False)\n\n # undo the sorting\n inv_ix = np.zeros_like(ix)\n inv_ix[ix] = np.arange(len(v))\n v_abs = v_abs[inv_ix]\n\n return np.sign(v) * v_abs\n\n\ndef _oscar_weights(alpha, beta, size):\n w = np.arange(size - 1, -1, -1, dtype=np.double)\n w *= beta\n w += alpha\n return w\n\n\ndef _fit_owl_fista(X, y, w, loss, max_iter=500, max_linesearch=20, eta=2.0,\n tol=1e-3, verbose=0):\n\n # least squares loss\n def sfunc(coef, grad=False):\n y_scores = safe_sparse_dot(X, coef)\n if grad:\n obj, lp = loss(y, y_scores, return_derivative=True)\n grad = safe_sparse_dot(X.T, lp)\n return obj, grad\n else:\n return loss(y, y_scores)\n\n def nsfunc(coef, L):\n return prox_owl(coef, w / L)\n\n coef = np.zeros(X.shape[1])\n return fista(sfunc, nsfunc, coef, max_iter, max_linesearch,\n eta, tol, verbose)\n\n\nclass _BaseOwl(BaseEstimator):\n \"\"\"\n\n Solves sum loss(y_pred, y) + sum_j weights_j |coef|_(j)\n where u_(j) is the jth largest component of the vector u.\n and weights is a monotonic nonincreasing vector.\n\n OWL is also known as: sorted L1 norm, SLOPE\n\n Parameters\n ----------\n\n weights: array, shape (n_features,) or tuple, length 2\n Nonincreasing weights vector for the ordered weighted L1 penalty.\n If weights = (alpha, 0, 0, ..., 0), this amounts to a L_inf penalty.\n If weights = alpha * np.ones(n_features) it amounts to L1.\n If weights is a tuple = (alpha, beta), the OSCAR penalty is used::\n alpha ||coef||_1 + beta sum_{i 0\n return self.lb_.inverse_transform(y_pred)\n\n\nif __name__ == '__main__':\n\n from sklearn.model_selection import train_test_split\n from sklearn.datasets import load_boston, load_breast_cancer\n\n print(\"OSCAR proximal operator on toy example:\")\n v = np.array([1, 3, 2.9, 4, 0])\n w_oscar = _oscar_weights(alpha=0.01, beta=1, size=5)\n print(prox_owl(v, w_oscar))\n print()\n\n print(\"Regression\")\n X, y = load_boston(return_X_y=True)\n X = np.column_stack([X, -X[:, 0] + 0.01 * np.random.randn(X.shape[0])])\n X_tr, X_te, y_tr, y_te = train_test_split(X, y, random_state=0)\n clf = OwlRegressor(weights=(1, 100))\n clf.fit(X_tr, y_tr)\n print(\"Correlated coefs\", clf.coef_[0], clf.coef_[-1])\n print(\"Test score\", clf.score(X_te, y_te))\n print()\n\n print(\"Classification\")\n X, y = load_breast_cancer(return_X_y=True)\n X = np.column_stack([X, -X[:, 0] + 0.01 * np.random.randn(X.shape[0])])\n X_tr, X_te, y_tr, y_te = train_test_split(X, y, random_state=0)\n clf = OwlClassifier(weights=(1, 100), loss='squared-hinge')\n clf.fit(X_tr, y_tr)\n print(\"Correlated coefs\", clf.coef_[0], clf.coef_[-1])\n print(\"Test score\", clf.score(X_te, y_te))\n"},"avg_line_length":{"kind":"number","value":31.4428571429,"string":"31.442857"},"max_line_length":{"kind":"number","value":79,"string":"79"},"alphanum_fraction":{"kind":"number","value":0.6334999243,"string":"0.6335"},"score":{"kind":"number","value":3.09375,"string":"3.09375"}}},{"rowIdx":956,"cells":{"hexsha":{"kind":"string","value":"816541fd0ac9aa27940ad0ee8d31a1aaaa0de62e"},"size":{"kind":"number","value":2467,"string":"2,467"},"ext":{"kind":"string","value":"rs"},"lang":{"kind":"string","value":"Rust"},"max_stars_repo_path":{"kind":"string","value":"typebinder/src/pipeline/mod.rs"},"max_stars_repo_name":{"kind":"string","value":"impero-com/ts_export"},"max_stars_repo_head_hexsha":{"kind":"string","value":"850f28f2286714e47ddc9179c56718b634cef279"},"max_stars_repo_licenses":{"kind":"list like","value":["X11"],"string":"[\n \"X11\"\n]"},"max_stars_count":{"kind":"number","value":4,"string":"4"},"max_stars_repo_stars_event_min_datetime":{"kind":"string","value":"2021-02-22T17:01:07.000Z"},"max_stars_repo_stars_event_max_datetime":{"kind":"string","value":"2021-02-24T09:43:14.000Z"},"max_issues_repo_path":{"kind":"string","value":"typebinder/src/pipeline/mod.rs"},"max_issues_repo_name":{"kind":"string","value":"impero-com/typebinder"},"max_issues_repo_head_hexsha":{"kind":"string","value":"a5414b57ce8d6dcd8b57c9ea08c7a1ab646c5380"},"max_issues_repo_licenses":{"kind":"list like","value":["X11"],"string":"[\n \"X11\"\n]"},"max_issues_count":{"kind":"number","value":18,"string":"18"},"max_issues_repo_issues_event_min_datetime":{"kind":"string","value":"2021-03-04T16:07:51.000Z"},"max_issues_repo_issues_event_max_datetime":{"kind":"string","value":"2021-11-03T17:04:55.000Z"},"max_forks_repo_path":{"kind":"string","value":"typebinder/src/pipeline/mod.rs"},"max_forks_repo_name":{"kind":"string","value":"impero-com/ts_export"},"max_forks_repo_head_hexsha":{"kind":"string","value":"850f28f2286714e47ddc9179c56718b634cef279"},"max_forks_repo_licenses":{"kind":"list like","value":["X11"],"string":"[\n \"X11\"\n]"},"max_forks_count":{"kind":"number","value":2,"string":"2"},"max_forks_repo_forks_event_min_datetime":{"kind":"string","value":"2021-03-08T15:16:39.000Z"},"max_forks_repo_forks_event_max_datetime":{"kind":"string","value":"2021-03-19T15:28:24.000Z"},"content":{"kind":"string","value":"//! The core logic of `typebinder`\n\nuse crate::{\n contexts::type_solving::TypeSolvingContext, error::TsExportError, exporters::Exporter,\n macros::context::MacroSolvingContext, path_mapper::PathMapper,\n step_spawner::PipelineStepSpawner,\n};\nuse syn::{punctuated::Punctuated, Path};\n\nuse self::module_step::{ModuleStepResult, ModuleStepResultData};\n\npub mod module_step;\npub mod step_result;\n\n/// The Pipeline is the starting point of `typebinder`.\n///\n/// A Pipeline is customized with both a [PipelineStepSpawner] and an [Exporter] implementor.\n///\n/// When launched, the [Pipeline] will use its [PipelineStepSpawner] to spawn the \"default\" module, that is, the module with an empty path.\n/// Each [ModuleStep](crate::pipeline::module_step::ModuleStep) thereby generated is then launched, see [ModuleStep::launch](crate::pipeline::module_step::ModuleStep).\n///\n/// Each output is passed to the [Exporter], that is responsible for outputting the data.\npub struct Pipeline {\n pub pipeline_step_spawner: PSS,\n pub exporter: E,\n pub path_mapper: PathMapper,\n}\n\nimpl Pipeline\nwhere\n PSS: PipelineStepSpawner,\n E: Exporter,\n TsExportError: From + From,\n{\n pub fn launch(\n mut self,\n solving_context: &TypeSolvingContext,\n macro_context: &MacroSolvingContext,\n ) -> Result<(), TsExportError> {\n let path = Path {\n leading_colon: None,\n segments: Punctuated::default(),\n };\n\n let res = self\n .pipeline_step_spawner\n .create_process(path)?\n .ok_or(TsExportError::FailedToLaunch)?\n .launch(\n &self.pipeline_step_spawner,\n solving_context,\n macro_context,\n &self.path_mapper,\n )?;\n let mut all_results: Vec = Vec::new();\n extractor(&mut all_results, res);\n\n for result_data in all_results.into_iter() {\n if result_data.imports.is_empty() && result_data.exports.is_empty() {\n continue;\n }\n self.exporter.export_module(result_data)?;\n }\n\n self.exporter.finish();\n\n Ok(())\n }\n}\n\n/// TODO: refactor this to a closure\nfn extractor(all: &mut Vec, iter: ModuleStepResult) {\n iter.children\n .into_iter()\n .for_each(|child| extractor(all, child));\n all.push(iter.data);\n}\n"},"avg_line_length":{"kind":"number","value":31.6282051282,"string":"31.628205"},"max_line_length":{"kind":"number","value":167,"string":"167"},"alphanum_fraction":{"kind":"number","value":0.6436967977,"string":"0.643697"},"score":{"kind":"number","value":3.015625,"string":"3.015625"}}},{"rowIdx":957,"cells":{"hexsha":{"kind":"string","value":"b6d937c6bf167ae0e8646a47e76aaee72e16392d"},"size":{"kind":"number","value":1417,"string":"1,417"},"ext":{"kind":"string","value":"swift"},"lang":{"kind":"string","value":"Swift"},"max_stars_repo_path":{"kind":"string","value":"swift-sdk/CommerceItem.swift"},"max_stars_repo_name":{"kind":"string","value":"brunosylvain/swift-sdk"},"max_stars_repo_head_hexsha":{"kind":"string","value":"8c22e0390b47ff3fecf0ae6b2b71fefa180eb904"},"max_stars_repo_licenses":{"kind":"list like","value":["MIT"],"string":"[\n \"MIT\"\n]"},"max_stars_count":{"kind":"null"},"max_stars_repo_stars_event_min_datetime":{"kind":"null"},"max_stars_repo_stars_event_max_datetime":{"kind":"null"},"max_issues_repo_path":{"kind":"string","value":"swift-sdk/CommerceItem.swift"},"max_issues_repo_name":{"kind":"string","value":"brunosylvain/swift-sdk"},"max_issues_repo_head_hexsha":{"kind":"string","value":"8c22e0390b47ff3fecf0ae6b2b71fefa180eb904"},"max_issues_repo_licenses":{"kind":"list like","value":["MIT"],"string":"[\n \"MIT\"\n]"},"max_issues_count":{"kind":"null"},"max_issues_repo_issues_event_min_datetime":{"kind":"null"},"max_issues_repo_issues_event_max_datetime":{"kind":"null"},"max_forks_repo_path":{"kind":"string","value":"swift-sdk/CommerceItem.swift"},"max_forks_repo_name":{"kind":"string","value":"brunosylvain/swift-sdk"},"max_forks_repo_head_hexsha":{"kind":"string","value":"8c22e0390b47ff3fecf0ae6b2b71fefa180eb904"},"max_forks_repo_licenses":{"kind":"list like","value":["MIT"],"string":"[\n \"MIT\"\n]"},"max_forks_count":{"kind":"null"},"max_forks_repo_forks_event_min_datetime":{"kind":"null"},"max_forks_repo_forks_event_max_datetime":{"kind":"null"},"content":{"kind":"string","value":"//\n// Created by Tapash Majumder on 6/6/18.\n// Copyright © 2018 Iterable. All rights reserved.\n//\n\nimport UIKit\n\n/**\n `CommerceItem` represents a product. These are used by the commerce API; see [IterableAPI trackPurchase:items:dataFields:]\n */\n@objcMembers public class CommerceItem: NSObject {\n /** id of this product */\n public var id: String\n \n /** name of this product */\n public var name: String\n \n /** price of this product */\n public var price: NSNumber\n \n /** quantity of this product */\n public var quantity: UInt\n \n /**\n Creates a `CommerceItem` with the specified properties\n \n - parameters:\n - id: id of the product\n - name: name of the product\n - price: price of the product\n - quantity: quantity of the product\n \n - returns: an instance of `CommerceItem` with the specified properties\n */\n public init(id: String, name: String, price: NSNumber, quantity: UInt) {\n self.id = id\n self.name = name\n self.price = price\n self.quantity = quantity\n }\n \n /**\n A Dictionary respresentation of this item\n \n - returns: An NSDictionary representing this item\n */\n public func toDictionary() -> [AnyHashable: Any] {\n return [\"id\": id,\n \"name\": name,\n \"price\": price,\n \"quantity\": quantity]\n }\n}\n"},"avg_line_length":{"kind":"number","value":26.2407407407,"string":"26.240741"},"max_line_length":{"kind":"number","value":123,"string":"123"},"alphanum_fraction":{"kind":"number","value":0.5878616796,"string":"0.587862"},"score":{"kind":"number","value":3,"string":"3"}}},{"rowIdx":958,"cells":{"hexsha":{"kind":"string","value":"403ed8c759891b08ee51690d9702e340ed6f4833"},"size":{"kind":"number","value":2620,"string":"2,620"},"ext":{"kind":"string","value":"py"},"lang":{"kind":"string","value":"Python"},"max_stars_repo_path":{"kind":"string","value":"instascrape/exceptions.py"},"max_stars_repo_name":{"kind":"string","value":"tnychn/instascrape"},"max_stars_repo_head_hexsha":{"kind":"string","value":"7aaf3c1a1786bbe80059ed6e0d93442a19a6f475"},"max_stars_repo_licenses":{"kind":"list like","value":["MIT"],"string":"[\n \"MIT\"\n]"},"max_stars_count":{"kind":"number","value":80,"string":"80"},"max_stars_repo_stars_event_min_datetime":{"kind":"string","value":"2020-05-28T17:22:14.000Z"},"max_stars_repo_stars_event_max_datetime":{"kind":"string","value":"2022-03-25T07:15:51.000Z"},"max_issues_repo_path":{"kind":"string","value":"instascrape/exceptions.py"},"max_issues_repo_name":{"kind":"string","value":"AlphaXenon/InstaScrape"},"max_issues_repo_head_hexsha":{"kind":"string","value":"7aaf3c1a1786bbe80059ed6e0d93442a19a6f475"},"max_issues_repo_licenses":{"kind":"list like","value":["MIT"],"string":"[\n \"MIT\"\n]"},"max_issues_count":{"kind":"number","value":23,"string":"23"},"max_issues_repo_issues_event_min_datetime":{"kind":"string","value":"2020-05-25T12:45:40.000Z"},"max_issues_repo_issues_event_max_datetime":{"kind":"string","value":"2022-03-06T05:44:41.000Z"},"max_forks_repo_path":{"kind":"string","value":"instascrape/exceptions.py"},"max_forks_repo_name":{"kind":"string","value":"AlphaXenon/InstaScrape"},"max_forks_repo_head_hexsha":{"kind":"string","value":"7aaf3c1a1786bbe80059ed6e0d93442a19a6f475"},"max_forks_repo_licenses":{"kind":"list like","value":["MIT"],"string":"[\n \"MIT\"\n]"},"max_forks_count":{"kind":"number","value":14,"string":"14"},"max_forks_repo_forks_event_min_datetime":{"kind":"string","value":"2020-06-28T05:52:28.000Z"},"max_forks_repo_forks_event_max_datetime":{"kind":"string","value":"2022-03-28T04:27:50.000Z"},"content":{"kind":"string","value":"class InstascrapeError(Exception):\n \"\"\"Base exception class for all of the exceptions raised by Instascrape.\"\"\"\n\n\nclass ExtractionError(InstascrapeError):\n \"\"\"Raised when Instascrape fails to extract specified data from HTTP response.\"\"\"\n\n def __init__(self, message: str):\n super().__init__(\"Failed to extract data from response. (message: '{0}')\".format(message))\n\n\nclass PrivateAccessError(InstascrapeError):\n \"\"\"Raised when user does not have permission to access specified data, i.e. private profile which the user is not following.\"\"\"\n\n def __init__(self):\n super().__init__(\"The user profile is private and not being followed by you.\")\n\n\nclass RateLimitedError(InstascrapeError):\n \"\"\"Raised when Instascrape receives a 429 TooManyRequests from Instagram.\"\"\"\n\n def __init__(self):\n super().__init__(\"(429) Too many requests. Failed to query data. Rate limited by Instagram.\")\n\n\nclass NotFoundError(InstascrapeError):\n \"\"\"Raised when Instascrape receives a 404 Not Found from Instagram.\"\"\"\n\n def __init__(self, message: str = None):\n super().__init__(message or \"(404) Nothing found.\")\n\n\nclass ConnectionError(InstascrapeError):\n \"\"\"Raised when Instascrape fails to connect to Instagram server.\"\"\"\n\n def __init__(self, url: str):\n super().__init__(\"Failed to connect to '{0}'.\".format(url))\n\n\nclass LoginError(InstascrapeError):\n \"\"\"Raised when Instascrape fails to perform authentication, e.g. wrong credentials.\"\"\"\n\n def __init__(self, message: str):\n super().__init__(\"Failed to log into Instagram. (message: '{0}')\".format(message))\n\n\nclass TwoFactorAuthRequired(LoginError):\n \"\"\"Raised when Instascrape fails to perform authentication due to two-factor authenticattion.\"\"\"\n\n def __init__(self):\n super().__init__(\"two-factor authentication is required\")\n\n\nclass CheckpointChallengeRequired(LoginError):\n \"\"\"Raised when Instascrape fails to perform authentication due to checkpoint challenge.\"\"\"\n\n def __init__(self):\n super().__init__(\"checkpoint challenge solving is required\")\n\n\nclass AuthenticationRequired(InstascrapeError):\n \"\"\"Raised when anonymous/unauthenticated (guest) user tries to perform actions that require authentication.\"\"\"\n\n def __init__(self):\n super().__init__(\"Login is required in order to perform this action.\")\n\n\nclass DownloadError(InstascrapeError):\n \"\"\"Raised when Instascrape fails to download data from Instagram server.\"\"\"\n\n def __init__(self, message: str, url: str):\n super().__init__(\"Download Failed -> {0} (url: '{1}')\".format(message, url))\n"},"avg_line_length":{"kind":"number","value":35.8904109589,"string":"35.890411"},"max_line_length":{"kind":"number","value":131,"string":"131"},"alphanum_fraction":{"kind":"number","value":0.7190839695,"string":"0.719084"},"score":{"kind":"number","value":3.296875,"string":"3.296875"}}},{"rowIdx":959,"cells":{"hexsha":{"kind":"string","value":"c7c0ddf187a121fa94e350a87f3a1a34fe08c11c"},"size":{"kind":"number","value":10107,"string":"10,107"},"ext":{"kind":"string","value":"py"},"lang":{"kind":"string","value":"Python"},"max_stars_repo_path":{"kind":"string","value":"ball.py"},"max_stars_repo_name":{"kind":"string","value":"adata111/brick-carnival"},"max_stars_repo_head_hexsha":{"kind":"string","value":"38dcb03764e00b84010eaa61dbec79c087dc9295"},"max_stars_repo_licenses":{"kind":"list like","value":["BSD-2-Clause"],"string":"[\n \"BSD-2-Clause\"\n]"},"max_stars_count":{"kind":"null"},"max_stars_repo_stars_event_min_datetime":{"kind":"null"},"max_stars_repo_stars_event_max_datetime":{"kind":"null"},"max_issues_repo_path":{"kind":"string","value":"ball.py"},"max_issues_repo_name":{"kind":"string","value":"adata111/brick-carnival"},"max_issues_repo_head_hexsha":{"kind":"string","value":"38dcb03764e00b84010eaa61dbec79c087dc9295"},"max_issues_repo_licenses":{"kind":"list like","value":["BSD-2-Clause"],"string":"[\n \"BSD-2-Clause\"\n]"},"max_issues_count":{"kind":"null"},"max_issues_repo_issues_event_min_datetime":{"kind":"null"},"max_issues_repo_issues_event_max_datetime":{"kind":"null"},"max_forks_repo_path":{"kind":"string","value":"ball.py"},"max_forks_repo_name":{"kind":"string","value":"adata111/brick-carnival"},"max_forks_repo_head_hexsha":{"kind":"string","value":"38dcb03764e00b84010eaa61dbec79c087dc9295"},"max_forks_repo_licenses":{"kind":"list like","value":["BSD-2-Clause"],"string":"[\n \"BSD-2-Clause\"\n]"},"max_forks_count":{"kind":"null"},"max_forks_repo_forks_event_min_datetime":{"kind":"null"},"max_forks_repo_forks_event_max_datetime":{"kind":"null"},"content":{"kind":"string","value":"from headers import *\nimport globalVar\nfrom globalVar import TOP, BOTTOM, LIVES, HT, WIDTH, LEFT, RIGHT, obj_bricks, paddle, ALT_LIVES\nrows = HT\ncols = WIDTH\n\nclass Ball:\n\t\"\"\"docstring for Ball\n\tcontains all methods that can modify state of the ball.\n\t\"\"\"\n\tdef __init__(self, x, y, vx, vy, m):\n\t\tsuper().__init__()\n\t\tself.width = 1\n\t\tself.height = 1\n\t\tself.x = x\n\t\tself.y = y\n\t\tself.v_x = vx\n\t\tself.v_y = -1\n\t\tself.moving = m\n\t\tself.thru = 0\n\t\tself.dead = 0\n\t\tself.fast = 0\n\t\tself.fire = 0\n\n\tdef move(self,v=1):\n\t\tpaddle=globalVar.paddle\n\t\ttemp = self.v_x\n\n\t\tif(self.moving == 0): # movement with paddle\n\t\t\tif(paddle.x+paddle.width>=RIGHT and v>0):\n\t\t\t\tv = 0\n\t\t\telif(paddle.x<=LEFT and v<0):\n\t\t\t\tv = 0\n\t\t\tself.x += v*paddle.v\n\t\t\treturn\n\t\tself.x += self.v_x\n\n\t\tif(self.x+self.width>=RIGHT and self.v_x>0):\n\t\t\tself.v_x = -self.v_x\n\t\telif(self.x<=LEFT and self.v_x<0):\n\t\t\tself.v_x = -self.v_x\n\t\telif(self.x+self.v_x+self.width>RIGHT):\n\t\t\tself.x = RIGHT-self.v_x-self.width\n\t\telif(self.x+self.v_x=BOTTOM and self.v_y>0):\t# v_y>0 means it will go down\n\t\t\tself.kill_ball()\n\t\t\treturn\n\n\t\telif(self.y<=TOP and self.v_y<0):\n\t\t\tself.v_y = -self.v_y\n\t\tif(self.y+self.v_y>BOTTOM):\n\t\t\tself.y = BOTTOM-self.height\n\t\t\t# self.kill_ball()\n\t\t\treturn\n\t\telif(self.y+self.v_y paddle.x and self.xpaddle.x and self.v_x>=0): # top-left collision possible\n\t\t\tif(self.y+self.heightpaddle.y):\n\t\t\t\t# top-left collision \n\t\t\t\tself.y = paddle.y-self.height - self.v_y\n\t\t\t\tcheck=1\n\t\telif(self.x>(paddle.x) and self.x+self.v_xpaddle.y):\n\t\t\t\t# top-right collision \n\t\t\t\t# self.set_vel(-self.v_y)\n\t\t\t\tself.y = paddle.y-self.height - self.v_y\n\t\t\t\tcheck=1\n\t\treturn check\n\n\n\tdef check_brick_collision(self):\n\t\tcheck = 0\n\t\tv_x=self.v_x\n\t\tv_y=self.v_y\n\t\tfor brick in reversed(globalVar.obj_bricks):\n\t\t\tif(brick.is_broken()):\n\t\t\t\tcontinue\n\t\t\tif((self.x>=brick.getx() and self.x+self.width<=brick.getx()+brick.width) and ((self.y<=brick.gety()+brick.height and self.y+self.height>=brick.gety() and self.v_y<0) or (self.y+self.height>=brick.gety() and self.y<=brick.gety()+brick.height and self.v_y>0)) ):\n\t\t\t\tif(self.v_y==0): #not possible, but okay\n\t\t\t\t\tcontinue\n\t\t\t\t# collision with top or bottom brick surface \n\t\t\t\tv_y = -self.v_y\n\t\t\t\tcheck = 1\n\t\t\telif(((self.x+self.width==brick.getx() and self.v_x>0) or (self.x==brick.getx()+brick.width and self.v_x<0)) and (self.y+self.height<=brick.gety()+brick.height and self.y>=brick.gety())):\n\t\t\t\tif(self.v_x==0):\n\t\t\t\t\tcontinue\n\t\t\t\t# collision with left or right edge of brick\n\t\t\t\tv_x = -self.v_x\n\t\t\t\tcheck = 1\n\t\t\t\n\t\t\tif(check):\n\t\t\t\tif(self.thru==0):\n\t\t\t\t\tself.v_y = v_y\n\t\t\t\t\tself.v_x = v_x\n\t\t\t\telse:\n\t\t\t\t\tif(brick.strength == 100):\t# exploding\n\t\t\t\t\t\tbrick.reduce_strength(self.v_x, self.v_y)\n\t\t\t\t\t\tbreak\n\t\t\t\t\tif(self.fire==0):\n\t\t\t\t\t\tbrick.break_it(self.v_x, self.v_y)\n\t\t\t\t\telse:\n\t\t\t\t\t\tbrick.fire(self.v_x, self.v_y)\n\t\t\t\t\tbreak\n\t\t\t\tif(brick.strength != -1):\n\t\t\t\t\tif(self.fire):\n\t\t\t\t\t\tbrick.fire(self.v_x, self.v_y)\n\t\t\t\t\telse:\n\t\t\t\t\t\tbrick.reduce_strength(self.v_x, self.v_y)\n\t\t\t\telse:\n\t\t\t\t\tif(self.fire):\n\t\t\t\t\t\tbrick.fire(self.v_x, self.v_y)\n\t\t\t\tbreak\n\t\tif(check):\n\n\t\t\tos.system('aplay -q ./sounds/ball_brick.wav&')\n\t\t\treturn\n\t\tcheck=0\n\t\tfor brick in globalVar.obj_bricks:\n\t\t\tif(brick.is_broken()):\n\t\t\t\tcontinue\n\t\t\tif((self.x+self.width)==brick.getx() and self.v_x>0): # top-left or bottom-left collision possible\n\t\t\t\tif(self.y==brick.gety()+brick.height and self.v_y<0): \n\t\t\t\t\t# bottom-left collision\n\t\t\t\t\tv_y = -self.v_y\n\t\t\t\t\tv_x = -self.v_x\n\t\t\t\t\t# self.v_x = -self.v_x\n\t\t\t\t\tcheck = 1\n\t\t\t\telif(self.y+self.height==brick.gety() and self.v_y>0):\n\t\t\t\t\t# top-left collision \n\t\t\t\t\t# self.v_y = -self.v_y\n\t\t\t\t\tv_x = -self.v_x\n\t\t\t\t\tv_y = -self.v_y\n\t\t\t\t\tcheck = 1\n\t\t\telif(self.x==(brick.getx()+brick.width) and self.v_x<0): # top-right or bottom-right collision possible\n\t\t\t\tif(self.y==brick.gety()+brick.height and self.v_y<0): \n\t\t\t\t\t# bottom-right collision\n\t\t\t\t\tv_y = -self.v_y\n\t\t\t\t\tv_x = -self.v_x\n\t\t\t\t\t# self.v_x = -self.v_x\n\t\t\t\t\tcheck = 1\n\t\t\t\telif(self.y+self.height==brick.gety() and self.v_y>0):\n\t\t\t\t\t# top-right collision \n\t\t\t\t\t# self.v_y = -self.v_y\n\t\t\t\t\tv_x = -self.v_x\n\t\t\t\t\tv_y = -self.v_y\n\t\t\t\t\tcheck = 1\n\t\t\tif(check):\n\t\t\t\t# ball-brick collision occurred, reduce brick strength now\n\t\t\t\tos.system('aplay -q ./sounds/ball_brick.wav&')\n\t\t\t\tif(self.thru==0):\n\t\t\t\t\tself.v_y = v_y\n\t\t\t\t\tself.v_x = v_x\n\t\t\t\telse:\n\t\t\t\t\t# if thru ball power up is activated, destroy bricks \n\t\t\t\t\tif(brick.strength == 100): # exploding brick\n\t\t\t\t\t\tbrick.reduce_strength(self.v_x, self.v_y)\n\t\t\t\t\t\tbreak\n\t\t\t\t\tif(self.fire==0):\n\t\t\t\t\t\tbrick.break_it(self.v_x, self.v_y)\n\t\t\t\t\telse:\n\t\t\t\t\t\t# if fire ball power up is activated, destroy bricks exploding bricks style\n\t\t\t\t\t\tbrick.fire(self.v_x, self.v_y)\n\t\t\t\t\tbreak\n\t\t\t\tif(brick.strength != -1):\n\t\t\t\t\tif(self.fire):\n\t\t\t\t\t\tbrick.fire(self.v_x, self.v_y)\n\t\t\t\t\telse:\n\t\t\t\t\t\tbrick.reduce_strength(self.v_x, self.v_y)\n\t\t\t\telse:\n\t\t\t\t\t# unbreakable brick breaks only if the ball is fireball\n\t\t\t\t\tif(self.fire):\n\t\t\t\t\t\tbrick.fire(self.v_x, self.v_y)\n\t\t\t\tbreak\n\t\tif(check):\n\t\t\treturn\n\t\tcheck=0\n\t\tfor brick in globalVar.obj_bricks:\n\t\t\tif(brick.is_broken()):\n\t\t\t\tcontinue\n\n\t\t\tif(self.x+self.widthbrick.getx()+brick.width or self.y+self.heightbrick.gety()+brick.height):\n\n\t\t\t\tx = self.x+self.v_x\n\t\t\t\ty = self.y + self.v_y\n\t\t\t\tfor b_x in range(brick.getx(),brick.getx()+brick.width):\n\t\t\t\t\tfor b_y in range(brick.gety(),brick.gety()+brick.height):\n\t\t\t\t\t\tif(x==b_x and y==b_y):\n\t\t\t\t\t\t\tif(self.v_x>0 and self.v_y>0):\t# approaching from north west\n\t\t\t\t\t\t\t\tif(x-brick.getx()0):\t# approaching from north east\n\t\t\t\t\t\t\t\tif(brick.getx()+brick.width-x0 and self.v_y<0):\t# approaching from south west\n\t\t\t\t\t\t\t\tif(x-brick.getx()0):\n\t\t\tglobalVar.ALT_LIVES -= 1\n\t\t\tself.dead = 1\n\t\telse:\n\t\t\tl = globalVar.LIVES\n\t\t\tglobalVar.LIVES = l-1\n\t\t\tfor power_up in globalVar.power_ups:\n\t\t\t\tif(power_up.is_activated()):\n\t\t\t\t\tpower_up.deactivate_power_up()\n\t\t\tself.x = random.randint(p.x, p.x+p.width-self.width)\n\t\t\tself.y = p.y-self.height\n\t\t\tself.moving = 0\n\t\t\tself.v_y = -2\n\t\t\tself.v_x = 0\n\t\t\tself.thru = 0\n\t\t\tself.fast = 0\n\t\t\tself.fire = 0\n\t\t\n\tdef set_props(self, x, y, vx, vy):\n\t\tself.x = x\n\t\tself.y = y\n\t\tself.v_x = vx\n\t\tself.v_y = vy\n\n\tdef is_moving(self):\n\t\treturn self.moving\n\tdef set_moving(self):\n\t\tself.moving = 1\n\t\tif(not globalVar.paddle.is_sticky()):\n\t\t\tself.set_vel()\n\n\tdef set_vel(self, vy=-1):\n\t\tpaddle = globalVar.paddle\n\t\tcen = paddle.width//2\n\t\tp1 = cen//2\n\t\tp3 = cen+p1\n\t\tif(self.x - paddle.x<=p1):\n\t\t\tself.v_x = self.v_x - 2\n\t\telif(self.x - paddle.xp3):\n\t\t\tself.v_x += 2\n\t\tself.v_y = vy\n\n\tdef incr_vel(self):\n\t\tif(self.fast):\n\t\t\treturn\n\t\tself.fast = 1\n\t\tif(self.v_y > 0):\n\t\t\tself.v_y += 1\n\t\telif(self.v_y < 0 or not (self.is_moving())):\n\t\t\tself.v_y -= 1\n\n\tdef decr_vel(self):\n\t\tif(self.fast):\n\t\t\tself.fast =0\n\t\t\tif(self.v_y > 0):\n\t\t\t\tself.v_y -= 1\n\t\t\telif(self.v_y < 0):\n\t\t\t\tself.v_y += 1\n\n\tdef getArr(self, colour, symbol, arr):\n\t\ty = self.y\n\t\th = self.height\n\t\tw = self.width\n\t\tx = self.x\n\t\tfor i in range(y, y+h):\n\t\t\tfor j in range(x,x+w):\n\t\t\t\tarr[i][j] = (colour +symbol + Style.RESET_ALL)\n\t\treturn arr\n\n\tdef set_thru(self):\n\t\tself.thru = 1\n\n\tdef unset_thru(self):\n\t\tself.thru = 0\n\n\tdef set_fire(self):\n\t\tself.fire = 1\n\tdef unset_fire(self):\n\t\tself.fire = 0"},"avg_line_length":{"kind":"number","value":27.464673913,"string":"27.464674"},"max_line_length":{"kind":"number","value":264,"string":"264"},"alphanum_fraction":{"kind":"number","value":0.6105669338,"string":"0.610567"},"score":{"kind":"number","value":3.21875,"string":"3.21875"}}},{"rowIdx":960,"cells":{"hexsha":{"kind":"string","value":"1bde984571b7f4f4e48121c1095f28ca435b7f01"},"size":{"kind":"number","value":6195,"string":"6,195"},"ext":{"kind":"string","value":"py"},"lang":{"kind":"string","value":"Python"},"max_stars_repo_path":{"kind":"string","value":"PyFile/pyfile.py"},"max_stars_repo_name":{"kind":"string","value":"chyka-dev/PyFile"},"max_stars_repo_head_hexsha":{"kind":"string","value":"a52e69c712c10934bc88c0b75b3f536e12303c83"},"max_stars_repo_licenses":{"kind":"list like","value":["MIT"],"string":"[\n \"MIT\"\n]"},"max_stars_count":{"kind":"null"},"max_stars_repo_stars_event_min_datetime":{"kind":"null"},"max_stars_repo_stars_event_max_datetime":{"kind":"null"},"max_issues_repo_path":{"kind":"string","value":"PyFile/pyfile.py"},"max_issues_repo_name":{"kind":"string","value":"chyka-dev/PyFile"},"max_issues_repo_head_hexsha":{"kind":"string","value":"a52e69c712c10934bc88c0b75b3f536e12303c83"},"max_issues_repo_licenses":{"kind":"list like","value":["MIT"],"string":"[\n \"MIT\"\n]"},"max_issues_count":{"kind":"null"},"max_issues_repo_issues_event_min_datetime":{"kind":"null"},"max_issues_repo_issues_event_max_datetime":{"kind":"null"},"max_forks_repo_path":{"kind":"string","value":"PyFile/pyfile.py"},"max_forks_repo_name":{"kind":"string","value":"chyka-dev/PyFile"},"max_forks_repo_head_hexsha":{"kind":"string","value":"a52e69c712c10934bc88c0b75b3f536e12303c83"},"max_forks_repo_licenses":{"kind":"list like","value":["MIT"],"string":"[\n \"MIT\"\n]"},"max_forks_count":{"kind":"null"},"max_forks_repo_forks_event_min_datetime":{"kind":"null"},"max_forks_repo_forks_event_max_datetime":{"kind":"null"},"content":{"kind":"string","value":"# -*- coding:utf-8 -*-\n\nimport os\n\nimport six\n\nfrom .pystring import PyString\n\n\nclass PyFile(object):\n \"\"\"More human-friendly file access interface.\n Works on Python2 and 3.\n\n Usage:\n file = File(\".bashrc\")\n file.write(\"Hello, world!!\")\n print(file.read())\n del file\n \"\"\"\n class Mode:\n r = \"rb\"\n w = \"wb+\"\n a = \"ab+\"\n\n def __init__(self, path, encoding=\"utf-8\"):\n self.path = path\n self.encoding = encoding\n self.mode = None\n self._fd = None\n\n def __del__(self):\n self.ensure_close()\n\n def __str__(self):\n return \">> file = File(\"hello.txt\")\n >>> print(file.read())\n hello, world\n >>> print(file.read())\n\n >>> print(file.top())\n >>> print(file.read())\n hello, world\n \"\"\"\n return self.seek(0)\n\n def end(self):\n \"\"\"\n Usage:\n >>> file = File(\"hello.txt\")\n >>> print(file.end())\n >>> print(file.read())\n\n \"\"\"\n return self.seek(0, 2)\n\n def seek(self, *args, **kwargs):\n \"\"\"WIP what about mode?\"\"\"\n return self._fd.seek(*args, **kwargs)\n\n def truncate(self, *args, **kwargs):\n \"\"\"WIP How should I work??\"\"\"\n return self._fd.truncate(*args, **kwargs)\n\n def read(self, *args, **kwargs):\n \"\"\"\n Usage:\n >>> file = File(\"hello.txt\")\n >>> print(file.read())\n hello, world\n \"\"\"\n self.ensure_open(self.Mode.r)\n return PyString(self._fd.read(*args, **kwargs))\n\n def readline(self, *args, **kwargs):\n self.ensure_open(self.Mode.r)\n return PyString(self._fd.readline(*args, **kwargs))\n\n def readlines(self, *args, **kwargs):\n self.ensure_open(self.Mode.r)\n return (PyString(s) for s in self._fd.readlines(*args, **kwargs))\n\n def write(self, data, *args, **kwargs):\n \"\"\"\n Usage:\n >>> file = File(\"hello.txt\")\n >>> file.write(\"hello, world\")\n >>> print(file.read())\n hello, world\n \"\"\"\n self.ensure_open(self.Mode.w)\n return self.__write(data, *args, **kwargs)\n\n def writelines(self, seq, *args, **kwargs):\n \"\"\"\n Usage:\n >>> file = File(\"hello.txt\")\n >>> file.writelines([\"hello\", \"world\"])\n >>> print(file.read())\n hello\n world\n \"\"\"\n self.ensure_open(self.Mode.w)\n seq = [self.__ensure_nl(line) for line in seq] \n return self.__writelines(seq, *args, **kwargs)\n\n def append(self, data, *args, **kwargs):\n \"\"\"\n Usage:\n >>> file = File(\"hello.txt\")\n >>> print(file.read())\n hello\n >>> file.append(\", world\")\n >>> print(file.read())\n hello, world\n \"\"\"\n self.ensure_open(self.Mode.a)\n return self.__write(data, *args, **kwargs)\n\n def appendlines(self, seq, *args, **kwargs):\n \"\"\"\n Usage:\n >>> file = File(\"hello.txt\")\n >>> print(file.read())\n hello\n >>> file.appendlines([\"world\", \"!!\"])\n >>> print(file.read())\n hello\n world\n !!\n \"\"\"\n self.ensure_open(self.Mode.a)\n seq = [self.__ensure_nl(\"\")] + [self.__ensure_nl(line) for line in seq] \n return self.__writelines(seq, *args, **kwargs)\n\n def open(self, mode, *args, **kwargs):\n \"\"\" An alias of ensure_open\n\n Usage:\n >>> file = File(path, encoding).open(File.Mode.R)\n \"\"\"\n return self.ensure_open(mode, *args, **kwargs)\n\n def close(self, *args, **kwargs):\n \"\"\"An alias of ensure_close\n\n Usage:\n >>> file.close()\n \"\"\"\n self.ensure_close(*args, **kwargs)\n\n def ensure_open(self, mode, *args, **kwargs):\n \"\"\" Open the file with mode `mode` if not opend.\n Usually you don't have to use this method directly.\n Use read, write, append,.. methods instead.\n\n Usage:\n >>> file.ensure_open(File.Mode.R)\n \"\"\"\n if self._fd and self.mode == mode:\n return self\n self.mode = mode\n self._fd = self.__open(\n self.path, mode, *args, **kwargs\n )\n return self\n\n def ensure_close(self, *args, **kwargs):\n \"\"\"\n Close the file if opened.\n Usually you don't have to use this method directly.\n\n Usage:\n >>> file.ensure_close()\n \"\"\"\n if not self._fd:\n return\n self._fd.close(*args, **kwargs)\n self._fd = None\n self.mode = None\n return\n\n def __ensure_nl(self, string):\n \"\"\"Append new line chars to the end of `string`.\n\n Usage:\n >>> assert self.__ensure_nl(\"\") == \"\\n\"\n >>> assert self.__ensure_nl(\"hello\") == \"hello\\n\"\n \"\"\"\n if not string.endswith(\"\\n\"):\n string += \"\\n\"\n return string\n\n def __write(self, data, *args, **kwargs):\n \"\"\"Use this instead of fd.write.\n \"\"\"\n data = PyString(data, self.encoding)\n self._fd.write(data.encode(self.encoding), *args, **kwargs)\n\n def __writelines(self, seq, *args, **kwargs):\n \"\"\"Use this instead of fd.writelines.\n \"\"\"\n seq = [PyString(s, self.encoding).encode(self.encoding) for s in seq]\n self._fd.writelines(seq, *args, **kwargs)\n\n def __open(self, *args, **kwargs):\n # In python2, open doesn't accept `encoding`.\n # In python3, `encoding` cannot be specified on binary mode.\n if 'encoding' in kwargs:\n del kwargs['encoding']\n return open(*args, **kwargs)\n\n"},"avg_line_length":{"kind":"number","value":26.9347826087,"string":"26.934783"},"max_line_length":{"kind":"number","value":80,"string":"80"},"alphanum_fraction":{"kind":"number","value":0.5012106538,"string":"0.501211"},"score":{"kind":"number","value":3.375,"string":"3.375"}}},{"rowIdx":961,"cells":{"hexsha":{"kind":"string","value":"3e727e35fdc429e6a7c905d1f46288b990a809bb"},"size":{"kind":"number","value":1038,"string":"1,038"},"ext":{"kind":"string","value":"swift"},"lang":{"kind":"string","value":"Swift"},"max_stars_repo_path":{"kind":"string","value":"Sources/ValueProvider/Miscellanious.swift"},"max_stars_repo_name":{"kind":"string","value":"KizzyCode/persistentstate-swift"},"max_stars_repo_head_hexsha":{"kind":"string","value":"22566951b25fe6dd5e0a2d109bda1492b9f6d032"},"max_stars_repo_licenses":{"kind":"list like","value":["BSD-2-Clause","MIT"],"string":"[\n \"BSD-2-Clause\",\n \"MIT\"\n]"},"max_stars_count":{"kind":"null"},"max_stars_repo_stars_event_min_datetime":{"kind":"null"},"max_stars_repo_stars_event_max_datetime":{"kind":"null"},"max_issues_repo_path":{"kind":"string","value":"Sources/ValueProvider/Miscellanious.swift"},"max_issues_repo_name":{"kind":"string","value":"KizzyCode/persistentstate-swift"},"max_issues_repo_head_hexsha":{"kind":"string","value":"22566951b25fe6dd5e0a2d109bda1492b9f6d032"},"max_issues_repo_licenses":{"kind":"list like","value":["BSD-2-Clause","MIT"],"string":"[\n \"BSD-2-Clause\",\n \"MIT\"\n]"},"max_issues_count":{"kind":"null"},"max_issues_repo_issues_event_min_datetime":{"kind":"null"},"max_issues_repo_issues_event_max_datetime":{"kind":"null"},"max_forks_repo_path":{"kind":"string","value":"Sources/ValueProvider/Miscellanious.swift"},"max_forks_repo_name":{"kind":"string","value":"KizzyCode/persistentstate-swift"},"max_forks_repo_head_hexsha":{"kind":"string","value":"22566951b25fe6dd5e0a2d109bda1492b9f6d032"},"max_forks_repo_licenses":{"kind":"list like","value":["BSD-2-Clause","MIT"],"string":"[\n \"BSD-2-Clause\",\n \"MIT\"\n]"},"max_forks_count":{"kind":"null"},"max_forks_repo_forks_event_min_datetime":{"kind":"null"},"max_forks_repo_forks_event_max_datetime":{"kind":"null"},"content":{"kind":"string","value":"import Foundation\n\n\n/// A type that implements a default constructor\npublic protocol Default {\n /// Creates a new default instance of `Self`\n init()\n}\n\n\n/// A mapped value/dictionary ID\npublic protocol ID {\n /// The ID bytes\n var bytes: Data { get }\n}\nextension Data: ID {\n public var bytes: Data { self }\n}\nextension String: ID {\n public var bytes: Data { self.data(using: .utf8)! }\n}\n\n\n/// A type that defines methods for encoding\npublic protocol ValueEncoder {\n /// Encodes an instance of the indicated type\n ///\n /// - Parameter value: The instance to encode\n func encode(_ value: T) throws -> Data\n}\nextension JSONEncoder: ValueEncoder {}\n\n\n/// A type that defines methods for decoding\npublic protocol ValueDecoder {\n /// Decodes an instance of the indicated type\n ///\n /// - Parameters:\n /// - type: The target type\n /// - data: The data to decode\n func decode(_ type: T.Type, from data: Data) throws -> T\n}\nextension JSONDecoder: ValueDecoder {}\n"},"avg_line_length":{"kind":"number","value":23.5909090909,"string":"23.590909"},"max_line_length":{"kind":"number","value":74,"string":"74"},"alphanum_fraction":{"kind":"number","value":0.6589595376,"string":"0.65896"},"score":{"kind":"number","value":3.109375,"string":"3.109375"}}},{"rowIdx":962,"cells":{"hexsha":{"kind":"string","value":"84c5ee02cb1cd393b85356f2a49149b3e3d7c9cc"},"size":{"kind":"number","value":2037,"string":"2,037"},"ext":{"kind":"string","value":"h"},"lang":{"kind":"string","value":"C"},"max_stars_repo_path":{"kind":"string","value":"Project8/NodeList.h"},"max_stars_repo_name":{"kind":"string","value":"nicky189/cs202"},"max_stars_repo_head_hexsha":{"kind":"string","value":"ecfb9b92e094bfa29102e586ffd615d719b45532"},"max_stars_repo_licenses":{"kind":"list like","value":["MIT"],"string":"[\n \"MIT\"\n]"},"max_stars_count":{"kind":"null"},"max_stars_repo_stars_event_min_datetime":{"kind":"null"},"max_stars_repo_stars_event_max_datetime":{"kind":"null"},"max_issues_repo_path":{"kind":"string","value":"Project8/NodeList.h"},"max_issues_repo_name":{"kind":"string","value":"nicky189/cs202"},"max_issues_repo_head_hexsha":{"kind":"string","value":"ecfb9b92e094bfa29102e586ffd615d719b45532"},"max_issues_repo_licenses":{"kind":"list like","value":["MIT"],"string":"[\n \"MIT\"\n]"},"max_issues_count":{"kind":"null"},"max_issues_repo_issues_event_min_datetime":{"kind":"null"},"max_issues_repo_issues_event_max_datetime":{"kind":"null"},"max_forks_repo_path":{"kind":"string","value":"Project8/NodeList.h"},"max_forks_repo_name":{"kind":"string","value":"nicky189/cs202"},"max_forks_repo_head_hexsha":{"kind":"string","value":"ecfb9b92e094bfa29102e586ffd615d719b45532"},"max_forks_repo_licenses":{"kind":"list like","value":["MIT"],"string":"[\n \"MIT\"\n]"},"max_forks_count":{"kind":"null"},"max_forks_repo_forks_event_min_datetime":{"kind":"null"},"max_forks_repo_forks_event_max_datetime":{"kind":"null"},"content":{"kind":"string","value":"#ifndef NODELIST_H_\n#define NODELIST_H_\n\n#include \"DataType.h\"\n\nclass Node{\n\n friend class NodeList; //allows direct accessing of link and data from class NodeList\n\n public:\n Node() :\n\t m_next( NULL )\n\t{\n\t}\n\tNode(const DataType & data, Node * next = NULL) :\n\t m_next( next ),\n\t m_data( data )\n\t{\n\t}\n\tNode(const Node & other) :\n\t m_next( other.m_next ),\n\t m_data( other.m_data )\n\t{\n\t}\n\t\n DataType & data(){ //gets non-const reference, can be used to modify value of underlying data\n //return const_cast(static_cast(*this).getData()); //an alternative implementation, just for studying reference\n\t return m_data;\n }\n const DataType & data() const{ //gets const reference, can be used to access value of underlying data\n return m_data;\n }\n\n private:\n Node * m_next; \t\t\t\t\t\t\n DataType m_data;\n};\n\nclass NodeList{\n friend std::ostream & operator<<(std::ostream & os,\t //(i)\n\t\t\t\t\t const NodeList & nodeList);\n public:\n NodeList();\t\t\t\t\t\t\t\t \t\t //(1)\n NodeList(size_t count, const DataType & value);\t\t //(2)\n NodeList(const NodeList & other);\t\t\t\t\t //(3)\n ~NodeList();\t\t\t\t\t\t\t \t\t\t //(4)\n\n NodeList & operator= (const NodeList & rhs);\t\t //(5)\n\n Node * front();\t\t\t\t\t\t\t \t\t //(6)\n Node * back();\t\t\t\t\t\t\t\t\t\t //(7)\n\t\n Node * find(const DataType & target,\t\t\t\t //(8)\n Node * & previous,\n const Node * start = NULL);\t\n\t\n Node * insertAfter(const DataType & target, \t\t //(9)\n const DataType & value);\t \n Node * insertBefore(const DataType & target, \t //(10)\n const DataType & value);\n Node * erase(const DataType & target); \t\t\t //(11)\n\n\t\n DataType & operator[] (size_t position);\t\t\t //(12a)\n const DataType & operator[] (size_t position) const; //(12b)\n \n size_t size() const;\t\t\t\t\t\t\t\t //(13)\n bool empty() const;\t\t\t\t\t\t\t\t\t //(14)\n void clear();\t\t\t\t\t\t\t\t\t\t //(15)\t\n\n private:\n Node * m_head;\n};\n\n#endif //NODELIST_H_\n"},"avg_line_length":{"kind":"number","value":26.8026315789,"string":"26.802632"},"max_line_length":{"kind":"number","value":144,"string":"144"},"alphanum_fraction":{"kind":"number","value":0.5522827688,"string":"0.552283"},"score":{"kind":"number","value":3.078125,"string":"3.078125"}}},{"rowIdx":963,"cells":{"hexsha":{"kind":"string","value":"0a89d3a27590be15490c3a27155eb9c2ff4bd500"},"size":{"kind":"number","value":2074,"string":"2,074"},"ext":{"kind":"string","value":"kt"},"lang":{"kind":"string","value":"Kotlin"},"max_stars_repo_path":{"kind":"string","value":"collaboration-suite-utils/src/main/java/com/kaleyra/collaboration_suite_utils/network_observer/InternetObserver.kt"},"max_stars_repo_name":{"kind":"string","value":"Bandyer/Kaleyra-Android-Collaboration-Suite-Utils"},"max_stars_repo_head_hexsha":{"kind":"string","value":"216049e9b5e234e441c234e121d689c493591373"},"max_stars_repo_licenses":{"kind":"list like","value":["Apache-2.0","MIT"],"string":"[\n \"Apache-2.0\",\n \"MIT\"\n]"},"max_stars_count":{"kind":"null"},"max_stars_repo_stars_event_min_datetime":{"kind":"null"},"max_stars_repo_stars_event_max_datetime":{"kind":"null"},"max_issues_repo_path":{"kind":"string","value":"collaboration-suite-utils/src/main/java/com/kaleyra/collaboration_suite_utils/network_observer/InternetObserver.kt"},"max_issues_repo_name":{"kind":"string","value":"Bandyer/Kaleyra-Android-Collaboration-Suite-Utils"},"max_issues_repo_head_hexsha":{"kind":"string","value":"216049e9b5e234e441c234e121d689c493591373"},"max_issues_repo_licenses":{"kind":"list like","value":["Apache-2.0","MIT"],"string":"[\n \"Apache-2.0\",\n \"MIT\"\n]"},"max_issues_count":{"kind":"null"},"max_issues_repo_issues_event_min_datetime":{"kind":"null"},"max_issues_repo_issues_event_max_datetime":{"kind":"null"},"max_forks_repo_path":{"kind":"string","value":"collaboration-suite-utils/src/main/java/com/kaleyra/collaboration_suite_utils/network_observer/InternetObserver.kt"},"max_forks_repo_name":{"kind":"string","value":"Bandyer/Kaleyra-Android-Collaboration-Suite-Utils"},"max_forks_repo_head_hexsha":{"kind":"string","value":"216049e9b5e234e441c234e121d689c493591373"},"max_forks_repo_licenses":{"kind":"list like","value":["Apache-2.0","MIT"],"string":"[\n \"Apache-2.0\",\n \"MIT\"\n]"},"max_forks_count":{"kind":"null"},"max_forks_repo_forks_event_min_datetime":{"kind":"null"},"max_forks_repo_forks_event_max_datetime":{"kind":"null"},"content":{"kind":"string","value":"/*\n * Copyright (C) 2022 Kaleyra S.p.a. All Rights Reserved.\n * See LICENSE.txt for licensing information\n */\npackage com.kaleyra.collaboration_suite_utils.network_observer\n\nimport android.Manifest\nimport androidx.annotation.RequiresPermission\nimport kotlinx.coroutines.*\nimport kotlinx.coroutines.channels.BufferOverflow\nimport kotlinx.coroutines.flow.*\nimport java.net.HttpURLConnection\nimport java.net.URL\n\n/**\n * Utility class which allows to observe the internet state. It tells if there is actually internet connection.\n */\nclass InternetObserver @RequiresPermission(Manifest.permission.INTERNET) constructor(private val intervalInMs: Long) {\n\n private val isConnectedFlow: MutableSharedFlow =\n MutableSharedFlow(onBufferOverflow = BufferOverflow.DROP_OLDEST, replay = 1)\n private var job: Job = CoroutineScope(Dispatchers.IO).launch {\n while (true) {\n isConnectedFlow.emit(isConnected())\n delay(intervalInMs)\n }\n }\n\n /**\n * Call to observe the internet state. It returns true if internet is reachable, false otherwise\n *\n * @return SharedFlow\n */\n fun observe(): Flow = isConnectedFlow.distinctUntilChanged()\n\n /**\n * Stop the observer\n */\n fun stop() = job.cancel()\n\n private fun isConnected(): Boolean {\n var urlConnection: HttpURLConnection? = null\n val result = kotlin.runCatching {\n urlConnection = initConnection()\n urlConnection!!.responseCode == 204\n }.getOrNull() ?: false\n urlConnection?.disconnect()\n return result\n }\n\n private fun initConnection() =\n (URL(HOST).openConnection() as HttpURLConnection).apply {\n connectTimeout = CONNECT_TIMEOUT\n readTimeout = READ_TIMEOUT\n instanceFollowRedirects = false\n useCaches = false\n }\n\n private companion object {\n const val HOST = \"https://clients3.google.com/generate_204\"\n const val CONNECT_TIMEOUT = 10000\n const val READ_TIMEOUT = 10000\n }\n}"},"avg_line_length":{"kind":"number","value":32.40625,"string":"32.40625"},"max_line_length":{"kind":"number","value":118,"string":"118"},"alphanum_fraction":{"kind":"number","value":0.6827386692,"string":"0.682739"},"score":{"kind":"number","value":3,"string":"3"}}},{"rowIdx":964,"cells":{"hexsha":{"kind":"string","value":"2f3550f45eaf39f60243ac6016b0a728b6dafd98"},"size":{"kind":"number","value":8420,"string":"8,420"},"ext":{"kind":"string","value":"php"},"lang":{"kind":"string","value":"PHP"},"max_stars_repo_path":{"kind":"string","value":"src/DbAdmin/Traits/TableSelectTrait.php"},"max_stars_repo_name":{"kind":"string","value":"lagdo/jaxon-dbadmin"},"max_stars_repo_head_hexsha":{"kind":"string","value":"fb4389223d1f7ad9aba0cd6e8256c95275942661"},"max_stars_repo_licenses":{"kind":"list like","value":["Apache-2.0"],"string":"[\n \"Apache-2.0\"\n]"},"max_stars_count":{"kind":"null"},"max_stars_repo_stars_event_min_datetime":{"kind":"null"},"max_stars_repo_stars_event_max_datetime":{"kind":"null"},"max_issues_repo_path":{"kind":"string","value":"src/DbAdmin/Traits/TableSelectTrait.php"},"max_issues_repo_name":{"kind":"string","value":"lagdo/jaxon-dbadmin"},"max_issues_repo_head_hexsha":{"kind":"string","value":"fb4389223d1f7ad9aba0cd6e8256c95275942661"},"max_issues_repo_licenses":{"kind":"list like","value":["Apache-2.0"],"string":"[\n \"Apache-2.0\"\n]"},"max_issues_count":{"kind":"null"},"max_issues_repo_issues_event_min_datetime":{"kind":"null"},"max_issues_repo_issues_event_max_datetime":{"kind":"null"},"max_forks_repo_path":{"kind":"string","value":"src/DbAdmin/Traits/TableSelectTrait.php"},"max_forks_repo_name":{"kind":"string","value":"lagdo/jaxon-dbadmin"},"max_forks_repo_head_hexsha":{"kind":"string","value":"fb4389223d1f7ad9aba0cd6e8256c95275942661"},"max_forks_repo_licenses":{"kind":"list like","value":["Apache-2.0"],"string":"[\n \"Apache-2.0\"\n]"},"max_forks_count":{"kind":"null"},"max_forks_repo_forks_event_min_datetime":{"kind":"null"},"max_forks_repo_forks_event_max_datetime":{"kind":"null"},"content":{"kind":"string","value":" $select,\n 'values' => (array)$options[\"columns\"],\n 'columns' => $columns,\n 'functions' => $this->driver->functions(),\n 'grouping' => $this->driver->grouping(),\n ];\n }\n\n /**\n * Print search box in select\n *\n * @param array $columns Selectable columns\n * @param array $indexes\n * @param array $options\n *\n * @return array\n */\n private function getFiltersOptions(array $columns, array $indexes, array $options): array\n {\n $fulltexts = [];\n foreach ($indexes as $i => $index) {\n $fulltexts[$i] = $index->type == \"FULLTEXT\" ? $this->util->html($options[\"fulltext\"][$i]) : '';\n }\n return [\n // 'where' => $where,\n 'values' => (array)$options[\"where\"],\n 'columns' => $columns,\n 'indexes' => $indexes,\n 'operators' => $this->driver->operators(),\n 'fulltexts' => $fulltexts,\n ];\n }\n\n /**\n * Print order box in select\n *\n * @param array $columns Selectable columns\n * @param array $options\n *\n * @return array\n */\n private function getSortingOptions(array $columns, array $options): array\n {\n $values = [];\n $descs = (array)$options[\"desc\"];\n foreach ((array)$options[\"order\"] as $key => $value) {\n $values[] = [\n 'col' => $value,\n 'desc' => $descs[$key] ?? 0,\n ];\n }\n return [\n // 'order' => $order,\n 'values' => $values,\n 'columns' => $columns,\n ];\n }\n\n /**\n * Print limit box in select\n *\n * @param string $limit Result of processSelectLimit()\n *\n * @return array\n */\n private function getLimitOptions(string $limit): array\n {\n return ['value' => $this->util->html($limit)];\n }\n\n /**\n * Print text length box in select\n *\n * @param int $textLength Result of processSelectLength()\n *\n * @return array\n */\n private function getLengthOptions(int $textLength): array\n {\n return [\n 'value' => $textLength === 0 ? 0 : $this->util->html($textLength),\n ];\n }\n\n /**\n * Print action box in select\n *\n * @param array $indexes\n *\n * @return array\n */\n // private function getActionOptions(array $indexes)\n // {\n // $columns = [];\n // foreach ($indexes as $index) {\n // $current_key = \\reset($index->columns);\n // if ($index->type != \"FULLTEXT\" && $current_key) {\n // $columns[$current_key] = 1;\n // }\n // }\n // $columns[\"\"] = 1;\n // return ['columns' => $columns];\n // }\n\n /**\n * Print command box in select\n *\n * @return bool whether to print default commands\n */\n // private function getCommandOptions()\n // {\n // return !$this->driver->isInformationSchema($this->driver->database());\n // }\n\n /**\n * Print import box in select\n *\n * @return bool whether to print default import\n */\n // private function getImportOptions()\n // {\n // return !$this->driver->isInformationSchema($this->driver->database());\n // }\n\n /**\n * Print extra text in the end of a select form\n *\n * @param array $emailFields Fields holding e-mails\n * @param array $columns Selectable columns\n *\n * @return array\n */\n // private function getEmailOptions(array $emailFields, array $columns)\n // {\n // }\n\n /**\n * @param array $queryOptions\n *\n * @return int\n */\n private function setDefaultOptions(array &$queryOptions): int\n {\n $defaultOptions = [\n 'columns' => [],\n 'where' => [],\n 'order' => [],\n 'desc' => [],\n 'fulltext' => [],\n 'limit' => '50',\n 'text_length' => '100',\n 'page' => '1',\n ];\n foreach ($defaultOptions as $name => $value) {\n if (!isset($queryOptions[$name])) {\n $queryOptions[$name] = $value;\n }\n }\n $page = intval($queryOptions['page']);\n if ($page > 0) {\n $page -= 1; // Page numbers start at 0 here, instead of 1.\n }\n $queryOptions['page'] = $page;\n return $page;\n }\n\n /**\n * @param array $fields\n *\n * @return array\n */\n private function getFieldsOptions(array $fields): array\n {\n $rights = []; // privilege => 0\n $columns = []; // selectable columns\n $textLength = 0;\n foreach ($fields as $key => $field) {\n $name = $this->util->fieldName($field);\n if (isset($field->privileges[\"select\"]) && $name != \"\") {\n $columns[$key] = html_entity_decode(strip_tags($name), ENT_QUOTES);\n if ($this->util->isShortable($field)) {\n $textLength = $this->util->processSelectLength();\n }\n }\n $rights[] = $field->privileges;\n }\n return [$rights, $columns, $textLength];\n }\n\n /**\n * @param array $indexes\n * @param array $select\n * @param mixed $tableStatus\n *\n * @return array\n */\n private function setPrimaryKey(array &$indexes, array $select, $tableStatus): array\n {\n $primary = null;\n $unselected = [];\n foreach ($indexes as $index) {\n if ($index->type == \"PRIMARY\") {\n $primary = array_flip($index->columns);\n $unselected = ($select ? $primary : []);\n foreach ($unselected as $key => $val) {\n if (in_array($this->driver->escapeId($key), $select)) {\n unset($unselected[$key]);\n }\n }\n break;\n }\n }\n\n $oid = $tableStatus->oid;\n if ($oid && !$primary) {\n /*$primary = */$unselected = [$oid => 0];\n $indexes[] = [\"type\" => \"PRIMARY\", \"columns\" => [$oid]];\n }\n\n return $unselected;\n }\n\n /**\n * @param string $table\n * @param array $columns\n * @param array $fields\n * @param array $select\n * @param array $group\n * @param array $where\n * @param array $order\n * @param array $unselected\n * @param int $limit\n * @param int $page\n *\n * @return TableSelectEntity\n */\n private function getSelectEntity(string $table, array $columns, array $fields, array $select,\n array $group, array $where, array $order, array $unselected, int $limit, int $page): TableSelectEntity\n {\n $select2 = $select;\n $group2 = $group;\n if (empty($select2)) {\n $select2[] = \"*\";\n $convert_fields = $this->driver->convertFields($columns, $fields, $select);\n if ($convert_fields) {\n $select2[] = substr($convert_fields, 2);\n }\n }\n foreach ($select as $key => $val) {\n $field = $fields[$this->driver->unescapeId($val)] ?? null;\n if ($field && ($as = $this->driver->convertField($field))) {\n $select2[$key] = \"$as AS $val\";\n }\n }\n $isGroup = count($group) < count($select);\n if (!$isGroup && !empty($unselected)) {\n foreach ($unselected as $key => $val) {\n $select2[] = $this->driver->escapeId($key);\n if (!empty($group2)) {\n $group2[] = $this->driver->escapeId($key);\n }\n }\n }\n\n // From driver.inc.php\n return new TableSelectEntity($table, $select2, $where, $group2, $order, $limit, $page);\n }\n}\n"},"avg_line_length":{"kind":"number","value":28.5423728814,"string":"28.542373"},"max_line_length":{"kind":"number","value":139,"string":"139"},"alphanum_fraction":{"kind":"number","value":0.4933491686,"string":"0.493349"},"score":{"kind":"number","value":3.015625,"string":"3.015625"}}},{"rowIdx":965,"cells":{"hexsha":{"kind":"string","value":"a1e4f9de1a2c3d177810684e3270894543c2d4e4"},"size":{"kind":"number","value":1124,"string":"1,124"},"ext":{"kind":"string","value":"c"},"lang":{"kind":"string","value":"C"},"max_stars_repo_path":{"kind":"string","value":"cours9/semaphore.c"},"max_stars_repo_name":{"kind":"string","value":"Menelau/LOG645-Course-examples"},"max_stars_repo_head_hexsha":{"kind":"string","value":"ef35b7c10daa2610dd1b4da10658b4acb623d7b4"},"max_stars_repo_licenses":{"kind":"list like","value":["MIT"],"string":"[\n \"MIT\"\n]"},"max_stars_count":{"kind":"null"},"max_stars_repo_stars_event_min_datetime":{"kind":"null"},"max_stars_repo_stars_event_max_datetime":{"kind":"null"},"max_issues_repo_path":{"kind":"string","value":"cours9/semaphore.c"},"max_issues_repo_name":{"kind":"string","value":"Menelau/LOG645-Course-examples"},"max_issues_repo_head_hexsha":{"kind":"string","value":"ef35b7c10daa2610dd1b4da10658b4acb623d7b4"},"max_issues_repo_licenses":{"kind":"list like","value":["MIT"],"string":"[\n \"MIT\"\n]"},"max_issues_count":{"kind":"null"},"max_issues_repo_issues_event_min_datetime":{"kind":"null"},"max_issues_repo_issues_event_max_datetime":{"kind":"null"},"max_forks_repo_path":{"kind":"string","value":"cours9/semaphore.c"},"max_forks_repo_name":{"kind":"string","value":"Menelau/LOG645-Course-examples"},"max_forks_repo_head_hexsha":{"kind":"string","value":"ef35b7c10daa2610dd1b4da10658b4acb623d7b4"},"max_forks_repo_licenses":{"kind":"list like","value":["MIT"],"string":"[\n \"MIT\"\n]"},"max_forks_count":{"kind":"number","value":2,"string":"2"},"max_forks_repo_forks_event_min_datetime":{"kind":"string","value":"2021-09-23T15:56:33.000Z"},"max_forks_repo_forks_event_max_datetime":{"kind":"string","value":"2021-12-15T12:15:23.000Z"},"content":{"kind":"string","value":"#include \n#include \n#include \n#include \n#include \n#include \n\nlong thread_count;\nsem_t semaphore;\n\nvoid* Semaphore_Function(void* args) {\n long rank = (long) args;\n sem_wait(&semaphore);\n printf(\"Hello from thread %ld.\\n\", rank);\n sleep(10); // simulating intensive calculation...\n printf(\"Thread %ld returning token.\\n\", rank);\n sem_post(&semaphore);\n return NULL;\n}\n\nint main(int argc, char *argv[]) {\n\n thread_count = strtol(argv[1], NULL, 10);\n\n pthread_t *thread_handles;\n thread_handles = malloc(thread_count * sizeof(pthread_t));\n\n // initializing a semaphore with 4 \"tokens\" or threads that can run in parallel\n sem_init(&semaphore, 0, 4);\n\n for (long thread = 0; thread < thread_count; thread++) {\n pthread_create(&thread_handles[thread], NULL, &Semaphore_Function, (void*) thread);\n }\n\n for (int thread = 0; thread < thread_count; thread++) {\n pthread_join(thread_handles[thread], NULL);\n }\n\n //destroy semaphore\n sem_destroy(&semaphore);\n free(thread_handles);\n\n return 0;\n}\n"},"avg_line_length":{"kind":"number","value":24.9777777778,"string":"24.977778"},"max_line_length":{"kind":"number","value":91,"string":"91"},"alphanum_fraction":{"kind":"number","value":0.6628113879,"string":"0.662811"},"score":{"kind":"number","value":3.203125,"string":"3.203125"}}},{"rowIdx":966,"cells":{"hexsha":{"kind":"string","value":"163b389e91b9ce381ce43b77bd922f4308b69cbd"},"size":{"kind":"number","value":913,"string":"913"},"ext":{"kind":"string","value":"ts"},"lang":{"kind":"string","value":"TypeScript"},"max_stars_repo_path":{"kind":"string","value":"app/components/menu-bar.component.ts"},"max_stars_repo_name":{"kind":"string","value":"YaelMendes/GeoMapStories"},"max_stars_repo_head_hexsha":{"kind":"string","value":"df744aa757079074ddff3af8240d2f4905a67c54"},"max_stars_repo_licenses":{"kind":"list like","value":["MIT"],"string":"[\n \"MIT\"\n]"},"max_stars_count":{"kind":"null"},"max_stars_repo_stars_event_min_datetime":{"kind":"null"},"max_stars_repo_stars_event_max_datetime":{"kind":"null"},"max_issues_repo_path":{"kind":"string","value":"app/components/menu-bar.component.ts"},"max_issues_repo_name":{"kind":"string","value":"YaelMendes/GeoMapStories"},"max_issues_repo_head_hexsha":{"kind":"string","value":"df744aa757079074ddff3af8240d2f4905a67c54"},"max_issues_repo_licenses":{"kind":"list like","value":["MIT"],"string":"[\n \"MIT\"\n]"},"max_issues_count":{"kind":"null"},"max_issues_repo_issues_event_min_datetime":{"kind":"null"},"max_issues_repo_issues_event_max_datetime":{"kind":"null"},"max_forks_repo_path":{"kind":"string","value":"app/components/menu-bar.component.ts"},"max_forks_repo_name":{"kind":"string","value":"YaelMendes/GeoMapStories"},"max_forks_repo_head_hexsha":{"kind":"string","value":"df744aa757079074ddff3af8240d2f4905a67c54"},"max_forks_repo_licenses":{"kind":"list like","value":["MIT"],"string":"[\n \"MIT\"\n]"},"max_forks_count":{"kind":"null"},"max_forks_repo_forks_event_min_datetime":{"kind":"null"},"max_forks_repo_forks_event_max_datetime":{"kind":"null"},"content":{"kind":"string","value":"import {Component, Input, Injectable} from '@angular/core';\nimport {User} from '../objects/user';\nimport {DataService} from '../services/data.service';\nimport {VARIABLES} from '../AppSettings';\n\n@Component({\n selector: 'menu-bar',\n templateUrl: '../html/menu-bar.component.html',\n styleUrls: ['../css/menu-bar.component.css']\n})\nexport class MenuBarComponent {\n\n model: User = new User('yafou');\n connected: boolean=false;\n\n constructor(private dataService: DataService) {\n }\n\n onSubmit() {\n console.log('onSubmit is called ! model=' + this.model);\n\n this.tryConnect(this.model);\n }\n\n\n private tryConnect(model: User) {\n\n if (VARIABLES.MODE_TEST) {\n\n this.connected=true;\n this.model = this.retrieveUserInfos();\n\n } else {\n //TODO: call backend to check user/pwd and retrieve infos/stories/....\n }\n }\n\n private retrieveUserInfos(): User {\n\n return this.model;\n }\n\n}\n"},"avg_line_length":{"kind":"number","value":20.75,"string":"20.75"},"max_line_length":{"kind":"number","value":76,"string":"76"},"alphanum_fraction":{"kind":"number","value":0.6593647317,"string":"0.659365"},"score":{"kind":"number","value":3.078125,"string":"3.078125"}}},{"rowIdx":967,"cells":{"hexsha":{"kind":"string","value":"407cc58f6332cad601adf4604b758ab8ec38d70e"},"size":{"kind":"number","value":2396,"string":"2,396"},"ext":{"kind":"string","value":"py"},"lang":{"kind":"string","value":"Python"},"max_stars_repo_path":{"kind":"string","value":"NVLL/analysis/word_freq.py"},"max_stars_repo_name":{"kind":"string","value":"jennhu/vmf_vae_nlp"},"max_stars_repo_head_hexsha":{"kind":"string","value":"95a39fa9f7a0659e432475e8dfb9a46e305d53b7"},"max_stars_repo_licenses":{"kind":"list like","value":["MIT"],"string":"[\n \"MIT\"\n]"},"max_stars_count":{"kind":"number","value":159,"string":"159"},"max_stars_repo_stars_event_min_datetime":{"kind":"string","value":"2018-08-31T15:57:36.000Z"},"max_stars_repo_stars_event_max_datetime":{"kind":"string","value":"2022-03-27T15:31:38.000Z"},"max_issues_repo_path":{"kind":"string","value":"NVLL/analysis/word_freq.py"},"max_issues_repo_name":{"kind":"string","value":"jennhu/vmf_vae_nlp"},"max_issues_repo_head_hexsha":{"kind":"string","value":"95a39fa9f7a0659e432475e8dfb9a46e305d53b7"},"max_issues_repo_licenses":{"kind":"list like","value":["MIT"],"string":"[\n \"MIT\"\n]"},"max_issues_count":{"kind":"number","value":9,"string":"9"},"max_issues_repo_issues_event_min_datetime":{"kind":"string","value":"2018-10-11T15:58:50.000Z"},"max_issues_repo_issues_event_max_datetime":{"kind":"string","value":"2019-04-16T03:13:33.000Z"},"max_forks_repo_path":{"kind":"string","value":"NVLL/analysis/word_freq.py"},"max_forks_repo_name":{"kind":"string","value":"jennhu/vmf_vae_nlp"},"max_forks_repo_head_hexsha":{"kind":"string","value":"95a39fa9f7a0659e432475e8dfb9a46e305d53b7"},"max_forks_repo_licenses":{"kind":"list like","value":["MIT"],"string":"[\n \"MIT\"\n]"},"max_forks_count":{"kind":"number","value":21,"string":"21"},"max_forks_repo_forks_event_min_datetime":{"kind":"string","value":"2018-09-01T17:57:20.000Z"},"max_forks_repo_forks_event_max_datetime":{"kind":"string","value":"2021-12-17T03:31:01.000Z"},"content":{"kind":"string","value":"import os\n\n\ndef count(dic, fname):\n with open(fname, 'r') as fd:\n lines = fd.read().splitlines()\n filtered_sents = []\n for l in lines:\n words = l.split(\" \")\n _ratio = comp_unk_ratio(words)\n if _ratio <= 0.05:\n filtered_sents.append(words)\n for w in words:\n if w in dic:\n dic[w] += 1\n else:\n dic[w] = 1\n return dic, filtered_sents\n\n\ndef read_sent():\n pass\n\n\ndef comp_unk_ratio(sent):\n total = len(sent) + 0.000001\n cnt = 0\n for w in sent:\n if w == '':\n cnt += 1\n return cnt / total\n\n\ndef comp_ratio():\n pass\n\n\ndef generate_based_on_word_freq():\n count_word_freq()\n\n\ndef generate_based_on_sentiment():\n pass\n\n\ndef count_word_freq():\n d = {}\n os.chdir(\"../../data/yelp\")\n d, _ = count(d, \"valid.txt\")\n d, filtered_sents_test = count(d, \"test.txt\")\n\n sorted_d = sorted(d, key=d.get, reverse=True)\n print(\"Len of trimmed vocab {}\".format(len(sorted_d)))\n print(\"Num of Test samples after trimming {}\".format(len(filtered_sents_test)))\n uncommon = sorted_d[-10000:]\n print(uncommon)\n divide = 5\n every = int(len(filtered_sents_test) / divide)\n sent_dictionary = {}\n for sent in filtered_sents_test:\n total = len(sent)\n cnt = 0.\n for w in sent:\n if w in uncommon:\n cnt += 1\n sent_dictionary[\" \".join(sent)] = cnt / total\n sorted_sents = sorted(sent_dictionary, key=sent_dictionary.get, reverse=True)\n for piece in range(divide):\n start = int(piece * every)\n end = int((piece + 1) * every)\n tmp_sents = sorted_sents[start:end]\n with open(\"test-rare-\" + str(piece) + \".txt\", 'w') as fd:\n fd.write(\"\\n\".join(tmp_sents))\n\n\nif __name__ == \"__main__\":\n bank_size = 1000\n\n # Generate 2 set of sentences.\n # Before beginning\n # if a sentence has more than 10% UNK, remove it.\n ############\n # Based on WordFreq Vocab size=15K\n # Divide\n # Top 1K sample with largest Common Word Ratio (common word= top3K freq word)\n # Top 1K sample with largest Uncommon Word Ratio (uncommon word= top3K infreq word)\n generate_based_on_word_freq()\n ############\n # Based on Sentiment (sample from 5star and 1star)\n #############\n"},"avg_line_length":{"kind":"number","value":26.3296703297,"string":"26.32967"},"max_line_length":{"kind":"number","value":87,"string":"87"},"alphanum_fraction":{"kind":"number","value":0.5630217028,"string":"0.563022"},"score":{"kind":"number","value":3.390625,"string":"3.390625"}}},{"rowIdx":968,"cells":{"hexsha":{"kind":"string","value":"f082f97017dd469928617efda67490dacef988de"},"size":{"kind":"number","value":1135,"string":"1,135"},"ext":{"kind":"string","value":"js"},"lang":{"kind":"string","value":"JavaScript"},"max_stars_repo_path":{"kind":"string","value":"assets/js/rotation.js"},"max_stars_repo_name":{"kind":"string","value":"boardingschool/boardingschool.github.io"},"max_stars_repo_head_hexsha":{"kind":"string","value":"7c8aee829b294bf8ab9f1f02fb7efee5859b0382"},"max_stars_repo_licenses":{"kind":"list like","value":["CC-BY-3.0"],"string":"[\n \"CC-BY-3.0\"\n]"},"max_stars_count":{"kind":"null"},"max_stars_repo_stars_event_min_datetime":{"kind":"null"},"max_stars_repo_stars_event_max_datetime":{"kind":"null"},"max_issues_repo_path":{"kind":"string","value":"assets/js/rotation.js"},"max_issues_repo_name":{"kind":"string","value":"boardingschool/boardingschool.github.io"},"max_issues_repo_head_hexsha":{"kind":"string","value":"7c8aee829b294bf8ab9f1f02fb7efee5859b0382"},"max_issues_repo_licenses":{"kind":"list like","value":["CC-BY-3.0"],"string":"[\n \"CC-BY-3.0\"\n]"},"max_issues_count":{"kind":"null"},"max_issues_repo_issues_event_min_datetime":{"kind":"null"},"max_issues_repo_issues_event_max_datetime":{"kind":"null"},"max_forks_repo_path":{"kind":"string","value":"assets/js/rotation.js"},"max_forks_repo_name":{"kind":"string","value":"boardingschool/boardingschool.github.io"},"max_forks_repo_head_hexsha":{"kind":"string","value":"7c8aee829b294bf8ab9f1f02fb7efee5859b0382"},"max_forks_repo_licenses":{"kind":"list like","value":["CC-BY-3.0"],"string":"[\n \"CC-BY-3.0\"\n]"},"max_forks_count":{"kind":"null"},"max_forks_repo_forks_event_min_datetime":{"kind":"null"},"max_forks_repo_forks_event_max_datetime":{"kind":"null"},"content":{"kind":"string","value":"$(window).load(function() { //start after HTML, images have loaded\n \n var InfiniteRotator =\n {\n init: function()\n {\n //initial fade-in time (in milliseconds)\n var initialFadeIn = 0;\n \n //interval between items (in milliseconds)\n var itemInterval = 5000;\n \n //cross-fade time (in milliseconds)\n var fadeTime = 2500;\n \n //count number of items\n var numberOfItems = $('.img-item').length;\n \n //set current item\n var currentItem = 0;\n \n //show first item\n $('.img-item').eq(currentItem).fadeIn(initialFadeIn);\n \n //loop through the items\n var infiniteLoop = setInterval(function(){\n $('.img-item').eq(currentItem).fadeOut(fadeTime);\n \n if(currentItem == numberOfItems -1){\n currentItem = 0;\n }else{\n currentItem++;\n }\n $('.img-item').eq(currentItem).fadeIn(fadeTime);\n \n }, itemInterval);\n }\n };\n \n InfiniteRotator.init();\n \n});"},"avg_line_length":{"kind":"number","value":27.0238095238,"string":"27.02381"},"max_line_length":{"kind":"number","value":66,"string":"66"},"alphanum_fraction":{"kind":"number","value":0.4898678414,"string":"0.489868"},"score":{"kind":"number","value":3.265625,"string":"3.265625"}}},{"rowIdx":969,"cells":{"hexsha":{"kind":"string","value":"d272b67bfa1137eb6c3871e24aadb62b6fb72ffa"},"size":{"kind":"number","value":12498,"string":"12,498"},"ext":{"kind":"string","value":"php"},"lang":{"kind":"string","value":"PHP"},"max_stars_repo_path":{"kind":"string","value":"parser.php"},"max_stars_repo_name":{"kind":"string","value":"hylandry/opensimstuff"},"max_stars_repo_head_hexsha":{"kind":"string","value":"18f5fc5d2399b14d464066b1754b096ce8db9c2d"},"max_stars_repo_licenses":{"kind":"list like","value":["MIT"],"string":"[\n \"MIT\"\n]"},"max_stars_count":{"kind":"null"},"max_stars_repo_stars_event_min_datetime":{"kind":"null"},"max_stars_repo_stars_event_max_datetime":{"kind":"null"},"max_issues_repo_path":{"kind":"string","value":"parser.php"},"max_issues_repo_name":{"kind":"string","value":"hylandry/opensimstuff"},"max_issues_repo_head_hexsha":{"kind":"string","value":"18f5fc5d2399b14d464066b1754b096ce8db9c2d"},"max_issues_repo_licenses":{"kind":"list like","value":["MIT"],"string":"[\n \"MIT\"\n]"},"max_issues_count":{"kind":"number","value":1,"string":"1"},"max_issues_repo_issues_event_min_datetime":{"kind":"string","value":"2020-06-09T17:27:35.000Z"},"max_issues_repo_issues_event_max_datetime":{"kind":"string","value":"2020-06-09T17:27:35.000Z"},"max_forks_repo_path":{"kind":"string","value":"parser.php"},"max_forks_repo_name":{"kind":"string","value":"hylandry/opensimstuff"},"max_forks_repo_head_hexsha":{"kind":"string","value":"18f5fc5d2399b14d464066b1754b096ce8db9c2d"},"max_forks_repo_licenses":{"kind":"list like","value":["MIT"],"string":"[\n \"MIT\"\n]"},"max_forks_count":{"kind":"null"},"max_forks_repo_forks_event_min_datetime":{"kind":"null"},"max_forks_repo_forks_event_max_datetime":{"kind":"null"},"content":{"kind":"string","value":"\\n\";\n }\n else\n $failcounter = \"0\";\n\n //Update nextcheck to be 10 minutes from now. The current OS instance\n //won't be checked again until at least this much time has gone by.\n $next = $now + 600;\n\n mysqli_query($db, \"UPDATE hostsregister SET nextcheck = $next,\" .\n \" checked = 1, failcounter = \" . $failcounter .\n \" WHERE host = '\" . mysqli_real_escape_string($db, $hostname) . \"'\" . \n \" AND port = '\" . mysqli_real_escape_string($db, $port) . \"'\");\n\n if ($xml != \"\") \n parse($hostname, $port, $xml);\n}\n\nfunction parse($hostname, $port, $xml)\n{\n global $now, $db;\n \n\tif (!isset($db)) $db=mysqli_connect ($DB_HOST, $DB_USER, $DB_PASS, $DB_NAME);\n\t\n ///////////////////////////////////////////////////////////////////////\n //\n // Search engine sim scanner\n //\n\n //\n // Load XML doc from URL\n //\n $objDOM = new DOMDocument();\n $objDOM->resolveExternals = false;\n\n //Don't try and parse if XML is invalid or we got an HTML 404 error.\n if ($objDOM->loadXML($xml) == False)\n return;\n\n //\n // Get the region data to update\n //\n $regiondata = $objDOM->getElementsByTagName(\"regiondata\");\n\n //If returned length is 0, collector method may have returned an error\n if ($regiondata->length == 0)\n return;\n\n $regiondata = $regiondata->item(0);\n\n //\n // Update nextcheck so this host entry won't be checked again until after\n // the DataSnapshot module has generated a new set of data to be parsed.\n //\n $expire = $regiondata->getElementsByTagName(\"expire\")->item(0)->nodeValue;\n $next = $now + $expire;\n\n $updater = mysqli_query($db, \"UPDATE hostsregister SET nextcheck = $next \" .\n \"WHERE host = '\" . mysqli_real_escape_string($db, $hostname) . \"' AND \" .\n \"port = '\" . mysqli_real_escape_string($db, $port) . \"'\");\n\n //\n // Get the region data to be saved in the database\n //\n $regionlist = $regiondata->getElementsByTagName(\"region\");\n\n foreach ($regionlist as $region)\n {\n $regioncategory = $region->getAttributeNode(\"category\")->nodeValue;\n\n //\n // Start reading the Region info\n //\n $info = $region->getElementsByTagName(\"info\")->item(0);\n\n $regionuuid = $info->getElementsByTagName(\"uuid\")->item(0)->nodeValue;\n\n $regionname = $info->getElementsByTagName(\"name\")->item(0)->nodeValue;\n\n $regionhandle = $info->getElementsByTagName(\"handle\")->item(0)->nodeValue;\n\n $url = $info->getElementsByTagName(\"url\")->item(0)->nodeValue;\n\n //\n // First, check if we already have a region that is the same\n //\n $check = mysqli_query($db, \"SELECT * FROM regions WHERE regionuuid = '\" .\n mysqli_real_escape_string($db, $regionuuid) . \"'\");\n\n if (mysqli_num_rows($check) > 0)\n {\n mysqli_query($db, \"DELETE FROM regions WHERE regionuuid = '\" .\n mysqli_real_escape_string($db, $regionuuid) . \"'\");\n mysqli_query($db, \"DELETE FROM parcels WHERE regionuuid = '\" .\n mysqli_real_escape_string($db, $regionuuid) . \"'\");\n mysqli_query($db, \"DELETE FROM allparcels WHERE regionUUID = '\" .\n mysqli_real_escape_string($db, $regionuuid) . \"'\");\n mysqli_query($db, \"DELETE FROM parcelsales WHERE regionUUID = '\" .\n mysqli_real_escape_string($db, $regionuuid) . \"'\");\n mysqli_query($db, \"DELETE FROM objects WHERE regionuuid = '\" .\n mysqli_real_escape_string($db, $regionuuid) . \"'\");\n }\n\n $data = $region->getElementsByTagName(\"data\")->item(0);\n $estate = $data->getElementsByTagName(\"estate\")->item(0);\n\n $username = $estate->getElementsByTagName(\"name\")->item(0)->nodeValue;\n $useruuid = $estate->getElementsByTagName(\"uuid\")->item(0)->nodeValue;\n\n $estateid = $estate->getElementsByTagName(\"id\")->item(0)->nodeValue;\n\n //\n // Second, add the new info to the database\n //\n $sql = \"INSERT INTO regions VALUES('\" .\n mysqli_real_escape_string($db, $regionname) . \"','\" .\n mysqli_real_escape_string($db, $regionuuid) . \"','\" .\n mysqli_real_escape_string($db, $regionhandle) . \"','\" .\n mysqli_real_escape_string($db, $url) . \"','\" .\n mysqli_real_escape_string($db, $username) .\"','\" .\n mysqli_real_escape_string($db, $useruuid) .\"')\";\n\n mysqli_query($db, $sql);\n\n //\n // Start reading the parcel info\n //\n $parcel = $data->getElementsByTagName(\"parcel\");\n\n foreach ($parcel as $value)\n {\n $parcelname = $value->getElementsByTagName(\"name\")->item(0)->nodeValue;\n\n $parceluuid = $value->getElementsByTagName(\"uuid\")->item(0)->nodeValue;\n\n $infouuid = $value->getElementsByTagName(\"infouuid\")->item(0)->nodeValue;\n\n $parcellanding = $value->getElementsByTagName(\"location\")->item(0)->nodeValue;\n\n $parceldescription = $value->getElementsByTagName(\"description\")->item(0)->nodeValue;\n\n $parcelarea = $value->getElementsByTagName(\"area\")->item(0)->nodeValue;\n\n $parcelcategory = $value->getAttributeNode(\"category\")->nodeValue;\n\n $parcelsaleprice = $value->getAttributeNode(\"salesprice\")->nodeValue;\n\n $dwell = $value->getElementsByTagName(\"dwell\")->item(0)->nodeValue;\n\n $owner = $value->getElementsByTagName(\"owner\")->item(0);\n\n $owneruuid = $owner->getElementsByTagName(\"uuid\")->item(0)->nodeValue;\n\n // Adding support for groups\n\n $group = $value->getElementsByTagName(\"group\")->item(0);\n \n if ($group != \"\")\n {\n $groupuuid = $group->getElementsByTagName(\"groupuuid\")->item(0)->nodeValue;\n }\n else\n {\n $groupuuid = \"00000000-0000-0000-0000-000000000000\";\n }\n\n //\n // Check bits on Public, Build, Script\n //\n $parcelforsale = $value->getAttributeNode(\"forsale\")->nodeValue;\n $parceldirectory = $value->getAttributeNode(\"showinsearch\")->nodeValue;\n $parcelbuild = $value->getAttributeNode(\"build\")->nodeValue;\n $parcelscript = $value->getAttributeNode(\"scripts\")->nodeValue;\n $parcelpublic = $value->getAttributeNode(\"public\")->nodeValue;\n\n //\n // Save\n //\n //$db=mysqli_connect ($DB_HOST, $DB_USER, $DB_PASSWORD);\n $sql = \"INSERT INTO allparcels VALUES('\" .\n mysqli_real_escape_string($db, $regionuuid) . \"','\" .\n mysqli_real_escape_string($db, $parcelname) . \"','\" .\n mysqli_real_escape_string($db, $owneruuid) . \"','\" .\n mysqli_real_escape_string($db, $groupuuid) . \"','\" .\n mysqli_real_escape_string($db, $parcellanding) . \"','\" .\n mysqli_real_escape_string($db, $parceluuid) . \"','\" .\n mysqli_real_escape_string($db, $infouuid) . \"','\" .\n mysqli_real_escape_string($db, $parcelarea) . \"' )\";\n\n mysqli_query($db,$sql);\n\n if ($parceldirectory == \"true\")\n {\n $sql = \"INSERT INTO parcels VALUES('\" .\n mysqli_real_escape_string($db, $regionuuid) . \"','\" .\n mysqli_real_escape_string($db, $parcelname) . \"','\" .\n mysqli_real_escape_string($db, $parceluuid) . \"','\" .\n mysqli_real_escape_string($db, $parcellanding) . \"','\" .\n mysqli_real_escape_string($db, $parceldescription) . \"','\" .\n mysqli_real_escape_string($db, $parcelcategory) . \"','\" .\n mysqli_real_escape_string($db, $parcelbuild) . \"','\" .\n mysqli_real_escape_string($db, $parcelscript) . \"','\" .\n mysqli_real_escape_string($db, $parcelpublic) . \"','\".\n mysqli_real_escape_string($db, $dwell) . \"','\" .\n mysqli_real_escape_string($db, $infouuid) . \"','\" .\n mysqli_real_escape_string($db, $regioncategory) . \"')\";\n\n mysqli_query($db, $sql);\n }\n\n if ($parcelforsale == \"true\")\n {\n $sql = \"INSERT INTO parcelsales VALUES('\" .\n mysqli_real_escape_string($db, $regionuuid) . \"','\" .\n mysqli_real_escape_string($db, $parcelname) . \"','\" .\n mysqli_real_escape_string($db, $parceluuid) . \"','\" .\n mysqli_real_escape_string($db, $parcelarea) . \"','\" .\n mysqli_real_escape_string($db, $parcelsaleprice) . \"','\" .\n mysqli_real_escape_string($db, $parcellanding) . \"','\" .\n mysqli_real_escape_string($db, $infouuid) . \"', '\" .\n mysqli_real_escape_string($db, $dwell) . \"', '\" .\n mysqli_real_escape_string($db, $estateid) . \"', '\" .\n mysqli_real_escape_string($db, $regioncategory) . \"')\";\n\n mysqli_query($db, $sql);\n }\n }\n\n //\n // Handle objects\n //\n $objects = $data->getElementsByTagName(\"object\");\n\n foreach ($objects as $value)\n {\n $uuid = $value->getElementsByTagName(\"uuid\")->item(0)->nodeValue;\n\n $regionuuid = $value->getElementsByTagName(\"regionuuid\")->item(0)->nodeValue;\n\n $parceluuid = $value->getElementsByTagName(\"parceluuid\")->item(0)->nodeValue;\n\n $location = $value->getElementsByTagName(\"location\")->item(0)->nodeValue;\n\n $title = $value->getElementsByTagName(\"title\")->item(0)->nodeValue;\n\n $description = $value->getElementsByTagName(\"description\")->item(0)->nodeValue;\n\n $flags = $value->getElementsByTagName(\"flags\")->item(0)->nodeValue;\n\n mysqli_query($db, \"INSERT INTO objects VALUES('\" .\n mysqli_real_escape_string($db, $uuid) . \"','\" .\n mysqli_real_escape_string($db, $parceluuid) . \"','\" .\n mysqli_real_escape_string($db, $location) . \"','\" .\n mysqli_real_escape_string($db, $title) . \"','\" .\n mysqli_real_escape_string($db, $description) . \"','\" .\n mysqli_real_escape_string($db, $regionuuid) . \"')\");\n }\n }\n}\n\n$sql = \"SELECT host, port FROM hostsregister \" .\n \"WHERE nextcheck < $now AND checked = 0 LIMIT 0,10\";\n\n$jobsearch = mysqli_query($db,$sql);\n\n//\n// If the sql query returns no rows, all entries in the hostsregister\n// table have been checked. Reset the checked flag and re-run the\n// query to select the next set of hosts to be checked.\n//\nif (mysqli_num_rows($jobsearch) == 0)\n{\n mysqli_query($db, \"UPDATE hostsregister SET checked = 0\");\n $jobsearch = mysqli_query($db,$sql);\n}\n\nwhile ($jobs = mysqli_fetch_row($jobsearch))\n CheckHost($jobs[0], $jobs[1]);\n?>\n"},"avg_line_length":{"kind":"number","value":36.9763313609,"string":"36.976331"},"max_line_length":{"kind":"number","value":97,"string":"97"},"alphanum_fraction":{"kind":"number","value":0.5520883341,"string":"0.552088"},"score":{"kind":"number","value":3.0625,"string":"3.0625"}}},{"rowIdx":970,"cells":{"hexsha":{"kind":"string","value":"c7cfca04a6d46c8657fca251fdef016d7c180a06"},"size":{"kind":"number","value":7637,"string":"7,637"},"ext":{"kind":"string","value":"py"},"lang":{"kind":"string","value":"Python"},"max_stars_repo_path":{"kind":"string","value":"src/main.py"},"max_stars_repo_name":{"kind":"string","value":"FranciscoCharles/doom-fire-simulator"},"max_stars_repo_head_hexsha":{"kind":"string","value":"fccd45e5c96d37de00a6979ec00a5e13a668d4d9"},"max_stars_repo_licenses":{"kind":"list like","value":["MIT"],"string":"[\n \"MIT\"\n]"},"max_stars_count":{"kind":"number","value":1,"string":"1"},"max_stars_repo_stars_event_min_datetime":{"kind":"string","value":"2021-05-19T16:12:37.000Z"},"max_stars_repo_stars_event_max_datetime":{"kind":"string","value":"2021-05-19T16:12:37.000Z"},"max_issues_repo_path":{"kind":"string","value":"src/main.py"},"max_issues_repo_name":{"kind":"string","value":"FranciscoCharles/doom-fire-simulator"},"max_issues_repo_head_hexsha":{"kind":"string","value":"fccd45e5c96d37de00a6979ec00a5e13a668d4d9"},"max_issues_repo_licenses":{"kind":"list like","value":["MIT"],"string":"[\n \"MIT\"\n]"},"max_issues_count":{"kind":"null"},"max_issues_repo_issues_event_min_datetime":{"kind":"null"},"max_issues_repo_issues_event_max_datetime":{"kind":"null"},"max_forks_repo_path":{"kind":"string","value":"src/main.py"},"max_forks_repo_name":{"kind":"string","value":"FranciscoCharles/doom-fire-simulator"},"max_forks_repo_head_hexsha":{"kind":"string","value":"fccd45e5c96d37de00a6979ec00a5e13a668d4d9"},"max_forks_repo_licenses":{"kind":"list like","value":["MIT"],"string":"[\n \"MIT\"\n]"},"max_forks_count":{"kind":"null"},"max_forks_repo_forks_event_min_datetime":{"kind":"null"},"max_forks_repo_forks_event_max_datetime":{"kind":"null"},"content":{"kind":"string","value":"#_*_coding:utf-8_*_\n#created by FranciscoCharles in april,2021.\nfrom os import environ\nif 'PYGAME_HIDE_SUPPORT_PROMPT' not in environ:\n environ['PYGAME_HIDE_SUPPORT_PROMPT'] = 'hidden'\ndel environ\n\nimport pygame\nimport colorsys\nimport numpy as np\n\nfrom menu import Menu, HslColor\nfrom fire import FireColorArray\nfrom random import randint\nfrom typing import Tuple, List, Optional, NewType\n\nRgbColor = Tuple[int,int,int]\n\ndef rgb_to_float(r:int, g:int, b:int) ->[HslColor]:\n return (r/255, g/255, b/255)\ndef rgb_to_int(r:int, g:int, b:int) -> RgbColor:\n return (int(r*255), int(g*255), int(b*255))\n\nclass DoomFireSimulator:\n def __init__(self) -> None:\n pygame.init()\n pygame.font.init()\n self.SCREEN_W = 830\n self.SCREEN_H = 480\n pygame.display.set_caption('FireDoomSimaltor v1.0.1')\n self.display = pygame.display.set_mode((self.SCREEN_W,self.SCREEN_H))\n icon = pygame.image.load('images/icon32.png')\n pygame.display.set_icon(icon)\n self.clock = pygame.time.Clock()\n\n self.FPS = 20\n self.on_fire = True\n self.fire_size = (8,8)\n self.fire_x = 40\n self.fire_y = 40\n self.decay_value = 3\n self.wind_force = 7\n self.wind_mi_force = 0\n self.colors = self.selectColorPalette()\n self.fire_array = FireColorArray(40,50)\n self.setBaseFlameValue(len(self.colors)-1)\n self.menu = Menu(len(self.colors)-1)\n\n def selectColorPalette(self, options: Optional[HslColor] = None) -> List[RgbColor]:\n self.colors = [\n (7,7,7),(31,7,7),(47,15,7),(71,15,7),(87,23,7),(103,31,7),(119,31,7),(143,39,7),(159,47,7),(175,63,7),\n (191,71,7),(199,71,7),(223,79,7),(223,87,7),(223,87,7),(215,95,7),(215,95,7),(215,103,15),(207,111,15),\n (207,119,15),(207,127,15),(207,135,23),(199,135,23),(199,143,23),(199,151,31),(191,159,31),(191,159,31),\n (191,167,39),(191,167,39),(191,175,47),(183,175,47),(183,183,47),(183,183,55),(207,207,111),(223,223,159),\n (239,239,199),(255,255,255)]\n\n if isinstance(options, tuple) and len(options)==3:\n (shift_color, decay_l,decay_s) = options\n shift_color = (shift_color%360)/360\n result = []\n for cor in self.colors:\n cor = rgb_to_float(*cor)\n (_,l,s) = colorsys.rgb_to_hls(*cor)\n cor = colorsys.hls_to_rgb(shift_color, decay_l*l, decay_s*s)\n result.append(rgb_to_int(*cor))\n self.colors = result\n return self.colors\n \n def setBaseFlameValue(self, value: int) -> None:\n row,columns = self.fire_array.shape\n row -= 1\n for column in range(columns):\n self.fire_array[row,column] = value\n \n def updatePixelFire(self, row: int, column: int) -> None:\n\n decay = randint(0, self.decay_value)\n shift = column + randint(self.wind_mi_force, self.wind_force)\n self.fire_array[row, shift] = (self.fire_array[row+1, column]-decay)\n if self.fire_array[row, shift]<0:\n self.fire_array[row, shift] = 0\n return shift\n \n def evaporateFire(self) -> None:\n rows,columns = self.fire_array.shape\n for row in range(rows-1):\n for column in range(columns):\n self.updatePixelFire(row, column)\n\n def drawFire(self) -> None:\n rows,columns = self.fire_array.shape\n w,h = self.fire_size\n for row in range(rows):\n for column in range(columns):\n color = self.fire_array[row,column]\n rect = (self.fire_x+column*w,self.fire_y+row*h,w,h)\n pygame.draw.rect(self.display, self.colors[color], rect)\n self.evaporateFire()\n\n @property\n def rectFire(self) -> Tuple[int,int,int,int]:\n h,w = self.fire_array.shape\n return (self.fire_x, self.fire_y, w*self.fire_size[0], h*self.fire_size[1])\n\n def run(self) -> None:\n ticks = 0\n game = True\n draw_menu = True\n valid_keys = ['q','w','a','s','z','left','right','up','down']\n key_pressed = ''\n (x,y,w,h) = self.rectFire\n fire_rect = (x-1, y-1, w+2, h+2)\n rect_menu = (399, 39, 392, 402)\n positions = self.menu.getListPositionMenu(620, 94)\n\n pygame.draw.rect(self.display, (0xaaaaaa), fire_rect, 1)\n\n while game:\n self.clock.tick(self.FPS)\n for e in pygame.event.get():\n if e.type == pygame.QUIT:\n game = False\n break\n elif e.type == pygame.KEYDOWN:\n key = pygame.key.name(e.key)\n if key == 'escape':\n game = False\n break\n elif key in valid_keys:\n ticks = pygame.time.get_ticks()\n key_pressed = key\n getattr(self, key_pressed)()\n draw_menu = True\n\n elif e.type == pygame.KEYUP:\n key_pressed = ''\n\n if key_pressed and (pygame.time.get_ticks()-ticks)>500:\n getattr(self, key_pressed)()\n draw_menu = True\n\n if game:\n\n if draw_menu:\n pygame.draw.rect(self.display, (0), rect_menu)\n self.menu.draw(self.display, positions)\n pygame.draw.rect(self.display, (0xaaaaaa), rect_menu, 1)\n pygame.display.update(rect_menu)\n draw_menu = False\n\n self.drawFire()\n pygame.display.update(fire_rect)\n\n self.stop()\n\n def changePalette(self) -> None:\n color = None\n if self.menu['color intensity']['value']:\n color = self.menu.currentColorValue()\n self.selectColorPalette(color)\n\n def updateSimulationValues(self) -> None:\n name = self.menu.name\n if name=='FPS':\n self.FPS = self.menu['FPS']['value']\n elif name=='decay':\n self.decay_value = self.menu['decay']['value']\n elif name=='wind direction':\n type_index = self.menu['wind direction']['value']\n type_value = self.menu['wind direction']['types'][type_index]\n wind_force = self.menu['wind force']['value']\n if type_value=='left':\n self.wind_mi_force = 0\n self.wind_force = wind_force\n elif type_value=='right':\n self.wind_mi_force = -wind_force\n self.wind_force = 0\n else:\n self.wind_force = wind_force\n self.wind_mi_force = -wind_force\n elif name=='wind force':\n self.wind_force = self.menu['wind force']['value']\n elif name in ['H','S','L','color intensity']:\n self.changePalette()\n\n def left(self) -> None:\n self.menu.decrement()\n self.updateSimulationValues()\n def right(self) -> None:\n self.menu.increment()\n self.updateSimulationValues()\n def up(self) -> None:\n self.menu.up()\n def down(self) -> None:\n self.menu.down()\n def a(self) -> None:\n self.left()\n def s(self) -> None:\n self.right()\n def w(self) -> None:\n self.up()\n def z(self) -> None:\n self.down()\n def q(self) -> None:\n self.on_fire = not self.on_fire\n self.setBaseFlameValue((len(self.colors)-1) if self.on_fire else 0)\n def stop(self) -> None:\n pygame.quit()\n\nif __name__ == '__main__':\n DoomFireSimulator().run()\n\n\n\n"},"avg_line_length":{"kind":"number","value":35.5209302326,"string":"35.52093"},"max_line_length":{"kind":"number","value":118,"string":"118"},"alphanum_fraction":{"kind":"number","value":0.5525729999,"string":"0.552573"},"score":{"kind":"number","value":3.125,"string":"3.125"}}},{"rowIdx":971,"cells":{"hexsha":{"kind":"string","value":"332ec3ad83ab42693d9db460bc909a8573da26d4"},"size":{"kind":"number","value":1210,"string":"1,210"},"ext":{"kind":"string","value":"py"},"lang":{"kind":"string","value":"Python"},"max_stars_repo_path":{"kind":"string","value":"src/model/synapses/numba_backend/VoltageJump.py"},"max_stars_repo_name":{"kind":"string","value":"Fassial/pku-intern"},"max_stars_repo_head_hexsha":{"kind":"string","value":"4463e7d5a5844c8002f7e3d01b4fadc3a20e2038"},"max_stars_repo_licenses":{"kind":"list like","value":["MIT"],"string":"[\n \"MIT\"\n]"},"max_stars_count":{"kind":"null"},"max_stars_repo_stars_event_min_datetime":{"kind":"null"},"max_stars_repo_stars_event_max_datetime":{"kind":"null"},"max_issues_repo_path":{"kind":"string","value":"src/model/synapses/numba_backend/VoltageJump.py"},"max_issues_repo_name":{"kind":"string","value":"Fassial/pku-intern"},"max_issues_repo_head_hexsha":{"kind":"string","value":"4463e7d5a5844c8002f7e3d01b4fadc3a20e2038"},"max_issues_repo_licenses":{"kind":"list like","value":["MIT"],"string":"[\n \"MIT\"\n]"},"max_issues_count":{"kind":"null"},"max_issues_repo_issues_event_min_datetime":{"kind":"null"},"max_issues_repo_issues_event_max_datetime":{"kind":"null"},"max_forks_repo_path":{"kind":"string","value":"src/model/synapses/numba_backend/VoltageJump.py"},"max_forks_repo_name":{"kind":"string","value":"Fassial/pku-intern"},"max_forks_repo_head_hexsha":{"kind":"string","value":"4463e7d5a5844c8002f7e3d01b4fadc3a20e2038"},"max_forks_repo_licenses":{"kind":"list like","value":["MIT"],"string":"[\n \"MIT\"\n]"},"max_forks_count":{"kind":"null"},"max_forks_repo_forks_event_min_datetime":{"kind":"null"},"max_forks_repo_forks_event_max_datetime":{"kind":"null"},"content":{"kind":"string","value":"\"\"\"\nCreated on 12:39, June. 4th, 2021\nAuthor: fassial\nFilename: VoltageJump.py\n\"\"\"\nimport brainpy as bp\n\n__all__ = [\n \"VoltageJump\",\n]\n\nclass VoltageJump(bp.TwoEndConn):\n target_backend = ['numpy', 'numba', 'numba-parallel', 'numba-cuda']\n\n def __init__(self, pre, post, conn,\n weight = 1., delay = 0., **kwargs\n ):\n # init params\n self.weight = weight\n self.delay = delay\n\n # init connections\n self.conn = conn(pre.size, post.size)\n self.pre_ids, self.post_ids = self.conn.requires(\"pre_ids\", \"post_ids\")\n self.size = len(self.pre_ids)\n\n # init vars\n self.Isyn = self.register_constant_delay(\"Isyn\",\n size = self.size,\n delay_time = self.delay\n )\n\n # init super\n super(VoltageJump, self).__init__(pre = pre, post = post, **kwargs)\n\n def update(self, _t):\n # set post.V\n for i in range(self.size):\n pre_id, post_id = self.pre_ids[i], self.post_ids[i]\n self.Isyn.push(i,\n self.pre.spike[pre_id] * self.weight\n )\n if not self.post.refractory[post_id]:\n self.post.V[post_id] += self.Isyn.pull(i)\n\n"},"avg_line_length":{"kind":"number","value":26.3043478261,"string":"26.304348"},"max_line_length":{"kind":"number","value":79,"string":"79"},"alphanum_fraction":{"kind":"number","value":0.5661157025,"string":"0.566116"},"score":{"kind":"number","value":3.015625,"string":"3.015625"}}},{"rowIdx":972,"cells":{"hexsha":{"kind":"string","value":"fea027e63c8ec137410b07cae121edd763bbcdf7"},"size":{"kind":"number","value":1270,"string":"1,270"},"ext":{"kind":"string","value":"kt"},"lang":{"kind":"string","value":"Kotlin"},"max_stars_repo_path":{"kind":"string","value":"buildSrc/src/main/kotlin/MarkdownUtil.kt"},"max_stars_repo_name":{"kind":"string","value":"StanleyProjects/AndroidExtension.UserInterface"},"max_stars_repo_head_hexsha":{"kind":"string","value":"245a7c3c8f7518ddcf17f02629b01404800852da"},"max_stars_repo_licenses":{"kind":"list like","value":["Apache-2.0"],"string":"[\n \"Apache-2.0\"\n]"},"max_stars_count":{"kind":"null"},"max_stars_repo_stars_event_min_datetime":{"kind":"null"},"max_stars_repo_stars_event_max_datetime":{"kind":"null"},"max_issues_repo_path":{"kind":"string","value":"buildSrc/src/main/kotlin/MarkdownUtil.kt"},"max_issues_repo_name":{"kind":"string","value":"StanleyProjects/AndroidExtension.UserInterface"},"max_issues_repo_head_hexsha":{"kind":"string","value":"245a7c3c8f7518ddcf17f02629b01404800852da"},"max_issues_repo_licenses":{"kind":"list like","value":["Apache-2.0"],"string":"[\n \"Apache-2.0\"\n]"},"max_issues_count":{"kind":"number","value":11,"string":"11"},"max_issues_repo_issues_event_min_datetime":{"kind":"string","value":"2021-12-19T13:37:00.000Z"},"max_issues_repo_issues_event_max_datetime":{"kind":"string","value":"2022-01-13T16:00:59.000Z"},"max_forks_repo_path":{"kind":"string","value":"buildSrc/src/main/kotlin/MarkdownUtil.kt"},"max_forks_repo_name":{"kind":"string","value":"StanleyProjects/AndroidExtension.UserInterface"},"max_forks_repo_head_hexsha":{"kind":"string","value":"245a7c3c8f7518ddcf17f02629b01404800852da"},"max_forks_repo_licenses":{"kind":"list like","value":["Apache-2.0"],"string":"[\n \"Apache-2.0\"\n]"},"max_forks_count":{"kind":"null"},"max_forks_repo_forks_event_min_datetime":{"kind":"null"},"max_forks_repo_forks_event_max_datetime":{"kind":"null"},"content":{"kind":"string","value":"object MarkdownUtil {\n fun url(\n text: String,\n value: String\n ): String {\n return \"[$text]($value)\"\n }\n\n fun image(\n text: String,\n url: String\n ): String {\n return \"!\" + url(text = text, value = url)\n }\n\n fun table(\n heads: List,\n dividers: List,\n rows: List>\n ): String {\n require(heads.size > 1) { \"Size of heads must be more than 1!\" }\n require(heads.size == dividers.size) {\n \"Size of heads and size of dividers must be equal!\"\n }\n val firstRow = rows.firstOrNull()\n requireNotNull(firstRow) { \"Rows must be exist!\" }\n for (i in 1 until rows.size) {\n require(firstRow.size == rows[i].size) {\n \"Size of columns in all rows must be equal!\"\n }\n }\n require(heads.size == firstRow.size) {\n \"Size of heads and size of rows must be equal!\"\n }\n val result = mutableListOf(\n heads.joinToString(separator = \"|\"),\n dividers.joinToString(separator = \"|\")\n )\n result.addAll(rows.map { it.joinToString(separator = \"|\") })\n return result.joinToString(separator = SystemUtil.newLine)\n }\n}\n"},"avg_line_length":{"kind":"number","value":29.5348837209,"string":"29.534884"},"max_line_length":{"kind":"number","value":72,"string":"72"},"alphanum_fraction":{"kind":"number","value":0.531496063,"string":"0.531496"},"score":{"kind":"number","value":3.328125,"string":"3.328125"}}},{"rowIdx":973,"cells":{"hexsha":{"kind":"string","value":"4e0cf10ba62a61bbde206ff0ae503fbef4c0c6dc"},"size":{"kind":"number","value":1953,"string":"1,953"},"ext":{"kind":"string","value":"asm"},"lang":{"kind":"string","value":"Assembly"},"max_stars_repo_path":{"kind":"string","value":"Microcontroller_Lab/Lab_6/Lab_6/Read_Code_With_Comments.asm"},"max_stars_repo_name":{"kind":"string","value":"MuhammadAlBarham/pic16f778_projects"},"max_stars_repo_head_hexsha":{"kind":"string","value":"c12e15e48a62cd16f869cbe9411728a4eea8f499"},"max_stars_repo_licenses":{"kind":"list like","value":["MIT"],"string":"[\n \"MIT\"\n]"},"max_stars_count":{"kind":"null"},"max_stars_repo_stars_event_min_datetime":{"kind":"null"},"max_stars_repo_stars_event_max_datetime":{"kind":"null"},"max_issues_repo_path":{"kind":"string","value":"Microcontroller_Lab/Lab_6/Lab_6/Read_Code_With_Comments.asm"},"max_issues_repo_name":{"kind":"string","value":"MuhammadAlBarham/pic16f778_projects"},"max_issues_repo_head_hexsha":{"kind":"string","value":"c12e15e48a62cd16f869cbe9411728a4eea8f499"},"max_issues_repo_licenses":{"kind":"list like","value":["MIT"],"string":"[\n \"MIT\"\n]"},"max_issues_count":{"kind":"null"},"max_issues_repo_issues_event_min_datetime":{"kind":"null"},"max_issues_repo_issues_event_max_datetime":{"kind":"null"},"max_forks_repo_path":{"kind":"string","value":"Microcontroller_Lab/Lab_6/Lab_6/Read_Code_With_Comments.asm"},"max_forks_repo_name":{"kind":"string","value":"MuhammadAlBarham/pic16f778_projects"},"max_forks_repo_head_hexsha":{"kind":"string","value":"c12e15e48a62cd16f869cbe9411728a4eea8f499"},"max_forks_repo_licenses":{"kind":"list like","value":["MIT"],"string":"[\n \"MIT\"\n]"},"max_forks_count":{"kind":"null"},"max_forks_repo_forks_event_min_datetime":{"kind":"null"},"max_forks_repo_forks_event_max_datetime":{"kind":"null"},"content":{"kind":"string","value":"Include \"p16F84A.inc\"\n; ----------------------------------------------------------\n; General Purpose RAM Assignments\n; ----------------------------------------------------------\ncblock\t\t0x0C\t\nCounter\nEndc\n\n; ----------------------------------------------------------\n; Macro Definitions\n; ----------------------------------------------------------\nRead_EEPROM\tmacro\nBcf STATUS, RP0 ;Go to Bank 0 \nClrf EEADR ;Clear EEADR (EEADR=0)\nBsf STATUS, RP0 ;Go to Bank 1 \nBsf EECON1, RD ;Begin Read \nBcf STATUS, RP0 ;Go to Bank 0 \nEndm\n; ----------------------------------------------------------\n; Vector definition\n; ----------------------------------------------------------\n\t\t\torg 0x000\n\t\t\tnop\n\t\t\tgoto Main\n\t\t\nINT_Routine\torg 0x004\n\t\tgoto INT_Routine\n; ----------------------------------------------------------\n; The main Program\n; ---------------------------------------------------------- \n\tMain\nRead_EEPROM\n\tClrf Counter ;Clear the counter \n\tBsf STATUS, RP0 ;Go to Bank 1 \n\tClrf\t\tTRISB ;Make PORTB as OUTPUT\n\tBcf STATUS, RP0 ;Go to BANK 0\n\tMovlw\t\tA'H' ;Move Character to W-Reg\n\tSubwf\t\tEEDATA,w ;Check If the first char. is H \n\tBtfsc\t\tSTATUS,Z ;If Yes goto finish \n\tGoto\t\tFinish\n\tIncf\t\tCounter,f \n\tMovlw\t\tA'M'\n\tSubwf\t\tEEDATA,w\n\tBtfsc\t\tSTATUS,Z\nFinish\t\n\tIncf\tCounter,f\n\tCall\t\tLook_Up\n\tMovwf\t\tPORTB\nLoop\n\tGoto\t\tLoop\n\t\n; ----------------------------------------------------------\n; Sub Routine Definitions\n; ----------------------------------------------------------\n;This Look_Up table for 7-Seg. Display \nLook_Up \nMovf Counter,w \nAddwf PCL,f \nRetlw B'00111111' \t; Number 0 \nRetlw B'00000110' \t; Number 1\nRetlw B'01011011' \t; Number 2\nRetlw B'01001111' \t; Number 3\nRetlw B'01100110' \t; Number 4\nRetlw B'01101101' \t; Number 5\n\nend\t\t\t\t\n"},"avg_line_length":{"kind":"number","value":29.1492537313,"string":"29.149254"},"max_line_length":{"kind":"number","value":61,"string":"61"},"alphanum_fraction":{"kind":"number","value":0.4101382488,"string":"0.410138"},"score":{"kind":"number","value":3.046875,"string":"3.046875"}}},{"rowIdx":974,"cells":{"hexsha":{"kind":"string","value":"fdaf633a2d6691a5cac79970a134936e25b5c10c"},"size":{"kind":"number","value":3480,"string":"3,480"},"ext":{"kind":"string","value":"lua"},"lang":{"kind":"string","value":"Lua"},"max_stars_repo_path":{"kind":"string","value":"src/scripts/client/gameplay/viz_handlers/flash_units.lua"},"max_stars_repo_name":{"kind":"string","value":"Psimage/Lovely-Camera-Mod"},"max_stars_repo_head_hexsha":{"kind":"string","value":"7ecb1c354c3e00091ef6005d53eb19d0fcf7f56e"},"max_stars_repo_licenses":{"kind":"list like","value":["MIT"],"string":"[\n \"MIT\"\n]"},"max_stars_count":{"kind":"null"},"max_stars_repo_stars_event_min_datetime":{"kind":"null"},"max_stars_repo_stars_event_max_datetime":{"kind":"null"},"max_issues_repo_path":{"kind":"string","value":"src/scripts/client/gameplay/viz_handlers/flash_units.lua"},"max_issues_repo_name":{"kind":"string","value":"Psimage/Lovely-Camera-Mod"},"max_issues_repo_head_hexsha":{"kind":"string","value":"7ecb1c354c3e00091ef6005d53eb19d0fcf7f56e"},"max_issues_repo_licenses":{"kind":"list like","value":["MIT"],"string":"[\n \"MIT\"\n]"},"max_issues_count":{"kind":"null"},"max_issues_repo_issues_event_min_datetime":{"kind":"null"},"max_issues_repo_issues_event_max_datetime":{"kind":"null"},"max_forks_repo_path":{"kind":"string","value":"src/scripts/client/gameplay/viz_handlers/flash_units.lua"},"max_forks_repo_name":{"kind":"string","value":"Psimage/Lovely-Camera-Mod"},"max_forks_repo_head_hexsha":{"kind":"string","value":"7ecb1c354c3e00091ef6005d53eb19d0fcf7f56e"},"max_forks_repo_licenses":{"kind":"list like","value":["MIT"],"string":"[\n \"MIT\"\n]"},"max_forks_count":{"kind":"null"},"max_forks_repo_forks_event_min_datetime":{"kind":"null"},"max_forks_repo_forks_event_max_datetime":{"kind":"null"},"content":{"kind":"string","value":"----------------------------------------------------------------\n-- Copyright (c) 2012 Klei Entertainment Inc.\n-- All Rights Reserved.\n-- SPY SOCIETY.\n----------------------------------------------------------------\n\nlocal viz_thread = include( \"gameplay/viz_thread\" )\nlocal array = include( \"modules/array\" )\nlocal cdefs = include( \"client_defs\" )\nlocal util = include( \"client_util\" )\nlocal simdefs = include( \"sim/simdefs\" )\nlocal simquery = include( \"sim/simquery\" )\n\n---------------------------------------------------------------\n\nlocal flash_units = class( viz_thread )\n\nfunction flash_units:init( boardrig, viz, rig, duration )\n viz_thread.init( self, viz, self.onResume )\n\tviz:registerHandler( simdefs.EV_FRAME_UPDATE, self )\n self.boardrig = boardrig\n self.rig = rig\n self.duration = duration\n\n --move rig from layers[\"main\"] to layers[\"ceiling\"]\n --increment usage_count and enable fullscreen darkening overlay if 1\n \n local rigProp = rig:getProp()\n local main = boardrig._layers[\"main\"]\n local ceiling = boardrig._layers[\"ceiling\"]\n\n main:removeProp( rigProp )\n ceiling:insertProp( rigProp )\n\n rigProp:setPriority( 110000 )\n\n if not boardrig._flashThreadCount or boardrig._flashThreadCount == 0 then\n boardrig._flashThreadCount = 1\n\n --print( \"inserting dimmer\" )\n\n --local bSoundPlayed = false\n local timer = MOAITimer.new()\n\t timer:setSpan( duration / (60*2) )\n\t timer:setMode( MOAITimer.PING_PONG )\n\t timer:start()\n\t local uniformDriver = function( uniforms )\n\t\t local t = timer:getTime() / (duration / (60*2) )\n\n\t\t t = math.min(0.7,t*3)\n\t\t uniforms:setUniformFloat( \"ease\", t )\n --print('dimmer ease', t )\n\t end\n\n local uniforms = KLEIShaderUniforms.new()\n uniforms:setUniformDriver( uniformDriver )\n \n local dimmerProp = KLEIFullscreenProp.new()\n dimmerProp:setShader( MOAIShaderMgr.getShader( MOAIShaderMgr.KLEI_POST_PROCESS_PASS_THROUGH_EASE ) )\n dimmerProp:setShaderUniforms( uniforms )\n dimmerProp:setTexture( \"data/images/the_darkness.png\" )\n dimmerProp:setBlendMode( MOAIProp.BLEND_NORMAL )\n dimmerProp:setPriority( 100000 )\n\n ceiling:insertProp( dimmerProp )\n\n boardrig._dimmerProp = dimmerProp\n else\n boardrig._flashThreadCount = boardrig._flashThreadCount + 1\n end\nend\n\nfunction flash_units:onStop()\n self.rig:refreshRenderFilter()\n\n --move rig from layers[\"ceiling\"] to layers[\"main\"]\n --decrement usage_count and disable fullscreen darkening overlay if 0\n\n local rigProp = self.rig:getProp()\n local main = self.boardrig._layers[\"main\"]\n local ceiling = self.boardrig._layers[\"ceiling\"]\n\n ceiling:removeProp( rigProp )\n main:insertProp( rigProp )\n\n local count = self.boardrig._flashThreadCount - 1\n self.boardrig._flashThreadCount = count\n if count <= 0 then\n --print( \"deleting dimmer\" )\n local dimmerProp = self.boardrig._dimmerProp\n ceiling:removeProp( dimmerProp )\n self.boardrig._dimmerProp = nil\n end\nend\n\nfunction flash_units:onResume( ev )\n while self.duration > 0 do\n\t if self.duration % 20 == 0 then\n\t self.rig:getProp():setRenderFilter( cdefs.RENDER_FILTERS[\"focus_highlite\"] )\n\t elseif self.duration % 10 == 0 then\n\t self.rig:refreshRenderFilter()\n\t end\n self.duration = self.duration - 1\n coroutine.yield()\n end\nend\n\nreturn flash_units\n"},"avg_line_length":{"kind":"number","value":31.9266055046,"string":"31.926606"},"max_line_length":{"kind":"number","value":109,"string":"109"},"alphanum_fraction":{"kind":"number","value":0.641091954,"string":"0.641092"},"score":{"kind":"number","value":3.078125,"string":"3.078125"}}},{"rowIdx":975,"cells":{"hexsha":{"kind":"string","value":"da1f4b82540846bf3b10497980830f102e02fe01"},"size":{"kind":"number","value":2018,"string":"2,018"},"ext":{"kind":"string","value":"lua"},"lang":{"kind":"string","value":"Lua"},"max_stars_repo_path":{"kind":"string","value":"resources/[scripts]/[hoppe]/[jobs]/hpp_cet/towtruck.lua"},"max_stars_repo_name":{"kind":"string","value":"HoppeDevz/bclrp"},"max_stars_repo_head_hexsha":{"kind":"string","value":"acc33ae5032fb2488dacfa49046470feb8cac32e"},"max_stars_repo_licenses":{"kind":"list like","value":["MIT"],"string":"[\n \"MIT\"\n]"},"max_stars_count":{"kind":"number","value":4,"string":"4"},"max_stars_repo_stars_event_min_datetime":{"kind":"string","value":"2020-09-15T17:43:21.000Z"},"max_stars_repo_stars_event_max_datetime":{"kind":"string","value":"2022-01-14T16:49:16.000Z"},"max_issues_repo_path":{"kind":"string","value":"resources/[scripts]/[hoppe]/[jobs]/hpp_cet/towtruck.lua"},"max_issues_repo_name":{"kind":"string","value":"kFxDaKing/bclrp"},"max_issues_repo_head_hexsha":{"kind":"string","value":"acc33ae5032fb2488dacfa49046470feb8cac32e"},"max_issues_repo_licenses":{"kind":"list like","value":["MIT"],"string":"[\n \"MIT\"\n]"},"max_issues_count":{"kind":"null"},"max_issues_repo_issues_event_min_datetime":{"kind":"null"},"max_issues_repo_issues_event_max_datetime":{"kind":"null"},"max_forks_repo_path":{"kind":"string","value":"resources/[scripts]/[hoppe]/[jobs]/hpp_cet/towtruck.lua"},"max_forks_repo_name":{"kind":"string","value":"kFxDaKing/bclrp"},"max_forks_repo_head_hexsha":{"kind":"string","value":"acc33ae5032fb2488dacfa49046470feb8cac32e"},"max_forks_repo_licenses":{"kind":"list like","value":["MIT"],"string":"[\n \"MIT\"\n]"},"max_forks_count":{"kind":"number","value":4,"string":"4"},"max_forks_repo_forks_event_min_datetime":{"kind":"string","value":"2020-09-14T11:47:50.000Z"},"max_forks_repo_forks_event_max_datetime":{"kind":"string","value":"2021-02-15T20:39:45.000Z"},"content":{"kind":"string","value":"\nRegisterCommand(\"towtruck\", function()\n\tvehicle = GetVehiclePedIsUsing(GetPlayerPed(-1))\n\tvehicletow = GetDisplayNameFromVehicleModel(GetEntityModel(GetVehiclePedIsUsing(GetPlayerPed(-1))))\nend)\n\nlocal reboque = nil\nlocal rebocado = nil\nRegisterCommand(\"tow\",function(source,args)\n\t--local vehicle = GetPlayersLastVehicle()\n\t--local vehicletow = IsVehicleModel(vehicle,GetHashKey(\"flatbed\"))\n\n\tif vehicletow and not IsPedInAnyVehicle(PlayerPedId()) then\n\t\t--rebocado = getVehicleInDirection(GetEntityCoords(PlayerPedId()),GetOffsetFromEntityInWorldCoords(PlayerPedId(),0.0,5.0,0.0))\n\t\trebocado = GetPlayersLastVehicle()\n\t\tif reboque == nil then\n\t\t\tif vehicle ~= rebocado then\n\t\t\t\tlocal min,max = GetModelDimensions(GetEntityModel(rebocado))\n\t\t\t\tAttachEntityToEntity(rebocado,vehicle,GetEntityBoneIndexByName(vehicle,\"bodyshell\"),0,-2.2,0.4-min.z,0,0,0,1,1,0,1,0,1)\n\t\t\t\treboque = rebocado\n\t\t\tend\n\t\telse\n\t\t\tAttachEntityToEntity(reboque,vehicle,20,-0.5,-15.0,-0.3,0.0,0.0,0.0,false,false,true,false,20,true)\n\t\t\tDetachEntity(reboque,false,false)\n\t\t\tPlaceObjectOnGroundProperly(reboque)\n\t\t\treboque = nil\n\t\t\trebocado = nil\n\t\tend\n\tend\nend)\n\nCitizen.CreateThread(function()\n\twhile true do\n\t\tCitizen.Wait(0)\n\t\tif enablemechud then\n\n\t\t\tif vehicletow == nil then\n\t\t\t\tvehicletow = \"NENHUM\"\n\t\t\tend\n\t\t\t\n\t\t\trebocado = GetDisplayNameFromVehicleModel(GetEntityModel(GetPlayersLastVehicle()))\n\n\t\t\tif rebocado == nil then\n\t\t\t\trebocado = \"NENHUM\"\n\t\t\telseif rebocado == \"FLATBED\" then\n\t\t\t\trebocado = \"NENHUM\"\t\n\t\t\tend\t\n\n\t\t\t--print(vehicletow)\n\t\t\t--print(rebocado)\n\n\t\t\tif vehicletow ~= \"FLATBED\" then\n\t\t\t\tdrawTxt(\"REBOQUE:~r~\"..vehicletow,4,0.5,0.93,0.50,255,255,255,255)\n\t\t\telseif vehicletow == \"FLATBED\" then\n\t\t\t\tdrawTxt(\"REBOQUE:~g~\"..vehicletow,4,0.5,0.93,0.50,255,255,255,255)\n\t\t\tend\t\n\t\t\tdrawTxt(\"REBOCADO:~r~\"..rebocado,4,0.5,0.96,0.50,255,255,255,255)\n\t\tend\n\tend\nend)\n\nenablemechud = false\nRegisterCommand(\"towhud\", function()\n\tif not enablemechud then\n\t\tenablemechud = true\n\telseif enablemechud then\n\t\tenablemechud = false\n\tend\nend)"},"avg_line_length":{"kind":"number","value":29.2463768116,"string":"29.246377"},"max_line_length":{"kind":"number","value":128,"string":"128"},"alphanum_fraction":{"kind":"number","value":0.7304261645,"string":"0.730426"},"score":{"kind":"number","value":3.09375,"string":"3.09375"}}},{"rowIdx":976,"cells":{"hexsha":{"kind":"string","value":"b89f9543ecb27486610847a26f0dea237fce7656"},"size":{"kind":"number","value":3804,"string":"3,804"},"ext":{"kind":"string","value":"rs"},"lang":{"kind":"string","value":"Rust"},"max_stars_repo_path":{"kind":"string","value":"days/day25/src/lib.rs"},"max_stars_repo_name":{"kind":"string","value":"dfm/adventofcode"},"max_stars_repo_head_hexsha":{"kind":"string","value":"ab2c4228229988d79ba7a9034069961650830031"},"max_stars_repo_licenses":{"kind":"list like","value":["Apache-2.0"],"string":"[\n \"Apache-2.0\"\n]"},"max_stars_count":{"kind":"number","value":2,"string":"2"},"max_stars_repo_stars_event_min_datetime":{"kind":"string","value":"2020-12-05T23:14:48.000Z"},"max_stars_repo_stars_event_max_datetime":{"kind":"string","value":"2021-12-27T04:39:33.000Z"},"max_issues_repo_path":{"kind":"string","value":"days/day25/src/lib.rs"},"max_issues_repo_name":{"kind":"string","value":"dfm/adventofcode"},"max_issues_repo_head_hexsha":{"kind":"string","value":"ab2c4228229988d79ba7a9034069961650830031"},"max_issues_repo_licenses":{"kind":"list like","value":["Apache-2.0"],"string":"[\n \"Apache-2.0\"\n]"},"max_issues_count":{"kind":"null"},"max_issues_repo_issues_event_min_datetime":{"kind":"null"},"max_issues_repo_issues_event_max_datetime":{"kind":"null"},"max_forks_repo_path":{"kind":"string","value":"days/day25/src/lib.rs"},"max_forks_repo_name":{"kind":"string","value":"dfm/adventofcode"},"max_forks_repo_head_hexsha":{"kind":"string","value":"ab2c4228229988d79ba7a9034069961650830031"},"max_forks_repo_licenses":{"kind":"list like","value":["Apache-2.0"],"string":"[\n \"Apache-2.0\"\n]"},"max_forks_count":{"kind":"number","value":1,"string":"1"},"max_forks_repo_forks_event_min_datetime":{"kind":"string","value":"2019-12-24T04:56:27.000Z"},"max_forks_repo_forks_event_max_datetime":{"kind":"string","value":"2019-12-24T04:56:27.000Z"},"content":{"kind":"string","value":"use aoc::solver::Solver;\n\npub struct Day25;\n\n#[derive(Copy, Clone)]\nenum Cell {\n Empty,\n East,\n South,\n}\n\nstruct Grid {\n width: usize,\n height: usize,\n value: Vec,\n}\n\nimpl Grid {\n fn new(data: &str) -> Self {\n let width = data.lines().next().unwrap().trim().len();\n let mut grid = Grid {\n width,\n height: 0,\n value: Vec::new(),\n };\n for line in data.lines() {\n for c in line.trim().chars() {\n grid.value.push(match c {\n '>' => Cell::East,\n 'v' => Cell::South,\n _ => Cell::Empty,\n });\n }\n }\n grid.height = grid.value.len() / width;\n grid\n }\n\n fn east(&self, n: usize) -> usize {\n if n % self.width == self.width - 1 {\n n + 1 - self.width\n } else {\n n + 1\n }\n }\n\n fn south(&self, n: usize) -> usize {\n if n >= (self.height - 1) * self.width {\n n % self.width\n } else {\n n + self.width\n }\n }\n\n fn step(&self) -> (usize, Self) {\n let mut count = 0;\n let mut grid = Grid {\n width: self.width,\n height: self.height,\n value: Vec::new(),\n };\n grid.value.resize(self.width * self.height, Cell::Empty);\n\n for (n, v) in self.value.iter().enumerate() {\n if matches!(v, Cell::East) {\n let target = self.east(n);\n if matches!(self.value[target], Cell::Empty) {\n grid.value[target] = Cell::East;\n count += 1;\n } else {\n grid.value[n] = Cell::East;\n }\n }\n }\n\n for (n, v) in self.value.iter().enumerate() {\n if matches!(v, Cell::South) {\n let target = self.south(n);\n if matches!(grid.value[target], Cell::Empty)\n && !matches!(self.value[target], Cell::South)\n {\n grid.value[target] = Cell::South;\n count += 1;\n } else {\n grid.value[n] = Cell::South;\n }\n }\n }\n\n (count, grid)\n }\n}\n\nimpl std::fmt::Display for Cell {\n fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result {\n write!(\n f,\n \"{}\",\n match self {\n Cell::Empty => '.',\n Cell::East => '>',\n Cell::South => 'v',\n }\n )\n }\n}\n\nimpl std::fmt::Display for Grid {\n fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result {\n let mut n = 0;\n let mut result = String::new();\n for _ in 0..self.height {\n for _ in 0..self.width {\n let line = format!(\"{}\", self.value[n]);\n n += 1;\n result.push_str(&line);\n }\n result.push('\\n');\n }\n write!(f, \"{}\", result)\n }\n}\n\nimpl Solver<&str> for Day25 {\n fn part1(data: &str) -> usize {\n let mut count = 0;\n let mut grid = Grid::new(data);\n loop {\n let result = grid.step();\n count += 1;\n if result.0 == 0 {\n break;\n }\n grid = result.1;\n }\n count\n }\n\n fn part2(_data: &str) -> usize {\n 0\n }\n}\n\n#[cfg(test)]\nmod tests {\n use super::*;\n\n const DATA: &str = \"v...>>.vv>\n.vv>>.vv..\n>>.>v>...v\n>>v>>.>.v.\nv>v.vv.v..\n>.>>..v...\n.vv..>.>v.\nv.v..>>v.v\n....v..v.>\n\";\n\n #[test]\n fn test_part1() {\n assert_eq!(Day25::part1(DATA), 58);\n }\n\n #[test]\n fn test_part2() {\n assert_eq!(Day25::part2(DATA), 0);\n }\n}\n"},"avg_line_length":{"kind":"number","value":22.5088757396,"string":"22.508876"},"max_line_length":{"kind":"number","value":68,"string":"68"},"alphanum_fraction":{"kind":"number","value":0.398002103,"string":"0.398002"},"score":{"kind":"number","value":3.28125,"string":"3.28125"}}},{"rowIdx":977,"cells":{"hexsha":{"kind":"string","value":"b156345f812e80c4199551982bf4bc47bc72fad8"},"size":{"kind":"number","value":1984,"string":"1,984"},"ext":{"kind":"string","value":"h"},"lang":{"kind":"string","value":"C"},"max_stars_repo_path":{"kind":"string","value":"sorce/mikey/Opera/Grass.h"},"max_stars_repo_name":{"kind":"string","value":"montoyamoraga/shbobo"},"max_stars_repo_head_hexsha":{"kind":"string","value":"3469747603dfead376111f38b455af1250365848"},"max_stars_repo_licenses":{"kind":"list like","value":["MIT"],"string":"[\n \"MIT\"\n]"},"max_stars_count":{"kind":"number","value":16,"string":"16"},"max_stars_repo_stars_event_min_datetime":{"kind":"string","value":"2020-12-21T04:52:20.000Z"},"max_stars_repo_stars_event_max_datetime":{"kind":"string","value":"2022-02-28T10:15:34.000Z"},"max_issues_repo_path":{"kind":"string","value":"sorce/mikey/Opera/Grass.h"},"max_issues_repo_name":{"kind":"string","value":"montoyamoraga/shbobo"},"max_issues_repo_head_hexsha":{"kind":"string","value":"3469747603dfead376111f38b455af1250365848"},"max_issues_repo_licenses":{"kind":"list like","value":["MIT"],"string":"[\n \"MIT\"\n]"},"max_issues_count":{"kind":"number","value":8,"string":"8"},"max_issues_repo_issues_event_min_datetime":{"kind":"string","value":"2021-01-02T01:01:26.000Z"},"max_issues_repo_issues_event_max_datetime":{"kind":"string","value":"2021-12-19T01:40:34.000Z"},"max_forks_repo_path":{"kind":"string","value":"sorce/mikey/Opera/Grass.h"},"max_forks_repo_name":{"kind":"string","value":"montoyamoraga/shbobo"},"max_forks_repo_head_hexsha":{"kind":"string","value":"3469747603dfead376111f38b455af1250365848"},"max_forks_repo_licenses":{"kind":"list like","value":["MIT"],"string":"[\n \"MIT\"\n]"},"max_forks_count":{"kind":"number","value":4,"string":"4"},"max_forks_repo_forks_event_min_datetime":{"kind":"string","value":"2021-01-01T15:27:43.000Z"},"max_forks_repo_forks_event_max_datetime":{"kind":"string","value":"2021-08-10T21:14:29.000Z"},"content":{"kind":"string","value":"\nstruct BistaTranz {\n float squl, squr;\n bool state;\n BistaTranz (){squl=squr=0;}\n float calx(float fmm, float den) {\n if (state) {\n squl += fmm;\n if (squl >= den) {\n squr = -den;\n state = !state;\n }\n } else {\n squr += fmm;\n if (squr >= den) {\n squl = -den;\n state = !state;\n }\n } return squl;\n }\n};\n\nstruct Bista : Opero {\n BistaTranz bt;\n shOpr mul, add, fmo, den;\n Bista(lua_State *L): Opero() {\n broinger(L, 1, \"fre\", &fmo);\n broinger(L, 2, \"den\", &den, 1.0);\n broinger(L, 3, \"mul\", &mul,1.0);\n broinger(L, 4, \"add\", &add);\n }\n float calx(float sr) {\n mint = 0;\n float fmm = 4*fmo->calx(sr)/sr;\n float deno = fabs(den->calx(sr));\n bt.calx(fmm,deno);\n mint = bt.squr * mul->calx(sr) + add->calx(sr);\n return mint;\n }\n static const char className[];\n const char * getClassName() { return className; }\n};\nconst char Bista::className[] = \"Bista\";\n\nstruct RunglTranz {\n unsigned char pattern;\n float mint;\n float lastcar;\n RunglTranz(){pattern=rand();}\n float calx(bool car, bool mod) {\n if (car && !lastcar)\n pattern = (pattern << 1) | (mod ? 1 : 0);\n lastcar = car;\n //printf(\"patternfloat%d\\n\",pattern);\n mint = (float)pattern / 256; \n return mint;\n }\n};\n\nstruct Grass : Opero {\n BistaTranz square[4];\n RunglTranz castle[4];\n static const char className[];\n shOpr mul, add, fmo, cha;\n Grass(lua_State *L): Opero() {\n broinger(L, 1, \"fre\", &fmo);\n broinger(L, 2, \"cha\", &cha);\n broinger(L, 3, \"mul\", &mul,1.0);\n broinger(L, 4, \"add\", &add);\n }\n float calx(float sr) {\n mint = 0;\n float chaos = cha->calx(sr);\n //printf(\"chaos%f\\n\",chaos);\n float fmm = 4*fmo->calx(sr)/sr;\n for (int i = 0; i < 4; i++) {\n square[i].calx(\n fmm*(1+chaos*castle[i].calx(\n square[(i+2)%4].state, square[(i+1)%4].state)),\n 1+(float)i/10);\n } \n float mullo = mul->calx(sr);\n mint = square[0].squl*mullo + add->calx(sr);\n return mint;\n }\n const char * getClassName() { return className; }\n};\nconst char Grass::className[] = \"Grass\";"},"avg_line_length":{"kind":"number","value":22.5454545455,"string":"22.545455"},"max_line_length":{"kind":"number","value":52,"string":"52"},"alphanum_fraction":{"kind":"number","value":0.5836693548,"string":"0.583669"},"score":{"kind":"number","value":3.0625,"string":"3.0625"}}},{"rowIdx":978,"cells":{"hexsha":{"kind":"string","value":"ef53113206ffe662ae977a2dee15a9f65785ecd4"},"size":{"kind":"number","value":1950,"string":"1,950"},"ext":{"kind":"string","value":"pls"},"lang":{"kind":"string","value":"SQL"},"max_stars_repo_path":{"kind":"string","value":"home/lib/python/exemple/plgrader/one.pls"},"max_stars_repo_name":{"kind":"string","value":"PremierLangage/premierlangage"},"max_stars_repo_head_hexsha":{"kind":"string","value":"7134a2aadffee2bf264abee6c4b23ea33f1b390b"},"max_stars_repo_licenses":{"kind":"list like","value":["CECILL-B"],"string":"[\n \"CECILL-B\"\n]"},"max_stars_count":{"kind":"number","value":8,"string":"8"},"max_stars_repo_stars_event_min_datetime":{"kind":"string","value":"2019-01-30T13:51:59.000Z"},"max_stars_repo_stars_event_max_datetime":{"kind":"string","value":"2022-01-08T03:26:53.000Z"},"max_issues_repo_path":{"kind":"string","value":"apps/misc_tests/resources/lib/python/exemple/plgrader/one.pls"},"max_issues_repo_name":{"kind":"string","value":"PremierLangage/premierlangage"},"max_issues_repo_head_hexsha":{"kind":"string","value":"7134a2aadffee2bf264abee6c4b23ea33f1b390b"},"max_issues_repo_licenses":{"kind":"list like","value":["CECILL-B"],"string":"[\n \"CECILL-B\"\n]"},"max_issues_count":{"kind":"number","value":286,"string":"286"},"max_issues_repo_issues_event_min_datetime":{"kind":"string","value":"2019-01-18T21:35:51.000Z"},"max_issues_repo_issues_event_max_datetime":{"kind":"string","value":"2022-03-24T18:53:59.000Z"},"max_forks_repo_path":{"kind":"string","value":"apps/misc_tests/resources/lib/python/exemple/plgrader/one.pls"},"max_forks_repo_name":{"kind":"string","value":"PremierLangage/premierlangage"},"max_forks_repo_head_hexsha":{"kind":"string","value":"7134a2aadffee2bf264abee6c4b23ea33f1b390b"},"max_forks_repo_licenses":{"kind":"list like","value":["CECILL-B"],"string":"[\n \"CECILL-B\"\n]"},"max_forks_count":{"kind":"number","value":4,"string":"4"},"max_forks_repo_forks_event_min_datetime":{"kind":"string","value":"2019-02-11T13:38:30.000Z"},"max_forks_repo_forks_event_max_datetime":{"kind":"string","value":"2021-03-02T20:59:00.000Z"},"content":{"kind":"string","value":"from playexo.strategy import StrategyAPI\n\ndef get_last_answered_index(request, activity):\n strat = StrategyAPI(activity)\n pls = strat.get_pl_list()\n i = 0\n for pl in pls:\n if not strat.get_last_good_answer(pl, request):\n return i\n i += 1\n return 0\n\ndef strategy(request, activity):\n \"\"\" Process request to determine what do to. Should return an HttpResponse. \"\"\"\n \n strat = StrategyAPI(activity)\n current = get_last_answered_index(request, activity)\n \n if request.method == 'GET': # Request changing which exercise will be loaded\n action = request.GET.get(\"action\", None)\n if action == \"pl\":\n strat.set_pl(strat.get_pl_sha1(request.GET.get(\"pl_sha1\", None)), request)\n return HttpResponseRedirect(\"/playexo/activity/\") # Remove get parameters from url\n elif action == \"pltp\":\n pl = strat.get_current_pl(request)\n if (pl):\n can_do = pl.sha1;\n strat.set_pl(None, request)\n \n dic = strat.get_pl_dic(strat.get_current_pl(request))\n if 'oneshot' in dic and dic['oneshot'] == 'True':\n seed = None\n else:\n seed = strat.get_seed_from_answer(strat.get_last_answer(strat.get_current_pl(request), request))\n exercise = strat.load_exercise(request, seed)\n \n if request.method == 'GET': # Request changing or interacting an exercise\n if action == \"reset\":\n strat.reset_pl(exercise)\n elif action == \"next\":\n pl = strat.get_next_pl(request)\n strat.set_pl(pl, request)\n return HttpResponseRedirect(\"/playexo/activity/\") # Remove get parameters from url\n \n if request.method == 'POST':\n state, feedback = strat.evaluate(exercise, request)\n return strat.send_evaluate_feedback(state, feedback)\n \n strat.add_to_context(exercise, 'current_auth', current)\n return strat.render(exercise, request)\n"},"avg_line_length":{"kind":"number","value":38.2352941176,"string":"38.235294"},"max_line_length":{"kind":"number","value":104,"string":"104"},"alphanum_fraction":{"kind":"number","value":0.6405128205,"string":"0.640513"},"score":{"kind":"number","value":3,"string":"3"}}},{"rowIdx":979,"cells":{"hexsha":{"kind":"string","value":"8c1cd1193490b6043dceafbc312e302e08e02328"},"size":{"kind":"number","value":1496,"string":"1,496"},"ext":{"kind":"string","value":"kt"},"lang":{"kind":"string","value":"Kotlin"},"max_stars_repo_path":{"kind":"string","value":"java-time/src/main/kotlin/com/github/debop/javatimes/PeriodExtensions.kt"},"max_stars_repo_name":{"kind":"string","value":"debop/joda-time-kotlin"},"max_stars_repo_head_hexsha":{"kind":"string","value":"cbb0efedaa53bdf9d77b230d8477cb0ae0d7abd7"},"max_stars_repo_licenses":{"kind":"list like","value":["ECL-2.0","Apache-2.0"],"string":"[\n \"ECL-2.0\",\n \"Apache-2.0\"\n]"},"max_stars_count":{"kind":"number","value":91,"string":"91"},"max_stars_repo_stars_event_min_datetime":{"kind":"string","value":"2016-07-15T03:06:17.000Z"},"max_stars_repo_stars_event_max_datetime":{"kind":"string","value":"2021-12-07T11:16:44.000Z"},"max_issues_repo_path":{"kind":"string","value":"java-time/src/main/kotlin/com/github/debop/javatimes/PeriodExtensions.kt"},"max_issues_repo_name":{"kind":"string","value":"debop/joda-time-kotlin"},"max_issues_repo_head_hexsha":{"kind":"string","value":"cbb0efedaa53bdf9d77b230d8477cb0ae0d7abd7"},"max_issues_repo_licenses":{"kind":"list like","value":["ECL-2.0","Apache-2.0"],"string":"[\n \"ECL-2.0\",\n \"Apache-2.0\"\n]"},"max_issues_count":{"kind":"number","value":11,"string":"11"},"max_issues_repo_issues_event_min_datetime":{"kind":"string","value":"2016-11-23T11:04:30.000Z"},"max_issues_repo_issues_event_max_datetime":{"kind":"string","value":"2021-05-18T13:07:11.000Z"},"max_forks_repo_path":{"kind":"string","value":"java-time/src/main/kotlin/com/github/debop/javatimes/PeriodExtensions.kt"},"max_forks_repo_name":{"kind":"string","value":"debop/joda-time-kotlin"},"max_forks_repo_head_hexsha":{"kind":"string","value":"cbb0efedaa53bdf9d77b230d8477cb0ae0d7abd7"},"max_forks_repo_licenses":{"kind":"list like","value":["ECL-2.0","Apache-2.0"],"string":"[\n \"ECL-2.0\",\n \"Apache-2.0\"\n]"},"max_forks_count":{"kind":"number","value":9,"string":"9"},"max_forks_repo_forks_event_min_datetime":{"kind":"string","value":"2017-01-23T13:35:25.000Z"},"max_forks_repo_forks_event_max_datetime":{"kind":"string","value":"2020-06-08T06:26:48.000Z"},"content":{"kind":"string","value":"package com.github.debop.javatimes\n\nimport java.time.Period\nimport java.time.temporal.Temporal\n\noperator fun Period.unaryMinus(): Period = this.negated()\n\n@Suppress(\"UNCHECKED_CAST\")\noperator fun Period.plus(instant: T): T = addTo(instant) as T\n\n@Suppress(\"UNCHECKED_CAST\")\noperator fun Period.minus(instant: T): T = subtractFrom(instant) as T\n\n@JvmOverloads\nfun periodOf(years: Int, months: Int = 0, days: Int = 0): Period = Period.of(years, months, days)\n\n/**\n * year sequence of `Period`\n */\nsuspend fun Period.yearSequence(): Sequence = sequence {\n var year = 0\n val years = this@yearSequence.years\n if(years > 0) {\n while(year < years) {\n yield(year++)\n }\n } else {\n while(year > years) {\n yield(year--)\n }\n }\n}\n\n/**\n * month sequence of `java.time.Period`\n */\nsuspend fun Period.monthSequence(): Sequence = sequence {\n var month = 0\n val months = this@monthSequence.months\n if(months > 0) {\n while(month < months) {\n yield(month++)\n }\n } else {\n while(month > months) {\n yield(month--)\n }\n }\n}\n\n/**\n * day sequence of `java.time.Period`\n */\nsuspend fun Period.daySequence(): Sequence = sequence {\n var day = 0\n val days = this@daySequence.days\n if(days > 0) {\n while(day < days) {\n yield(day++)\n }\n } else {\n while(day > days) {\n yield(day--)\n }\n }\n}"},"avg_line_length":{"kind":"number","value":22.6666666667,"string":"22.666667"},"max_line_length":{"kind":"number","value":97,"string":"97"},"alphanum_fraction":{"kind":"number","value":0.577540107,"string":"0.57754"},"score":{"kind":"number","value":3.25,"string":"3.25"}}},{"rowIdx":980,"cells":{"hexsha":{"kind":"string","value":"27e0e1bdc49eab4fdb1dfb4431fe18bd1c9bc125"},"size":{"kind":"number","value":6115,"string":"6,115"},"ext":{"kind":"string","value":"kt"},"lang":{"kind":"string","value":"Kotlin"},"max_stars_repo_path":{"kind":"string","value":"app/src/main/java/com/starline/hamsteradoption/HamsterDetailActivity.kt"},"max_stars_repo_name":{"kind":"string","value":"MaxNeverSleep/HamsterAdoption"},"max_stars_repo_head_hexsha":{"kind":"string","value":"38416348921b78c2f51933ddbc287f234fd0ac5c"},"max_stars_repo_licenses":{"kind":"list like","value":["Apache-2.0"],"string":"[\n \"Apache-2.0\"\n]"},"max_stars_count":{"kind":"null"},"max_stars_repo_stars_event_min_datetime":{"kind":"null"},"max_stars_repo_stars_event_max_datetime":{"kind":"null"},"max_issues_repo_path":{"kind":"string","value":"app/src/main/java/com/starline/hamsteradoption/HamsterDetailActivity.kt"},"max_issues_repo_name":{"kind":"string","value":"MaxNeverSleep/HamsterAdoption"},"max_issues_repo_head_hexsha":{"kind":"string","value":"38416348921b78c2f51933ddbc287f234fd0ac5c"},"max_issues_repo_licenses":{"kind":"list like","value":["Apache-2.0"],"string":"[\n \"Apache-2.0\"\n]"},"max_issues_count":{"kind":"null"},"max_issues_repo_issues_event_min_datetime":{"kind":"null"},"max_issues_repo_issues_event_max_datetime":{"kind":"null"},"max_forks_repo_path":{"kind":"string","value":"app/src/main/java/com/starline/hamsteradoption/HamsterDetailActivity.kt"},"max_forks_repo_name":{"kind":"string","value":"MaxNeverSleep/HamsterAdoption"},"max_forks_repo_head_hexsha":{"kind":"string","value":"38416348921b78c2f51933ddbc287f234fd0ac5c"},"max_forks_repo_licenses":{"kind":"list like","value":["Apache-2.0"],"string":"[\n \"Apache-2.0\"\n]"},"max_forks_count":{"kind":"null"},"max_forks_repo_forks_event_min_datetime":{"kind":"null"},"max_forks_repo_forks_event_max_datetime":{"kind":"null"},"content":{"kind":"string","value":"package com.starline.hamsteradoption\n\nimport android.graphics.Paint\nimport android.os.Bundle\nimport android.util.Log\nimport androidx.activity.compose.setContent\nimport androidx.appcompat.app.AppCompatActivity\nimport androidx.compose.foundation.Image\nimport androidx.compose.foundation.layout.*\nimport androidx.compose.material.*\nimport androidx.compose.material.icons.Icons\nimport androidx.compose.material.icons.filled.ArrowBack\nimport androidx.compose.runtime.Composable\nimport androidx.compose.ui.Alignment\nimport androidx.compose.ui.Modifier\nimport androidx.compose.ui.layout.ContentScale\nimport androidx.compose.ui.res.painterResource\nimport androidx.compose.ui.text.font.FontWeight\nimport androidx.compose.ui.unit.dp\nimport androidx.compose.ui.unit.sp\nimport androidx.core.graphics.scaleMatrix\nimport com.starline.hamsteradoption.ui.theme.HamsterAdoptionTheme\n\nval hamsterAae = arrayOf(\n \"3 months\",\n \"4 months\",\n \"1 year 4 months\",\n \"2 years\",\n \"3 months\",\n \"5 months\",\n \"7 months\",\n \"1 1moneths\",\n \"1 months\",\n \"3 months\",\n \"8 months\",\n \"1 months\",\n)\n\nclass HamsterDetailActivity : AppCompatActivity() {\n override fun onCreate(savedInstanceState: Bundle?) {\n super.onCreate(savedInstanceState)\n val name = intent.getStringExtra(\"HAMSTER_NAME\")\n val desc = intent.getStringExtra(\"HAMSTER_DESC\")\n var detailImage1: Int = 0\n var detailImage2: Int = 0\n var age: String = \"1 months\"\n when (name) {\n \"Sussy\" -> {\n detailImage1 = R.mipmap.hamster_preview_1\n detailImage2 = R.mipmap.hamster_1_1\n age = \"1 months\"\n }\n \"Jack\" -> {\n detailImage1 = R.mipmap.hamster_preview_2\n detailImage2 = R.mipmap.hamster_2_1\n age = \"3 months\"\n }\n \"David\" -> {\n detailImage1 = R.mipmap.hamster_preview_3\n detailImage2 = R.mipmap.hamster_3_1\n age = \"6 months\"\n }\n \"Stephen\" -> {\n detailImage1 = R.mipmap.hamster_preview_4\n detailImage2 = R.mipmap.hamster_4_1\n age = \"4 months\"\n }\n \"Kiro\" -> {\n detailImage1 = R.mipmap.hamster_preview_5\n detailImage2 = R.mipmap.hamster_5_1\n age = \"2 months\"\n }\n \"Warden\" -> {\n detailImage1 = R.mipmap.hamster_preview_6\n detailImage2 = R.mipmap.hamster_6_1\n age = \"4 months\"\n }\n \"Love\" -> {\n detailImage1 = R.mipmap.hamster_preview_7\n detailImage2 = R.mipmap.hamster_7_1\n age = \"2 months\"\n }\n \"Cookie\" -> {\n detailImage1 = R.mipmap.hamster_preview_8\n detailImage2 = R.mipmap.hamster_8_1\n age = \"5 months\"\n }\n \"Hamster007\" -> {\n detailImage1 = R.mipmap.hamster_preview_9\n detailImage2 = R.mipmap.hamster_5_1\n age = \"4 months\"\n }\n \"SweetHamster\" -> {\n detailImage1 = R.mipmap.hamster_preview_10\n detailImage2 = R.mipmap.hamster_4_1\n age = \"6 months\"\n }\n \"Lam\" -> {\n detailImage1 = R.mipmap.hamster_preview_11\n detailImage2 = R.mipmap.hamster_1_1\n age = \"7 months\"\n }\n \"Pinkie\" -> {\n detailImage1 = R.mipmap.hamster_preview_12\n detailImage2 = R.mipmap.hamster_6_1\n age = \"12 months\"\n }\n }\n setContent {\n HamsterAdoptionTheme {\n MyDetail(name, age, detailImage1, detailImage2, { back() })\n }\n }\n\n }\n private fun back() {\n Log.i(\"test\", \"finish activity结束\")\n super.finish()\n }\n}\n\n@Composable\nfun MyDetail(\n name: String?,\n age: String?,\n detailImage1: Int,\n detailImage2: Int,\n onClick: () -> Unit\n) {\n Surface(color = MaterialTheme.colors.background) {\n Column {\n TopAppBar(\n navigationIcon = {\n IconButton(onClick = { onClick }) {\n Icon(Icons.Filled.ArrowBack, \"back\")\n }\n },\n title = {\n Text(\n text = \"$name's Detail\",\n fontWeight = FontWeight.Bold\n )\n }\n )\n Row {\n Image(\n painter = painterResource(id = detailImage1),\n contentDescription = name,\n Modifier\n .width(180.dp)\n .height(180.dp)\n .padding(30.dp, 30.dp),\n contentScale = ContentScale.FillBounds\n )\n Text(\n text = \"Name : $name\\r\\n\\nAge : $age\\r\\n\\nDistance : 4.5km\",\n Modifier.padding(top = 40.dp),\n fontWeight = FontWeight.Bold,\n fontSize = 18.sp\n )\n }\n Text(\n text = \"$name ,she’s really light for a dwarf, do not mistake her weight for her looks as when she arrived, she looks a lil preggie ...Turns out after 4 weeks, she just has a food baby and big hips (yes #bodygoals) Therefore, pls give Wheelington a chance even if you’re looking for a round hammy, light hams can be chonky too!\",\n Modifier.padding(horizontal = 30.dp, vertical = 10.dp),\n style = MaterialTheme.typography.body2\n )\n Image(\n painter = painterResource(id = detailImage2),\n contentDescription = name,\n Modifier\n .width(400.dp)\n .height(280.dp)\n .padding(30.dp, 30.dp),\n contentScale = ContentScale.FillBounds\n\n )\n }\n }\n}\n\n\n"},"avg_line_length":{"kind":"number","value":33.5989010989,"string":"33.598901"},"max_line_length":{"kind":"number","value":345,"string":"345"},"alphanum_fraction":{"kind":"number","value":0.5216680294,"string":"0.521668"},"score":{"kind":"number","value":3,"string":"3"}}},{"rowIdx":981,"cells":{"hexsha":{"kind":"string","value":"8e447720674c91b173384fcd9c06cc17164dc87b"},"size":{"kind":"number","value":3122,"string":"3,122"},"ext":{"kind":"string","value":"rb"},"lang":{"kind":"string","value":"Ruby"},"max_stars_repo_path":{"kind":"string","value":"lib/fastly_nsq/manager.rb"},"max_stars_repo_name":{"kind":"string","value":"fastly/fastly_nsq"},"max_stars_repo_head_hexsha":{"kind":"string","value":"e05d3972ebcfb25f7a42eb0268d482e5142d4658"},"max_stars_repo_licenses":{"kind":"list like","value":["MIT"],"string":"[\n \"MIT\"\n]"},"max_stars_count":{"kind":"number","value":11,"string":"11"},"max_stars_repo_stars_event_min_datetime":{"kind":"string","value":"2016-01-30T00:59:26.000Z"},"max_stars_repo_stars_event_max_datetime":{"kind":"string","value":"2022-03-04T21:48:06.000Z"},"max_issues_repo_path":{"kind":"string","value":"lib/fastly_nsq/manager.rb"},"max_issues_repo_name":{"kind":"string","value":"fastly/fastly_nsq"},"max_issues_repo_head_hexsha":{"kind":"string","value":"e05d3972ebcfb25f7a42eb0268d482e5142d4658"},"max_issues_repo_licenses":{"kind":"list like","value":["MIT"],"string":"[\n \"MIT\"\n]"},"max_issues_count":{"kind":"number","value":65,"string":"65"},"max_issues_repo_issues_event_min_datetime":{"kind":"string","value":"2016-02-02T23:27:41.000Z"},"max_issues_repo_issues_event_max_datetime":{"kind":"string","value":"2022-02-23T14:32:01.000Z"},"max_forks_repo_path":{"kind":"string","value":"lib/fastly_nsq/manager.rb"},"max_forks_repo_name":{"kind":"string","value":"fastly/fastly_nsq"},"max_forks_repo_head_hexsha":{"kind":"string","value":"e05d3972ebcfb25f7a42eb0268d482e5142d4658"},"max_forks_repo_licenses":{"kind":"list like","value":["MIT"],"string":"[\n \"MIT\"\n]"},"max_forks_count":{"kind":"number","value":2,"string":"2"},"max_forks_repo_forks_event_min_datetime":{"kind":"string","value":"2016-04-18T15:36:09.000Z"},"max_forks_repo_forks_event_max_datetime":{"kind":"string","value":"2022-03-23T08:17:47.000Z"},"content":{"kind":"string","value":"# frozen_string_literal: true\n\n##\n# Interface for tracking listeners and managing the processing pool.\nclass FastlyNsq::Manager\n DEADLINE = 30\n\n # @return [Boolean] Set true when all listeners are stopped\n attr_reader :done\n\n # @return [FastlyNsq::PriorityThreadPool]\n attr_reader :pool\n\n # @return [Logger]\n attr_reader :logger\n\n ##\n # Create a FastlyNsq::Manager\n #\n # @param logger [Logger]\n # @param max_threads [Integer] Maxiumum number of threads to be used by {FastlyNsq::PriorityThreadPool}\n # @param pool_options [Hash] Options forwarded to {FastlyNsq::PriorityThreadPool} constructor.\n def initialize(logger: FastlyNsq.logger, max_threads: FastlyNsq.max_processing_pool_threads, **pool_options)\n @done = false\n @logger = logger\n @pool = FastlyNsq::PriorityThreadPool.new(\n { fallback_policy: :caller_runs, max_threads: max_threads }.merge(pool_options),\n )\n end\n\n ##\n # Hash of listeners. Keys are topics, values are {FastlyNsq::Listener} instances.\n # @return [Hash]\n def topic_listeners\n @topic_listeners ||= {}\n end\n\n ##\n # Array of listening topic names\n # @return [Array]\n def topics\n topic_listeners.keys\n end\n\n ##\n # Set of {FastlyNsq::Listener} objects\n # @return [Set]\n def listeners\n topic_listeners.values.to_set\n end\n\n ##\n # Stop the manager.\n # Terminates the listeners and stops all processing in the pool.\n # @param deadline [Integer] Number of seconds to wait for pool to stop processing\n def terminate(deadline = DEADLINE)\n return if done\n\n stop_listeners\n\n return if pool.shutdown?\n\n stop_processing(deadline)\n\n @done = true\n end\n\n ##\n # Manager state\n # @return [Boolean]\n def stopped?\n done\n end\n\n ##\n # Add a {FastlyNsq::Listener} to the @topic_listeners\n # @param listener [FastlyNsq::Listener}\n def add_listener(listener)\n logger.info { \"topic #{listener.topic}, channel #{listener.channel}: listening\" }\n\n if topic_listeners[listener.topic]\n logger.warn { \"topic #{listener.topic}: duplicate listener\" }\n end\n\n topic_listeners[listener.topic] = listener\n end\n\n ##\n # Transer listeners to a new manager and stop processing from the existing pool.\n # @param new_manager [FastlyNsq::Manager] new manager to which listeners will be added\n # @param deadline [Integer] Number of seconds to wait for exsiting pool to stop processing\n def transfer(new_manager, deadline: DEADLINE)\n new_manager.topic_listeners.merge!(topic_listeners)\n stop_processing(deadline)\n topic_listeners.clear\n @done = true\n end\n\n ##\n # Terminate all listeners\n def stop_listeners\n logger.info { 'Stopping listeners' }\n listeners.each(&:terminate)\n topic_listeners.clear\n end\n\n protected\n\n ##\n # Shutdown the pool\n # @param deadline [Integer] Number of seconds to wait for pool to stop processing\n def stop_processing(deadline)\n logger.info { 'Stopping processors' }\n pool.shutdown\n\n logger.info { 'Waiting for processors to finish...' }\n return if pool.wait_for_termination(deadline)\n\n logger.info { 'Killing processors...' }\n pool.kill\n end\nend\n"},"avg_line_length":{"kind":"number","value":25.3821138211,"string":"25.382114"},"max_line_length":{"kind":"number","value":110,"string":"110"},"alphanum_fraction":{"kind":"number","value":0.7049967969,"string":"0.704997"},"score":{"kind":"number","value":3,"string":"3"}}},{"rowIdx":982,"cells":{"hexsha":{"kind":"string","value":"402fab453bb9e2ac59ef1604d2cf41a8d383046e"},"size":{"kind":"number","value":1270,"string":"1,270"},"ext":{"kind":"string","value":"py"},"lang":{"kind":"string","value":"Python"},"max_stars_repo_path":{"kind":"string","value":"move.py"},"max_stars_repo_name":{"kind":"string","value":"Nexowned/SnakeAI"},"max_stars_repo_head_hexsha":{"kind":"string","value":"95b5d4a9d20df124040ff9335ad09409ca9ff607"},"max_stars_repo_licenses":{"kind":"list like","value":["Apache-2.0"],"string":"[\n \"Apache-2.0\"\n]"},"max_stars_count":{"kind":"null"},"max_stars_repo_stars_event_min_datetime":{"kind":"null"},"max_stars_repo_stars_event_max_datetime":{"kind":"null"},"max_issues_repo_path":{"kind":"string","value":"move.py"},"max_issues_repo_name":{"kind":"string","value":"Nexowned/SnakeAI"},"max_issues_repo_head_hexsha":{"kind":"string","value":"95b5d4a9d20df124040ff9335ad09409ca9ff607"},"max_issues_repo_licenses":{"kind":"list like","value":["Apache-2.0"],"string":"[\n \"Apache-2.0\"\n]"},"max_issues_count":{"kind":"null"},"max_issues_repo_issues_event_min_datetime":{"kind":"null"},"max_issues_repo_issues_event_max_datetime":{"kind":"null"},"max_forks_repo_path":{"kind":"string","value":"move.py"},"max_forks_repo_name":{"kind":"string","value":"Nexowned/SnakeAI"},"max_forks_repo_head_hexsha":{"kind":"string","value":"95b5d4a9d20df124040ff9335ad09409ca9ff607"},"max_forks_repo_licenses":{"kind":"list like","value":["Apache-2.0"],"string":"[\n \"Apache-2.0\"\n]"},"max_forks_count":{"kind":"null"},"max_forks_repo_forks_event_min_datetime":{"kind":"null"},"max_forks_repo_forks_event_max_datetime":{"kind":"null"},"content":{"kind":"string","value":"from enum import Enum\n\n\nclass Move(Enum):\n LEFT = -1\n STRAIGHT = 0\n RIGHT = 1\n\n\nclass Direction(Enum):\n NORTH = 0\n EAST = 1\n SOUTH = 2\n WEST = 3\n\n def get_new_direction(self, move):\n return Direction(self.value + move.value) % 4\n\n def get_xy_manipulation(self):\n m = {\n Direction.NORTH: (0, -1),\n Direction.EAST: (1, 0),\n Direction.SOUTH: (0, 1),\n Direction.WEST: (-1, 0)\n }\n\n return m[self]\n\n def get_xy_moves(self):\n m = {\n Direction.NORTH: [Direction.NORTH.get_xy_manipulation(), Direction.EAST.get_xy_manipulation(),\n Direction.WEST.get_xy_manipulation()],\n Direction.EAST: [Direction.NORTH.get_xy_manipulation(), Direction.EAST.get_xy_manipulation(),\n Direction.SOUTH.get_xy_manipulation()],\n Direction.SOUTH: [Direction.SOUTH.get_xy_manipulation(), Direction.EAST.get_xy_manipulation(),\n Direction.WEST.get_xy_manipulation()],\n Direction.WEST: [Direction.NORTH.get_xy_manipulation(), Direction.WEST.get_xy_manipulation(),\n Direction.SOUTH.get_xy_manipulation()],\n }\n\n return m[self]\n"},"avg_line_length":{"kind":"number","value":30.2380952381,"string":"30.238095"},"max_line_length":{"kind":"number","value":106,"string":"106"},"alphanum_fraction":{"kind":"number","value":0.5732283465,"string":"0.573228"},"score":{"kind":"number","value":3.234375,"string":"3.234375"}}},{"rowIdx":983,"cells":{"hexsha":{"kind":"string","value":"dda83b2c35d62055796e4e123db761f5ab14e3a9"},"size":{"kind":"number","value":5711,"string":"5,711"},"ext":{"kind":"string","value":"go"},"lang":{"kind":"string","value":"Go"},"max_stars_repo_path":{"kind":"string","value":"vendor/github.com/joomcode/redispipe/redis/request_writer.go"},"max_stars_repo_name":{"kind":"string","value":"anuragprafulla/components-contrib"},"max_stars_repo_head_hexsha":{"kind":"string","value":"91be9ad2a0767526049e0c95225b5afb3791e353"},"max_stars_repo_licenses":{"kind":"list like","value":["MIT"],"string":"[\n \"MIT\"\n]"},"max_stars_count":{"kind":"number","value":229,"string":"229"},"max_stars_repo_stars_event_min_datetime":{"kind":"string","value":"2018-12-20T09:36:33.000Z"},"max_stars_repo_stars_event_max_datetime":{"kind":"string","value":"2022-03-31T18:39:26.000Z"},"max_issues_repo_path":{"kind":"string","value":"vendor/github.com/joomcode/redispipe/redis/request_writer.go"},"max_issues_repo_name":{"kind":"string","value":"anuragprafulla/components-contrib"},"max_issues_repo_head_hexsha":{"kind":"string","value":"91be9ad2a0767526049e0c95225b5afb3791e353"},"max_issues_repo_licenses":{"kind":"list like","value":["MIT"],"string":"[\n \"MIT\"\n]"},"max_issues_count":{"kind":"number","value":12,"string":"12"},"max_issues_repo_issues_event_min_datetime":{"kind":"string","value":"2019-09-27T14:14:19.000Z"},"max_issues_repo_issues_event_max_datetime":{"kind":"string","value":"2022-03-10T00:06:20.000Z"},"max_forks_repo_path":{"kind":"string","value":"vendor/github.com/joomcode/redispipe/redis/request_writer.go"},"max_forks_repo_name":{"kind":"string","value":"anuragprafulla/components-contrib"},"max_forks_repo_head_hexsha":{"kind":"string","value":"91be9ad2a0767526049e0c95225b5afb3791e353"},"max_forks_repo_licenses":{"kind":"list like","value":["MIT"],"string":"[\n \"MIT\"\n]"},"max_forks_count":{"kind":"number","value":17,"string":"17"},"max_forks_repo_forks_event_min_datetime":{"kind":"string","value":"2018-12-21T17:34:47.000Z"},"max_forks_repo_forks_event_max_datetime":{"kind":"string","value":"2022-02-09T19:07:44.000Z"},"content":{"kind":"string","value":"package redis\n\nimport (\n\t\"strconv\"\n\n\t\"github.com/joomcode/errorx\"\n)\n\n// AppendRequest appends request to byte slice as RESP request (ie as array of strings).\n//\n// It could fail if some request value is not nil, integer, float, string or byte slice.\n// In case of error it still returns modified buffer, but truncated to original size, it could be used save reallocation.\n//\n// Note: command could contain single space. In that case, it will be split and last part will be prepended to arguments.\nfunc AppendRequest(buf []byte, req Request) ([]byte, error) {\n\toldSize := len(buf)\n\tspace := -1\n\tfor i, c := range []byte(req.Cmd) {\n\t\tif c == ' ' {\n\t\t\tspace = i\n\t\t\tbreak\n\t\t}\n\t}\n\tif space == -1 {\n\t\tbuf = appendHead(buf, '*', len(req.Args)+1)\n\t\tbuf = appendHead(buf, '$', len(req.Cmd))\n\t\tbuf = append(buf, req.Cmd...)\n\t\tbuf = append(buf, '\\r', '\\n')\n\t} else {\n\t\tbuf = appendHead(buf, '*', len(req.Args)+2)\n\t\tbuf = appendHead(buf, '$', space)\n\t\tbuf = append(buf, req.Cmd[:space]...)\n\t\tbuf = append(buf, '\\r', '\\n')\n\t\tbuf = appendHead(buf, '$', len(req.Cmd)-space-1)\n\t\tbuf = append(buf, req.Cmd[space+1:]...)\n\t\tbuf = append(buf, '\\r', '\\n')\n\t}\n\tfor i, val := range req.Args {\n\t\tswitch v := val.(type) {\n\t\tcase string:\n\t\t\tbuf = appendHead(buf, '$', len(v))\n\t\t\tbuf = append(buf, v...)\n\t\tcase []byte:\n\t\t\tbuf = appendHead(buf, '$', len(v))\n\t\t\tbuf = append(buf, v...)\n\t\tcase int:\n\t\t\tbuf = appendBulkInt(buf, int64(v))\n\t\tcase uint:\n\t\t\tbuf = appendBulkUint(buf, uint64(v))\n\t\tcase int64:\n\t\t\tbuf = appendBulkInt(buf, int64(v))\n\t\tcase uint64:\n\t\t\tbuf = appendBulkUint(buf, uint64(v))\n\t\tcase int32:\n\t\t\tbuf = appendBulkInt(buf, int64(v))\n\t\tcase uint32:\n\t\t\tbuf = appendBulkUint(buf, uint64(v))\n\t\tcase int8:\n\t\t\tbuf = appendBulkInt(buf, int64(v))\n\t\tcase uint8:\n\t\t\tbuf = appendBulkUint(buf, uint64(v))\n\t\tcase int16:\n\t\t\tbuf = appendBulkInt(buf, int64(v))\n\t\tcase uint16:\n\t\t\tbuf = appendBulkUint(buf, uint64(v))\n\t\tcase bool:\n\t\t\tif v {\n\t\t\t\tbuf = append(buf, \"$1\\r\\n1\"...)\n\t\t\t} else {\n\t\t\t\tbuf = append(buf, \"$1\\r\\n0\"...)\n\t\t\t}\n\t\tcase float32:\n\t\t\tstr := strconv.FormatFloat(float64(v), 'f', -1, 32)\n\t\t\tbuf = appendHead(buf, '$', len(str))\n\t\t\tbuf = append(buf, str...)\n\t\tcase float64:\n\t\t\tstr := strconv.FormatFloat(v, 'f', -1, 64)\n\t\t\tbuf = appendHead(buf, '$', len(str))\n\t\t\tbuf = append(buf, str...)\n\t\tcase nil:\n\t\t\tbuf = append(buf, \"$0\\r\\n\"...)\n\t\tdefault:\n\t\t\treturn buf[:oldSize], ErrArgumentType.NewWithNoMessage().\n\t\t\t\tWithProperty(EKVal, val).\n\t\t\t\tWithProperty(EKArgPos, i).\n\t\t\t\tWithProperty(EKRequest, req)\n\t\t}\n\t\tbuf = append(buf, '\\r', '\\n')\n\t}\n\treturn buf, nil\n}\n\nfunc appendInt(b []byte, i int64) []byte {\n\tvar u uint64\n\tif i >= 0 && i <= 9 {\n\t\tb = append(b, byte(i)+'0')\n\t\treturn b\n\t}\n\tif i > 0 {\n\t\tu = uint64(i)\n\t} else {\n\t\tb = append(b, '-')\n\t\tu = uint64(-i)\n\t}\n\treturn appendUint(b, u)\n}\n\nfunc appendUint(b []byte, u uint64) []byte {\n\tif u <= 9 {\n\t\tb = append(b, byte(u)+'0')\n\t\treturn b\n\t}\n\tdigits := [20]byte{}\n\tp := 20\n\tfor u > 0 {\n\t\tn := u / 10\n\t\tp--\n\t\tdigits[p] = byte(u-n*10) + '0'\n\t\tu = n\n\t}\n\treturn append(b, digits[p:]...)\n}\n\nfunc appendHead(b []byte, t byte, i int) []byte {\n\tif i < 0 {\n\t\tpanic(\"negative length header\")\n\t}\n\tb = append(b, t)\n\tb = appendUint(b, uint64(i))\n\treturn append(b, '\\r', '\\n')\n}\n\nfunc appendBulkInt(b []byte, i int64) []byte {\n\tif i >= -99999999 && i <= 999999999 {\n\t\tb = append(b, '$', '0', '\\r', '\\n')\n\t} else {\n\t\tb = append(b, '$', '0', '0', '\\r', '\\n')\n\t}\n\tl := len(b)\n\tb = appendInt(b, i)\n\tli := byte(len(b) - l)\n\tif li < 10 {\n\t\tb[l-3] = li + '0'\n\t} else {\n\t\td := li / 10\n\t\tb[l-4] = d + '0'\n\t\tb[l-3] = li - (d * 10) + '0'\n\t}\n\treturn b\n}\n\nfunc appendBulkUint(b []byte, i uint64) []byte {\n\tif i <= 999999999 {\n\t\tb = append(b, '$', '0', '\\r', '\\n')\n\t} else {\n\t\tb = append(b, '$', '0', '0', '\\r', '\\n')\n\t}\n\tl := len(b)\n\tb = appendUint(b, i)\n\tli := byte(len(b) - l)\n\tif li < 10 {\n\t\tb[l-3] = li + '0'\n\t} else {\n\t\td := li / 10\n\t\tb[l-4] = d + '0'\n\t\tb[l-3] = li - (d * 10) + '0'\n\t}\n\treturn b\n}\n\n// ArgToString returns string representataion of an argument.\n// Used in cluster to determine cluster slot.\n// Have to be in sync with AppendRequest\nfunc ArgToString(arg interface{}) (string, bool) {\n\tvar bufarr [20]byte\n\tvar buf []byte\n\tswitch v := arg.(type) {\n\tcase string:\n\t\treturn v, true\n\tcase []byte:\n\t\treturn string(v), true\n\tcase int:\n\t\tbuf = appendInt(bufarr[:0], int64(v))\n\tcase uint:\n\t\tbuf = appendUint(bufarr[:0], uint64(v))\n\tcase int64:\n\t\tbuf = appendInt(bufarr[:0], int64(v))\n\tcase uint64:\n\t\tbuf = appendUint(bufarr[:0], uint64(v))\n\tcase int32:\n\t\tbuf = appendInt(bufarr[:0], int64(v))\n\tcase uint32:\n\t\tbuf = appendUint(bufarr[:0], uint64(v))\n\tcase int8:\n\t\tbuf = appendInt(bufarr[:0], int64(v))\n\tcase uint8:\n\t\tbuf = appendUint(bufarr[:0], uint64(v))\n\tcase int16:\n\t\tbuf = appendInt(bufarr[:0], int64(v))\n\tcase uint16:\n\t\tbuf = appendUint(bufarr[:0], uint64(v))\n\tcase bool:\n\t\tif v {\n\t\t\treturn \"1\", true\n\t\t}\n\t\treturn \"0\", true\n\tcase float32:\n\t\treturn strconv.FormatFloat(float64(v), 'f', -1, 32), true\n\tcase float64:\n\t\treturn strconv.FormatFloat(v, 'f', -1, 64), true\n\tcase nil:\n\t\treturn \"\", true\n\tdefault:\n\t\treturn \"\", false\n\t}\n\treturn string(buf), true\n}\n\n// CheckRequest checks requests command and arguments to be compatible with connector.\nfunc CheckRequest(req Request, singleThreaded bool) error {\n\tif err := ForbiddenCommand(req.Cmd, singleThreaded); err != nil {\n\t\treturn err.(*errorx.Error).WithProperty(EKRequest, req)\n\t}\n\tfor i, arg := range req.Args {\n\t\tswitch val := arg.(type) {\n\t\tcase string, []byte, int, uint, int64, uint64, int32, uint32, int8, uint8, int16, uint16, bool, float32, float64, nil:\n\t\t\t// ok\n\t\tdefault:\n\t\t\treturn ErrArgumentType.NewWithNoMessage().\n\t\t\t\tWithProperty(EKVal, val).\n\t\t\t\tWithProperty(EKArgPos, i).\n\t\t\t\tWithProperty(EKRequest, req)\n\t\t}\n\t}\n\treturn nil\n}\n"},"avg_line_length":{"kind":"number","value":24.0970464135,"string":"24.097046"},"max_line_length":{"kind":"number","value":121,"string":"121"},"alphanum_fraction":{"kind":"number","value":0.5963929259,"string":"0.596393"},"score":{"kind":"number","value":3.328125,"string":"3.328125"}}},{"rowIdx":984,"cells":{"hexsha":{"kind":"string","value":"402ca108a9c3f098029d64faeab25fe9ff44caf8"},"size":{"kind":"number","value":2763,"string":"2,763"},"ext":{"kind":"string","value":"py"},"lang":{"kind":"string","value":"Python"},"max_stars_repo_path":{"kind":"string","value":"Mesh/System/Entity/Function/Powered.py"},"max_stars_repo_name":{"kind":"string","value":"ys-warble/Mesh"},"max_stars_repo_head_hexsha":{"kind":"string","value":"115e7391d19ea09db3c627d8b8ed90b3e3bef9b5"},"max_stars_repo_licenses":{"kind":"list like","value":["MIT"],"string":"[\n \"MIT\"\n]"},"max_stars_count":{"kind":"null"},"max_stars_repo_stars_event_min_datetime":{"kind":"null"},"max_stars_repo_stars_event_max_datetime":{"kind":"null"},"max_issues_repo_path":{"kind":"string","value":"Mesh/System/Entity/Function/Powered.py"},"max_issues_repo_name":{"kind":"string","value":"ys-warble/Mesh"},"max_issues_repo_head_hexsha":{"kind":"string","value":"115e7391d19ea09db3c627d8b8ed90b3e3bef9b5"},"max_issues_repo_licenses":{"kind":"list like","value":["MIT"],"string":"[\n \"MIT\"\n]"},"max_issues_count":{"kind":"number","value":2,"string":"2"},"max_issues_repo_issues_event_min_datetime":{"kind":"string","value":"2019-02-25T00:10:15.000Z"},"max_issues_repo_issues_event_max_datetime":{"kind":"string","value":"2019-03-22T20:13:32.000Z"},"max_forks_repo_path":{"kind":"string","value":"Mesh/System/Entity/Function/Powered.py"},"max_forks_repo_name":{"kind":"string","value":"ys-warble/Mesh"},"max_forks_repo_head_hexsha":{"kind":"string","value":"115e7391d19ea09db3c627d8b8ed90b3e3bef9b5"},"max_forks_repo_licenses":{"kind":"list like","value":["MIT"],"string":"[\n \"MIT\"\n]"},"max_forks_count":{"kind":"null"},"max_forks_repo_forks_event_min_datetime":{"kind":"null"},"max_forks_repo_forks_event_max_datetime":{"kind":"null"},"content":{"kind":"string","value":"from enum import Enum\n\nfrom Mesh.System.Entity.Channel.PowerWire import PowerWire\nfrom Mesh.System.Entity.Function import BaseFunction, Function\nfrom Mesh.System.Entity.Function.Tasked import TaskName, SystemTask\nfrom Mesh.util.TypeList import TypeList\n\n\nclass PowerType(Enum):\n ELECTRIC = 101\n\n\nclass Power:\n def __init__(self, power_type):\n self.power_type = power_type\n\n\nclass ElectricPower(Power):\n def __init__(self, voltage):\n super().__init__(PowerType.ELECTRIC)\n self.voltage = voltage\n\n def __eq__(self, other):\n return self.power_type == other.power_type and self.voltage == other.voltage\n\n def __str__(self):\n return '%s(voltage=%s)' % (type(self).__name__, self.voltage)\n\n\nclass PowerInput:\n identifier = 'PowerInput'\n\n def __init__(self, parent, power=ElectricPower(voltage=0)):\n self.parent = parent\n self.power = power\n self.power_wires = TypeList(PowerWire)\n\n def set_power(self, power=ElectricPower(voltage=0)):\n self.power = power\n if self.parent.has_function(Function.TASKED):\n self.parent.send_task(SystemTask(name=TaskName.SET_POWER, value={'power': power}))\n else:\n if self.power in self.parent.get_function(Function.POWERED).input_power_ratings:\n self.parent.active = True\n else:\n self.parent.active = False\n\n def get_power(self):\n if len(self.power_wires) > 0:\n return self.power_wires[0].get_power()\n else:\n return self.power\n\n\nclass PowerOutput:\n identifier = 'PowerOutput'\n\n def __init__(self, parent, power=ElectricPower(voltage=0)):\n self.parent = parent\n self.power = power\n self.power_wires = TypeList(PowerWire)\n\n def get_power(self):\n return self.power\n\n def set_power(self, power=ElectricPower(voltage=0)):\n self.power = power\n for wire in self.power_wires:\n wire.set_power(self.power)\n\n\nclass Powered(BaseFunction):\n tasks = [\n TaskName.SET_POWER,\n ]\n\n def __init__(self, entity):\n super().__init__(entity)\n self.power_inputs = TypeList(PowerInput)\n self.power_outputs = TypeList(PowerOutput)\n\n self.input_power_ratings = []\n self.output_power_ratings = []\n\n def eval(self):\n pass\n\n def init(self):\n pass\n\n def terminate(self):\n pass\n\n def get_power_input(self, index=0):\n if index < len(self.power_inputs):\n return self.power_inputs[index]\n else:\n raise IndexError\n\n def get_power_output(self, index=0):\n if index < len(self.power_outputs):\n return self.power_outputs[index]\n else:\n raise IndexError\n"},"avg_line_length":{"kind":"number","value":26.3142857143,"string":"26.314286"},"max_line_length":{"kind":"number","value":94,"string":"94"},"alphanum_fraction":{"kind":"number","value":0.6420557365,"string":"0.642056"},"score":{"kind":"number","value":3.28125,"string":"3.28125"}}},{"rowIdx":985,"cells":{"hexsha":{"kind":"string","value":"b0c17de3777c79065b9d41b281b353d09c785a63"},"size":{"kind":"number","value":1215,"string":"1,215"},"ext":{"kind":"string","value":"kt"},"lang":{"kind":"string","value":"Kotlin"},"max_stars_repo_path":{"kind":"string","value":"network/src/main/kotlin/rs/dusk/network/rs/codec/game/encode/message/ContainerItemsMessage.kt"},"max_stars_repo_name":{"kind":"string","value":"dusk-rs/server-old"},"max_stars_repo_head_hexsha":{"kind":"string","value":"4af70ecb731d9ce292d086c81c21eda66bfaa040"},"max_stars_repo_licenses":{"kind":"list like","value":["CC-BY-3.0"],"string":"[\n \"CC-BY-3.0\"\n]"},"max_stars_count":{"kind":"number","value":52,"string":"52"},"max_stars_repo_stars_event_min_datetime":{"kind":"string","value":"2020-12-09T06:46:47.000Z"},"max_stars_repo_stars_event_max_datetime":{"kind":"string","value":"2022-03-19T19:53:53.000Z"},"max_issues_repo_path":{"kind":"string","value":"network/src/main/kotlin/rs/dusk/network/rs/codec/game/encode/message/ContainerItemsMessage.kt"},"max_issues_repo_name":{"kind":"string","value":"dusk-rs/server-old"},"max_issues_repo_head_hexsha":{"kind":"string","value":"4af70ecb731d9ce292d086c81c21eda66bfaa040"},"max_issues_repo_licenses":{"kind":"list like","value":["CC-BY-3.0"],"string":"[\n \"CC-BY-3.0\"\n]"},"max_issues_count":{"kind":"number","value":114,"string":"114"},"max_issues_repo_issues_event_min_datetime":{"kind":"string","value":"2020-12-10T23:02:59.000Z"},"max_issues_repo_issues_event_max_datetime":{"kind":"string","value":"2021-06-02T03:02:00.000Z"},"max_forks_repo_path":{"kind":"string","value":"network/src/main/kotlin/rs/dusk/network/rs/codec/game/encode/message/ContainerItemsMessage.kt"},"max_forks_repo_name":{"kind":"string","value":"dusk-rs/server-old"},"max_forks_repo_head_hexsha":{"kind":"string","value":"4af70ecb731d9ce292d086c81c21eda66bfaa040"},"max_forks_repo_licenses":{"kind":"list like","value":["CC-BY-3.0"],"string":"[\n \"CC-BY-3.0\"\n]"},"max_forks_count":{"kind":"number","value":9,"string":"9"},"max_forks_repo_forks_event_min_datetime":{"kind":"string","value":"2020-12-13T21:45:34.000Z"},"max_forks_repo_forks_event_max_datetime":{"kind":"string","value":"2022-01-26T18:23:59.000Z"},"content":{"kind":"string","value":"package rs.dusk.network.rs.codec.game.encode.message\n\nimport rs.dusk.core.network.model.message.Message\n\n/**\n * Sends a list of items to display on a interface item group component\n * @param key The id of the container\n * @param items List of the item ids to display\n * @param amounts List of the item amounts to display\n * @param secondary Optional to send to the primary or secondary container\n */\ndata class ContainerItemsMessage(val key: Int, val items: IntArray, val amounts: IntArray, val secondary: Boolean) : Message {\n override fun equals(other: Any?): Boolean {\n if (this === other) return true\n if (javaClass != other?.javaClass) return false\n\n other as ContainerItemsMessage\n\n if (key != other.key) return false\n if (!items.contentEquals(other.items)) return false\n if (!amounts.contentEquals(other.amounts)) return false\n if (secondary != other.secondary) return false\n\n return true\n }\n\n override fun hashCode(): Int {\n var result = key\n result = 31 * result + items.contentHashCode()\n result = 31 * result + amounts.contentHashCode()\n result = 31 * result + secondary.hashCode()\n return result\n }\n}"},"avg_line_length":{"kind":"number","value":35.7352941176,"string":"35.735294"},"max_line_length":{"kind":"number","value":126,"string":"126"},"alphanum_fraction":{"kind":"number","value":0.6781893004,"string":"0.678189"},"score":{"kind":"number","value":3.0625,"string":"3.0625"}}},{"rowIdx":986,"cells":{"hexsha":{"kind":"string","value":"1bef23af1d1e1885b3fbe2f77c7b1a3aa023161b"},"size":{"kind":"number","value":2703,"string":"2,703"},"ext":{"kind":"string","value":"py"},"lang":{"kind":"string","value":"Python"},"max_stars_repo_path":{"kind":"string","value":"tests/test_subscription.py"},"max_stars_repo_name":{"kind":"string","value":"avito-tech/alert-autoconf"},"max_stars_repo_head_hexsha":{"kind":"string","value":"73d9270c6f9f0655cfc68ae3dac4e7406acf10ae"},"max_stars_repo_licenses":{"kind":"list like","value":["MIT"],"string":"[\n \"MIT\"\n]"},"max_stars_count":{"kind":"null"},"max_stars_repo_stars_event_min_datetime":{"kind":"null"},"max_stars_repo_stars_event_max_datetime":{"kind":"null"},"max_issues_repo_path":{"kind":"string","value":"tests/test_subscription.py"},"max_issues_repo_name":{"kind":"string","value":"avito-tech/alert-autoconf"},"max_issues_repo_head_hexsha":{"kind":"string","value":"73d9270c6f9f0655cfc68ae3dac4e7406acf10ae"},"max_issues_repo_licenses":{"kind":"list like","value":["MIT"],"string":"[\n \"MIT\"\n]"},"max_issues_count":{"kind":"null"},"max_issues_repo_issues_event_min_datetime":{"kind":"null"},"max_issues_repo_issues_event_max_datetime":{"kind":"null"},"max_forks_repo_path":{"kind":"string","value":"tests/test_subscription.py"},"max_forks_repo_name":{"kind":"string","value":"avito-tech/alert-autoconf"},"max_forks_repo_head_hexsha":{"kind":"string","value":"73d9270c6f9f0655cfc68ae3dac4e7406acf10ae"},"max_forks_repo_licenses":{"kind":"list like","value":["MIT"],"string":"[\n \"MIT\"\n]"},"max_forks_count":{"kind":"null"},"max_forks_repo_forks_event_min_datetime":{"kind":"null"},"max_forks_repo_forks_event_max_datetime":{"kind":"null"},"content":{"kind":"string","value":"from unittest import TestCase\n\nfrom alert_autoconf.moira import MoiraAlert\n\n\ndef _make_sub(**kwargs):\n sub = {\n 'tags': [],\n 'contacts': [],\n 'escalations': [],\n 'sched': {'startOffset': 0, 'endOffset': 1439, 'tzOffset': 0, 'days': []},\n }\n sub.update(**kwargs)\n return sub\n\n\ndef _make_esc(offset=10, contacts=None):\n return {'contacts': contacts or [], 'offset_in_minutes': offset}\n\n\nclass SubscriptionCmpTest(TestCase):\n def test_two_empty(self):\n s1 = _make_sub()\n s2 = _make_sub()\n r = MoiraAlert._subscription_not_changed(s1, s2)\n self.assertTrue(r)\n\n def test_tags_changed(self):\n s1 = _make_sub(tags=['t1'])\n s2 = _make_sub()\n r = MoiraAlert._subscription_not_changed(s1, s2)\n self.assertFalse(r)\n\n def test_tags_equal(self):\n s1 = _make_sub(tags=['t1', 't2'])\n s2 = _make_sub(tags=['t1', 't2'])\n r = MoiraAlert._subscription_not_changed(s1, s2)\n self.assertTrue(r)\n\n def test_contacts_equal(self):\n s1 = _make_sub(contacts=['c1', 'c2'])\n s2 = _make_sub(contacts=['c1', 'c2'])\n r = MoiraAlert._subscription_not_changed(s1, s2)\n self.assertTrue(r)\n\n def test_tags_and_contacts_equal(self):\n s1 = _make_sub(contacts=['c1', 'c2'], tags=['t1'])\n s2 = _make_sub(contacts=['c1', 'c2'], tags=['t1'])\n r = MoiraAlert._subscription_not_changed(s1, s2)\n self.assertTrue(r)\n\n def test_tags_and_contacts_not_equal(self):\n s1 = _make_sub(contacts=['z1', 'c2'], tags=['t1'])\n s2 = _make_sub(contacts=['c1', 'c2'], tags=['t1'])\n r = MoiraAlert._subscription_not_changed(s1, s2)\n self.assertFalse(r)\n\n def test_escalations_empty(self):\n s1 = _make_sub(escalations=[_make_esc()])\n s2 = _make_sub(escalations=[_make_esc()])\n r = MoiraAlert._subscription_not_changed(s1, s2)\n self.assertTrue(r)\n\n def test_escalations_diff_offsets(self):\n s1 = _make_sub(escalations=[_make_esc(20)])\n s2 = _make_sub(escalations=[_make_esc()])\n r = MoiraAlert._subscription_not_changed(s1, s2)\n self.assertFalse(r)\n\n def test_escalations_order(self):\n s1 = _make_sub(escalations=[_make_esc(20), _make_esc(10)])\n s2 = _make_sub(escalations=[_make_esc(10), _make_esc(20)])\n r = MoiraAlert._subscription_not_changed(s1, s2)\n self.assertTrue(r)\n\n def test_escalations_contacts_order(self):\n s1 = _make_sub(escalations=[_make_esc(contacts=['1', '2'])])\n s2 = _make_sub(escalations=[_make_esc(contacts=['2', '1'])])\n r = MoiraAlert._subscription_not_changed(s1, s2)\n self.assertTrue(r)\n"},"avg_line_length":{"kind":"number","value":33.3703703704,"string":"33.37037"},"max_line_length":{"kind":"number","value":82,"string":"82"},"alphanum_fraction":{"kind":"number","value":0.623381428,"string":"0.623381"},"score":{"kind":"number","value":3.015625,"string":"3.015625"}}},{"rowIdx":987,"cells":{"hexsha":{"kind":"string","value":"3d238d6a6e35dd9449ab92eabfd502e8e7e9c381"},"size":{"kind":"number","value":1187,"string":"1,187"},"ext":{"kind":"string","value":"go"},"lang":{"kind":"string","value":"Go"},"max_stars_repo_path":{"kind":"string","value":"config/config.go"},"max_stars_repo_name":{"kind":"string","value":"patarra/jira-todo-sync"},"max_stars_repo_head_hexsha":{"kind":"string","value":"736a1bb8b5a76894138a8bd3b0f37a85d10871ae"},"max_stars_repo_licenses":{"kind":"list like","value":["MIT"],"string":"[\n \"MIT\"\n]"},"max_stars_count":{"kind":"null"},"max_stars_repo_stars_event_min_datetime":{"kind":"null"},"max_stars_repo_stars_event_max_datetime":{"kind":"null"},"max_issues_repo_path":{"kind":"string","value":"config/config.go"},"max_issues_repo_name":{"kind":"string","value":"patarra/jira-todo-sync"},"max_issues_repo_head_hexsha":{"kind":"string","value":"736a1bb8b5a76894138a8bd3b0f37a85d10871ae"},"max_issues_repo_licenses":{"kind":"list like","value":["MIT"],"string":"[\n \"MIT\"\n]"},"max_issues_count":{"kind":"null"},"max_issues_repo_issues_event_min_datetime":{"kind":"null"},"max_issues_repo_issues_event_max_datetime":{"kind":"null"},"max_forks_repo_path":{"kind":"string","value":"config/config.go"},"max_forks_repo_name":{"kind":"string","value":"patarra/jira-todo-sync"},"max_forks_repo_head_hexsha":{"kind":"string","value":"736a1bb8b5a76894138a8bd3b0f37a85d10871ae"},"max_forks_repo_licenses":{"kind":"list like","value":["MIT"],"string":"[\n \"MIT\"\n]"},"max_forks_count":{"kind":"null"},"max_forks_repo_forks_event_min_datetime":{"kind":"null"},"max_forks_repo_forks_event_max_datetime":{"kind":"null"},"content":{"kind":"string","value":"package config\n\nimport (\n\t\"errors\"\n\t\"fmt\"\n\t\"github.com/spf13/viper\"\n\t\"sync\"\n)\n\ntype JiraConfig struct {\n\tServer string `mapstructure:\"server\"`\n\tUser string `mapstructure:\"user\"`\n\tPassword string `mapstructure:\"password\"`\n}\n\ntype TodoistConfig struct {\n\tToken string `mapstructure:\"token\"`\n}\n\ntype Config struct {\n\tJira JiraConfig `mapstructure:\"jira\"`\n\tTodoist TodoistConfig `mapstructure:\"todoist\"`\n}\n\nvar instance Config\nvar once sync.Once\nvar initialised = false\n\nfunc InitConfig(cfgFile string) (*Config, error) {\n\tvar onceErr error = nil\n\tonce.Do(func() {\n\t\tviper.SetConfigFile(cfgFile)\n\t\tviper.SetConfigType(\"toml\")\n\n\t\tif err := viper.ReadInConfig(); err != nil {\n\t\t\tonceErr = errors.New(fmt.Sprintf(\"couldn't load config from %s: %s\\n\", cfgFile, err))\n\t\t}\n\t\tif err := viper.Unmarshal(&instance); err != nil {\n\t\t\tonceErr = errors.New(fmt.Sprintf(\"couldn't read config: %s\\n\", err))\n\t\t}\n\t\tinitialised = true\n\t})\n\tif onceErr == nil {\n\t\treturn &instance, nil\n\t} else {\n\t\treturn nil, onceErr\n\t}\n}\n\nfunc GetConfig() (*Config, error) {\n\tif !initialised {\n\t\treturn nil, errors.New(\"config is not initialised yet, please call InitConfig(cfgFile)\")\n\t}\n\treturn &instance, nil\n}\n\n"},"avg_line_length":{"kind":"number","value":20.8245614035,"string":"20.824561"},"max_line_length":{"kind":"number","value":90,"string":"90"},"alphanum_fraction":{"kind":"number","value":0.6891322662,"string":"0.689132"},"score":{"kind":"number","value":3.046875,"string":"3.046875"}}},{"rowIdx":988,"cells":{"hexsha":{"kind":"string","value":"7f71cd62fefa40eebb9ea107e698b6ca746b7814"},"size":{"kind":"number","value":1427,"string":"1,427"},"ext":{"kind":"string","value":"go"},"lang":{"kind":"string","value":"Go"},"max_stars_repo_path":{"kind":"string","value":"internal/logger/logger.go"},"max_stars_repo_name":{"kind":"string","value":"vsdmars/actor"},"max_stars_repo_head_hexsha":{"kind":"string","value":"7f5a8a9ca8801684a2008213435ecff4f142506c"},"max_stars_repo_licenses":{"kind":"list like","value":["MIT"],"string":"[\n \"MIT\"\n]"},"max_stars_count":{"kind":"null"},"max_stars_repo_stars_event_min_datetime":{"kind":"null"},"max_stars_repo_stars_event_max_datetime":{"kind":"null"},"max_issues_repo_path":{"kind":"string","value":"internal/logger/logger.go"},"max_issues_repo_name":{"kind":"string","value":"vsdmars/actor"},"max_issues_repo_head_hexsha":{"kind":"string","value":"7f5a8a9ca8801684a2008213435ecff4f142506c"},"max_issues_repo_licenses":{"kind":"list like","value":["MIT"],"string":"[\n \"MIT\"\n]"},"max_issues_count":{"kind":"number","value":4,"string":"4"},"max_issues_repo_issues_event_min_datetime":{"kind":"string","value":"2019-03-04T19:39:25.000Z"},"max_issues_repo_issues_event_max_datetime":{"kind":"string","value":"2019-04-03T02:28:27.000Z"},"max_forks_repo_path":{"kind":"string","value":"internal/logger/logger.go"},"max_forks_repo_name":{"kind":"string","value":"vsdmars/actor"},"max_forks_repo_head_hexsha":{"kind":"string","value":"7f5a8a9ca8801684a2008213435ecff4f142506c"},"max_forks_repo_licenses":{"kind":"list like","value":["MIT"],"string":"[\n \"MIT\"\n]"},"max_forks_count":{"kind":"null"},"max_forks_repo_forks_event_min_datetime":{"kind":"null"},"max_forks_repo_forks_event_max_datetime":{"kind":"null"},"content":{"kind":"string","value":"package logger\n\nimport (\n\t\"fmt\"\n\t\"os\"\n\n\t\"go.uber.org/zap\"\n\t\"go.uber.org/zap/zapcore\"\n)\n\nvar logger serviceLogger\nvar origLogger serviceLogger\n\nfunc init() {\n\tinitLogger()\n}\n\n// LogSync sync logger output\nfunc LogSync() {\n\t// ignore logger Sync error\n\tlogger.Sync()\n}\n\n// SetLogger sets caller provided zap logger\n//\n// reset to service's default logger by passing in nil pointer\nfunc SetLogger(l *zap.Logger) {\n\tif l != nil {\n\t\tlogger.Logger = l\n\t\tlogger.provided = true\n\t\treturn\n\t}\n\n\tlogger = origLogger\n}\n\n// SetLogLevel sets the service log level\n//\n// noop if caller provides it's own zap logger\nfunc SetLogLevel(level zapcore.Level) {\n\tif logger.provided {\n\t\treturn\n\t}\n\n\tlogger.config.Level.SetLevel(level)\n}\n\nfunc initLogger() {\n\t// default log level set to 'info'\n\tatom := zap.NewAtomicLevelAt(zap.InfoLevel)\n\n\tconfig := zap.Config{\n\t\tLevel: atom,\n\t\tDevelopment: false,\n\t\tSampling: &zap.SamplingConfig{\n\t\t\tInitial: 100,\n\t\t\tThereafter: 100,\n\t\t},\n\t\tEncoding: \"json\", // console, json, toml\n\t\tEncoderConfig: zap.NewProductionEncoderConfig(),\n\t\tOutputPaths: []string{\"stderr\"},\n\t\tErrorOutputPaths: []string{\"stderr\"},\n\t}\n\n\tmylogger, err := config.Build()\n\tif err != nil {\n\t\tfmt.Printf(\"Initialize zap logger error: %v\\n\", err)\n\t\tos.Exit(1)\n\t}\n\n\tlogger = serviceLogger{mylogger, &config, false}\n\torigLogger = logger\n}\n\n// GetLog gets the current logger\nfunc GetLog() serviceLogger {\n\treturn logger\n}\n"},"avg_line_length":{"kind":"number","value":18.0632911392,"string":"18.063291"},"max_line_length":{"kind":"number","value":62,"string":"62"},"alphanum_fraction":{"kind":"number","value":0.6839523476,"string":"0.683952"},"score":{"kind":"number","value":3.0625,"string":"3.0625"}}},{"rowIdx":989,"cells":{"hexsha":{"kind":"string","value":"857b8c2dbc0be5ef308797b18469cd500d2836fa"},"size":{"kind":"number","value":1470,"string":"1,470"},"ext":{"kind":"string","value":"js"},"lang":{"kind":"string","value":"JavaScript"},"max_stars_repo_path":{"kind":"string","value":"lib/setup.js"},"max_stars_repo_name":{"kind":"string","value":"jkresner/meanair-scream"},"max_stars_repo_head_hexsha":{"kind":"string","value":"c5c179e83692f371c722178d4af62398d0eabe1c"},"max_stars_repo_licenses":{"kind":"list like","value":["MIT"],"string":"[\n \"MIT\"\n]"},"max_stars_count":{"kind":"number","value":3,"string":"3"},"max_stars_repo_stars_event_min_datetime":{"kind":"string","value":"2015-09-22T03:20:03.000Z"},"max_stars_repo_stars_event_max_datetime":{"kind":"string","value":"2016-03-02T12:10:44.000Z"},"max_issues_repo_path":{"kind":"string","value":"lib/setup.js"},"max_issues_repo_name":{"kind":"string","value":"jkresner/meanair-scream"},"max_issues_repo_head_hexsha":{"kind":"string","value":"c5c179e83692f371c722178d4af62398d0eabe1c"},"max_issues_repo_licenses":{"kind":"list like","value":["MIT"],"string":"[\n \"MIT\"\n]"},"max_issues_count":{"kind":"null"},"max_issues_repo_issues_event_min_datetime":{"kind":"null"},"max_issues_repo_issues_event_max_datetime":{"kind":"null"},"max_forks_repo_path":{"kind":"string","value":"lib/setup.js"},"max_forks_repo_name":{"kind":"string","value":"jkresner/meanair-scream"},"max_forks_repo_head_hexsha":{"kind":"string","value":"c5c179e83692f371c722178d4af62398d0eabe1c"},"max_forks_repo_licenses":{"kind":"list like","value":["MIT"],"string":"[\n \"MIT\"\n]"},"max_forks_count":{"kind":"null"},"max_forks_repo_forks_event_min_datetime":{"kind":"null"},"max_forks_repo_forks_event_max_datetime":{"kind":"null"},"content":{"kind":"string","value":"module.exports = ({log}) => ({\n\n\n data(done) {\n log.step('data:db')\n global.DB = require('./db')(() => {\n let seeder = require('./db.seed')(DB, OPTS)\n seeder.testToSeed(y => y ? seeder.restoreBSONData(done) : done())\n })\n\n let ISODate = global.ISODate = str => moment(str).toDate()\n let ID = global.ID = global.ObjectId = (DB||{}).ObjectId\n\n log.step('data:fixture')\n global.FIXTURE = require('./data.fixture')\n\n if (!DB) done()\n\n return { ISODate, ID }\n },\n\n\n runner() {\n let Mocha = require('mocha')\n log.step('tests:init')\n return new Mocha(OPTS.config.mocha)\n .addFile(join(__dirname,'runner'))\n .run(status => {\n log.info('DONE', `${status==0?'No':'With'} errors\\n`).flush()\n process.exit(status)\n })\n },\n\n\n app(done) {\n let start = new Date()\n log.step('app:init')\n\n global.APP = OPTS.App(function(e) {\n log.info('APP', `${e?'fail':'ready'} (${new Date()-start}ms)`).flush()\n log.step('tests:run')\n done(e)\n })\n },\n\n /*\n If unhandledPromiseRejection f => Error, p => Profile\n If failed test / assertion f => mocha.ctx, p => Error\n */\n fail(f, p) {\n // console.log('in fail....', f, p)\n if (f.stack)\n log.error(f)\n else if ((p instanceof Error))\n log.error(p)\n\n log.info('FAIL', `${log.step()} `.white + `${log.runner.scope.join(' > ')}`.spec)\n process.exit(1) // Exiting stops default mocha exit output\n }\n\n})\n"},"avg_line_length":{"kind":"number","value":23.7096774194,"string":"23.709677"},"max_line_length":{"kind":"number","value":85,"string":"85"},"alphanum_fraction":{"kind":"number","value":0.5428571429,"string":"0.542857"},"score":{"kind":"number","value":3.140625,"string":"3.140625"}}},{"rowIdx":990,"cells":{"hexsha":{"kind":"string","value":"f076aaf49a3d8fba6fb5ba17c6020bb113d2de01"},"size":{"kind":"number","value":5417,"string":"5,417"},"ext":{"kind":"string","value":"py"},"lang":{"kind":"string","value":"Python"},"max_stars_repo_path":{"kind":"string","value":"src/jsonengine/main.py"},"max_stars_repo_name":{"kind":"string","value":"youhengzhou/json-crud-engine"},"max_stars_repo_head_hexsha":{"kind":"string","value":"8ee614af6dddbe1236a78a7debf71048f476a3ff"},"max_stars_repo_licenses":{"kind":"list like","value":["MIT"],"string":"[\n \"MIT\"\n]"},"max_stars_count":{"kind":"number","value":2,"string":"2"},"max_stars_repo_stars_event_min_datetime":{"kind":"string","value":"2021-07-02T04:33:36.000Z"},"max_stars_repo_stars_event_max_datetime":{"kind":"string","value":"2022-01-09T23:40:30.000Z"},"max_issues_repo_path":{"kind":"string","value":"src/jsonengine/main.py"},"max_issues_repo_name":{"kind":"string","value":"youhengzhou/json-crud-engine"},"max_issues_repo_head_hexsha":{"kind":"string","value":"8ee614af6dddbe1236a78a7debf71048f476a3ff"},"max_issues_repo_licenses":{"kind":"list like","value":["MIT"],"string":"[\n \"MIT\"\n]"},"max_issues_count":{"kind":"null"},"max_issues_repo_issues_event_min_datetime":{"kind":"null"},"max_issues_repo_issues_event_max_datetime":{"kind":"null"},"max_forks_repo_path":{"kind":"string","value":"src/jsonengine/main.py"},"max_forks_repo_name":{"kind":"string","value":"youhengzhou/json-crud-engine"},"max_forks_repo_head_hexsha":{"kind":"string","value":"8ee614af6dddbe1236a78a7debf71048f476a3ff"},"max_forks_repo_licenses":{"kind":"list like","value":["MIT"],"string":"[\n \"MIT\"\n]"},"max_forks_count":{"kind":"null"},"max_forks_repo_forks_event_min_datetime":{"kind":"null"},"max_forks_repo_forks_event_max_datetime":{"kind":"null"},"content":{"kind":"string","value":"# JSON engine 21 9 16\n# database\n# eng.json\n# engine\n# eng.py\nimport os\nimport json\n\npath = os.getcwd() + '\\\\json_engine_database\\\\'\npath_string = ''\n\ndef set_path(string):\n global path\n path = os.getcwd() + string\n\ndef dictionary_kv(dictionary, key, value):\n dictionary[key] = value\n return dictionary\n\ndef set_path_string(args,create_flag):\n global path_string\n if (args):\n path_string = str(args[0]) + '\\\\'\n if os.path.exists(path + path_string)==False:\n if create_flag == True:\n os.makedirs(path + path_string)\n else:\n return False\n return path_string\n\ndef create(dictionary, *args):\n path_string = set_path_string(args,True)\n with open(path + path_string + 'eng.json', 'w') as outfile:\n json.dump(dictionary, outfile, indent=4)\n\ndef retrieve(*args):\n path_string = set_path_string(args,False)\n if path_string == False:\n return False\n with open(path + path_string + 'eng.json', 'r') as f:\n return(json.load(f))\n\ndef retrieve_k(key, *args):\n path_string = set_path_string(args,False)\n if path_string == False:\n return False\n with open(path + path_string + 'eng.json', 'r') as f:\n if key in json.load(f):\n with open(path + path_string + 'eng.json', 'r') as f:\n return(json.load(f)[key])\n else:\n return False\n\ndef update(dictionary, *args):\n path_string = set_path_string(args,False)\n if path_string == False:\n return False\n with open(path + path_string + 'eng.json', 'w') as outfile:\n json.dump(dictionary, outfile, indent=4)\n return True\n\ndef update_kv(key, value, *args):\n path_string = set_path_string(args,False)\n if path_string == False:\n return False\n with open(path + path_string + 'eng.json', 'w') as outfile:\n json.dump({key: value}, outfile, indent=4)\n return True\n\ndef patch(dictionary, *args):\n path_string = set_path_string(args,False)\n if path_string == False:\n return False\n with open(path + path_string + 'eng.json', 'r') as f:\n data=(json.load(f))\n data.update(dictionary)\n with open(path + path_string + 'eng.json', 'w') as outfile:\n json.dump(data, outfile, indent=4)\n return True\n\ndef patch_kv(key, value, *args):\n path_string = set_path_string(args,False)\n if path_string == False:\n return False\n with open(path + path_string + 'eng.json', 'r') as f:\n data=(json.load(f))\n data.update({key: value})\n with open(path + path_string + 'eng.json', 'w') as outfile:\n json.dump(data, outfile, indent=4)\n return True\n\ndef delete(*args):\n if (args):\n path_string = str(args[0]) + '\\\\'\n if os.path.exists(path + path_string + 'eng.json'):\n os.remove(path + path_string + 'eng.json')\n os.rmdir(path + path_string)\n return True\n else:\n return False\n\ndef delete_k(key, *args):\n if (args):\n path_string = str(args[0]) + '\\\\'\n if os.path.exists(path + path_string + 'eng.json'):\n with open(path + path_string + 'eng.json', 'r') as f:\n if key in json.load(f):\n data = json.load(f)\n data.pop(key)\n with open(path + path_string + 'eng.json', 'w') as outfile:\n json.dump(data, outfile, indent=4)\n return True\n else:\n return False\n else:\n return False\n\ndef display(*args):\n if (args):\n path_string = str(args[0]) + '\\\\'\n if os.path.exists(path + path_string + 'eng.json'):\n with open(path + path_string + 'eng.json', 'r') as f:\n print(json.load(f))\n return True\n else:\n print('The selected file does not exist')\n return False\n\ndef display_key(key, *args):\n if (args):\n path_string = str(args[0]) + '\\\\'\n if os.path.exists(path + path_string + 'eng.json'):\n with open(path + path_string + 'eng.json', 'r') as f:\n if key in json.load(f):\n print(key + ' ' + str(json.load(f)[key]))\n return True\n else:\n print('The selected file does not exist')\n return False\n\ndef display_nkv(key, *args):\n if (args):\n path_string = str(args[0]) + '\\\\'\n if os.path.exists(path + path_string + 'eng.json'):\n with open(path + path_string + 'eng.json', 'r') as f:\n if key in json.load(f):\n data = json.load(f)\n data.pop(key,'key not found')\n print(data)\n return True\n else:\n print('The selected file does not exist')\n return False\n\ndef display_ind(*args):\n if (args):\n path_string = str(args[0]) + '\\\\'\n if os.path.exists(path + path_string + 'eng.json'):\n with open(path + path_string + 'eng.json', 'r') as f:\n print(json.dumps(json.load(f), indent=4))\n else:\n print('The selected file does not exist')\n\ndef display_ind_nkv(key, *args):\n if (args):\n path_string = str(args[0]) + '\\\\'\n if os.path.exists(path + path_string + 'eng.json'):\n with open(path + path_string + 'eng.json', 'r') as f:\n data = json.load(f)\n data.pop(key,'key not found')\n print(json.dumps(data, indent=4))\n else:\n print('The selected file does not exist')\n"},"avg_line_length":{"kind":"number","value":31.132183908,"string":"31.132184"},"max_line_length":{"kind":"number","value":75,"string":"75"},"alphanum_fraction":{"kind":"number","value":0.568026583,"string":"0.568027"},"score":{"kind":"number","value":3.1875,"string":"3.1875"}}},{"rowIdx":991,"cells":{"hexsha":{"kind":"string","value":"19d428aad36966348419dd413eabc7e98a9aa51d"},"size":{"kind":"number","value":13103,"string":"13,103"},"ext":{"kind":"string","value":"lua"},"lang":{"kind":"string","value":"Lua"},"max_stars_repo_path":{"kind":"string","value":"Plugins/UnrealLua/LuaSource/luahotupdate.lua"},"max_stars_repo_name":{"kind":"string","value":"asomfai/unreal.lua"},"max_stars_repo_head_hexsha":{"kind":"string","value":"61a7f3fd2e967ffd970c9b2ac72f12aa2af34bd8"},"max_stars_repo_licenses":{"kind":"list like","value":["MIT"],"string":"[\n \"MIT\"\n]"},"max_stars_count":{"kind":"number","value":311,"string":"311"},"max_stars_repo_stars_event_min_datetime":{"kind":"string","value":"2017-01-31T04:24:13.000Z"},"max_stars_repo_stars_event_max_datetime":{"kind":"string","value":"2022-03-02T10:12:58.000Z"},"max_issues_repo_path":{"kind":"string","value":"Plugins/UnrealLua/LuaSource/luahotupdate.lua"},"max_issues_repo_name":{"kind":"string","value":"asomfai/unreal.lua"},"max_issues_repo_head_hexsha":{"kind":"string","value":"61a7f3fd2e967ffd970c9b2ac72f12aa2af34bd8"},"max_issues_repo_licenses":{"kind":"list like","value":["MIT"],"string":"[\n \"MIT\"\n]"},"max_issues_count":{"kind":"number","value":39,"string":"39"},"max_issues_repo_issues_event_min_datetime":{"kind":"string","value":"2017-02-14T09:33:02.000Z"},"max_issues_repo_issues_event_max_datetime":{"kind":"string","value":"2020-02-14T07:45:33.000Z"},"max_forks_repo_path":{"kind":"string","value":"Plugins/UnrealLua/LuaSource/luahotupdate.lua"},"max_forks_repo_name":{"kind":"string","value":"asomfai/unreal.lua"},"max_forks_repo_head_hexsha":{"kind":"string","value":"61a7f3fd2e967ffd970c9b2ac72f12aa2af34bd8"},"max_forks_repo_licenses":{"kind":"list like","value":["MIT"],"string":"[\n \"MIT\"\n]"},"max_forks_count":{"kind":"number","value":98,"string":"98"},"max_forks_repo_forks_event_min_datetime":{"kind":"string","value":"2017-01-30T17:49:34.000Z"},"max_forks_repo_forks_event_max_datetime":{"kind":"string","value":"2022-03-15T08:16:34.000Z"},"content":{"kind":"string","value":"if _VERSION == \"Lua 5.3\" then\n\tfunction getfenv(f)\n\t\tif type(f) == \"function\" then\n\t\t\tlocal name, value = debug.getupvalue(f, 1)\n\t\t\tif name == \"_ENV\" then\n\t\t\t\treturn value\n\t\t\telse\n\t\t\t\treturn _ENV\n\t\t\tend\t\t\n\t\tend\n\tend\n\n\tfunction setfenv(f, Env)\n\t\tif type(f) == \"function\" then\n\t\t\tlocal name, value = debug.getupvalue(f, 1)\n\t\t\tif name == \"_ENV\" then\n\t\t\t\tdebug.setupvalue(f, 1, Env)\n\t\t\tend\t\t\n\t\tend\n\tend\n\tdebug = debug or {}\n\tdebug.setfenv = setfenv\n\n\tfunction loadstring( ... )\n return load(...)\n end\nend\n\nlocal HU = {}\n\nfunction HU.FailNotify(...)\n\tif HU.NotifyFunc then HU.NotifyFunc(...) end\nend\nfunction HU.DebugNofity(...)\n\tif HU.DebugNofityFunc then HU.DebugNofityFunc(...) end\nend\n\nlocal function GetWorkingDir()\n\tif HU.WorkingDir == nil then\n\t local p = io.popen(\"echo %cd%\")\n\t if p then\n\t HU.WorkingDir = p:read(\"*l\")..\"\\\\\"\n\t p:close()\n\t end\n\tend\n\treturn HU.WorkingDir\nend\n\nlocal function Normalize(path)\n\tpath = path:gsub(\"/\",\"\\\\\") \n\tif path:find(\":\") == nil then\n\t\tpath = GetWorkingDir()..path \n\tend\n\tlocal pathLen = #path \n\tif path:sub(pathLen, pathLen) == \"\\\\\" then\n\t\t path = path:sub(1, pathLen - 1)\n\tend\n\t \n local parts = { }\n for w in path:gmatch(\"[^\\\\]+\") do\n if w == \"..\" and #parts ~=0 then table.remove(parts)\n elseif w ~= \".\" then table.insert(parts, w)\n end\n end\n return table.concat(parts, \"\\\\\")\nend\n\nfunction HU.InitFileMap(RootPath)\n\tlocal TheMap = {}\n\tfor _, rootpath in pairs(RootPath) do\n\t\trootpath = Normalize(rootpath)\n\t\tlocal file = io.popen(\"dir /S/B /A:A \\\"\"..rootpath..\"\\\"\")\n\t\tio.input(file)\n\t\tfor line in io.lines() do\n\t \t\tlocal FileName = string.match(line,\".*\\\\(.*)%.lua\")\n\t \t if FileName ~= nil then\n\t if TheMap[FileName] == nil then\n\t \tTheMap[FileName] = {}\n\t \tend\n\t \tlocal luapath = string.sub(line, #rootpath+2, #line-4)\n\t\t\t\tluapath = string.gsub(luapath, \"\\\\\", \".\")\n\t\t\t\tHU.LuaPathToSysPath[luapath] = SysPath\n\t \ttable.insert(TheMap[FileName], {SysPath = line, LuaPath = luapath})\n\t \tend\n\t end\n\t file:close()\n\tend\n\treturn TheMap\nend\n\nfunction HU.InitFakeTable()\n\tlocal meta = {}\n\tHU.Meta = meta\n\tlocal function FakeT() return setmetatable({}, meta) end\n\tlocal function EmptyFunc() end\n\tlocal function pairs() return EmptyFunc end \n\tlocal function setmetatable(t, metaT)\n\t\tHU.MetaMap[t] = metaT \n\t\treturn t\n\tend\n\tlocal function getmetatable(t, metaT)\n\t\treturn setmetatable({}, t)\n\tend\n\tlocal function require(LuaPath)\n\t\tif not HU.RequireMap[LuaPath] then\n\t\t\tlocal FakeTable = FakeT()\n\t\t\tHU.RequireMap[LuaPath] = FakeTable\n\t\tend\n\t\treturn HU.RequireMap[LuaPath]\n\tend\n\tfunction meta.__index(t, k)\n\t\tif k == \"setmetatable\" then\n\t\t\treturn setmetatable\n\t\telseif k == \"pairs\" or k == \"ipairs\" then\n\t\t\treturn pairs\n\t\telseif k == \"next\" then\n\t\t\treturn EmptyFunc\n\t\telseif k == \"require\" then\n\t\t\treturn require\n\t\telseif HU.CallOriginFunctions and HU.CallOriginFunctions[k] then\n\t\t\treturn _G[k]\n\t\telse\n\t\t\tlocal FakeTable = FakeT()\n\t\t\trawset(t, k, FakeTable)\n\t\t\treturn FakeTable \n\t\tend\n\tend\n\tfunction meta.__newindex(t, k, v) rawset(t, k, v) end\n\tfunction meta.__call() return FakeT(), FakeT(), FakeT() end\n\tfunction meta.__add() return meta.__call() end\n\tfunction meta.__sub() return meta.__call() end\n\tfunction meta.__mul() return meta.__call() end\n\tfunction meta.__div() return meta.__call() end\n\tfunction meta.__mod() return meta.__call() end\n\tfunction meta.__pow() return meta.__call() end\n\tfunction meta.__unm() return meta.__call() end\n\tfunction meta.__concat() return meta.__call() end\n\tfunction meta.__eq() return meta.__call() end\n\tfunction meta.__lt() return meta.__call() end\n\tfunction meta.__le() return meta.__call() end\n\tfunction meta.__len() return meta.__call() end\n\treturn FakeT\nend\n\nfunction HU.InitProtection()\n\tHU.Protection = {}\n\tHU.Protection[setmetatable] = true\n\tHU.Protection[pairs] = true\n\tHU.Protection[ipairs] = true\n\tHU.Protection[next] = true\n\tHU.Protection[require] = true\n\tHU.Protection[HU] = true\n\tHU.Protection[HU.Meta] = true\n\tHU.Protection[math] = true\n\tHU.Protection[string] = true\n\tHU.Protection[table] = true\nend\n\nfunction HU.AddFileFromHUList()\n\tpackage.loaded[HU.UpdateListFile] = nil\n\tlocal FileList = require (HU.UpdateListFile)\n\tHU.ALL = false\n\tHU.HUMap = {}\n\tfor _, file in pairs(FileList) do\n\t\tif file == \"_ALL_\" then\n\t\t\tHU.ALL = true\n\t\t\tfor k, v in pairs(HU.FileMap) do\n\t\t\t\tfor _, path in pairs(v) do\n\t\t\t\t\tHU.HUMap[path.LuaPath] = path.SysPath \t\n\t\t\t\tend\n\t\t\tend\n\t\t\treturn\n\t\tend\n\n\t\tif not HU.FileMap[file] then\n\t\t\tif HU.TryReloadFileCount[file] == nil or HU.TryReloadFileCount[file] == 0 then\n\t\t\t\tHU.FileMap = HU.InitFileMap(HU.RootPath)\n\t\t\t\tif not HU.FileMap[file] then\n\t\t\t\t\tHU.FailNotify(\"HotUpdate can't not find \"..file)\n\t\t\t\t\tHU.TryReloadFileCount[file] = 3\n\t\t\t\tend\n\t\t\telse\n\t\t\t\tHU.TryReloadFileCount[file] = HU.TryReloadFileCount[file] - 1\n\t\t\tend\t\t\t\n\t\tend\n\n\t\tif HU.FileMap[file] then\n\t\t\tfor _, path in pairs(HU.FileMap[file]) do\n\t\t\t\tHU.HUMap[path.LuaPath] = path.SysPath \t\n\t\t\tend\n\t\tend\n\tend\nend\n\nfunction HU.ErrorHandle(e)\n\tHU.FailNotify(\"HotUpdate Error\\n\"..tostring(e))\n\tHU.ErrorHappen = true\nend\n\nfunction HU.LoadStringFunc(SysPath)\n\tio.input(SysPath)\n\tlocal CodeStr = io.read(\"*all\")\n\tio.input():close()\n\treturn CodeStr\nend\n\nfunction HU.BuildNewCode(SysPath, LuaPath)\n\tlocal NewCode = HU.LoadStringFunc(SysPath)\n\tif HU.ALL and HU.OldCode[SysPath] == nil then\n\t\tHU.OldCode[SysPath] = NewCode\n\t\treturn\n\tend\n\tif HU.OldCode[SysPath] == NewCode then\n\t\treturn false\n\tend\n\tHU.DebugNofity(SysPath)\n\tlocal chunk = \"--[[\"..LuaPath..\"]] \"\n\tchunk = chunk..NewCode\t\n\tlocal NewFunction = loadstring(chunk)\n\tif not NewFunction then \n \t\tHU.FailNotify(SysPath..\" has syntax error.\") \t\n \t\tcollectgarbage(\"collect\")\n \t\treturn false\n\telse\n\t\tHU.FakeENV = HU.FakeT()\n\t\tHU.MetaMap = {}\n\t\tHU.RequireMap = {}\n\t\tsetfenv(NewFunction, HU.FakeENV)\n\t\tlocal NewObject\n\t\tHU.ErrorHappen = false\n\t\txpcall(function () NewObject = NewFunction() end, HU.ErrorHandle)\n\t\tif not HU.ErrorHappen then \n\t\t\tHU.OldCode[SysPath] = NewCode\n\t\t\treturn true, NewObject\n\t\telse\n\t \t\tcollectgarbage(\"collect\")\n\t\t\treturn false\n\t\tend\n\tend\nend\n\nfunction HU.Travel_G()\n\tlocal visited = {}\n\tvisited[HU] = true\n\tlocal function f(t)\n\t\tif (type(t) ~= \"function\" and type(t) ~= \"table\") or visited[t] or HU.Protection[t] then return end\n\t\tvisited[t] = true\n\t\tif type(t) == \"function\" then\n\t\t \tfor i = 1, math.huge do\n\t\t\t\tlocal name, value = debug.getupvalue(t, i)\n\t\t\t\tif not name then break end\n\t\t\t\tif type(value) == \"function\" then\n\t\t\t\t\tfor _, funcs in ipairs(HU.ChangedFuncList) do\n\t\t\t\t\t\tif value == funcs[1] then\n\t\t\t\t\t\t\tdebug.setupvalue(t, i, funcs[2])\n\t\t\t\t\t\tend\n\t\t\t\t\tend\n\t\t\t\tend\n\t\t\t\tf(value)\n\t\t\tend\n\t\telseif type(t) == \"table\" then\n\t\t\tf(debug.getmetatable(t))\n\t\t\tlocal changeIndexs = {}\n\t\t\tfor k,v in pairs(t) do\n\t\t\t\tf(k); f(v);\n\t\t\t\tif type(v) == \"function\" then\n\t\t\t\t\tfor _, funcs in ipairs(HU.ChangedFuncList) do\n\t\t\t\t\t\tif v == funcs[1] then t[k] = funcs[2] end\n\t\t\t\t\tend\n\t\t\t\tend\n\t\t\t\tif type(k) == \"function\" then\n\t\t\t\t\tfor index, funcs in ipairs(HU.ChangedFuncList) do\n\t\t\t\t\t\tif k == funcs[1] then changeIndexs[#changeIndexs+1] = index end\n\t\t\t\t\tend\n\t\t\t\tend\n\t\t\tend\n\t\t\tfor _, index in ipairs(changeIndexs) do\n\t\t\t\tlocal funcs = HU.ChangedFuncList[index]\n\t\t\t\tt[funcs[2]] = t[funcs[1]] \n\t\t\t\tt[funcs[1]] = nil\n\t\t\tend\n\t\tend\n\tend\n\t\n\tf(_G)\n\tlocal registryTable = debug.getregistry()\n\tf(registryTable)\n\t\n\tfor _, funcs in ipairs(HU.ChangedFuncList) do\n\t\tif funcs[3] == \"HUDebug\" then funcs[4]:HUDebug() end\n\tend\nend\n\nfunction HU.ReplaceOld(OldObject, NewObject, LuaPath, From, Deepth)\n\tif type(OldObject) == type(NewObject) then\n\t\tif type(NewObject) == \"table\" then\n\t\t\tHU.UpdateAllFunction(OldObject, NewObject, LuaPath, From, \"\") \n\t\telseif type(NewObject) == \"function\" then\n\t\t\tHU.UpdateOneFunction(OldObject, NewObject, LuaPath, nil, From, \"\")\n\t\tend\n\tend\nend\n\nfunction HU.HotUpdateCode(LuaPath, SysPath)\n\tlocal OldObject = package.loaded[LuaPath]\n\tif OldObject ~= nil then\n\t\tHU.VisitedSig = {}\n\t\tHU.ChangedFuncList = {}\n\t\tlocal Success, NewObject = HU.BuildNewCode(SysPath, LuaPath)\n\t\tif Success then\n\t\t\tHU.ReplaceOld(OldObject, NewObject, LuaPath, \"Main\", \"\")\n\t\t\tfor LuaPath, NewObject in pairs(HU.RequireMap) do\n\t\t\t\tlocal OldObject = package.loaded[LuaPath]\n\t\t\t\tHU.ReplaceOld(OldObject, NewObject, LuaPath, \"Main_require\", \"\")\n\t\t\tend\n\t\t\tsetmetatable(HU.FakeENV, nil)\n\t\t\tHU.UpdateAllFunction(HU.ENV, HU.FakeENV, \" ENV \", \"Main\", \"\")\n\t\t\tif #HU.ChangedFuncList > 0 then\n\t\t\t\tHU.Travel_G()\n\t\t\tend\n\t\t\tcollectgarbage(\"collect\")\n\t\tend\n\telseif HU.OldCode[SysPath] == nil then \n\t\tHU.OldCode[SysPath] = HU.LoadStringFunc(SysPath)\n\tend\nend\n\nfunction HU.ResetENV(object, name, From, Deepth)\n\tlocal visited = {}\n\tlocal function f(object, name)\n\t\tif not object or visited[object] then return end\n\t\tvisited[object] = true\n\t\tif type(object) == \"function\" then\n\t\t\tHU.DebugNofity(Deepth..\"HU.ResetENV\", name, \" from:\"..From)\n\t\t\txpcall(function () setfenv(object, HU.ENV) end, HU.FailNotify)\n\t\telseif type(object) == \"table\" then\n\t\t\tHU.DebugNofity(Deepth..\"HU.ResetENV\", name, \" from:\"..From)\n\t\t\tfor k, v in pairs(object) do\n\t\t\t\tf(k, tostring(k)..\"__key\", \" HU.ResetENV \", Deepth..\" \" )\n\t\t\t\tf(v, tostring(k), \" HU.ResetENV \", Deepth..\" \")\n\t\t\tend\n\t\tend\n\tend\n\tf(object, name)\nend\n\nfunction HU.UpdateUpvalue(OldFunction, NewFunction, Name, From, Deepth)\n\tHU.DebugNofity(Deepth..\"HU.UpdateUpvalue\", Name, \" from:\"..From)\n\tlocal OldUpvalueMap = {}\n\tlocal OldExistName = {}\n\tfor i = 1, math.huge do\n\t\tlocal name, value = debug.getupvalue(OldFunction, i)\n\t\tif not name then break end\n\t\tOldUpvalueMap[name] = value\n\t\tOldExistName[name] = true\n\tend\n\tfor i = 1, math.huge do\n\t\tlocal name, value = debug.getupvalue(NewFunction, i)\n\t\tif not name then break end\n\t\tif OldExistName[name] then\n\t\t\tlocal OldValue = OldUpvalueMap[name]\n\t\t\tif type(OldValue) ~= type(value) then\n\t\t\t\tdebug.setupvalue(NewFunction, i, OldValue)\n\t\t\telseif type(OldValue) == \"function\" then\n\t\t\t\tHU.UpdateOneFunction(OldValue, value, name, nil, \"HU.UpdateUpvalue\", Deepth..\" \")\n\t\t\telseif type(OldValue) == \"table\" then\n\t\t\t\tHU.UpdateAllFunction(OldValue, value, name, \"HU.UpdateUpvalue\", Deepth..\" \")\n\t\t\t\tdebug.setupvalue(NewFunction, i, OldValue)\n\t\t\telse\n\t\t\t\tdebug.setupvalue(NewFunction, i, OldValue)\n\t\t\tend\n\t\telse\n\t\t\tHU.ResetENV(value, name, \"HU.UpdateUpvalue\", Deepth..\" \")\n\t\tend\n\tend\nend \n\nfunction HU.UpdateOneFunction(OldObject, NewObject, FuncName, OldTable, From, Deepth)\n\tif HU.Protection[OldObject] or HU.Protection[NewObject] then return end\n\tif OldObject == NewObject then return end\n\tlocal signature = tostring(OldObject)..tostring(NewObject)\n\tif HU.VisitedSig[signature] then return end\n\tHU.VisitedSig[signature] = true\n\tHU.DebugNofity(Deepth..\"HU.UpdateOneFunction \"..FuncName..\" from:\"..From)\n\tif pcall(debug.setfenv, NewObject, getfenv(OldObject)) then\n\t\tHU.UpdateUpvalue(OldObject, NewObject, FuncName, \"HU.UpdateOneFunction\", Deepth..\" \")\n\t\tHU.ChangedFuncList[#HU.ChangedFuncList + 1] = {OldObject, NewObject, FuncName, OldTable}\n\tend\nend\n\nfunction HU.UpdateAllFunction(OldTable, NewTable, Name, From, Deepth)\n\tif HU.Protection[OldTable] or HU.Protection[NewTable] then return end\n\tlocal IsSame = getmetatable(OldTable) == getmetatable(NewTable)\n\tlocal IsSame = IsSame and OldTable == NewTable\n\tif IsSame == true then return end\n\tlocal signature = tostring(OldTable)..tostring(NewTable)\n\tif HU.VisitedSig[signature] then return end\n\tHU.VisitedSig[signature] = true\n\tHU.DebugNofity(Deepth..\"HU.UpdateAllFunction \"..Name..\" from:\"..From)\n\tfor ElementName, Element in pairs(NewTable) do\n\t\tlocal OldElement = OldTable[ElementName]\n\t\tif type(Element) == type(OldElement) then\n\t\t\tif type(Element) == \"function\" then\n\t\t\t\tHU.UpdateOneFunction(OldElement, Element, ElementName, OldTable, \"HU.UpdateAllFunction\", Deepth..\" \")\n\t\t\telseif type(Element) == \"table\" then\n\t\t\t\tHU.UpdateAllFunction(OldElement, Element, ElementName, \"HU.UpdateAllFunction\", Deepth..\" \")\n\t\t\tend\n\t\telseif OldElement == nil and type(Element) == \"function\" then\n\t\t\tif pcall(setfenv, Element, HU.ENV) then\n\t\t\t\tOldTable[ElementName] = Element\n\t\t\tend\n\t\tend\n\tend\n\tlocal OldMeta = debug.getmetatable(OldTable) \n\tlocal NewMeta = HU.MetaMap[NewTable]\n\tif type(OldMeta) == \"table\" and type(NewMeta) == \"table\" then\n\t\tHU.UpdateAllFunction(OldMeta, NewMeta, Name..\"'s Meta\", \"HU.UpdateAllFunction\", Deepth..\" \")\n\tend\nend\n\nfunction HU.SetFileLoader(InitFileMapFunc, LoadStringFunc)\n\tHU.InitFileMap = InitFileMapFunc\n\tHU.LoadStringFunc = LoadStringFunc\nend\n\nfunction HU.Init(UpdateListFile, RootPath, FailNotify, ENV, CallOriginFunctions)\n\tHU.UpdateListFile = UpdateListFile\n\tHU.HUMap = {}\n\tHU.FileMap = {}\n\tHU.NotifyFunc = FailNotify\n\tHU.OldCode = {}\n\tHU.ChangedFuncList = {}\n\tHU.VisitedSig = {}\n\tHU.FakeENV = nil\n\tHU.ENV = ENV or _G\n\tHU.LuaPathToSysPath = {}\n\tHU.RootPath = RootPath\n\tHU.FileMap = HU.InitFileMap(RootPath)\n\tHU.FakeT = HU.InitFakeTable()\n\tHU.CallOriginFunctions = CallOriginFunctions\n\tHU.InitProtection()\n\tHU.ALL = false\n\tHU.TryReloadFileCount = {}\nend\n\nfunction HU.Update()\n\tHU.AddFileFromHUList()\n\tfor LuaPath, SysPath in pairs(HU.HUMap) do\n\t\tHU.HotUpdateCode(LuaPath, SysPath)\n\tend\nend\n\nreturn HU\n"},"avg_line_length":{"kind":"number","value":28.8612334802,"string":"28.861233"},"max_line_length":{"kind":"number","value":108,"string":"108"},"alphanum_fraction":{"kind":"number","value":0.6844997329,"string":"0.6845"},"score":{"kind":"number","value":3.140625,"string":"3.140625"}}},{"rowIdx":992,"cells":{"hexsha":{"kind":"string","value":"bc5bf135a68e1a20f8283e82b82ae3e27ca64d3a"},"size":{"kind":"number","value":4800,"string":"4,800"},"ext":{"kind":"string","value":"asm"},"lang":{"kind":"string","value":"Assembly"},"max_stars_repo_path":{"kind":"string","value":"source/tokeniser/tokenise/tokenise.asm"},"max_stars_repo_name":{"kind":"string","value":"paulscottrobson/6502-basic"},"max_stars_repo_head_hexsha":{"kind":"string","value":"d4c360041bfa49427a506465e58bb0ef94beaa44"},"max_stars_repo_licenses":{"kind":"list like","value":["MIT"],"string":"[\n \"MIT\"\n]"},"max_stars_count":{"kind":"number","value":3,"string":"3"},"max_stars_repo_stars_event_min_datetime":{"kind":"string","value":"2021-09-30T19:34:11.000Z"},"max_stars_repo_stars_event_max_datetime":{"kind":"string","value":"2021-10-31T06:55:50.000Z"},"max_issues_repo_path":{"kind":"string","value":"source/tokeniser/tokenise/tokenise.asm"},"max_issues_repo_name":{"kind":"string","value":"paulscottrobson/6502-Basic"},"max_issues_repo_head_hexsha":{"kind":"string","value":"d4c360041bfa49427a506465e58bb0ef94beaa44"},"max_issues_repo_licenses":{"kind":"list like","value":["MIT"],"string":"[\n \"MIT\"\n]"},"max_issues_count":{"kind":"null"},"max_issues_repo_issues_event_min_datetime":{"kind":"null"},"max_issues_repo_issues_event_max_datetime":{"kind":"null"},"max_forks_repo_path":{"kind":"string","value":"source/tokeniser/tokenise/tokenise.asm"},"max_forks_repo_name":{"kind":"string","value":"paulscottrobson/6502-Basic"},"max_forks_repo_head_hexsha":{"kind":"string","value":"d4c360041bfa49427a506465e58bb0ef94beaa44"},"max_forks_repo_licenses":{"kind":"list like","value":["MIT"],"string":"[\n \"MIT\"\n]"},"max_forks_count":{"kind":"number","value":1,"string":"1"},"max_forks_repo_forks_event_min_datetime":{"kind":"string","value":"2021-12-07T21:58:44.000Z"},"max_forks_repo_forks_event_max_datetime":{"kind":"string","value":"2021-12-07T21:58:44.000Z"},"content":{"kind":"string","value":"; ************************************************************************************************\n; ************************************************************************************************\n;\n;\t\tName:\t\ttokenise.asm\n;\t\tPurpose:\tTokenise a string\n;\t\tCreated:\t8th March 2021\n;\t\tReviewed: \t16th March 2021\n;\t\tAuthor:\t\tPaul Robson (paul@robsons.org.uk)\n;\n; ************************************************************************************************\n; ************************************************************************************************\n\n\t\t.section storage\ntokenHeader: \t\t\t\t\t\t\t\t; bytes (all zero) to create a fake 'program line'\n\t\t.fill \t3\ntokenBuffer:\t\t\t\t\t\t\t\t; token buffer.\n\t\t.fill \t256\ntokenBufferIndex:\t\t\t\t\t\t\t; count of characters in buffer.\n\t\t.fill \t1\n\n\t\t.send \tstorage\n\t\t\t\n\t\t.section code\n\n; ************************************************************************************************\n;\n;\t\t\t\t\tTokenise string at (codePtr) into tokenising buffer\n;\t\t\t\t\t\t\t\tA != 0 if tokenising successful.\n;\n; ************************************************************************************************\n\nTokenise: ;; \n\t\tjsr \tTokeniseMakeASCIIZ \t\t\t; convert to ASCIIZ string.\nTokeniseASCIIZ: ;; \n\t\tjsr \tTokeniseFixCase \t\t\t; remove controls and lower case outside quotes.\n\t\tlda \t#0 \t\t\t\t\t\t\t; reset the token buffer index\n\t\tsta \ttokenBufferIndex\n\t\ttay \t\t\t\t\t\t\t\t; start pointer\n\t\tlda \t#$80 \t\t\t\t\t\t; empty token buffer ($80 ends it)\n\t\tsta \ttokenBuffer\n\t\t;\n\t\t;\t\tMain tokenisation loop\n\t\t;\n_TokLoop:\n\t\tlda \t(codePtr),y \t\t\t\t; get next character\n\t\tbeq \t_TokExit \t\t\t\t\t; if zero, then exit.\t\n\t\tiny \t\t\t\t\t\t\t\t; skip over spaces.\n\t\tcmp \t#\" \"\n\t\tbeq \t_TokLoop\n\t\tdey \t\t\t\t\t\t\t\t; point back to character.\n\t\tcmp \t#\"&\"\t\t\t\t\t\t; Hexadecimal constant.\n\t\tbeq \t_TokHexConst\n\t\tcmp \t#'\"'\t\t\t\t\t\t; Quoted String\n\t\tbeq \t_TokQString\n\t\tcmp \t#\"Z\"+1 \t\t\t\t\t\t; > 'Z' is punctuation\n\t\tbcs \t_TokPunctuation\n\t\tcmp \t#\"A\" \t\t\t\t\t\t; A..Z identifier\n\t\tbcs \t_TokIdentifier\n\t\tcmp \t#\"9\"+1\n\t\tbcs \t_TokPunctuation \t\t\t; between 9 and A exclusive, punctuation\n\t\tcmp \t#\"0\"\n\t\tbcc \t_TokPunctuation \t\t\t; < 0, punctuation.\n\t\tlda \t#10 \t\t\t\t\t\t; 0..9 constant in base 10.\n\t\tbne \t_TokConst\n\t\t;\n\t\t;\t\tHandle hexadecimal constant.\n\t\t;\n_TokHexConst:\t\t\n\t\tiny\t\t\t\t\t\t\t\t\t; consume token.\n\t\tlda \t#TKW_AMP \t\t\t\t\t; Write ampersand token out\n\t\tjsr \tTokenWrite\n\t\tlda \t#16 \n\t\t;\n\t\t;\t\tHandle constant in base A\n\t\t;\n_TokConst:\n\t\tjsr \tTokeniseInteger\n\t\tbcs \t_TokLoop\n\t\tbcc \t_TokFail\n\t\t;\n\t\t;\t\tQuoted string\n\t\t;\n_TokQString:\n\t\tjsr \tTokeniseString\t\t\t\t\n\t\tbcs \t_TokLoop\n\t\tbcc \t_TokFail\n\t\t;\n\t\t;\t\tPunctuation token.\n\t\t;\n_TokPunctuation:\n\t\tjsr \tTokenisePunctuation\n\t\tbcs \t_TokLoop\n\t\tbcc \t_TokFail\n\t\t;\n\t\t;\t\tIdentifier or text token\n\t\t;\n_TokIdentifier:\n\t\tjsr \tTokeniseIdentifier\n\t\tbcs \t_TokLoop\n\t\tbcc \t_TokFail\n\n_TokExit:\t\t\n\t\tlda \t#1\n\t\trts\n\n_TokFail:\n\t\tlda \t#0\n\t\trts\n\n; ************************************************************************************************\n;\n;\t\t\t\t\t\t\t\tWrite A to tokenise buffer\n;\n; ************************************************************************************************\n\nTokenWrite:\n\t\tsta \ttempShort \t\t\t\t\t; save XA\n\t\tpha\n\t\t.pshx\n\t\tlda \ttempShort\n\t\tldx \ttokenBufferIndex \t\t\t; geet index\n\t\tsta \ttokenBuffer,x \t\t\t\t; write byte to buffer\n\t\tlda \t#TOK_EOL \t\t\t\t\t; pre-emptively write EOL marker after\n\t\tsta \ttokenBuffer+1,x\n\t\tinc \ttokenBufferIndex \t\t\t; bump index\n\t\t.pulx\n\t\tpla\n\t\trts\n\n; ************************************************************************************************\n;\n;\t\t\t\t\t\t\tMake string at (codePtr) ASCIIZ\n;\n; ************************************************************************************************\n\nTokeniseMakeASCIIZ:\n\t\tldy \t#0\t\t\t\t\t\t\t; get length of string.\n\t\tlda \t(codePtr),y\n\t\ttay\n\t\tiny \t\t\t\t\t\t\t\t; +1, the NULL goes here.\n\t\tlda \t#0\t\t\t\t\t\t\t\n\t\tsta \t(codePtr),y \t\t\t\t; write the trailing NULL.\n\t\tinc \tcodePtr \t\t\t\t\t; bump the pointer.\n\t\tbne \t_TMKAExit\n\t\tinc \tcodePtr+1\n_TMKAExit:\t\t\n\t\trts\n\n; ************************************************************************************************\n;\n;\t\t\t\tMake upper case and remove controls for everything outside quotes.\n;\n; ************************************************************************************************\n\nTokeniseFixCase:\n\t\tldy \t#0 \t\t\t\t\t\t\t; position in buffer.\n\t\tldx \t#1 \t\t\t\t\t\t\t; bit 0 of this is 'in quotes'\n_TFCFlipQ:\t\t\n\t\ttxa\n\t\teor \t#1\n\t\ttax\n_TFCLoop:\n\t\tlda \t(codePtr),y \t\t\t\t; get character\n\t\tbeq \t_TFCExit \t\t\t\t\t; if zero exit.\n\t\tcmp \t#32 \t\t\t\t\t\t; if control\n\t\tbcc \t_TFCControl\t\t\n\t\tiny \t\t\t\t\t\t\t\t; preconsume\n\t\tcmp \t#'\"'\n\t\tbeq \t_TFCFlipQ\n\t\tcmp \t#\"a\"\t\t\t\t\t\t; check if L/C\n\t\tbcc \t_TFCLoop\n\t\tcmp \t#\"z\"+1\n\t\tbcs \t_TFCLoop\n\t\t;\n\t\tcpx \t#0 \t\t\t\t\t\t\t; in quotes, if so, leave alone.\n\t\tbne \t_TFCLoop\n\t\tdey\n\t\teor \t#\"A\"^\"a\"\t\t\t\t\t; make U/C\n_TFCWrite:\t\t\n\t\tsta \t(codePtr),y\n\t\tiny\n\t\tjmp \t_TFCLoop\n\t\t;\n_TFCControl:\n\t\tlda \t#\" \"\n\t\tbne \t_TFCWrite\n_TFCExit:\n\t\trts\n\n\t\t.send code\n"},"avg_line_length":{"kind":"number","value":25.1308900524,"string":"25.13089"},"max_line_length":{"kind":"number","value":98,"string":"98"},"alphanum_fraction":{"kind":"number","value":0.4625,"string":"0.4625"},"score":{"kind":"number","value":3.21875,"string":"3.21875"}}},{"rowIdx":993,"cells":{"hexsha":{"kind":"string","value":"40af8c8f3236438b38a2ec95b565b4efbf998b1b"},"size":{"kind":"number","value":4709,"string":"4,709"},"ext":{"kind":"string","value":"py"},"lang":{"kind":"string","value":"Python"},"max_stars_repo_path":{"kind":"string","value":"dsbox/utils/utils.py"},"max_stars_repo_name":{"kind":"string","value":"Pandinosaurus/dsbox"},"max_stars_repo_head_hexsha":{"kind":"string","value":"aea56049025ed7e6e66427f8636286f8be1b6e03"},"max_stars_repo_licenses":{"kind":"list like","value":["Apache-2.0"],"string":"[\n \"Apache-2.0\"\n]"},"max_stars_count":{"kind":"number","value":16,"string":"16"},"max_stars_repo_stars_event_min_datetime":{"kind":"string","value":"2020-05-11T09:10:15.000Z"},"max_stars_repo_stars_event_max_datetime":{"kind":"string","value":"2021-04-13T08:43:28.000Z"},"max_issues_repo_path":{"kind":"string","value":"dsbox/utils/utils.py"},"max_issues_repo_name":{"kind":"string","value":"Pandinosaurus/dsbox"},"max_issues_repo_head_hexsha":{"kind":"string","value":"aea56049025ed7e6e66427f8636286f8be1b6e03"},"max_issues_repo_licenses":{"kind":"list like","value":["Apache-2.0"],"string":"[\n \"Apache-2.0\"\n]"},"max_issues_count":{"kind":"number","value":1,"string":"1"},"max_issues_repo_issues_event_min_datetime":{"kind":"string","value":"2020-12-03T20:02:32.000Z"},"max_issues_repo_issues_event_max_datetime":{"kind":"string","value":"2020-12-03T20:02:32.000Z"},"max_forks_repo_path":{"kind":"string","value":"dsbox/utils/utils.py"},"max_forks_repo_name":{"kind":"string","value":"Pandinosaurus/dsbox"},"max_forks_repo_head_hexsha":{"kind":"string","value":"aea56049025ed7e6e66427f8636286f8be1b6e03"},"max_forks_repo_licenses":{"kind":"list like","value":["Apache-2.0"],"string":"[\n \"Apache-2.0\"\n]"},"max_forks_count":{"kind":"number","value":1,"string":"1"},"max_forks_repo_forks_event_min_datetime":{"kind":"string","value":"2020-05-11T17:22:20.000Z"},"max_forks_repo_forks_event_max_datetime":{"kind":"string","value":"2020-05-11T17:22:20.000Z"},"content":{"kind":"string","value":"import gzip\nimport pickle\nimport networkx as nx\nimport matplotlib.pyplot as plt\nfrom smart_open import open\n\n\"\"\"\nSome util functions used to navigate into Airflow DAGs.\n\"\"\"\n\n\ndef breadth_first_search_task_list(task_root, task_list=[], mode='upstream'):\n sub_task_list = []\n queue = [task_root]\n\n while len(queue) > 0:\n task = queue.pop(0)\n sub_task_list.append(task)\n next_tasks = None\n if mode == 'upstream':\n next_tasks = task.upstream_list\n else:\n next_tasks = task.downstream_list\n for next_task in next_tasks:\n if next_task not in queue and next_task not in task_list and next_task not in sub_task_list:\n queue.append(next_task)\n\n return sub_task_list\n\n\ndef breadth_first_search_shell_list(task_roots):\n shell_task_list = [task_roots]\n done_tasks = set()\n queue = task_roots\n\n while len(queue) > 0:\n tasks = queue\n next_tasks = []\n for task in tasks:\n for next_task in task.downstream_list:\n if next_task not in done_tasks:\n next_tasks.append(next_task)\n done_tasks.add(next_task)\n\n if len(next_tasks) > 0:\n shell_task_list.append(next_tasks)\n queue = next_tasks\n\n return shell_task_list\n\n\ndef get_dag_roots(dag):\n roots = []\n for task in dag.tasks:\n if len(task.upstream_list) == 0:\n roots.append(task)\n\n return roots\n\n\ndef execute_dag(dag, verbose=False, mode='downstream'):\n task_list = []\n roots = dag.roots\n\n for root in roots:\n sub_task_list = breadth_first_search_task_list(root, task_list, mode=mode)\n task_list = sub_task_list + task_list\n\n for task in task_list:\n if verbose:\n print(dag.dag_id + '-' + str(task))\n if task.task_type == 'SubDagOperator':\n execute_dag(task.subdag, verbose=verbose)\n else:\n task.execute(dag.get_template_env())\n\n return task_list\n\n\ndef plot_dag(dag):\n fig, ax = plt.subplots(figsize=(15, 10), dpi=150)\n\n G = nx.DiGraph()\n color_list = []\n\n for task in dag.tasks:\n if len(task.downstream_list) > 0:\n for next_task in task.downstream_list:\n G.add_edge(task, next_task)\n\n for node in G.nodes():\n if len(node.ui_color) == 7:\n color_list.append(node.ui_color)\n else:\n last_code = node.ui_color[-1]\n color_list.append(str(node.ui_color).ljust(7, last_code))\n\n pos = nx.drawing.nx_agraph.graphviz_layout(G, prog='dot')\n nx.draw_networkx_nodes(G, pos, node_shape='D', node_color=color_list)\n nx.draw_networkx_edges(G, pos, edge_color='gray', alpha=0.8)\n\n nx.draw_networkx_labels(G, pos, font_size=5)\n\n ax.set_axis_off()\n plt.title(\"DAG preview\", fontsize=8)\n plt.show()\n\n\n\"\"\"\nSome utils function used to persist objects.\n\"\"\"\n\n\ndef pickle_compress(obj):\n return gzip.zlib.compress(pickle.dumps(obj))\n\n\ndef decompress_unpickle(obj_zp):\n return pickle.loads(gzip.zlib.decompress(obj_zp))\n\n\ndef write_object_file(file_path, obj):\n obj_pz = pickle_compress(obj)\n file_obj = open(file_path, 'wb')\n file_obj.write(obj_pz)\n file_obj.close()\n\n\ndef load_object_file(file_path):\n file_obj = open(file_path, 'rb')\n obj_pz = file_obj.read()\n obj = decompress_unpickle(obj_pz)\n return obj\n\n\n\"\"\"\nSome misc utils.\n\"\"\"\n\n\ndef pandas_downcast_numeric(df_to_downcast, float_type_to_downcast=(\"float64\", \"float32\"),\n int_type_to_downcast=(\"int64\", \"int32\")):\n float_cols = [c for c in df_to_downcast.columns if df_to_downcast[c].dtype == float_type_to_downcast[0]]\n int_cols = [c for c in df_to_downcast.columns if df_to_downcast[c].dtype == int_type_to_downcast[0]]\n df_to_downcast[float_cols] = df_to_downcast[float_cols].apply(lambda x: x.astype(float_type_to_downcast[1]))\n df_to_downcast[int_cols] = df_to_downcast[int_cols].apply(lambda x: x.astype(int_type_to_downcast[1]))\n\n\ndef format_dict_path_items(dictionary, replace_value):\n for k, v in dictionary.items():\n if isinstance(v, dict):\n dictionary[k] = format_dict_path_items(v, replace_value)\n else:\n if isinstance(v, list):\n formatted_list = []\n for list_item in v:\n if type(list_item) == str:\n list_item = list_item.format(replace_value)\n formatted_list.append(list_item)\n dictionary[k] = formatted_list\n else:\n if type(dictionary[k]) == str:\n dictionary[k] = dictionary[k].format(replace_value)\n return dictionary\n"},"avg_line_length":{"kind":"number","value":28.5393939394,"string":"28.539394"},"max_line_length":{"kind":"number","value":112,"string":"112"},"alphanum_fraction":{"kind":"number","value":0.6370779359,"string":"0.637078"},"score":{"kind":"number","value":3.140625,"string":"3.140625"}}},{"rowIdx":994,"cells":{"hexsha":{"kind":"string","value":"6191b56409da4f2d51541a3fc44a0ca10a01b642"},"size":{"kind":"number","value":1663,"string":"1,663"},"ext":{"kind":"string","value":"kt"},"lang":{"kind":"string","value":"Kotlin"},"max_stars_repo_path":{"kind":"string","value":"app/src/main/java/alektas/telecomapp/utils/L.kt"},"max_stars_repo_name":{"kind":"string","value":"Alektas/Telecom-System"},"max_stars_repo_head_hexsha":{"kind":"string","value":"1e83fbe6daa496f4c4f47d41f404d3e66fb200ff"},"max_stars_repo_licenses":{"kind":"list like","value":["Apache-2.0"],"string":"[\n \"Apache-2.0\"\n]"},"max_stars_count":{"kind":"null"},"max_stars_repo_stars_event_min_datetime":{"kind":"null"},"max_stars_repo_stars_event_max_datetime":{"kind":"null"},"max_issues_repo_path":{"kind":"string","value":"app/src/main/java/alektas/telecomapp/utils/L.kt"},"max_issues_repo_name":{"kind":"string","value":"Alektas/Telecom-System"},"max_issues_repo_head_hexsha":{"kind":"string","value":"1e83fbe6daa496f4c4f47d41f404d3e66fb200ff"},"max_issues_repo_licenses":{"kind":"list like","value":["Apache-2.0"],"string":"[\n \"Apache-2.0\"\n]"},"max_issues_count":{"kind":"null"},"max_issues_repo_issues_event_min_datetime":{"kind":"null"},"max_issues_repo_issues_event_max_datetime":{"kind":"null"},"max_forks_repo_path":{"kind":"string","value":"app/src/main/java/alektas/telecomapp/utils/L.kt"},"max_forks_repo_name":{"kind":"string","value":"Alektas/Telecom-System"},"max_forks_repo_head_hexsha":{"kind":"string","value":"1e83fbe6daa496f4c4f47d41f404d3e66fb200ff"},"max_forks_repo_licenses":{"kind":"list like","value":["Apache-2.0"],"string":"[\n \"Apache-2.0\"\n]"},"max_forks_count":{"kind":"null"},"max_forks_repo_forks_event_min_datetime":{"kind":"null"},"max_forks_repo_forks_event_max_datetime":{"kind":"null"},"content":{"kind":"string","value":"package alektas.telecomapp.utils\n\nimport alektas.telecomapp.BuildConfig\n\nclass L {\n companion object {\n private const val MEASURING_TIME_TAG = \"MEASURING_TIME\"\n private val startPoints = mutableListOf>()\n\n fun d(log: String) {\n if (BuildConfig.DEBUG) println(log)\n }\n\n fun d(where: Any, log: String) {\n if (BuildConfig.DEBUG) println(\"[${Thread.currentThread().name}][${where.javaClass.simpleName}]: $log\")\n }\n\n fun d(tag: String, log: String) {\n if (BuildConfig.DEBUG) println(\"[${Thread.currentThread().name}][$tag]: $log\")\n }\n\n /**\n * Start measuring time. Invoke this method at the moment where you want to start measuring.\n * To count time use [stop] method at right moment. It will log measured time to the Lagcat.\n */\n fun start(pointName: String = \"\") {\n d(MEASURING_TIME_TAG, \"Start measuring from |Point-$pointName|\")\n startPoints.add(pointName to System.nanoTime())\n }\n\n /**\n * Stop measuring time. Invoke this method at the moment where you want to stop measuring.\n * To start measuring use [start] method at right moment.\n * This method will log measured time to the Lagcat.\n */\n fun stop() {\n val endTime = System.nanoTime()\n d(MEASURING_TIME_TAG, \"*** Stop measuring ***\")\n\n startPoints.forEachIndexed { i, p ->\n d(MEASURING_TIME_TAG, \"Time from |Point-$i:${p.first}| |${(endTime - p.second) * 1.0e-9}| seconds\")\n }\n startPoints.clear()\n }\n }\n}"},"avg_line_length":{"kind":"number","value":36.152173913,"string":"36.152174"},"max_line_length":{"kind":"number","value":115,"string":"115"},"alphanum_fraction":{"kind":"number","value":0.583884546,"string":"0.583885"},"score":{"kind":"number","value":3.28125,"string":"3.28125"}}},{"rowIdx":995,"cells":{"hexsha":{"kind":"string","value":"252a7e7fbadd13a20e101809c6dfe0bf95856270"},"size":{"kind":"number","value":2514,"string":"2,514"},"ext":{"kind":"string","value":"kt"},"lang":{"kind":"string","value":"Kotlin"},"max_stars_repo_path":{"kind":"string","value":"src/main/kotlin/lain/Lexer.kt"},"max_stars_repo_name":{"kind":"string","value":"liminalitythree/bakadesu"},"max_stars_repo_head_hexsha":{"kind":"string","value":"6d5fbcc29da148fd72ecb58b164fc5c845267e5c"},"max_stars_repo_licenses":{"kind":"list like","value":["CC0-1.0"],"string":"[\n \"CC0-1.0\"\n]"},"max_stars_count":{"kind":"null"},"max_stars_repo_stars_event_min_datetime":{"kind":"null"},"max_stars_repo_stars_event_max_datetime":{"kind":"null"},"max_issues_repo_path":{"kind":"string","value":"src/main/kotlin/lain/Lexer.kt"},"max_issues_repo_name":{"kind":"string","value":"liminalitythree/bakadesu"},"max_issues_repo_head_hexsha":{"kind":"string","value":"6d5fbcc29da148fd72ecb58b164fc5c845267e5c"},"max_issues_repo_licenses":{"kind":"list like","value":["CC0-1.0"],"string":"[\n \"CC0-1.0\"\n]"},"max_issues_count":{"kind":"null"},"max_issues_repo_issues_event_min_datetime":{"kind":"null"},"max_issues_repo_issues_event_max_datetime":{"kind":"null"},"max_forks_repo_path":{"kind":"string","value":"src/main/kotlin/lain/Lexer.kt"},"max_forks_repo_name":{"kind":"string","value":"liminalitythree/bakadesu"},"max_forks_repo_head_hexsha":{"kind":"string","value":"6d5fbcc29da148fd72ecb58b164fc5c845267e5c"},"max_forks_repo_licenses":{"kind":"list like","value":["CC0-1.0"],"string":"[\n \"CC0-1.0\"\n]"},"max_forks_count":{"kind":"null"},"max_forks_repo_forks_event_min_datetime":{"kind":"null"},"max_forks_repo_forks_event_max_datetime":{"kind":"null"},"content":{"kind":"string","value":"package lain\n\nimport java.time.Clock\n\nclass Lexer(val source: List) {\n private val tokens = mutableListOf()\n private var start = 0\n private var current = 0\n\n fun scanTokens(): List {\n while (!isAtEnd()) {\n // we are at the beginning of the next lexeme\n start = current\n scanToken()\n }\n\n tokens.add(Token(TokenType.EOF, \"\", null))\n\n return tokens.toList()\n }\n\n // scan one token maybe\n private fun scanToken() {\n val c:String = advance()\n when (c) {\n \"\\uD83E\\uDD1C\" -> addToken(TokenType.LEFT_PAREN)\n \"\\uD83E\\uDD1B\" -> addToken(TokenType.RIGHT_PAREN)\n \"\\uD83D\\uDC49\" -> addToken(TokenType.LEFT_BRACKET)\n \"\\uD83D\\uDC48\" -> addToken(TokenType.RIGHT_BRACKET)\n \"〰\" -> addToken(TokenType.COMMA)\n\n else -> {\n if (isDigit(c)) number()\n else identifier(c)\n }\n }\n\n }\n\n // turns [0-9]+ into a token and adds it to token list\n private fun number() {\n while (ClockNumbers.isClock(peek())) advance()\n\n addToken(TokenType.NUMBER, ClockNumbers.parseClocks(source.subList(start, current)))\n }\n\n // identifier is just 1 emoji\n private fun identifier(c: String) {\n addToken(TokenType.IDENTIFIER, c)\n }\n\n // consumes current character if matches expected, else returns false\n private fun match(expected: String): Boolean {\n if (isAtEnd()) return false\n if (source[current] != expected) return false\n\n current++\n return true\n }\n\n // returns character without consuming it\n private fun peek(): String {\n if (isAtEnd()) return \"\\u0000\"\n return source[current]\n }\n\n // returns true if c is [0-9]\n private fun isDigit(c: String): Boolean {\n return ClockNumbers.isClock(c)\n }\n\n // returns true if at end of string source, false if not\n private fun isAtEnd(): Boolean {\n return current >= source.size\n }\n\n // advance the pointer and return the new current char\n private fun advance(): String {\n current++\n return source[current - 1]\n }\n\n // adds a token to the list of tokens\n private fun addToken(type: TokenType, literal: Any?) {\n val text = source.subList(start, current).reduce { a, e -> a.plus(e) }\n tokens.add(Token(type, text, literal))\n }\n\n private fun addToken(type: TokenType) {\n addToken(type, null)\n }\n}"},"avg_line_length":{"kind":"number","value":27.3260869565,"string":"27.326087"},"max_line_length":{"kind":"number","value":92,"string":"92"},"alphanum_fraction":{"kind":"number","value":0.5859188544,"string":"0.585919"},"score":{"kind":"number","value":3.28125,"string":"3.28125"}}},{"rowIdx":996,"cells":{"hexsha":{"kind":"string","value":"64e001a125c823bf162f4795a42b3d093c67a517"},"size":{"kind":"number","value":1868,"string":"1,868"},"ext":{"kind":"string","value":"rs"},"lang":{"kind":"string","value":"Rust"},"max_stars_repo_path":{"kind":"string","value":"basic/src/main.rs"},"max_stars_repo_name":{"kind":"string","value":"honkkki/rust-practice"},"max_stars_repo_head_hexsha":{"kind":"string","value":"73a0715c25ffb6ae10885cde092748d0effc5457"},"max_stars_repo_licenses":{"kind":"list like","value":["MIT"],"string":"[\n \"MIT\"\n]"},"max_stars_count":{"kind":"null"},"max_stars_repo_stars_event_min_datetime":{"kind":"null"},"max_stars_repo_stars_event_max_datetime":{"kind":"null"},"max_issues_repo_path":{"kind":"string","value":"basic/src/main.rs"},"max_issues_repo_name":{"kind":"string","value":"honkkki/rust-practice"},"max_issues_repo_head_hexsha":{"kind":"string","value":"73a0715c25ffb6ae10885cde092748d0effc5457"},"max_issues_repo_licenses":{"kind":"list like","value":["MIT"],"string":"[\n \"MIT\"\n]"},"max_issues_count":{"kind":"null"},"max_issues_repo_issues_event_min_datetime":{"kind":"null"},"max_issues_repo_issues_event_max_datetime":{"kind":"null"},"max_forks_repo_path":{"kind":"string","value":"basic/src/main.rs"},"max_forks_repo_name":{"kind":"string","value":"honkkki/rust-practice"},"max_forks_repo_head_hexsha":{"kind":"string","value":"73a0715c25ffb6ae10885cde092748d0effc5457"},"max_forks_repo_licenses":{"kind":"list like","value":["MIT"],"string":"[\n \"MIT\"\n]"},"max_forks_count":{"kind":"null"},"max_forks_repo_forks_event_min_datetime":{"kind":"null"},"max_forks_repo_forks_event_max_datetime":{"kind":"null"},"content":{"kind":"string","value":"const MAX_POINTS: u32 = 1000;\n\n\nfn main() {\n let i: i64 = 1;\n println!(\"{}\", i);\n let f = 1.1; // default f64\n let a = 1; // default i32\n let cc = '\\u{1F601}';\n println!(\"{}\", f);\n println!(\"{}\", a);\n println!(\"{}\", cc);\n println!(\"{}\", MAX_POINTS);\n\n // while\n let max = 10;\n let mut num = 0;\n\n while num * num < max {\n println!(\"{0} * {0} = {1}\", num, num * num);\n num += 1;\n }\n\n let mut x = 1;\n const MAX_NUM: i8 = 10;\n while x < MAX_NUM {\n // 允许重复定义相同变量名\n let mut y = x;\n while y < MAX_NUM {\n print!(\"{}*{}={} \", x, y, x * y);\n y += 1;\n }\n println!();\n x += 1;\n }\n\n let mut num = 0;\n // loop无限循环\n loop {\n println!(\"{0} * {0} = {1}\", num, num * num);\n num += 1;\n if num * num > max {\n break;\n }\n }\n\n let t: bool = true;\n println!(\"{}\", t);\n println!(\"--------------------------\");\n\n // tuple\n let tup: (i32, i64) = (500, 1000);\n println!(\"{}, {}\", tup.0, tup.1);\n let (x, y) = tup;\n println!(\"{}, {}\", x, y);\n println!(\"--------------------------\");\n\n // array\n let arr: [i32; 3] = [1, 2, 3];\n println!(\"{}\", arr[0]);\n for elem in arr {\n println!(\"arr: {}\", elem)\n }\n println!(\"--------------------------\");\n\n // 控制流\n let y = {\n let x = 1;\n x + 1\n };\n\n println!(\"{}\", y);\n let num = get_num();\n println!(\"{}\", num);\n\n let condition = true;\n let num = if condition {1} else {0};\n println!(\"{}\", num);\n println!(\"--------------------------\");\n\n range_num();\n\n let mut str = \"hello\";\n println!(\"{}\", str);\n str = \"rust\";\n println!(\"{}\", str);\n\n}\n\nfn get_num() -> i32 {\n 6\n}\n\nfn range_num() {\n for num in 1..6 { // 1-5\n println!(\"{}\", num)\n }\n}\n"},"avg_line_length":{"kind":"number","value":18.8686868687,"string":"18.868687"},"max_line_length":{"kind":"number","value":52,"string":"52"},"alphanum_fraction":{"kind":"number","value":0.3677730193,"string":"0.367773"},"score":{"kind":"number","value":3.234375,"string":"3.234375"}}},{"rowIdx":997,"cells":{"hexsha":{"kind":"string","value":"3e817e4997df88adffabb8da5e0a76af8c34804c"},"size":{"kind":"number","value":8985,"string":"8,985"},"ext":{"kind":"string","value":"h"},"lang":{"kind":"string","value":"C"},"max_stars_repo_path":{"kind":"string","value":"CommandLineParser.h"},"max_stars_repo_name":{"kind":"string","value":"malord/prime"},"max_stars_repo_head_hexsha":{"kind":"string","value":"f0e8be99b7dcd482708b9c928322bc07a3128506"},"max_stars_repo_licenses":{"kind":"list like","value":["MIT"],"string":"[\n \"MIT\"\n]"},"max_stars_count":{"kind":"null"},"max_stars_repo_stars_event_min_datetime":{"kind":"null"},"max_stars_repo_stars_event_max_datetime":{"kind":"null"},"max_issues_repo_path":{"kind":"string","value":"CommandLineParser.h"},"max_issues_repo_name":{"kind":"string","value":"malord/prime"},"max_issues_repo_head_hexsha":{"kind":"string","value":"f0e8be99b7dcd482708b9c928322bc07a3128506"},"max_issues_repo_licenses":{"kind":"list like","value":["MIT"],"string":"[\n \"MIT\"\n]"},"max_issues_count":{"kind":"null"},"max_issues_repo_issues_event_min_datetime":{"kind":"null"},"max_issues_repo_issues_event_max_datetime":{"kind":"null"},"max_forks_repo_path":{"kind":"string","value":"CommandLineParser.h"},"max_forks_repo_name":{"kind":"string","value":"malord/prime"},"max_forks_repo_head_hexsha":{"kind":"string","value":"f0e8be99b7dcd482708b9c928322bc07a3128506"},"max_forks_repo_licenses":{"kind":"list like","value":["MIT"],"string":"[\n \"MIT\"\n]"},"max_forks_count":{"kind":"null"},"max_forks_repo_forks_event_min_datetime":{"kind":"null"},"max_forks_repo_forks_event_max_datetime":{"kind":"null"},"content":{"kind":"string","value":"// Copyright 2000-2021 Mark H. P. Lord\n\n#ifndef PRIME_COMMANDLINEPARSER_H\n#define PRIME_COMMANDLINEPARSER_H\n\n#include \"Config.h\"\n\nnamespace Prime {\n\nclass Log;\n\n/// A command line reader that supports short option (-v) and long options (--verbose), combined short options\n/// (e.g., -v -n -r can be shortened to -vnr) and -- to mark the end of the options. In option names containing\n/// a '-', the '-' is optional (e.g., --nocolour will match \"no-colour\"). A flag is an option that may be followed\n/// by a '-' to disable it, e.g., -G- or --colours-, or a + to enable it. Long name flags can also be specified\n/// with a \"no-\" or \"disable-\" prefix to negate them, e.g., --no-colours has the same result as --colours-. It is\n/// also possible to use long options by default, e.g., -trace instead of --trace, by using\n/// setImplicitLongOptionsEnabled(), which defaults to false. Values are options which expect one or more\n/// parameters, e.g., --dest ~/Desktop.\nclass PRIME_PUBLIC CommandLineParser {\npublic:\n CommandLineParser()\n {\n construct();\n reset();\n }\n\n explicit CommandLineParser(char** argv)\n {\n construct();\n init(argv);\n }\n\n virtual ~CommandLineParser();\n\n /// Set the arguments to be read. The last element argv must be null (i.e., like the argument array which\n /// is passed to main()). Note that returned strings are commonly pointers in to the strings in this array,\n /// so the array must remain valid until the command line has been read.\n bool init(char** argv);\n\n class PRIME_PUBLIC ResponseFileLoader {\n public:\n virtual ~ResponseFileLoader() { }\n\n /// Update ***argv to point to a new list of arguments to be parsed. The file name of the response file\n /// comes from path, which may itself come from another response file.\n virtual void loadResponseFile(const char* path, char*** argv, Log* log) = 0;\n };\n\n void setResponseFileLoader(char responseFileChar, ResponseFileLoader* responseFileLoader)\n {\n _responseFileChar = responseFileChar;\n _responseFileLoader = responseFileLoader;\n }\n\n void reset();\n\n /// If true, -trace will be considered to match --trace, rather than -t -r -a -c -e. Defaults to false. For\n /// this to work, the application must check for the long options before the short options.\n bool getImplicitLongOptionsEnabled() const { return _allowImplicitLongOptions; }\n\n void setImplicitLongOptionsEnabled(bool enabled) { _allowImplicitLongOptions = enabled; }\n\n /// Parse the next token from the argument list. Returns false if there are no more arguments to read.\n bool next();\n\n /// Returns true if a basic, not-an-option argument was read.\n bool isFilename() const { return !_state.opt; }\n\n /// If a file name was read, returns it.\n const char* getFilename() const { return _state.opt ? NULL : _state.filename; }\n\n /// Returns true if a \"--\" argument has been encountered, signifying that all remaining arguments are files.\n bool hasOptionTerminatorBeenRead() const { return _state.noMoreOptions; }\n\n /// Returns true if an option, value or flag was read.\n bool isOption() const { return _state.opt ? true : false; }\n\n /// Returns the option that hasn't been read (for use when reporting errors, don't compare this, use\n /// readOption(), readFlag() or readValue()).\n const char* getOption() const { return _state.opt; }\n\n /// Returns the last option that was successfully read (with readOption(), readFlag() or readValue()).\n const char* getCurrentOption() const { return _state.currentOption; }\n\n /// Return the option or filename that was parsed.\n const char* getOptionOrFilename() const { return _state.opt ? _state.opt : _state.filename; }\n\n /// Returns true if the next argument is one of the | separated words. For example, for an archive utility\n /// you might ask cl.readCommand(\"add|a\"), which would match \"add\", \"a\", \"--add\" and \"-a\" (and \"-add\" if\n /// implicit long options are enabled).\n bool readCommand(const char* words);\n\n /// Returns true if the specified option was read. e.g., readOption(\"verbose|v\")\n bool readOption(const char* option)\n {\n return readOptionOrValueOrFlag(option, NULL, false);\n }\n\n /// If the specified option was read, returns true and sets *flag to true or false depending on whether the\n /// option was followed by a + or -, respectively. So -f or -f+ would set *flag to true, -f- to false.\n /// If flag is NULL, the result is stored internally and can be read by calling getFlag().\n bool readFlag(const char* option, bool* flag = NULL)\n {\n return readOptionOrValueOrFlag(option, flag ? flag : &_state.flag, false);\n }\n\n /// Returns the flag read by readFlag() (or readColourFlag()) if they were called with a NULL flag pointer.\n bool getFlag() const { return _state.flag; }\n\n /// Returns true if the specified option, which should have a value, was read. After calling this you should\n /// call one of the fetch*() methods (fetchString(), fetchInt() etc.) to fetch the option's value. A value\n /// differs from a plain option in that it may be followed by an '=' sign, e.g., `--path=/bin`, which could\n /// also be supplied as `--path /bin` and `--path= /bin`. An option can have multiple values, e.g.,\n /// `--offset 160 120`, and the fetch*() methods should be called for each.\n bool readValue(const char* option)\n {\n return readOptionOrValueOrFlag(option, NULL, true);\n }\n\n /// Fetch a string from the command line. Exits if there are no more arguments.\n const char* fetchString();\n\n /// Fetch an intmax_t from the command line. Exits if there are no more arguments or the argument is invalid.\n intmax_t fetchIntmax();\n\n /// Fetch an int from the command line. Exits if there are no more arguments or the argument is invalid.\n int fetchInt();\n\n /// Fetch an intmax_t from the command line. If the next argument isn't a valid number, returns the default\n /// value and leaves the next argument to be read.\n intmax_t fetchOptionalIntmax(intmax_t defaultValue);\n\n /// Fetch an int from the command line. If the next argument isn't a valid number, returns the default\n /// value and leaves the next argument to be read.\n int fetchOptionalInt(int defaultValue);\n\n /// Fetch a float from the command line. Exits if there are no more arguments or the argument is invalid.\n float fetchFloat();\n\n /// Fetch a double from the command line. Exits if there are no more arguments or the argument is invalid.\n double fetchDouble();\n\n /// Fetch the next argument and convert the result to a bool. If there's no argument, or the next argument\n /// begins with the switch character (- or /) then true is assumed, but if there is an argument then yes,\n /// true, on, 1 and + are all considered true and no, false, off, 0 and - are all considered false. So,\n /// -f 1, -f+, -f and even -f YES are all considered true. -f -x will be considered true, and -x will\n /// correctly be read next.\n bool fetchBool();\n\n /// Reads the standard colour/no colour flags (colour|color|colours|colors|G).\n bool readColourFlag(bool* flag = NULL);\n\n void skipLongOption();\n\n void skipShortOption();\n\n /// Skip an option's value. If unlessOption is true, if the next argument begins with a - then treat it as\n /// an option and don't skip it.\n void skipValue(bool unlessOption = false)\n {\n (void)fetchArgument(unlessOption);\n }\n\n // You can overload exit(ExitReason) to change how these are handled.\n\n void exitDueToMissingArgument() { exit(ExitReasonMissingArgument); }\n void exitDueToInvalidArgument() { exit(ExitReasonInvalidArgument); }\n void exitDueToUnknownOption() { exit(ExitReasonUnknownOption); }\n void exitDueToUnexpectedArgument() { exit(ExitReasonUnexpectedArgument); }\n void exitDueToUnknownOptionOrUnexpectedArgument() { exit(ExitReasonUnknownOptionOrUnexpectedArgument); }\n\nprotected:\n enum ExitReason {\n ExitReasonMissingArgument,\n ExitReasonInvalidArgument,\n ExitReasonUnknownOption,\n ExitReasonUnexpectedArgument,\n ExitReasonUnknownOptionOrUnexpectedArgument,\n };\n\n virtual void exit(ExitReason reason);\n\nprivate:\n void construct();\n\n bool readOptionOrValueOrFlag(const char* option, bool* flag, bool hasParam);\n\n static bool equalLongOptionName(const char* have, const char* want, const char*& ptr, bool hasParam, bool hasFlag);\n\n const char* fetchArgument(bool optional);\n\n struct State {\n char** argv;\n const char* opt;\n const char* filename;\n\n bool noMoreOptions;\n bool isLongOption;\n\n bool flag;\n char currentOption[64];\n } _state;\n\n bool _allowImplicitLongOptions;\n\n int _responseFileChar;\n ResponseFileLoader* _responseFileLoader;\n\n PRIME_UNCOPYABLE(CommandLineParser);\n};\n}\n\n#endif\n"},"avg_line_length":{"kind":"number","value":41.7906976744,"string":"41.790698"},"max_line_length":{"kind":"number","value":119,"string":"119"},"alphanum_fraction":{"kind":"number","value":0.6894824708,"string":"0.689482"},"score":{"kind":"number","value":3.0625,"string":"3.0625"}}},{"rowIdx":998,"cells":{"hexsha":{"kind":"string","value":"fb2b96accbc2fe2b0ca62463a37a0b609aa455d7"},"size":{"kind":"number","value":3120,"string":"3,120"},"ext":{"kind":"string","value":"c"},"lang":{"kind":"string","value":"C"},"max_stars_repo_path":{"kind":"string","value":"LinkedListIterator/linked_list_iterator_main.c"},"max_stars_repo_name":{"kind":"string","value":"Nam-H-Nguyen/DataStructure"},"max_stars_repo_head_hexsha":{"kind":"string","value":"61c86abf47171aecc66ba39e33364d12b12f94c1"},"max_stars_repo_licenses":{"kind":"list like","value":["MIT"],"string":"[\n \"MIT\"\n]"},"max_stars_count":{"kind":"number","value":1,"string":"1"},"max_stars_repo_stars_event_min_datetime":{"kind":"string","value":"2019-07-05T16:40:12.000Z"},"max_stars_repo_stars_event_max_datetime":{"kind":"string","value":"2019-07-05T16:40:12.000Z"},"max_issues_repo_path":{"kind":"string","value":"LinkedListIterator/linked_list_iterator_main.c"},"max_issues_repo_name":{"kind":"string","value":"Nam-H-Nguyen/DataStructure"},"max_issues_repo_head_hexsha":{"kind":"string","value":"61c86abf47171aecc66ba39e33364d12b12f94c1"},"max_issues_repo_licenses":{"kind":"list like","value":["MIT"],"string":"[\n \"MIT\"\n]"},"max_issues_count":{"kind":"null"},"max_issues_repo_issues_event_min_datetime":{"kind":"null"},"max_issues_repo_issues_event_max_datetime":{"kind":"null"},"max_forks_repo_path":{"kind":"string","value":"LinkedListIterator/linked_list_iterator_main.c"},"max_forks_repo_name":{"kind":"string","value":"Nam-H-Nguyen/DataStructure"},"max_forks_repo_head_hexsha":{"kind":"string","value":"61c86abf47171aecc66ba39e33364d12b12f94c1"},"max_forks_repo_licenses":{"kind":"list like","value":["MIT"],"string":"[\n \"MIT\"\n]"},"max_forks_count":{"kind":"null"},"max_forks_repo_forks_event_min_datetime":{"kind":"null"},"max_forks_repo_forks_event_max_datetime":{"kind":"null"},"content":{"kind":"string","value":"/*\n * @file linked_list_iterator_main.c\n *\n * This file exercises the singly linked list and singly linked list\n * iterators functions.\n *\n * @since Oct 24, 2018\n * @author: Nam H. Nguyen\n */\n#include \n#include \n#include \n#include \"linked_list_iterator.h\"\n\n\n/**\n * Test LinkedListIterator functions\n */\nvoid testLinkedListIterator(void) {\n\tprintf(\"\\nstart testLinkedListIterator\\n\");\n\n\tprintf(\"initial list\\n\");\n\tLinkedList *list = newLinkedList(5);\n\tprintLinkedList(list);\n\tprintf(\"list size: %ld\\n\", linkedListSize(list));\n\n\t// add 5 nodes to the list\n\tprintf(\"\\nAdding 5 values to list\\n\");\n\taddLastLinkedListVal(list, \"A\");\n\taddLastLinkedListVal(list, \"B\");\n\taddLastLinkedListVal(list, \"C\");\n\taddLastLinkedListVal(list, \"D\");\n\taddLastLinkedListVal(list, \"E\");\n\tprintLinkedList(list);\n\n\tprintf(\"list size: %ld\\n\", linkedListSize(list));\n\n\tprintf(\"\\nTraversing list forward with iterator\\n\");\n\tLinkedListIterator *itr = newLinkedListIterator(list);\n\tprintf(\"iterator count: %ld\\n\", getLinkedListIteratorCount(itr));\n\tprintf(\"iterator avail: %ld\\n\", getLinkedListIteratorAvailable(itr));\n\twhile (hasNextLinkedListIteratorVal(itr)) {\n\t\tconst char *val;\n\t\tif (getNextLinkedListIteratorVal(itr, &val)) {\n\t\t\tprintf(\"iterator next: \\\"%s\\\"\\n\", val);\n\t\t} else {\n\t\t\tprintf(\"iterator next: unavailable\\n\");\n\t\t}\n\t}\n\tprintf(\"iterator count: %ld\\n\", getLinkedListIteratorCount(itr));\n\tprintf(\"iterator avail: %ld\\n\", getLinkedListIteratorAvailable(itr));\n\n\tprintf(\"\\nMoving back one from end with iterator\\n\");\n\tprintf(\"iterator has prev: %s\\n\", hasPrevLinkedListIteratorVal(itr) ? \"true\" : \"false\");\n\tconst char *val;\n\tif (getPrevLinkedListIteratorVal(itr, &val)) {\n\t\tprintf(\"iterator prev: \\\"%s\\\"\\n\", val);\n\t} else {\n\t\tprintf(\"iterator prev: unavailable\\n\");\n\t}\n\tprintf(\"iterator count: %ld\\n\", getLinkedListIteratorCount(itr));\n\tprintf(\"iterator avail: %ld\\n\", getLinkedListIteratorAvailable(itr));\n\n\tprintf(\"\\nMoving forward one to end with iterator\\n\");\n\tif (getNextLinkedListIteratorVal(itr, &val)) {\n\t\tprintf(\"iterator next: \\\"%s\\\"\\n\", val);\n\t} else {\n\t\tprintf(\"iterator next: unavailable\\n\");\n\t}\n\tprintf(\"iterator count: %ld\\n\", getLinkedListIteratorCount(itr));\n\tprintf(\"iterator avail: %ld\\n\", getLinkedListIteratorAvailable(itr));\n\n\tprintf(\"\\nResetting iterator\\n\");\n\tresetLinkedListIterator(itr);\n\tprintf(\"iterator has next: %s\\n\", hasNextLinkedListIteratorVal(itr) ? \"true\" : \"false\");\n\tprintf(\"iterator count: %ld\\n\", getLinkedListIteratorCount(itr));\n\tprintf(\"iterator avail: %ld\\n\", getLinkedListIteratorAvailable(itr));\n\n\tprintf(\"\\nTrying to move back one from beginning with iterator\\n\");\n\tprintf(\"iterator has prev: %s\\n\", hasPrevLinkedListIteratorVal(itr) ? \"true\" : \"false\");\n\tif (getPrevLinkedListIteratorVal(itr, &val)) {\n\t\tprintf(\"iterator prev: \\\"%s\\\"\\n\", val);\n\t} else {\n\t\tprintf(\"iterator prev: unavailable\\n\");\n\t}\n\n\tprintf(\"\\nDeleting iterator and linked list\\n\");\n\tdeleteLinkedListIterator(itr);\n\tdeleteLinkedList(list);\n\n\tprintf(\"end testLinkedListIterator\\n\");\n}\n\n/**\n * Test functions.\n */\nint main(void) {\n\ttestLinkedListIterator();\n\n\tprintf(\"program exiting\\n\");\n}\n"},"avg_line_length":{"kind":"number","value":30.5882352941,"string":"30.588235"},"max_line_length":{"kind":"number","value":89,"string":"89"},"alphanum_fraction":{"kind":"number","value":0.7141025641,"string":"0.714103"},"score":{"kind":"number","value":3.15625,"string":"3.15625"}}},{"rowIdx":999,"cells":{"hexsha":{"kind":"string","value":"dc12fe3a72634b5363c218ab0b3d9830282fc7ea"},"size":{"kind":"number","value":6959,"string":"6,959"},"ext":{"kind":"string","value":"py"},"lang":{"kind":"string","value":"Python"},"max_stars_repo_path":{"kind":"string","value":"causalinference/core/propensity.py"},"max_stars_repo_name":{"kind":"string","value":"youngminju-phd/Causalinference"},"max_stars_repo_head_hexsha":{"kind":"string","value":"630e8fb195754a720da41791b725d3dadabfb257"},"max_stars_repo_licenses":{"kind":"list like","value":["BSD-3-Clause"],"string":"[\n \"BSD-3-Clause\"\n]"},"max_stars_count":{"kind":"number","value":392,"string":"392"},"max_stars_repo_stars_event_min_datetime":{"kind":"string","value":"2016-06-08T19:43:08.000Z"},"max_stars_repo_stars_event_max_datetime":{"kind":"string","value":"2022-03-29T14:18:07.000Z"},"max_issues_repo_path":{"kind":"string","value":"causalinference/core/propensity.py"},"max_issues_repo_name":{"kind":"string","value":"youngminju-phd/Causalinference"},"max_issues_repo_head_hexsha":{"kind":"string","value":"630e8fb195754a720da41791b725d3dadabfb257"},"max_issues_repo_licenses":{"kind":"list like","value":["BSD-3-Clause"],"string":"[\n \"BSD-3-Clause\"\n]"},"max_issues_count":{"kind":"number","value":12,"string":"12"},"max_issues_repo_issues_event_min_datetime":{"kind":"string","value":"2017-04-28T20:25:54.000Z"},"max_issues_repo_issues_event_max_datetime":{"kind":"string","value":"2021-11-14T10:25:40.000Z"},"max_forks_repo_path":{"kind":"string","value":"causalinference/core/propensity.py"},"max_forks_repo_name":{"kind":"string","value":"youngminju-phd/Causalinference"},"max_forks_repo_head_hexsha":{"kind":"string","value":"630e8fb195754a720da41791b725d3dadabfb257"},"max_forks_repo_licenses":{"kind":"list like","value":["BSD-3-Clause"],"string":"[\n \"BSD-3-Clause\"\n]"},"max_forks_count":{"kind":"number","value":82,"string":"82"},"max_forks_repo_forks_event_min_datetime":{"kind":"string","value":"2016-06-08T19:43:11.000Z"},"max_forks_repo_forks_event_max_datetime":{"kind":"string","value":"2022-03-28T13:36:28.000Z"},"content":{"kind":"string","value":"from __future__ import division\nimport numpy as np\nfrom scipy.optimize import fmin_bfgs\nfrom itertools import combinations_with_replacement\n\nimport causalinference.utils.tools as tools\nfrom .data import Dict\n\n\nclass Propensity(Dict):\n\n\t\"\"\"\n\tDictionary-like class containing propensity score data.\n\t\n\tPropensity score related data includes estimated logistic regression\n\tcoefficients, maximized log-likelihood, predicted propensity scores,\n\tand lists of the linear and quadratic terms that are included in the\n\tlogistic regression.\n\t\"\"\"\n\n\tdef __init__(self, data, lin, qua):\n\n\t\tZ = form_matrix(data['X'], lin, qua)\n\t\tZ_c, Z_t = Z[data['controls']], Z[data['treated']]\n\t\tbeta = calc_coef(Z_c, Z_t)\n\n\t\tself._data = data\n\t\tself._dict = dict()\n\t\tself._dict['lin'], self._dict['qua'] = lin, qua\n\t\tself._dict['coef'] = beta\n\t\tself._dict['loglike'] = -neg_loglike(beta, Z_c, Z_t)\n\t\tself._dict['fitted'] = sigmoid(Z.dot(beta))\n\t\tself._dict['se'] = calc_se(Z, self._dict['fitted'])\n\n\n\tdef __str__(self):\n\n\t\ttable_width = 80\n\n\t\tcoefs = self._dict['coef']\n\t\tses = self._dict['se']\n\n\t\toutput = '\\n'\n\t\toutput += 'Estimated Parameters of Propensity Score\\n\\n'\n\n\t\tentries1 = ['', 'Coef.', 'S.e.', 'z', 'P>|z|',\n\t\t '[95% Conf. int.]']\n\t\tentry_types1 = ['string']*6\n\t\tcol_spans1 = [1]*5 + [2]\n\t\toutput += tools.add_row(entries1, entry_types1,\n\t\t col_spans1, table_width)\n\t\toutput += tools.add_line(table_width)\n\n\t\tentries2 = tools.gen_reg_entries('Intercept', coefs[0], ses[0])\n\t\tentry_types2 = ['string'] + ['float']*6\n\t\tcol_spans2 = [1]*7\n\t\toutput += tools.add_row(entries2, entry_types2,\n\t\t col_spans2, table_width)\n\n\t\tlin = self._dict['lin']\n\t\tfor (lin_term, coef, se) in zip(lin, coefs[1:], ses[1:]):\n\t\t\tentries3 = tools.gen_reg_entries('X'+str(lin_term),\n\t\t\t coef, se)\n\t\t\toutput += tools.add_row(entries3, entry_types2,\n\t\t\t col_spans2, table_width)\n\n\t\tqua = self._dict['qua']\n\t\tlin_num = len(lin)+1 # including intercept\n\t\tfor (qua_term, coef, se) in zip(qua, coefs[lin_num:],\n\t\t ses[lin_num:]):\n\t\t\tname = 'X'+str(qua_term[0])+'*X'+str(qua_term[1])\n\t\t\tentries4 = tools.gen_reg_entries(name, coef, se)\n\t\t\toutput += tools.add_row(entries4, entry_types2,\n\t\t\t col_spans2, table_width)\n\n\t\treturn output\n\n\nclass PropensitySelect(Propensity):\n\n\t\"\"\"\n\tDictionary-like class containing propensity score data.\n\t\n\tPropensity score related data includes estimated logistic regression\n\tcoefficients, maximized log-likelihood, predicted propensity scores,\n\tand lists of the linear and quadratic terms that are included in the\n\tlogistic regression.\n\t\"\"\"\n\n\tdef __init__(self, data, lin_B, C_lin, C_qua):\n\n\t\tX_c, X_t = data['X_c'], data['X_t']\n\t\tlin = select_lin_terms(X_c, X_t, lin_B, C_lin)\n\t\tqua = select_qua_terms(X_c, X_t, lin, C_qua)\n\n\t\tsuper(PropensitySelect, self).__init__(data, lin, qua)\n\n\ndef form_matrix(X, lin, qua):\n\n\tN, K = X.shape\n\n\tmat = np.empty((N, 1+len(lin)+len(qua)))\n\tmat[:, 0] = 1 # constant term\n\n\tcurrent_col = 1\n\tif lin:\n\t\tmat[:, current_col:current_col+len(lin)] = X[:, lin]\n\t\tcurrent_col += len(lin)\n\tfor term in qua: # qua is a list of tuples of column numbers\n\t\tmat[:, current_col] = X[:, term[0]] * X[:, term[1]]\n\t\tcurrent_col += 1\n\n\treturn mat\n\n\ndef sigmoid(x, top_threshold=100, bottom_threshold=-100):\n\n\thigh_x = (x >= top_threshold)\n\tlow_x = (x <= bottom_threshold)\n\tmid_x = ~(high_x | low_x)\n\n\tvalues = np.empty(x.shape[0])\n\tvalues[high_x] = 1.0\n\tvalues[low_x] = 0.0\n\tvalues[mid_x] = 1/(1+np.exp(-x[mid_x]))\n\n\treturn values\n\n\ndef log1exp(x, top_threshold=100, bottom_threshold=-100):\n\n\thigh_x = (x >= top_threshold)\n\tlow_x = (x <= bottom_threshold)\n\tmid_x = ~(high_x | low_x)\n\n\tvalues = np.empty(x.shape[0])\n\tvalues[high_x] = 0.0\n\tvalues[low_x] = -x[low_x]\n\tvalues[mid_x] = np.log(1 + np.exp(-x[mid_x]))\n\n\treturn values\n\n\ndef neg_loglike(beta, X_c, X_t):\n\n\treturn log1exp(X_t.dot(beta)).sum() + log1exp(-X_c.dot(beta)).sum()\n\n\ndef neg_gradient(beta, X_c, X_t):\n\n\treturn (sigmoid(X_c.dot(beta))*X_c.T).sum(1) - \\\n\t (sigmoid(-X_t.dot(beta))*X_t.T).sum(1)\n\n\ndef calc_coef(X_c, X_t):\n\n\tK = X_c.shape[1]\n\n\tneg_ll = lambda b: neg_loglike(b, X_c, X_t)\n\tneg_grad = lambda b: neg_gradient(b, X_c, X_t)\n\n\tlogit = fmin_bfgs(neg_ll, np.zeros(K), neg_grad,\n\t\t\t full_output=True, disp=False)\n\n\treturn logit[0]\n\n\ndef calc_se(X, phat):\n\n\tH = np.dot(phat*(1-phat)*X.T, X)\n\t\n\treturn np.sqrt(np.diag(np.linalg.inv(H)))\n\n\ndef get_excluded_lin(K, included):\n\n\tincluded_set = set(included)\n\n\treturn [x for x in range(K) if x not in included_set]\n\n\ndef get_excluded_qua(lin, included):\n\n\twhole_set = list(combinations_with_replacement(lin, 2))\n\tincluded_set = set(included)\n\n\treturn [x for x in whole_set if x not in included_set]\n\n\ndef calc_loglike(X_c, X_t, lin, qua):\n\n\tZ_c = form_matrix(X_c, lin, qua)\n\tZ_t = form_matrix(X_t, lin, qua)\n\tbeta = calc_coef(Z_c, Z_t)\n\n\treturn -neg_loglike(beta, Z_c, Z_t)\n\n\ndef select_lin(X_c, X_t, lin_B, C_lin):\n\n\t# Selects, through a sequence of likelihood ratio tests, the\n\t# variables that should be included linearly in propensity\n\t# score estimation.\n\n\tK = X_c.shape[1]\n\texcluded = get_excluded_lin(K, lin_B)\n\tif excluded == []:\n\t\treturn lin_B\n\n\tll_null = calc_loglike(X_c, X_t, lin_B, [])\n\n\tdef lr_stat_lin(lin_term):\n\t\tll_alt = calc_loglike(X_c, X_t, lin_B+[lin_term], [])\n\t\treturn 2 * (ll_alt - ll_null)\n\n\tlr_stats = np.array([lr_stat_lin(term) for term in excluded])\n\targmax_lr = lr_stats.argmax()\n\n\tif lr_stats[argmax_lr] < C_lin:\n\t\treturn lin_B\n\telse:\n\t\tnew_term = [excluded[argmax_lr]]\n\t\treturn select_lin(X_c, X_t, lin_B+new_term, C_lin)\n\n\ndef select_lin_terms(X_c, X_t, lin_B, C_lin):\n\n\t# Mostly a wrapper around function select_lin to handle cases that\n\t# require little computation.\n\n\tif C_lin <= 0:\n\t\tK = X_c.shape[1]\n\t\treturn lin_B + get_excluded_lin(K, lin_B)\n\telif C_lin == np.inf:\n\t\treturn lin_B\n\telse:\n\t\treturn select_lin(X_c, X_t, lin_B, C_lin)\n\n\ndef select_qua(X_c, X_t, lin, qua_B, C_qua):\n\n\t# Selects, through a sequence of likelihood ratio tests, the\n\t# variables that should be included quadratically in propensity\n\t# score estimation.\n\n\texcluded = get_excluded_qua(lin, qua_B)\n\tif excluded == []:\n\t\treturn qua_B\n\n\tll_null = calc_loglike(X_c, X_t, lin, qua_B)\n\n\tdef lr_stat_qua(qua_term):\n\t\tll_alt = calc_loglike(X_c, X_t, lin, qua_B+[qua_term])\n\t\treturn 2 * (ll_alt - ll_null)\n\n\tlr_stats = np.array([lr_stat_qua(term) for term in excluded])\n\targmax_lr = lr_stats.argmax()\n\n\tif lr_stats[argmax_lr] < C_qua:\n\t\treturn qua_B\n\telse:\n\t\tnew_term = [excluded[argmax_lr]]\n\t\treturn select_qua(X_c, X_t, lin, qua_B+new_term, C_qua)\n\n\ndef select_qua_terms(X_c, X_t, lin, C_qua):\n\n\t# Mostly a wrapper around function select_qua to handle cases that\n\t# require little computation.\n\n\tif lin == []:\n\t\treturn []\n\tif C_qua <= 0:\n\t\treturn get_excluded_qua(lin, [])\n\telif C_qua == np.inf:\n\t\treturn []\n\telse:\n\t\treturn select_qua(X_c, X_t, lin, [], C_qua)\n\n"},"avg_line_length":{"kind":"number","value":24.7651245552,"string":"24.765125"},"max_line_length":{"kind":"number","value":69,"string":"69"},"alphanum_fraction":{"kind":"number","value":0.6745222015,"string":"0.674522"},"score":{"kind":"number","value":3.203125,"string":"3.203125"}}}],"truncated":false,"partial":false},"paginationData":{"pageIndex":9,"numItemsPerPage":100,"numTotalItems":831747,"offset":900,"length":100}},"jwt":"eyJhbGciOiJFZERTQSJ9.eyJyZWFkIjp0cnVlLCJwZXJtaXNzaW9ucyI6eyJyZXBvLmNvbnRlbnQucmVhZCI6dHJ1ZX0sImlhdCI6MTc1ODM0NjgwMCwic3ViIjoiL2RhdGFzZXRzL2Rldm5naG8vdGhlLXN0YWNrLW1pbmktZWR1IiwiZXhwIjoxNzU4MzUwNDAwLCJpc3MiOiJodHRwczovL2h1Z2dpbmdmYWNlLmNvIn0.sPNUcn-ZUMhw5am1cpVdDSxvl2EURiideTJZd9NEKz0F-o6XjvtsbRFJcC62AyWXx6T7jHZKIgKmt4EG9Vb4Ag","displayUrls":true},"discussionsStats":{"closed":1,"open":0,"total":1},"fullWidth":true,"hasGatedAccess":true,"hasFullAccess":true,"isEmbedded":false,"savedQueries":{"community":[],"user":[]}}">
hexsha
stringlengths
40
40
size
int64
140
1.03M
ext
stringclasses
94 values
lang
stringclasses
21 values
max_stars_repo_path
stringlengths
3
663
max_stars_repo_name
stringlengths
4
120
max_stars_repo_head_hexsha
stringlengths
40
78
max_stars_repo_licenses
listlengths
1
10
max_stars_count
int64
1
368k
max_stars_repo_stars_event_min_datetime
stringlengths
24
24
max_stars_repo_stars_event_max_datetime
stringlengths
24
24
max_issues_repo_path
stringlengths
3
663
max_issues_repo_name
stringlengths
4
120
max_issues_repo_head_hexsha
stringlengths
40
78
max_issues_repo_licenses
listlengths
1
10
max_issues_count
int64
1
116k
max_issues_repo_issues_event_min_datetime
stringlengths
24
24
max_issues_repo_issues_event_max_datetime
stringlengths
24
24
max_forks_repo_path
stringlengths
3
663
max_forks_repo_name
stringlengths
4
135
max_forks_repo_head_hexsha
stringlengths
40
78
max_forks_repo_licenses
listlengths
1
10
max_forks_count
int64
1
105k
max_forks_repo_forks_event_min_datetime
stringlengths
24
24
max_forks_repo_forks_event_max_datetime
stringlengths
24
24
content
stringlengths
140
1.03M
avg_line_length
float64
2.32
23.1k
max_line_length
int64
11
938k
alphanum_fraction
float64
0.01
1
score
float32
3
4.25
45271a2dcda8123130d2b8a8de38e3eba2fba68c
3,173
swift
Swift
Sources/servermodel/LoginTokenDAO.swift
rc2server/appserver
1bc4b4e7868b056eef30adece3878e2b80e4511c
[ "0BSD" ]
null
null
null
Sources/servermodel/LoginTokenDAO.swift
rc2server/appserver
1bc4b4e7868b056eef30adece3878e2b80e4511c
[ "0BSD" ]
3
2016-12-06T19:24:48.000Z
2018-01-28T15:28:26.000Z
Sources/servermodel/LoginTokenDAO.swift
rc2server/appserver
1bc4b4e7868b056eef30adece3878e2b80e4511c
[ "0BSD" ]
null
null
null
// // LoginTokenDAO.swift // // Copyright ©2017 Mark Lilback. This file is licensed under the ISC license. // import Foundation import pgswift import Rc2Model import Logging import SwiftJWT /// Simple wrapper around contents stored in the authentication token public struct LoginToken: Codable, Claims { public let id: Int public let userId: Int public init?(_ dict: [String: Any]) { guard let inId = dict["token"] as? Int, let inUser = dict["user"] as? Int else { return nil } id = inId userId = inUser } public init(_ inId: Int, _ inUser: Int) { id = inId userId = inUser } public var contents: [String: Any] { return ["token": id, "user": userId] } } /// Wrapper for database actions related to login tokens public final class LoginTokenDAO { private let pgdb: Connection /// create a DAO /// /// - Parameter connection: the database to query public init(connection: Connection) { pgdb = connection } /// create a new login token for a user /// /// - Parameter user: the user to create a token for /// - Returns: a new token /// - Throws: a .dbError if the sql command fails public func createToken(user: User) throws -> LoginToken { do { let result = try pgdb.execute(query: "insert into logintoken (userId) values ($1) returning id", parameters: [QueryParameter(type: .int8, value: user.id, connection: pgdb)]) guard result.wasSuccessful, result.rowCount == 1 else { throw ModelError.dbError } guard let tokenId: Int = try result.getValue(row: 0, column: 0) else { throw ModelError.dbError } return LoginToken(tokenId, user.id) } catch { logger.error("failed to insert logintoken \(error)") throw ModelError.dbError } } /// checks the database to make sure a token is still valid /// /// - Parameter token: the token to check /// - Returns: true if the token is still valid public func validate(token: LoginToken) -> Bool { do { let params: [QueryParameter] = [ try QueryParameter(type: .int8, value: token.id, connection: pgdb), try QueryParameter(type: .int8, value: token.userId, connection: pgdb) ] let result = try? pgdb.execute(query: "select * from logintoken where id = $1 and userId = $2 and valid = true", parameters: params) guard let res = result, res.wasSuccessful, res.rowCount == 1 else { logger.info("failed to validate token for user \(token.userId)") return false } return true } catch { logger.warning("validateLoginToken failed: \(error)") return false } } /// invalidate a token so it can't be used again /// /// - Parameter token: the token to invalidate /// - Throws: errors from executing sql public func invalidate(token: LoginToken) throws { let query = "update logintoken set valid = false where id = $1 and userId = $2" let params: [QueryParameter] = [ try QueryParameter(type: .int8, value: token.id, connection: pgdb), try QueryParameter(type: .int8, value: token.userId, connection: pgdb) ] let results = try pgdb.execute(query: query, parameters: params) guard results.wasSuccessful else { logger.warning("failed to invalidate token: \(results.errorMessage)") throw ModelError.dbError } } }
33.755319
176
0.695556
3.140625
0ce5cb9e4bc10393a6546a397038a2d745082f63
3,752
py
Python
read_iceye_h5.py
eciraci/iceye_gamma_proc
68b04bfd55082862f419031c28e7b52f1800f3db
[ "MIT" ]
null
null
null
read_iceye_h5.py
eciraci/iceye_gamma_proc
68b04bfd55082862f419031c28e7b52f1800f3db
[ "MIT" ]
null
null
null
read_iceye_h5.py
eciraci/iceye_gamma_proc
68b04bfd55082862f419031c28e7b52f1800f3db
[ "MIT" ]
null
null
null
#!/usr/bin/env python u""" read_iceye_h5.py Written by Enrico Ciraci' (03/2022) Read ICEYE Single Look Complex and Parameter file using GAMMA's Python integration with the py_gamma module. usage: read_iceye_h5.py [-h] [--directory DIRECTORY] TEST: Read ICEye Single Look Complex and Parameter. optional arguments: -h, --help show this help message and exit --directory DIRECTORY, -D DIRECTORY Project data directory. --slc SLC, -C SLC Process and single SLC. PYTHON DEPENDENCIES: argparse: Parser for command-line options, arguments and sub-commands https://docs.python.org/3/library/argparse.html datetime: Basic date and time types https://docs.python.org/3/library/datetime.html#module-datetime tqdm: Progress Bar in Python. https://tqdm.github.io/ py_gamma: GAMMA's Python integration with the py_gamma module UPDATE HISTORY: """ # - Python Dependencies from __future__ import print_function import os import argparse import datetime from tqdm import tqdm # - GAMMA's Python integration with the py_gamma module import py_gamma as pg # - Utility Function from utils.make_dir import make_dir def main(): parser = argparse.ArgumentParser( description="""TEST: Read ICEye Single Look Complex and Parameter.""" ) # - Absolute Path to directory containing input data. default_dir = os.path.join(os.path.expanduser('~'), 'Desktop', 'iceye_gamma_test') parser.add_argument('--directory', '-D', type=lambda p: os.path.abspath(os.path.expanduser(p)), default=default_dir, help='Project data directory.') parser.add_argument('--slc', '-C', type=str, default=None, help='Process and single SLC.') args = parser.parse_args() # - Path to Test directory data_dir = os.path.join(args.directory, 'input') # - create output directory out_dir = make_dir(args.directory, 'output') out_dir = make_dir(out_dir, 'slc+par') # - ICEye Suffix ieye_suff = 'ICEYE_X7_SLC_SM_' if args.slc is not None: # - Process a single SLC b_input = os.path.join(data_dir, args.slc) # - Read input Binary File Name b_input_name = b_input.split('/')[-1].replace(ieye_suff, '') slc_name = os.path.join(out_dir, str(b_input_name.replace('.h5', '.slc'))) par_name = os.path.join(out_dir, str(b_input_name.replace('.h5', '.par'))) # - Extract SLC and Parameter File # - Set dtype equal to zero to save the SLC in FCOMPLEX format. pg.par_ICEYE_SLC(b_input, par_name, slc_name, 0) else: # - Process hte entire input directory content # - List Directory Content data_dir_list = [os.path.join(data_dir, x) for x in os.listdir(data_dir) if x.endswith('.h5')] for b_input in tqdm(data_dir_list, total=len(data_dir_list), ncols=60): # - Read input Binary File Name b_input_name = b_input.split('/')[-1].replace(ieye_suff, '') slc_name = os.path.join(out_dir, b_input_name.replace('.h5', '.slc')) par_name = os.path.join(out_dir, b_input_name.replace('.h5', '.par')) # - Extract SLC and Parameter File # - Set dtype equal to zero to save the SLC in FCOMPLEX format. pg.par_ICEYE_SLC(b_input, par_name, slc_name, 0) # - run main program if __name__ == '__main__': start_time = datetime.datetime.now() main() end_time = datetime.datetime.now() print(f"# - Computation Time: {end_time - start_time}")
36.076923
81
0.63033
3.390625
b82b91a8c8452384a38b315df5c298b27dc87504
1,309
rs
Rust
src/aabb/aabb.rs
ebenpack/rtiaw
088482d47afb4753f5f13a788b54703cd61407f7
[ "MIT" ]
null
null
null
src/aabb/aabb.rs
ebenpack/rtiaw
088482d47afb4753f5f13a788b54703cd61407f7
[ "MIT" ]
null
null
null
src/aabb/aabb.rs
ebenpack/rtiaw
088482d47afb4753f5f13a788b54703cd61407f7
[ "MIT" ]
null
null
null
use crate::ray::Ray; use crate::vec3::Vec3; #[derive(Clone, Copy)] pub struct AABB { pub minimum: Vec3, pub maximum: Vec3, } impl AABB { pub fn new(minimum: Vec3, maximum: Vec3) -> Self { AABB { minimum, maximum } } pub fn hit(&self, ray: &Ray, mut t_min: f64, mut t_max: f64) -> bool { for a in 0..=2 { let inv_d = 1.0 / ray.direction[a]; let mut t0 = (self.minimum[a] - ray.origin[a]) * inv_d; let mut t1 = (self.maximum[a] - ray.origin[a]) * inv_d; if inv_d < 0.0 { std::mem::swap(&mut t0, &mut t1); } t_min = if t0 > t_min { t0 } else { t_min }; t_max = if t1 < t_max { t1 } else { t_max }; if t_max <= t_min { return false; } } true } pub fn bounding_box(box0: &AABB, box1: &AABB) -> AABB { let small = Vec3::new( box0.minimum.x.min(box1.minimum.x), box0.minimum.y.min(box1.minimum.y), box0.minimum.z.min(box1.minimum.z), ); let big = Vec3::new( box0.maximum.x.max(box1.maximum.x), box0.maximum.y.max(box1.maximum.y), box0.maximum.z.max(box1.maximum.z), ); AABB::new(small, big) } }
27.851064
74
0.488159
3.015625
264ead018108520a8478776c35bf7acb0a7da8da
1,231
java
Java
ch7-hbase/src/main/java/ch7/CH711GenderCount.java
wangyaomail/my-hadoop
d51faf1bddf5d29984f1ea5cfa7bdf4a2847e692
[ "Apache-2.0" ]
3
2022-03-28T02:04:39.000Z
2022-03-30T13:43:29.000Z
ch7-hbase/src/main/java/ch7/CH711GenderCount.java
wangyaomail/my-hadoop
d51faf1bddf5d29984f1ea5cfa7bdf4a2847e692
[ "Apache-2.0" ]
null
null
null
ch7-hbase/src/main/java/ch7/CH711GenderCount.java
wangyaomail/my-hadoop
d51faf1bddf5d29984f1ea5cfa7bdf4a2847e692
[ "Apache-2.0" ]
null
null
null
package ch7; import org.apache.hadoop.hbase.TableName; import org.apache.hadoop.hbase.client.Result; import org.apache.hadoop.hbase.client.ResultScanner; import org.apache.hadoop.hbase.client.Scan; import org.apache.hadoop.hbase.client.Table; import org.apache.hadoop.hbase.util.Bytes; import java.io.IOException; public class CH711GenderCount extends CH701HBaseBase { @Override public void run() throws IOException { Scan scan = new Scan(); scan.addColumn(Bytes.toBytes("data"),Bytes.toBytes("gender")); Table table = conn.getTable(TableName.valueOf("students")); ResultScanner resultScanner = table.getScanner(scan); int maleCount=0,femaleCount=0; for(Result result : resultScanner){ String gender = Bytes.toString(result.getValue(Bytes.toBytes("data"),Bytes.toBytes("gender"))); if(gender.equals("男")){ maleCount++; } else { femaleCount++; } } System.out.println("男生人数:"+maleCount); System.out.println("女生人数:"+femaleCount); conn.close(); } public static void main(String[] args) throws IOException { new CH711GenderCount().run(); } }
31.564103
107
0.65069
3.015625
0ca33e888a8c5506799931e71fe1070bf6588145
3,758
py
Python
we_sensesim.py
y95847frank/GenSense
0da122bea9b7bd51444748444700b5f788bd8a48
[ "MIT" ]
3
2018-05-31T05:52:18.000Z
2019-12-20T07:15:56.000Z
we_sensesim.py
y95847frank/GenSense
0da122bea9b7bd51444748444700b5f788bd8a48
[ "MIT" ]
null
null
null
we_sensesim.py
y95847frank/GenSense
0da122bea9b7bd51444748444700b5f788bd8a48
[ "MIT" ]
null
null
null
import numpy as np import sys import utils import os from collections import defaultdict from nltk.corpus import wordnet as wn from scipy.spatial.distance import cosine from scipy.spatial.distance import correlation from numpy.linalg import norm from scipy.stats import spearmanr, pearsonr from utils import trim import pdb """ Sense embedding format: see https://github.com/sjauhar/SenseRetrofit Use ',' to seperate Datasets """ def run(path, fname): ''' if len(sys.argv) != 3: print("Usage: python we_sensesim.py SenseEmbedding Datasets") exit(0) ''' #wvs = utils.readWordVecs(os.path.expanduser(full_path)) wvs = utils.readWordVecs(sys.argv[1]) print("Finish reading vector!") wvssen = {} s_list = defaultdict(list) for sense in wvs: wvssen[sense.split("%")[0]] = '' s_list[sense.split("%")[0]].append(sense) mean_vector = np.mean(wvs.values(), axis=0) spear_score_max = [] spear_score_avg = [] f_name = [] for name in fname: full_path = os.path.join(path, name) filenames = os.path.expanduser(full_path).split(',') pairs, scores = utils.readDataset(filenames[0], no_skip=True) #f_name.append(filenames[0]) #print("Pair number for %s: %d"%(filenames[0], len(pairs))) coefs_max = [] coefs_avg = [] missing = 0 for pair in pairs: vecs0 = [] trimed_p0 = trim(pair[0], wvssen) if trimed_p0 not in wvssen: vecs0.append(mean_vector) missing += 1 #print trimed_p0, else: for sense in s_list[trimed_p0]: vecs0.append(wvs[sense]) ''' for sense in wvs: word = sense.split("%")[0] if trimed_p0 == word: vecs0.append(wvs[sense]) ''' vecs1 = [] trimed_p1 = trim(pair[1],wvssen) if trimed_p1 not in wvssen: vecs1.append(mean_vector) missing += 1 #print trimed_p1, else: for sense in s_list[trimed_p1]: vecs1.append(wvs[sense]) ''' for sense in wvs: word = sense.split("%")[0] if trimed_p1 == word: vecs1.append(wvs[sense]) ''' ''' max_value and avg_value: see "Multi-Prototype Vector-Space Models of Word Meaning" section 3.2 Measuring Semantic Similarity http://www.cs.utexas.edu/~ml/papers/reisinger.naacl-2010.pdf ''' max_value = max([1-cosine(a,b) for a in vecs0 for b in vecs1]) avg_value = np.mean([1-cosine(a,b) for a in vecs0 for b in vecs1]) coefs_max.append(max_value) coefs_avg.append(avg_value) spear_max = spearmanr(scores, coefs_max) pearson_max = pearsonr(scores, coefs_max) spear_avg = spearmanr(scores, coefs_avg) pearson_avg = pearsonr(scores, coefs_avg) spear_score_max.append(spear_max[0]) spear_score_avg.append(spear_avg[0]) print 'type \t', for i in range(len(fname)): print fname[i].split('.')[0], print '\nspear max\t', for i in range(len(fname)): print '%.04f,' % (spear_score_max[i]), print '\nspear avg\t', for i in range(len(fname)): print '%.04f,' % (spear_score_avg[i]), if __name__ == "__main__": run('./eval_data', ['EN-MEN-n.txt', 'EN-MEN-l.txt', 'EN-TRUK.txt', 'EN-RW.txt', 'EN-WS353.txt', 'EN-WS353-s.txt', 'EN-WS353-r.txt'])
33.256637
140
0.549228
3.296875
de9e3cbfeb9caee6164895e5e997e0101d3ff54f
19,524
rs
Rust
frb_codegen/src/source_graph.rs
AlienKevin/flutter_rust_bridge
37ebac9f0ec26a1293374640d52a8b6f4e8bc277
[ "MIT" ]
764
2021-10-04T10:10:49.000Z
2022-03-31T16:30:50.000Z
frb_codegen/src/source_graph.rs
AlienKevin/flutter_rust_bridge
37ebac9f0ec26a1293374640d52a8b6f4e8bc277
[ "MIT" ]
199
2021-10-04T19:16:29.000Z
2022-03-31T09:48:53.000Z
frb_codegen/src/source_graph.rs
AlienKevin/flutter_rust_bridge
37ebac9f0ec26a1293374640d52a8b6f4e8bc277
[ "MIT" ]
42
2021-10-04T17:10:51.000Z
2022-03-25T23:21:29.000Z
/* Things this doesn't currently support that it might need to later: - Import parsing is unfinished and so is currently disabled - When import parsing is enabled: - Import renames (use a::b as c) - these are silently ignored - Imports that start with two colons (use ::a::b) - these are also silently ignored */ use std::{collections::HashMap, fmt::Debug, fs, path::PathBuf}; use cargo_metadata::MetadataCommand; use log::{debug, warn}; use syn::{Attribute, Ident, ItemEnum, ItemStruct, UseTree}; use crate::markers; /// Represents a crate, including a map of its modules, imports, structs and /// enums. #[derive(Debug, Clone)] pub struct Crate { pub name: String, pub manifest_path: PathBuf, pub root_src_file: PathBuf, pub root_module: Module, } impl Crate { pub fn new(manifest_path: &str) -> Self { let mut cmd = MetadataCommand::new(); cmd.manifest_path(&manifest_path); let metadata = cmd.exec().unwrap(); let root_package = metadata.root_package().unwrap(); let root_src_file = { let lib_file = root_package .manifest_path .parent() .unwrap() .join("src/lib.rs"); let main_file = root_package .manifest_path .parent() .unwrap() .join("src/main.rs"); if lib_file.exists() { fs::canonicalize(lib_file).unwrap() } else if main_file.exists() { fs::canonicalize(main_file).unwrap() } else { panic!("No src/lib.rs or src/main.rs found for this Cargo.toml file"); } }; let source_rust_content = fs::read_to_string(&root_src_file).unwrap(); let file_ast = syn::parse_file(&source_rust_content).unwrap(); let mut result = Crate { name: root_package.name.clone(), manifest_path: fs::canonicalize(manifest_path).unwrap(), root_src_file: root_src_file.clone(), root_module: Module { visibility: Visibility::Public, file_path: root_src_file, module_path: vec!["crate".to_string()], source: Some(ModuleSource::File(file_ast)), scope: None, }, }; result.resolve(); result } /// Create a map of the modules for this crate pub fn resolve(&mut self) { self.root_module.resolve(); } } /// Mirrors syn::Visibility, but can be created without a token #[derive(Debug, Clone)] pub enum Visibility { Public, Crate, Restricted, // Not supported Inherited, // Usually means private } fn syn_vis_to_visibility(vis: &syn::Visibility) -> Visibility { match vis { syn::Visibility::Public(_) => Visibility::Public, syn::Visibility::Crate(_) => Visibility::Crate, syn::Visibility::Restricted(_) => Visibility::Restricted, syn::Visibility::Inherited => Visibility::Inherited, } } #[derive(Debug, Clone)] pub struct Import { pub path: Vec<String>, pub visibility: Visibility, } #[derive(Debug, Clone)] pub enum ModuleSource { File(syn::File), ModuleInFile(Vec<syn::Item>), } #[derive(Clone)] pub struct Struct { pub ident: Ident, pub src: ItemStruct, pub visibility: Visibility, pub path: Vec<String>, pub mirror: bool, } impl Debug for Struct { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { f.debug_struct("Struct") .field("ident", &self.ident) .field("src", &"omitted") .field("visibility", &self.visibility) .field("path", &self.path) .field("mirror", &self.mirror) .finish() } } #[derive(Clone)] pub struct Enum { pub ident: Ident, pub src: ItemEnum, pub visibility: Visibility, pub path: Vec<String>, pub mirror: bool, } impl Debug for Enum { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { f.debug_struct("Enum") .field("ident", &self.ident) .field("src", &"omitted") .field("visibility", &self.visibility) .field("path", &self.path) .field("mirror", &self.mirror) .finish() } } #[derive(Debug, Clone)] pub struct ModuleScope { pub modules: Vec<Module>, pub enums: Vec<Enum>, pub structs: Vec<Struct>, pub imports: Vec<Import>, } #[derive(Clone)] pub struct Module { pub visibility: Visibility, pub file_path: PathBuf, pub module_path: Vec<String>, pub source: Option<ModuleSource>, pub scope: Option<ModuleScope>, } impl Debug for Module { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { f.debug_struct("Module") .field("visibility", &self.visibility) .field("module_path", &self.module_path) .field("file_path", &self.file_path) .field("source", &"omitted") .field("scope", &self.scope) .finish() } } /// Get a struct or enum ident, possibly remapped by a mirror marker fn get_ident(ident: &Ident, attrs: &[Attribute]) -> (Ident, bool) { markers::extract_mirror_marker(attrs) .and_then(|path| path.get_ident().map(|ident| (ident.clone(), true))) .unwrap_or_else(|| (ident.clone(), false)) } impl Module { pub fn resolve(&mut self) { self.resolve_modules(); // self.resolve_imports(); } /// Maps out modules, structs and enums within the scope of this module fn resolve_modules(&mut self) { let mut scope_modules = Vec::new(); let mut scope_structs = Vec::new(); let mut scope_enums = Vec::new(); let items = match self.source.as_ref().unwrap() { ModuleSource::File(file) => &file.items, ModuleSource::ModuleInFile(items) => items, }; for item in items.iter() { match item { syn::Item::Struct(item_struct) => { let (ident, mirror) = get_ident(&item_struct.ident, &item_struct.attrs); let ident_str = ident.to_string(); scope_structs.push(Struct { ident, src: item_struct.clone(), visibility: syn_vis_to_visibility(&item_struct.vis), path: { let mut path = self.module_path.clone(); path.push(ident_str); path }, mirror, }); } syn::Item::Enum(item_enum) => { let (ident, mirror) = get_ident(&item_enum.ident, &item_enum.attrs); let ident_str = ident.to_string(); scope_enums.push(Enum { ident, src: item_enum.clone(), visibility: syn_vis_to_visibility(&item_enum.vis), path: { let mut path = self.module_path.clone(); path.push(ident_str); path }, mirror, }); } syn::Item::Mod(item_mod) => { let ident = item_mod.ident.clone(); let mut module_path = self.module_path.clone(); module_path.push(ident.to_string()); scope_modules.push(match &item_mod.content { Some(content) => { let mut child_module = Module { visibility: syn_vis_to_visibility(&item_mod.vis), file_path: self.file_path.clone(), module_path, source: Some(ModuleSource::ModuleInFile(content.1.clone())), scope: None, }; child_module.resolve(); child_module } None => { let folder_path = self.file_path.parent().unwrap().join(ident.to_string()); let folder_exists = folder_path.exists(); let file_path = if folder_exists { folder_path.join("mod.rs") } else { self.file_path .parent() .unwrap() .join(ident.to_string() + ".rs") }; let file_exists = file_path.exists(); if !file_exists { warn!( "Skipping unresolvable module {} (tried {})", &ident, file_path.to_string_lossy() ); continue; } let source = if file_exists { let source_rust_content = fs::read_to_string(&file_path).unwrap(); debug!("Trying to parse {:?}", file_path); Some(ModuleSource::File( syn::parse_file(&source_rust_content).unwrap(), )) } else { None }; let mut child_module = Module { visibility: syn_vis_to_visibility(&item_mod.vis), file_path, module_path, source, scope: None, }; if file_exists { child_module.resolve(); } child_module } }); } _ => {} } } self.scope = Some(ModuleScope { modules: scope_modules, enums: scope_enums, structs: scope_structs, imports: vec![], // Will be filled in by resolve_imports() }); } #[allow(dead_code)] fn resolve_imports(&mut self) { let imports = &mut self.scope.as_mut().unwrap().imports; let items = match self.source.as_ref().unwrap() { ModuleSource::File(file) => &file.items, ModuleSource::ModuleInFile(items) => items, }; for item in items.iter() { if let syn::Item::Use(item_use) = item { let flattened_imports = flatten_use_tree(&item_use.tree); for import in flattened_imports { imports.push(Import { path: import, visibility: syn_vis_to_visibility(&item_use.vis), }); } } } } pub fn collect_structs<'a>(&'a self, container: &mut HashMap<String, &'a Struct>) { let scope = self.scope.as_ref().unwrap(); for scope_struct in &scope.structs { container.insert(scope_struct.ident.to_string(), scope_struct); } for scope_module in &scope.modules { scope_module.collect_structs(container); } } pub fn collect_structs_to_vec(&self) -> HashMap<String, &Struct> { let mut ans = HashMap::new(); self.collect_structs(&mut ans); ans } pub fn collect_enums<'a>(&'a self, container: &mut HashMap<String, &'a Enum>) { let scope = self.scope.as_ref().unwrap(); for scope_enum in &scope.enums { container.insert(scope_enum.ident.to_string(), scope_enum); } for scope_module in &scope.modules { scope_module.collect_enums(container); } } pub fn collect_enums_to_vec(&self) -> HashMap<String, &Enum> { let mut ans = HashMap::new(); self.collect_enums(&mut ans); ans } } fn flatten_use_tree_rename_abort_warning(use_tree: &UseTree) { debug!("WARNING: flatten_use_tree() found an import rename (use a::b as c). flatten_use_tree() will now abort."); debug!("WARNING: This happened while parsing {:?}", use_tree); debug!("WARNING: This use statement will be ignored."); } /// Takes a use tree and returns a flat list of use paths (list of string tokens) /// /// Example: /// use a::{b::c, d::e}; /// becomes /// [ /// ["a", "b", "c"], /// ["a", "d", "e"] /// ] /// /// Warning: As of writing, import renames (import a::b as c) are silently /// ignored. fn flatten_use_tree(use_tree: &UseTree) -> Vec<Vec<String>> { // Vec<(path, is_complete)> let mut result = vec![(vec![], false)]; let mut counter: usize = 0; loop { counter += 1; if counter > 10000 { panic!("flatten_use_tree: Use statement complexity limit exceeded. This is probably a bug."); } // If all paths are complete, break from the loop if result.iter().all(|result_item| result_item.1) { break; } let mut items_to_push = Vec::new(); for path_tuple in &mut result { let path = &mut path_tuple.0; let is_complete = &mut path_tuple.1; if *is_complete { continue; } let mut tree_cursor = use_tree; for path_item in path.iter() { match tree_cursor { UseTree::Path(use_path) => { let ident = use_path.ident.to_string(); if *path_item != ident { panic!("This ident did not match the one we already collected. This is a bug."); } tree_cursor = use_path.tree.as_ref(); } UseTree::Group(use_group) => { let mut moved_tree_cursor = false; for tree in use_group.items.iter() { match tree { UseTree::Path(use_path) => { if path_item == &use_path.ident.to_string() { tree_cursor = use_path.tree.as_ref(); moved_tree_cursor = true; break; } } // Since we're not matching UseTree::Group here, a::b::{{c}, {d}} might // break. But also why would anybody do that _ => unreachable!(), } } if !moved_tree_cursor { unreachable!(); } } _ => unreachable!(), } } match tree_cursor { UseTree::Name(use_name) => { path.push(use_name.ident.to_string()); *is_complete = true; } UseTree::Path(use_path) => { path.push(use_path.ident.to_string()); } UseTree::Glob(_) => { path.push("*".to_string()); *is_complete = true; } UseTree::Group(use_group) => { // We'll modify the first one in-place, and make clones for // all subsequent ones let mut first: bool = true; // Capture the path in this state, since we're about to // modify it let path_copy = path.clone(); for tree in use_group.items.iter() { let mut new_path_tuple = if first { None } else { let new_path = path_copy.clone(); items_to_push.push((new_path, false)); Some(items_to_push.iter_mut().last().unwrap()) }; match tree { UseTree::Path(use_path) => { let ident = use_path.ident.to_string(); if first { path.push(ident); } else { new_path_tuple.unwrap().0.push(ident); } } UseTree::Name(use_name) => { let ident = use_name.ident.to_string(); if first { path.push(ident); *is_complete = true; } else { let path_tuple = new_path_tuple.as_mut().unwrap(); path_tuple.0.push(ident); path_tuple.1 = true; } } UseTree::Glob(_) => { if first { path.push("*".to_string()); *is_complete = true; } else { let path_tuple = new_path_tuple.as_mut().unwrap(); path_tuple.0.push("*".to_string()); path_tuple.1 = true; } } UseTree::Group(_) => { panic!( "Directly-nested use groups ({}) are not supported by flutter_rust_bridge. Use {} instead.", "use a::{{b}, c}", "a::{b, c}" ); } // UseTree::Group(_) => panic!(), UseTree::Rename(_) => { flatten_use_tree_rename_abort_warning(use_tree); return vec![]; } } first = false; } } UseTree::Rename(_) => { flatten_use_tree_rename_abort_warning(use_tree); return vec![]; } } } for item in items_to_push { result.push(item); } } result.into_iter().map(|val| val.0).collect() }
35.241877
128
0.444376
3.03125
dd11dbd635362054dadd3b141f67a6cc9f5b54bc
4,303
go
Go
environment/container/kubernetes/kubernetes.go
hown3d/colima
d99e306af18b4459ea1562434899756b234816d6
[ "MIT" ]
null
null
null
environment/container/kubernetes/kubernetes.go
hown3d/colima
d99e306af18b4459ea1562434899756b234816d6
[ "MIT" ]
null
null
null
environment/container/kubernetes/kubernetes.go
hown3d/colima
d99e306af18b4459ea1562434899756b234816d6
[ "MIT" ]
null
null
null
package kubernetes import ( "strings" "time" "github.com/abiosoft/colima/cli" "github.com/abiosoft/colima/config" "github.com/abiosoft/colima/environment" "github.com/abiosoft/colima/environment/container/containerd" "github.com/abiosoft/colima/environment/container/docker" ) // Name is container runtime name const Name = "kubernetes" func newRuntime(host environment.HostActions, guest environment.GuestActions) environment.Container { return &kubernetesRuntime{ host: host, guest: guest, CommandChain: cli.New(Name), } } func init() { environment.RegisterContainer(Name, newRuntime) } var _ environment.Container = (*kubernetesRuntime)(nil) type kubernetesRuntime struct { host environment.HostActions guest environment.GuestActions cli.CommandChain } func (c kubernetesRuntime) Name() string { return Name } func (c kubernetesRuntime) isInstalled() bool { // it is installed if uninstall script is present. return c.guest.RunQuiet("command", "-v", "k3s-uninstall.sh") == nil } func (c kubernetesRuntime) Running() bool { return c.guest.RunQuiet("sudo", "service", "k3s", "status") == nil } func (c kubernetesRuntime) runtime() string { return c.guest.Get(environment.ContainerRuntimeKey) } func (c kubernetesRuntime) kubernetesVersion() string { return c.guest.Get(environment.KubernetesVersionKey) } func (c *kubernetesRuntime) Provision() error { a := c.Init() if !c.isInstalled() { // k3s a.Stage("downloading and installing") installK3s(c.host, c.guest, a, c.runtime()) } // this needs to happen on each startup if c.runtime() == containerd.Name { installContainerdDeps(c.guest, a) } return a.Exec() } func (c kubernetesRuntime) Start() error { log := c.Logger() a := c.Init() if c.Running() { log.Println("already running") return nil } a.Stage("starting") a.Add(func() error { defer time.Sleep(time.Second * 5) return c.guest.Run("sudo", "service", "k3s", "start") }) if err := a.Exec(); err != nil { return err } return c.provisionKubeconfig() } func (c kubernetesRuntime) Stop() error { a := c.Init() a.Stage("stopping") a.Add(func() error { return c.guest.Run("k3s-killall.sh") }) // k3s is buggy with external containerd for now // cleanup is manual a.Add(c.stopAllContainers) return a.Exec() } func (c kubernetesRuntime) deleteAllContainers() error { ids := c.runningContainerIDs() if ids == "" { return nil } var args []string switch c.runtime() { case containerd.Name: args = []string{"nerdctl", "-n", "k8s.io", "rm", "-f"} case docker.Name: args = []string{"docker", "rm", "-f"} default: return nil } args = append(args, strings.Fields(ids)...) return c.guest.Run("sudo", "sh", "-c", strings.Join(args, " ")) } func (c kubernetesRuntime) stopAllContainers() error { ids := c.runningContainerIDs() if ids == "" { return nil } var args []string switch c.runtime() { case containerd.Name: args = []string{"nerdctl", "-n", "k8s.io", "kill"} case docker.Name: args = []string{"docker", "kill"} default: return nil } args = append(args, strings.Fields(ids)...) return c.guest.Run("sudo", "sh", "-c", strings.Join(args, " ")) } func (c kubernetesRuntime) runningContainerIDs() string { var args []string switch c.runtime() { case containerd.Name: args = []string{"sudo", "nerdctl", "-n", "k8s.io", "ps", "-q"} case docker.Name: args = []string{"sudo", "sh", "-c", `docker ps --format '{{.Names}}'| grep "k8s_"`} default: return "" } ids, _ := c.guest.RunOutput(args...) if ids == "" { return "" } return strings.ReplaceAll(ids, "\n", " ") } func (c kubernetesRuntime) Teardown() error { a := c.Init() a.Stage("deleting") if c.isInstalled() { a.Add(func() error { return c.guest.Run("k3s-uninstall.sh") }) } // k3s is buggy with external containerd for now // cleanup is manual a.Add(func() error { return c.deleteAllContainers() }) c.teardownKubeconfig(a) a.Add(func() error { return c.guest.Set(kubeconfigKey, "") }) return a.Exec() } func (c kubernetesRuntime) Dependencies() []string { return []string{"kubectl"} } func (c kubernetesRuntime) Version() string { version, _ := c.host.RunOutput("kubectl", "--context", config.Profile().ID, "version", "--short") return version }
20.88835
101
0.668603
3.046875
583829c19d32bffd2c4e6c97031c45f243db0563
1,668
c
C
shaders/simple.c
he110world/libmeh_working
0f324dabdf2dcfc67b18f64acb7d9629aed1c04f
[ "MIT" ]
1
2015-03-05T12:47:47.000Z
2015-03-05T12:47:47.000Z
src/shaders/simple.c
he110world/libmeh_working
0f324dabdf2dcfc67b18f64acb7d9629aed1c04f
[ "MIT" ]
null
null
null
src/shaders/simple.c
he110world/libmeh_working
0f324dabdf2dcfc67b18f64acb7d9629aed1c04f
[ "MIT" ]
null
null
null
varying vec3 L, E; varying vec2 texcoord; varying vec3 cubecoord; attribute vec3 tangent; uniform vec4 light; void main(void){ gl_Position = ftransform(); texcoord=gl_MultiTexCoord0.xy; cubecoord=gl_MultiTexCoord1.xyz; vec3 n = normalize(gl_NormalMatrix*gl_Normal); vec3 t = normalize(gl_NormalMatrix*tangent); vec3 b = cross(n,t); vec3 vert = vec3(gl_ModelViewMatrix * gl_Vertex); vec3 tmp = vec3(gl_ModelViewMatrix*light)-vert; // vec3 tmp = light-vert; // vec3 tmp = light-gl_Vertex.xyz; L.x = dot(tmp,t); L.y = dot(tmp,b); L.z = dot(tmp,n); tmp = -vert; E.x = dot(tmp,t); E.y = dot(tmp,b); E.z = dot(tmp,n); } varying vec3 L,E; varying vec2 texcoord; varying vec3 cubecoord; uniform sampler2D colortex, normaltex; uniform samplerCube stenciltex; float appr_pow(float t) { return max(0.0, 8.0*t-7.0); } void main() { float invradius=1.0/100.0; float distsqr = dot(L,L); float att = clamp(1.0 - invradius * sqrt(distsqr), 0.0, 1.0); vec3 newL = L*inversesqrt(distsqr); vec3 newE = normalize(E); vec4 color=texture2D(colortex, texcoord); float opaque=textureCube(stenciltex, cubecoord).a; vec3 N=normalize(texture2D(normaltex, texcoord).xyz*2.0 - 1.0); float diffuse = max(dot(N,newL),0.0); // float spec = pow(clamp(dot(reflect(-newL,N),newE),0.0,1.0), 16.0); // float spec = clamp(dot(reflect(-newL,N),newE),0.0,1.0); float spec = dot(reflect(-newL,N),newE); // vec3 R=normalize(2*N-newL); // float spec = appr_pow(clamp(dot(N,R),0.0,1.0)); // float spec = pow(clamp(dot(N,R),0.0,1.0),1.0); // gl_FragColor = att*(vec4(diffuse)*color+vec4(.5*spec)); gl_FragColor = att*(vec4(diffuse+spec)*color); gl_FragColor.a = opaque; }
24.895522
69
0.68765
3.328125
e723f4d5cf2200b470bfcf636bc1085e063958ba
2,594
js
JavaScript
index.js
jimmywarting/highlight-saas
93c18a3da8c0b0e280944c46a4e20928ad800e6e
[ "MIT" ]
1
2020-02-02T04:16:12.000Z
2020-02-02T04:16:12.000Z
index.js
jimmywarting/highlight-saas
93c18a3da8c0b0e280944c46a4e20928ad800e6e
[ "MIT" ]
null
null
null
index.js
jimmywarting/highlight-saas
93c18a3da8c0b0e280944c46a4e20928ad800e6e
[ "MIT" ]
null
null
null
const { highlight, highlightAuto, getLanguage } = require('highlight.js') const compression = require('compression') const express = require('express') const inlineCss = require('inline-css/lib/inline-css') const cors = require('cors') const bodyParser = require('express/node_modules/body-parser') const config = require('./config.json') const app = express() const multer = require('multer') const upload = multer({}) const urlencodedParser = bodyParser.urlencoded({extended: false}) const jsonParser = bodyParser.json() const textParser = bodyParser.text() app.disable('x-powered-by') app.use(compression()) app.use(cors()) const defaultStyle = config.styles['atom-one-dark'] function getDefault(opts) { let language = (opts.language || 'auto').toLowerCase() let style = (opts.style || '').toLowerCase() let content = opts.content || `I'm a teapot` if (!getLanguage(language)) language = 'auto' if (!style || !Object.keys(config.styles).includes(style)) style = 'atom-one-dark' style = config.styles[style] content = language === 'auto' ? highlightAuto(content).value : highlight(language, content).value return { content, style } } app.get('/favicon.ico', (_, res) => res.sendStatus(404)) app.all('*', jsonParser, urlencodedParser, textParser, upload.single(), async (req, res) => { const myOrigin = req.protocol + '://' + req.get('host') const url = new URL(myOrigin + req.originalUrl) if (url.pathname === '/' && url.search === '' && req.method === 'GET') // return res.send(require('fs').readFileSync('./index.html', { encoding: 'utf8' }).toString('ascii')) return res.send(config.index) const body = typeof req.body === 'string' ? {content: req.body} : req.body || {} body.content = body.content || url.searchParams.get('content') body.style = body.style || url.searchParams.get('style') body.language = body.language || url.searchParams.get('language') const def = getDefault(body) const origin = req.get('origin') || '' const accept = req.get('accept') || '' const referrer = req.get('Referrer') || '' style = def.style content = def.content if (referrer && accept.includes('image') && !referrer.includes(myOrigin)) { content = `<body><pre class="hljs"><code>${content}</code></pre></body>` content += '<script></script>' content = inlineCss(content, style + config.extraCss, config.inlineOpts) } else { content = `<pre class="hljs"><code>${content}</code></pre>` content = inlineCss(content, style, config.inlineOpts) } res.send(content) }) app.listen(process.env.PORT || 3000)
33.688312
106
0.672321
3.140625
5f5df4c16e3cbba91cabd10a1877c24964949db2
2,915
ts
TypeScript
src/tests/utilities.test.ts
jmgrady/TheCombine
4645d891f4825458553b94119a7b5731bed715d3
[ "MIT" ]
null
null
null
src/tests/utilities.test.ts
jmgrady/TheCombine
4645d891f4825458553b94119a7b5731bed715d3
[ "MIT" ]
null
null
null
src/tests/utilities.test.ts
jmgrady/TheCombine
4645d891f4825458553b94119a7b5731bed715d3
[ "MIT" ]
null
null
null
import * as utilities from "utilities"; describe("utilities", () => { describe("quicksort", () => { const compareItem = (input: number) => { return input; }; const numbers: number[] = []; for (let i = 0; i < 25; i++) numbers.push(Math.random()); it("orders properly", () => { const sortedNums = utilities.quicksort<number>(numbers, compareItem); for (let i = 1; i < sortedNums.length; i++) expect(sortedNums[i - 1]).toBeLessThanOrEqual(sortedNums[i]); }); }); describe("getNowDateTimeString", () => { // This tests will fail intermittently if there is a bug with the 0-prepend it("returns string of correct length", () => { const expectedLength = "YYYY-MM-DD_hh-mm-ss".length; expect(utilities.getNowDateTimeString().length).toBe(expectedLength); }); }); describe("LevenshteinDistance", () => { let finder: utilities.LevenshteinDistance; const testParams: utilities.LevenshteinDistParams = { delCost: 3, insCost: 4, subCost: 5, }; beforeEach(() => { finder = new utilities.LevenshteinDistance(testParams); }); describe("getDistance", () => { const baseWord = "testing"; test("with empty word", () => { expect(finder.getDistance("", "")).toEqual(0); expect(finder.getDistance(baseWord, "")).toEqual( baseWord.length * testParams.delCost ); expect(finder.getDistance("", baseWord)).toEqual( baseWord.length * testParams.insCost ); }); const similarCases: [string, string, number][] = [ ["same word", baseWord, 0], ["1 deletion", "testin", testParams.delCost], ["1 insertion", "testings", testParams.insCost], ["1 substitution", "tasting", testParams.subCost], ["2 substitutions", "tossing", 2 * testParams.subCost], [ "1 insertion, 1 deletion", "teasing", testParams.insCost + testParams.delCost, ], [ "1 insertion, 1 substitution", "toasting", testParams.insCost + testParams.subCost, ], ]; test.each(similarCases)( "with similar word: %p", (_description: string, secondWord: string, expectedDist: number) => { expect(finder.getDistance(baseWord, secondWord)).toEqual( expectedDist ); } ); test("with much different words", () => { const diffWord = "QQQ"; expect(finder.getDistance(diffWord, baseWord)).toEqual( diffWord.length * testParams.subCost + (baseWord.length - diffWord.length) * testParams.insCost ); expect(finder.getDistance(baseWord, diffWord)).toEqual( diffWord.length * testParams.subCost + (baseWord.length - diffWord.length) * testParams.delCost ); }); }); }); });
31.684783
79
0.578045
3.359375
dfd7c4d2364f15422ade24e6579947af63f87ec7
1,585
ts
TypeScript
backend/src/connect.database.ts
ExiledNarwal28/gif-3112-project
806c697705c13d813a85dd643bd733dbd39af96d
[ "MIT" ]
1
2021-11-12T06:58:38.000Z
2021-11-12T06:58:38.000Z
backend/src/connect.database.ts
ExiledNarwal28/gif-3112-project
806c697705c13d813a85dd643bd733dbd39af96d
[ "MIT" ]
null
null
null
backend/src/connect.database.ts
ExiledNarwal28/gif-3112-project
806c697705c13d813a85dd643bd733dbd39af96d
[ "MIT" ]
null
null
null
import mongoose from 'mongoose'; import { logger } from './middlewares/logger'; const mongoURL = process.env.MONGO_URL || ''; const mongoOptions = { useFindAndModify: false, useNewUrlParser: true, useUnifiedTopology: true, user: process.env.MONGO_USERNAME, pass: process.env.MONGO_PASSWORD, }; const db = mongoose.connection; db.on('connecting', () => logger.info('Connecting to MongoDB...')); db.on('error', (error) => logger.error(`Error in MongoDB connection : ${error}`), ); db.on('connected', () => logger.info('MongoDB connected!')); db.once('open', async () => { logger.info('MongoDB connection opened!'); }); db.on('reconnected', () => logger.info('MongoDB reconnected!')); db.on('disconnected', () => { logger.info('MongoDB disconnected!'); retryConnectionAfterTimeout(); }); const MAX_ATTEMPTS = 10; const FACTOR = 1.5; const DEFAULT_RETRY_TIMEOUT = 5000; const DEFAULT_ATTEMPTS = 0; let retryTimeout = DEFAULT_RETRY_TIMEOUT; let attempts = DEFAULT_ATTEMPTS; const retryConnectionAfterTimeout = () => { if (attempts < MAX_ATTEMPTS) { logger.info(`Retrying connection in ${retryTimeout / 1000} seconds`); setTimeout(connectDatabase, retryTimeout); retryTimeout *= FACTOR; attempts++; } else { logger.info(`Max connection attempts (${MAX_ATTEMPTS}) reached!`); } }; export function connectDatabase() { mongoose .connect(mongoURL, mongoOptions) .then(() => { retryTimeout = DEFAULT_RETRY_TIMEOUT; attempts = DEFAULT_ATTEMPTS; }) .catch(() => { retryConnectionAfterTimeout(); }); }
24.384615
73
0.680126
3.015625
c36bc166ac8c5e32810efdfb68416109d184994a
1,233
go
Go
st600/fuzz.go
larixsource/stgps
03af5b868f238dd04a10c161038759036daee6ac
[ "MIT" ]
7
2017-05-17T20:12:49.000Z
2021-07-12T21:41:48.000Z
st600/fuzz.go
larixsource/stgps
03af5b868f238dd04a10c161038759036daee6ac
[ "MIT" ]
null
null
null
st600/fuzz.go
larixsource/stgps
03af5b868f238dd04a10c161038759036daee6ac
[ "MIT" ]
3
2016-11-29T23:30:08.000Z
2018-12-04T13:19:58.000Z
package st600 func Fuzz(data []byte) int { p := ParseBytes(data, ParserOpts{}) var results []int for p.Next() { frame := p.Msg() if frame == nil { panic("nil frame") } if len(frame.Frame) == 0 { panic("empty raw frame") } if frame.ParsingError != nil { results = append(results, 0) continue } switch frame.Type { case STTReport: if frame.STT == nil { panic("nil STT") } case EMGReport: if frame.EMG == nil { panic("nil EMG") } case EVTReport: if frame.EVT == nil { panic("nil EVT") } case ALTReport: if frame.ALT == nil { panic("nil ALT") } case ALVReport: if frame.ALV == nil { panic("nil ALV") } case UEXReport: if frame.UEX == nil { panic("nil UEX") } case UnknownMsg: default: panic("invalid Type") } // good frame results = append(results, 1) } // count results (zeroes and ones) zeroCount := 0 oneCount := 0 for _, r := range results { switch r { case 0: zeroCount++ case 1: oneCount++ default: panic("fuzz programming error") } } switch { case oneCount == 0: return 0 case zeroCount == 0 || zeroCount == 1: // at most one error permitted return 1 default: return 0 } }
16.223684
70
0.583131
3.03125
a1a6cf56c78ce242de3be9a964be2f1ff6b74f8a
2,736
go
Go
cmd/ports.go
rmohr/cli
a95a4b96a0c93899e2e6c3f1ef0ce911b118a382
[ "Apache-2.0" ]
null
null
null
cmd/ports.go
rmohr/cli
a95a4b96a0c93899e2e6c3f1ef0ce911b118a382
[ "Apache-2.0" ]
null
null
null
cmd/ports.go
rmohr/cli
a95a4b96a0c93899e2e6c3f1ef0ce911b118a382
[ "Apache-2.0" ]
null
null
null
package cmd import ( "fmt" "github.com/docker/docker/api/types" "github.com/docker/docker/client" "github.com/docker/go-connections/nat" "github.com/rmohr/cli/docker" "github.com/spf13/cobra" "strconv" ) const ( PORT_SSH = 2201 PORT_REGISTRY = 5000 PORT_OCP = 8443 PORT_K8S = 6443 PORT_VNC = 5901 PORT_NAME_SSH = "ssh" PORT_NAME_OCP = "ocp" PORT_NAME_REGISTRY = "registry" PORT_NAME_K8S = "k8s" PORT_NAME_VNC = "vnc" ) func NewPortCommand() *cobra.Command { port := &cobra.Command{ Use: "ports", Short: "ports shows exposed ports of the cluster", Long: `ports shows exposed ports of the cluster If no port name is specified, all exposed ports are printed. If an extra port name is specified, only the exposed port is printed. Known port names are 'ssh', 'registry', 'ocp' and 'k8s'. `, RunE: ports, Args: func(cmd *cobra.Command, args []string) error { if len(args) > 1 { return fmt.Errorf("only one port name can be specified at once") } if len(args) == 1 { switch args[0] { case PORT_NAME_SSH, PORT_NAME_K8S, PORT_NAME_OCP, PORT_NAME_REGISTRY, PORT_NAME_VNC: return nil default: return fmt.Errorf("unknown port name %s", args[0]) } } return nil }, } return port } func ports(cmd *cobra.Command, args []string) error { prefix, err := cmd.Flags().GetString("prefix") if err != nil { return err } cli, err := client.NewEnvClient() if err != nil { return err } container, err := docker.GetDDNSMasqContainer(cli, prefix) if err != nil { return err } portName := "" if len(args) > 0 { portName = args[0] } if portName != "" { err = nil switch portName { case PORT_NAME_SSH: err = printPort(PORT_SSH, container.Ports) case PORT_NAME_K8S: err = printPort(PORT_K8S, container.Ports) case PORT_NAME_REGISTRY: err = printPort(PORT_REGISTRY, container.Ports) case PORT_NAME_OCP: err = printPort(PORT_OCP, container.Ports) case PORT_NAME_VNC: err = printPort(PORT_VNC, container.Ports) } if err != nil { return err } } else { for _, p := range container.Ports { fmt.Printf("%d/%s -> %s:%d\n", p.PrivatePort, p.Type, p.IP, p.PublicPort) } } return nil } func getPort(port uint16, ports []types.Port) (uint16, error) { for _, p := range ports { if p.PrivatePort == port { return p.PublicPort, nil } } return 0, fmt.Errorf("port is not exposed") } func printPort(port uint16, ports []types.Port) error { p, err := getPort(port, ports) if err != nil { return err } fmt.Println(p) return nil } func tcpPortOrDie(port int) nat.Port { p, err := nat.NewPort("tcp", strconv.Itoa(port)) if err != nil { panic(err) } return p }
20.571429
88
0.654971
3.125
bd3a662fb349989868d51eb2243cb72e1eac5b28
2,124
rs
Rust
web-server/sgx-wallet-impl/src/schema/types.rs
ntls-io/nautilus-wallet
31a6a534c920d58548a8ac5869b3cb918d3b7c11
[ "Apache-2.0" ]
1
2022-01-30T03:54:55.000Z
2022-01-30T03:54:55.000Z
web-server/sgx-wallet-impl/src/schema/types.rs
ntls-io/nautilus-wallet
31a6a534c920d58548a8ac5869b3cb918d3b7c11
[ "Apache-2.0" ]
32
2021-11-15T08:43:10.000Z
2022-03-20T22:35:56.000Z
web-server/sgx-wallet-impl/src/schema/types.rs
ntls-io/nautilus-wallet
31a6a534c920d58548a8ac5869b3cb918d3b7c11
[ "Apache-2.0" ]
2
2021-11-21T19:19:08.000Z
2021-11-22T09:17:01.000Z
//! Supporting data types. use std::boxed::Box; use std::prelude::v1::String; use ripple_keypairs::{Algorithm, EntropyArray}; use serde::{Deserialize, Serialize}; pub type Bytes = Box<[u8]>; /// Nautilus Wallet ID. pub type WalletId = String; /// A wallet owner's authenticating PIN. pub type WalletPin = String; /// Algorand account seed, as bytes. pub type AlgorandAccountSeedBytes = [u8; 32]; /// Algorand account address, as bytes. pub type AlgorandAddressBytes = [u8; 32]; /// Algorand account address, as base32 with checksum. pub type AlgorandAddressBase32 = String; /// XRPL key type (signing algorithm). /// /// Docs: <https://xrpl.org/cryptographic-keys.html#signing-algorithms> #[derive(Copy, Clone, Eq, PartialEq, Debug)] // core #[derive(Deserialize, Serialize)] // serde #[serde(rename_all = "lowercase")] pub enum XrplKeyType { Secp256k1, Ed25519, } /// Default to `secp256k1`, like the XRP Ledger. impl Default for XrplKeyType { fn default() -> Self { Self::Secp256k1 } } // Convert between our representation and ripple-keypairs. /// Convert from `&Algorithm`, as used by ripple-keypairs. impl From<&Algorithm> for XrplKeyType { fn from(algorithm: &Algorithm) -> Self { match algorithm { Algorithm::Secp256k1 => Self::Secp256k1, Algorithm::Ed25519 => Self::Ed25519, } } } /// Convert to `&'static Algorithm`, as expected by ripple-keypairs. impl From<XrplKeyType> for &'static Algorithm { fn from(key_type: XrplKeyType) -> Self { match key_type { XrplKeyType::Secp256k1 => &Algorithm::Secp256k1, XrplKeyType::Ed25519 => &Algorithm::Ed25519, } } } /// XRP account seed, as bytes. pub type XrplAccountSeedBytes = EntropyArray; /// XRP account address, as base58 with checksum ("Base58Check"). /// /// Docs: <https://xrpl.org/base58-encodings.html> pub type XrplAddressBase58 = String; /// XRP public key, as a hexadecimal string. Used to prepare unsigned transactions. /// /// Docs: <https://xrpl.org/cryptographic-keys.html#public-key> pub type XrplPublicKeyHex = String;
27.230769
83
0.682203
3.09375
653e59fcbdd6ab10a8f9cfbeb5d59f5f53315a37
6,114
py
Python
src/trainer/transformations.py
tiborkubik/Robust-Teeth-Detection-in-3D-Dental-Scans-by-Automated-Multi-View-Landmarking
c7d9fa29b3b94ea786da5f4ec11a11520c1b882a
[ "MIT" ]
2
2022-02-20T23:45:47.000Z
2022-03-14T07:36:53.000Z
src/trainer/transformations.py
tiborkubik/Robust-Teeth-Detection-in-3D-Dental-Scans-by-Automated-Multi-View-Landmarking
c7d9fa29b3b94ea786da5f4ec11a11520c1b882a
[ "MIT" ]
null
null
null
src/trainer/transformations.py
tiborkubik/Robust-Teeth-Detection-in-3D-Dental-Scans-by-Automated-Multi-View-Landmarking
c7d9fa29b3b94ea786da5f4ec11a11520c1b882a
[ "MIT" ]
null
null
null
""" :filename transformations.py :author Tibor Kubik :email [email protected] from Classes of custom transformations that are applied during the training as additional augmentation of the depth maps. """ import torch import random import numpy as np import torch.nn.functional as F from random import randrange from skimage.transform import resize, warp, AffineTransform class Normalize(object): """Normalization of a depth map in the value of [0, 1] for each pixel.""" def __init__(self, input_type): self.input_type = input_type def __call__(self, sample): if self.input_type == 'geom': image, landmarks, label = sample['image'], sample['landmarks'], sample['label'] mean, std = image.mean([1, 2]), image.std([1, 2]) # TODO? return {'image': image, 'landmarks': landmarks, 'label': label} class ToTensor(object): """Transformation of a training sample into a torch tensor instance.""" def __init__(self, input_type): self.input_type = input_type def __call__(self, sample): image, landmarks, label = sample['image'], sample['landmarks'], sample['label'] image = torch.from_numpy(image.copy()) if self.input_type != 'depth+geom': image = image.unsqueeze(1) image = image.permute(1, 0, 2) else: image = image.permute(2, 0, 1) landmarks = np.asarray(landmarks) landmarks = torch.from_numpy(landmarks.copy()) return {'image': image, 'landmarks': landmarks, 'label': label} class Resize(object): """Resizing of the input sample into provided dimensions.""" def __init__(self, width, height, input_type='image'): assert isinstance(width, int) assert isinstance(height, int) self.width = width self.height = height self.type = input_type def __call__(self, sample): image, landmarks, label = sample['image'], sample['landmarks'], sample['label'] resized_landmarks = landmarks.copy() if self.type == 'image': image = resize(image, (self.height, self.width), anti_aliasing=True) if self.type == 'landmarks': resized_landmarks = [] for landmark in landmarks: landmark_resized = resize(landmark, (self.height, self.width), anti_aliasing=True) resized_landmarks.append(landmark_resized) return {'image': image, 'landmarks': resized_landmarks, 'label': label} class RandomTranslating(object): """Randomly translate the input sample from range [-10 px, 10 px] with provided probability.""" def __init__(self, p=0.5): assert isinstance(p, float) self.p = p def __call__(self, sample): image, landmarks, label = sample['image'], sample['landmarks'], sample['label'] translated_landmarks = landmarks.copy() if np.random.rand(1) < self.p: n1 = randrange(-10, 10) n2 = randrange(-10, 10) t = AffineTransform(translation=(n1, n2)) image = warp(image, t.inverse) translated_landmarks = [] for landmark in landmarks: translated_landmarks.append(warp(landmark, t.inverse)) return {'image': image, 'landmarks': translated_landmarks, 'label': label} class RandomScaling(object): """Randomly scales the input sample with scale index from range [0.90, 1.10] with provided probability.""" def __init__(self, p=0.5): assert isinstance(p, float) self.p = p def __call__(self, sample): image, landmarks, label = sample['image'], sample['landmarks'], sample['label'] scaled_landmarks = landmarks.copy() if np.random.rand(1) < self.p: n = random.uniform(0.90, 1.10) t = AffineTransform(scale=(n, n)) image = warp(image, t.inverse) scaled_landmarks = [] for landmark in landmarks: scaled_landmarks.append(warp(landmark, t.inverse)) return {'image': image, 'landmarks': scaled_landmarks, 'label': label} class RandomRotation(object): """Randomly rotates the input sample from range [−11.25 deg, 11.25 deg] with provided probability.""" def __init__(self, p=0.5): assert isinstance(p, float) self.p = p def __call__(self, sample): image, landmarks, label = sample['image'], sample['landmarks'], sample['label'] rnd_num1 = randrange(-32, -6) rnd_num2 = randrange(6, 32) rnd_num = random.choice([rnd_num1, rnd_num2]) if np.random.rand(1) < self.p: rotated_image = self.rotate(x=image.unsqueeze(0).type(torch.FloatTensor), theta=np.pi/rnd_num) rotated_landmarks = [] for _, landmark in enumerate(landmarks): rotated_landmark = self.rotate(x=landmark.unsqueeze(0).unsqueeze(0).type(torch.FloatTensor), theta=np.pi/rnd_num) rotated_landmarks.append(rotated_landmark.squeeze(0)) result = torch.cat(rotated_landmarks, dim=0) return {'image': rotated_image.squeeze(0), 'landmarks': result, 'label': label} return {'image': image, 'landmarks': landmarks, 'label': label} @staticmethod def get_rotation_matrix(theta): """Returns a tensor rotation matrix with given theta value.""" theta = torch.tensor(theta) return torch.tensor([[torch.cos(theta), -torch.sin(theta), 0], [torch.sin(theta), torch.cos(theta), 0]]) def rotate(self, x, theta): rot_mat = self.get_rotation_matrix(theta)[None, ...].repeat(x.shape[0], 1, 1) grid = F.affine_grid(rot_mat, x.size(), align_corners=False) x = F.grid_sample(x, grid, align_corners=False) return x
31.678756
129
0.596663
3.21875
b2c14d3bb32a9d0a97a9d773d034e8784a7e69a4
5,641
py
Python
lljs.py
Peter9192/wind_analytics
604136be1c2ef1155bdb7579c7d123525dbe10d8
[ "Apache-2.0" ]
null
null
null
lljs.py
Peter9192/wind_analytics
604136be1c2ef1155bdb7579c7d123525dbe10d8
[ "Apache-2.0" ]
null
null
null
lljs.py
Peter9192/wind_analytics
604136be1c2ef1155bdb7579c7d123525dbe10d8
[ "Apache-2.0" ]
null
null
null
""" Identify low-level jets in wind profile data. Peter Kalverla December 2020 """ import numpy as np import xarray as xr def detect_llj(x, axis=None, falloff=0, output='strength', inverse=False): """ Identify maxima in wind profiles. args: - x : ndarray with wind profile data - axis : specifies the vertical dimension is internally used with np.apply_along_axis - falloff : threshold for labeling as low-level jet default 0; can be masked later, e.g. llj[falloff>2.0] - output : specifiy return type: 'strength' or 'index' returns (depending on <output> argument): - strength : 0 if no maximum identified, otherwise falloff strength - index : nan if no maximum identified, otherwise index along <axis>, to get the height of the jet etc. """ def inner(x, output): if inverse: x = x[::-1, ...] # Identify local maxima x = x[~np.isnan(x)] dx = x[1:] - x[:-1] ind = np.where((np.hstack((dx, 0)) < 0) & (np.hstack((0, dx)) >= 0))[0] # Last value of x cannot be llj if ind.size and ind[-1] == x.size - 1: ind = ind[:-1] # Compute the falloff strength for each local maxima if ind.size: # this assumes height increases along axis!!! strength = np.array([x[i] - min(x[i:]) for i in ind]) imax = np.argmax(strength) # Return jet_strength and index of maximum: if output == 'strength': r = max(strength) if ind.size else 0 elif output == 'index': r = ind[imax] if ind.size else 0 return r # Wrapper interface to apply 1d function to ndarray return np.apply_along_axis(inner, axis, x, output=output) def detect_llj_vectorized(xs, axis=-1, output='falloff', mask_inv=False, inverse=False): """ Identify local maxima in wind profiles. args: - x : ndarray with wind profile data - axis : specifies the vertical dimension - output : specifiy return type: 'falloff', 'strength' or 'index' - mask_inv : use np.ma to mask nan values returns (depending on <output> argument and whether llj is identified): - falloff : 0 or largest difference between local max and subseq min - strength : 0 or wind speed at jet height - index : -1 or index along <axis> """ # Move <axis> to first dimension, to easily index and iterate over it. xv = np.rollaxis(xs, axis) if inverse: xv = xv[::-1, ...] if mask_inv: xv = np.ma.masked_invalid(xv) # Set initial arrays min_elem = xv[-1].copy() max_elem = np.zeros(min_elem.shape) max_diff = np.zeros(min_elem.shape) max_idx = np.ones(min_elem.shape, dtype=int) * (-1) # Start at end of array and search backwards for larger differences. for i, elem in reversed(list(enumerate(xv))): min_elem = np.minimum(elem, min_elem) new_max_identified = elem - min_elem > max_diff max_diff = np.where(new_max_identified, elem - min_elem, max_diff) max_elem = np.where(new_max_identified, elem, max_elem) max_idx = np.where(new_max_identified, i, max_idx) if output == 'falloff': r = max_diff elif output == 'strength': r = max_elem elif output == 'index': r = max_idx else: raise ValueError('Invalid argument for <output>: %s' % output) return r def detect_llj_xarray(da, inverse=False): """ Identify local maxima in wind profiles. args: - da : xarray.DataArray with wind profile data - inverse : to flip the array if the data is stored upside down returns: : xarray.Dataset with vertical dimension removed containing: - falloff : 0 or largest difference between local max and subseq min - strength : 0 or wind speed at jet height - index : -1 or index along <axis> Note: vertical dimension should be labeled 'level' and axis=1 """ # Move <axis> to first dimension, to easily index and iterate over it. xv = np.rollaxis(da.values, 1) if inverse: xv = xv[::-1, ...] # Set initial arrays min_elem = xv[-1].copy() max_elem = np.zeros(min_elem.shape) max_diff = np.zeros(min_elem.shape) max_idx = np.ones(min_elem.shape, dtype=int) * (-1) # Start at end of array and search backwards for larger differences. for i, elem in reversed(list(enumerate(xv))): min_elem = np.minimum(elem, min_elem) new_max_identified = elem - min_elem > max_diff max_diff = np.where(new_max_identified, elem - min_elem, max_diff) max_elem = np.where(new_max_identified, elem, max_elem) max_idx = np.where(new_max_identified, i, max_idx) # Combine the results in a dataframe get_height = lambda i: np.where(i > 0, da.level.values[i], da.level.values[ -1]) dims = da.isel(level=0).drop('level').dims coords = da.isel(level=0).drop('level').coords lljs = xr.Dataset( { 'falloff': (dims, max_diff), 'strength': (dims, max_elem), 'level': (dims, get_height(max_idx)), }, coords=coords) print( 'Beware! Level is also filled if no jet is detected! ' 'Use ds.sel(level=lljs.level).where(lljs.falloff>0) to get rid of them' ) return lljs
34.820988
80
0.591916
3.21875
08963ea78b7a7c8e6fdbd6c088271a84b9c0754d
5,743
go
Go
pkg/langserver/diagnostics.go
lizelive/yodk
dc37d204598e1ff39ccbbd3bec5ba897486c2df6
[ "MIT" ]
59
2019-11-19T08:58:08.000Z
2021-10-02T20:23:48.000Z
pkg/langserver/diagnostics.go
lizelive/yodk
dc37d204598e1ff39ccbbd3bec5ba897486c2df6
[ "MIT" ]
114
2019-11-05T08:15:53.000Z
2021-12-27T21:20:52.000Z
pkg/langserver/diagnostics.go
lizelive/yodk
dc37d204598e1ff39ccbbd3bec5ba897486c2df6
[ "MIT" ]
14
2020-08-01T17:42:41.000Z
2021-10-21T04:24:45.000Z
package langserver import ( "context" "log" "net/url" "path/filepath" "strings" "github.com/dbaumgarten/yodk/pkg/lsp" "github.com/dbaumgarten/yodk/pkg/nolol" "github.com/dbaumgarten/yodk/pkg/nolol/nast" "github.com/dbaumgarten/yodk/pkg/optimizers" "github.com/dbaumgarten/yodk/pkg/parser" "github.com/dbaumgarten/yodk/pkg/parser/ast" "github.com/dbaumgarten/yodk/pkg/validators" ) // fs is a special filesystem that retrieves the main file from the cache and all // other files from the filesystem. It is used when compiling a nolol file, as nolol files may // depend on files from the file-system using includes type fs struct { *nolol.DiskFileSystem ls *LangServer Mainfile string } func getFilePath(u lsp.DocumentURI) string { ur, _ := url.Parse(string(u)) s := filepath.FromSlash(ur.Path) if !strings.HasSuffix(s, "\\\\") { s = strings.TrimPrefix(s, "\\") } return s } func newfs(ls *LangServer, mainfile lsp.DocumentURI) *fs { return &fs{ ls: ls, DiskFileSystem: &nolol.DiskFileSystem{ Dir: filepath.Dir(getFilePath(mainfile)), }, Mainfile: string(mainfile), } } func (f fs) Get(name string) (string, error) { if name == f.Mainfile { return f.ls.cache.Get(lsp.DocumentURI(name)) } return f.DiskFileSystem.Get(name) } func convertToErrorlist(errs error) parser.Errors { if errs == nil { return make(parser.Errors, 0) } switch e := errs.(type) { case parser.Errors: return e case *parser.Error: // if it is a single error, convert it to a one-element list errlist := make(parser.Errors, 1) errlist[0] = e return errlist default: log.Printf("Unknown error type: %T\n (%s)", errs, errs.Error()) return nil } } func convertErrorsToDiagnostics(errs parser.Errors, source string, severity lsp.DiagnosticSeverity) []lsp.Diagnostic { diags := make([]lsp.Diagnostic, 0) for _, err := range errs { diag := convertErrorToDiagnostic(err, source, severity) diags = append(diags, diag) } return diags } func convertErrorToDiagnostic(err *parser.Error, source string, severity lsp.DiagnosticSeverity) lsp.Diagnostic { return lsp.Diagnostic{ Source: source, Message: err.Message, Severity: severity, Range: lsp.Range{ Start: lsp.Position{ Line: float64(err.StartPosition.Line) - 1, Character: float64(err.StartPosition.Coloumn) - 1, }, End: lsp.Position{ Line: float64(err.EndPosition.Line) - 1, Character: float64(err.EndPosition.Coloumn) - 1, }, }, } } func (s *LangServer) validateCodeLength(uri lsp.DocumentURI, text string, parsed *ast.Program) []lsp.Diagnostic { // check if the code-length of yolol-code is OK if s.settings.Yolol.LengthChecking.Mode != LengthCheckModeOff { lengtherror := validators.ValidateCodeLength(text) // check if the code is small enough after optimizing it if lengtherror != nil && s.settings.Yolol.LengthChecking.Mode == LengthCheckModeOptimize && parsed != nil { opt := optimizers.NewCompoundOptimizer() err := opt.Optimize(parsed) if err == nil { printer := parser.Printer{} optimized, err := printer.Print(parsed) if err == nil { lengtherror = validators.ValidateCodeLength(optimized) } } } if lengtherror != nil { err := lengtherror.(*parser.Error) diag := convertErrorToDiagnostic(err, "validator", lsp.SeverityWarning) return []lsp.Diagnostic{diag} } } return []lsp.Diagnostic{} } func (s *LangServer) validateAvailableOperations(uri lsp.DocumentURI, parsed ast.Node) []lsp.Diagnostic { chipType, _ := validators.AutoChooseChipType(s.settings.Yolol.ChipType, string(uri)) err := validators.ValidateAvailableOperations(parsed, chipType) if err != nil { errors := convertToErrorlist(err) return convertErrorsToDiagnostics(errors, "validator", lsp.SeverityError) } return []lsp.Diagnostic{} } func (s *LangServer) Diagnose(ctx context.Context, uri lsp.DocumentURI) { go func() { var parserError error var validationDiagnostics []lsp.Diagnostic var diagRes DiagnosticResults text, _ := s.cache.Get(uri) prevDiag, err := s.cache.GetDiagnostics(uri) if err == nil { diagRes = *prevDiag } if strings.HasSuffix(string(uri), ".yolol") { p := parser.NewParser() var parsed *ast.Program parsed, parserError = p.Parse(text) if parsed != nil { diagRes.Variables = findUsedVariables(parsed) } if parserError == nil { validationDiagnostics = s.validateAvailableOperations(uri, parsed) validationDiagnostics = append(validationDiagnostics, s.validateCodeLength(uri, text, parsed)...) } } else if strings.HasSuffix(string(uri), ".nolol") { mainfile := string(uri) converter := nolol.NewConverter() converter.SetChipType(s.settings.Yolol.ChipType) included := converter.LoadFileEx(mainfile, newfs(s, uri)).ProcessIncludes() parserError = included.Error() if parserError == nil { intermediate := included.GetIntermediateProgram() // Analyze() will mutate the ast, so we create a copy of it analyse := nast.CopyAst(intermediate).(*nast.Program) analysis, err := nolol.Analyse(analyse) if err == nil { diagRes.AnalysisReport = analysis } parserError = included.ProcessCodeExpansion().ProcessNodes().ProcessLineNumbers().ProcessFinalize().Error() } } else { return } s.cache.SetDiagnostics(uri, diagRes) parserErrors := convertToErrorlist(parserError) if parserErrors == nil { return } diags := convertErrorsToDiagnostics(parserErrors, "parser", lsp.SeverityError) if validationDiagnostics != nil { diags = append(diags, validationDiagnostics...) } s.client.PublishDiagnostics(ctx, &lsp.PublishDiagnosticsParams{ URI: uri, Diagnostics: diags, }) }() }
27.218009
118
0.705032
3.046875
62725e36e9c4383385bbd866c1009e735beeb419
2,979
rs
Rust
lexical-core/src/itoa/naive.rs
ignatenkobrain/rust-lexical
fefe81850e5678450ec0f001f562b182694caadf
[ "Apache-2.0", "MIT" ]
null
null
null
lexical-core/src/itoa/naive.rs
ignatenkobrain/rust-lexical
fefe81850e5678450ec0f001f562b182694caadf
[ "Apache-2.0", "MIT" ]
null
null
null
lexical-core/src/itoa/naive.rs
ignatenkobrain/rust-lexical
fefe81850e5678450ec0f001f562b182694caadf
[ "Apache-2.0", "MIT" ]
null
null
null
//! Slow, simple lexical integer-to-string conversion routine. use util::*; // Naive itoa algorithm. macro_rules! naive_algorithm { ($value:ident, $radix:ident, $buffer:ident, $index:ident) => ({ while $value >= $radix { let r = ($value % $radix).as_usize(); $value /= $radix; // This is always safe, since r must be [0, radix). $index -= 1; unchecked_index_mut!($buffer[$index] = digit_to_char(r)); } // Decode last digit. let r = ($value % $radix).as_usize(); // This is always safe, since r must be [0, radix). $index -= 1; unchecked_index_mut!($buffer[$index] = digit_to_char(r)); }); } // Naive implementation for radix-N numbers. // Precondition: `value` must be non-negative and mutable. perftools_inline!{ fn naive<T>(mut value: T, radix: u32, buffer: &mut [u8]) -> usize where T: UnsignedInteger { // Decode all but last digit, 1 at a time. let mut index = buffer.len(); let radix: T = as_cast(radix); naive_algorithm!(value, radix, buffer, index); index }} pub(crate) trait Naive { // Export integer to string. fn naive(self, radix: u32, buffer: &mut [u8]) -> usize; } // Implement naive for type. macro_rules! naive_impl { ($($t:ty)*) => ($( impl Naive for $t { perftools_inline_always!{ fn naive(self, radix: u32, buffer: &mut [u8]) -> usize { naive(self, radix, buffer) }} } )*); } naive_impl! { u8 u16 u32 u64 usize } // Naive implementation for 128-bit radix-N numbers. // Precondition: `value` must be non-negative and mutable. perftools_inline!{ #[cfg(has_i128)] fn naive_u128(value: u128, radix: u32, buffer: &mut [u8]) -> usize { // Decode all but last digit, 1 at a time. let (divisor, digits_per_iter, d_cltz) = u128_divisor(radix); let radix: u64 = as_cast(radix); // To deal with internal 0 values or values with internal 0 digits set, // we store the starting index, and if not all digits are written, // we just skip down `digits` digits for the next value. let mut index = buffer.len(); let mut start_index = index; let (value, mut low) = u128_divrem(value, divisor, d_cltz); naive_algorithm!(low, radix, buffer, index); if value != 0 { start_index -= digits_per_iter; index = index.min(start_index); let (value, mut mid) = u128_divrem(value, divisor, d_cltz); naive_algorithm!(mid, radix, buffer, index); if value != 0 { start_index -= digits_per_iter; index = index.min(start_index); let mut high = value as u64; naive_algorithm!(high, radix, buffer, index); } } index }} #[cfg(has_i128)] impl Naive for u128 { perftools_inline_always!{ fn naive(self, radix: u32, buffer: &mut [u8]) -> usize { naive_u128(self, radix, buffer) }} }
30.090909
75
0.598187
3.28125
330e71b3b4bc8bea5b484ce2931ef4411a75120b
2,311
py
Python
2d-lin_sep.py
rzepinskip/optimization-svm
9682980e19d5fc9f09353aa1284e86874e954aec
[ "MIT" ]
null
null
null
2d-lin_sep.py
rzepinskip/optimization-svm
9682980e19d5fc9f09353aa1284e86874e954aec
[ "MIT" ]
2
2020-01-16T21:35:43.000Z
2020-03-24T18:02:41.000Z
2d-lin_sep.py
rzepinskip/optimization-svm
9682980e19d5fc9f09353aa1284e86874e954aec
[ "MIT" ]
null
null
null
import numpy as np from matplotlib import pyplot as plt from optsvm.svm import SVM x_neg = np.array([[3, 4], [1, 4], [2, 3]]) y_neg = np.array([-1, -1, -1]) x_pos = np.array([[6, -1], [7, -1], [5, -3]]) y_pos = np.array([1, 1, 1]) x1 = np.linspace(-10, 10) x = np.vstack((np.linspace(-10, 10), np.linspace(-10, 10))) # Data for the next section X = np.vstack((x_neg, x_pos)) y = np.concatenate((y_neg, y_pos)) # Plot fig = plt.figure(figsize=(10, 10)) plt.scatter(x_neg[:, 0], x_neg[:, 1], marker="x", color="r", label="Negative -1") plt.scatter(x_pos[:, 0], x_pos[:, 1], marker="o", color="b", label="Positive +1") plt.plot(x1, x1 - 3, color="darkblue") plt.plot(x1, x1 - 7, linestyle="--", alpha=0.3, color="b") plt.plot(x1, x1 + 1, linestyle="--", alpha=0.3, color="r") plt.xlim(-2, 12) plt.ylim(-7, 7) plt.xticks(np.arange(0, 10, step=1)) plt.yticks(np.arange(-5, 5, step=1)) # Lines plt.axvline(0, color="black", alpha=0.5) plt.axhline(0, color="black", alpha=0.5) plt.plot([2, 6], [3, -1], linestyle="-", color="darkblue", alpha=0.5) plt.plot([4, 6], [1, 1], [6, 6], [1, -1], linestyle=":", color="darkblue", alpha=0.5) plt.plot( [0, 1.5], [0, -1.5], [6, 6], [1, -1], linestyle=":", color="darkblue", alpha=0.5 ) # Annotations plt.annotate(s="$A \ (6,-1)$", xy=(5, -1), xytext=(6, -1.5)) plt.annotate( s="$B \ (2,3)$", xy=(2, 3), xytext=(2, 3.5) ) # , arrowprops = {'width':.2, 'headwidth':8}) plt.annotate(s="$2$", xy=(5, 1.2), xytext=(5, 1.2)) plt.annotate(s="$2$", xy=(6.2, 0.5), xytext=(6.2, 0.5)) plt.annotate(s="$2\sqrt{2}$", xy=(4.5, -0.5), xytext=(4.5, -0.5)) plt.annotate(s="$2\sqrt{2}$", xy=(2.5, 1.5), xytext=(2.5, 1.5)) plt.annotate(s="$w^Tx + b = 0$", xy=(8, 4.5), xytext=(8, 4.5)) plt.annotate( s="$(\\frac{1}{4},-\\frac{1}{4}) \\binom{x_1}{x_2}- \\frac{3}{4} = 0$", xy=(7.5, 4), xytext=(7.5, 4), ) plt.annotate(s="$\\frac{3}{\sqrt{2}}$", xy=(0.5, -1), xytext=(0.5, -1)) # Labels and show plt.xlabel("$x_1$") plt.ylabel("$x_2$") plt.legend(loc="lower right") plt.show() svm = SVM(C=10) svm.fit(X, y) # Display results print("---Our results") print("w = ", svm.w_.flatten()) print("b = ", svm.b_) from sklearn.svm import SVC clf = SVC(C=10, kernel="linear") clf.fit(X, y.ravel()) print("---SVM library") print("w = ", clf.coef_) print("b = ", clf.intercept_)
30.012987
85
0.568585
3.0625
aef7e7c2a975620456846d7b4b268a42a96b390e
1,501
sql
SQL
sql/error_info.sql
pllua/pllua-deprecated
942388ac518b6890063e8158c7f5eb52712a67f2
[ "PostgreSQL", "Unlicense", "MIT" ]
null
null
null
sql/error_info.sql
pllua/pllua-deprecated
942388ac518b6890063e8158c7f5eb52712a67f2
[ "PostgreSQL", "Unlicense", "MIT" ]
null
null
null
sql/error_info.sql
pllua/pllua-deprecated
942388ac518b6890063e8158c7f5eb52712a67f2
[ "PostgreSQL", "Unlicense", "MIT" ]
1
2021-06-24T02:03:18.000Z
2021-06-24T02:03:18.000Z
do $$ local testfunc = function () error("my error") end local f = function() local status, err = pcall(testfunc) if (err) then error(err) end end f() $$language pllua; create or replace function pg_temp.function_with_error() returns integer as $$ local testfunc = function () error("my error") end local f = function() local status, err = pcall(testfunc) if (err) then error(err) end end f() $$language plluau; create or replace function pg_temp.second_function() returns void as $$ local k = server.execute('select pg_temp.function_with_error()') [0] $$language plluau; do $$ server.execute('select pg_temp.second_function()') $$language pllua; do $$ local status, err = subtransaction(function() assert(1==2) end) if (err) then error(err) end $$language pllua; do $$ info({message="info message", hint="info hint", detail="info detail"}) $$language pllua; do $$ info("info message") $$language pllua; do $$ warning({message="warning message", hint="warning hint", detail="warning detail"}) $$language pllua; do $$ warning("warning message") $$language pllua; do $$ error({message="error message", hint="error hint", detail="error detail"}) $$language pllua; do $$ error("error message") $$language pllua; do $$ info() $$language pllua; do $$ warning() $$language pllua; do $$ error() $$language pllua; do $$ local status, err = subtransaction(function() local _ = fromstring('no_type_text','qwerty') end) if (err) then print(err) end $$ language pllua
18.530864
96
0.690207
3.328125
6364a84db8d9c16cc598090ab621288ec02090e2
8,303
kt
Kotlin
snakebroadcast/src/main/java/com/uiza/sdkbroadcast/helpers/ICameraHelper.kt
uizaio/snake.android.sdk
681adca006f61e38151cf73cf711a951f05c57dc
[ "BSD-2-Clause" ]
null
null
null
snakebroadcast/src/main/java/com/uiza/sdkbroadcast/helpers/ICameraHelper.kt
uizaio/snake.android.sdk
681adca006f61e38151cf73cf711a951f05c57dc
[ "BSD-2-Clause" ]
null
null
null
snakebroadcast/src/main/java/com/uiza/sdkbroadcast/helpers/ICameraHelper.kt
uizaio/snake.android.sdk
681adca006f61e38151cf73cf711a951f05c57dc
[ "BSD-2-Clause" ]
null
null
null
package com.uiza.sdkbroadcast.helpers import android.content.Context import android.view.MotionEvent import com.pedro.encoder.input.gl.render.filters.BaseFilterRender import com.pedro.encoder.input.video.CameraHelper.Facing import com.pedro.rtplibrary.view.OpenGlView import com.uiza.sdkbroadcast.interfaces.UZCameraChangeListener import com.uiza.sdkbroadcast.interfaces.UZCameraOpenException import com.uiza.sdkbroadcast.interfaces.UZRecordListener import com.uiza.sdkbroadcast.interfaces.UZTakePhotoCallback import com.uiza.sdkbroadcast.profile.AudioAttributes import com.uiza.sdkbroadcast.profile.VideoAttributes import com.uiza.sdkbroadcast.profile.VideoSize import java.io.IOException interface ICameraHelper { val mOpenGlView: OpenGlView? /** * @param reTries retry connect reTries times */ fun setConnectReTries(reTries: Int) fun setUZCameraChangeListener(uzCameraChangeListener: UZCameraChangeListener?) fun setUZRecordListener(uzRecordListener: UZRecordListener?) fun replaceView(openGlView: OpenGlView?) fun replaceView(context: Context?) fun setVideoAttributes(attributes: VideoAttributes?) fun setAudioAttributes(attributes: AudioAttributes?) fun setLandscape(landscape: Boolean) /** * Set filter in position 0. * * @param filterReader filter to set. You can modify parameters to filter after set it to stream. */ fun setFilter(filterReader: BaseFilterRender?) /** * @param filterPosition position of filter * @param filterReader filter to set. You can modify parameters to filter after set it to stream. */ fun setFilter(filterPosition: Int, filterReader: BaseFilterRender?) /** * Get Anti alias is enabled. * * @return true is enabled, false is disabled. */ val isAAEnabled: Boolean /** * Enable or disable Anti aliasing (This method use FXAA). * * @param aAEnabled true is AA enabled, false is AA disabled. False by default. */ fun enableAA(aAEnabled: Boolean) /** * get Stream Width */ val streamWidth: Int /** * get Stream Height */ val streamHeight: Int /** * Enable a muted microphone, can be called before, while and after broadcast. */ fun enableAudio() /** * Mute microphone, can be called before, while and after broadcast. */ fun disableAudio() /** * Get mute state of microphone. * * @return true if muted, false if enabled */ val isAudioMuted: Boolean /** * You will do a portrait broadcast * * @return true if success, false if you get a error (Normally because the encoder selected * doesn't support any configuration seated or your device hasn't a H264 encoder). */ fun prepareBroadCast(): Boolean /** * @param isLandscape boolean * @return true if success, false if you get a error (Normally because the encoder selected * doesn't support any configuration seated or your device hasn't a H264 encoder). */ fun prepareBroadCast(isLandscape: Boolean): Boolean /** * Call this method before use [.startBroadCast]. * * @param audioAttributes [AudioAttributes] If null you will do a broadcast without audio. * @param videoAttributes [VideoAttributes] * @param isLandscape boolean you will broadcast is landscape * @return true if success, false if you get a error (Normally because the encoder selected * doesn't support any configuration seated or your device hasn't a AAC encoder). */ fun prepareBroadCast( audioAttributes: AudioAttributes?, videoAttributes: VideoAttributes, isLandscape: Boolean ): Boolean /** * Get video camera state * * @return true if disabled, false if enabled */ val isVideoEnabled: Boolean /** * Need be called after [.prepareBroadCast] or/and [.prepareBroadCast]. * * @param broadCastUrl of the broadcast like: rtmp://ip:port/application/stream_name * * * RTMP: rtmp://192.168.1.1:1935/fmp4/live_stream_name * [.startPreview] to resolution seated in * [.prepareBroadCast]. * If you never startPreview this method [.startPreview] for you to resolution seated in * [.prepareBroadCast]. */ fun startBroadCast(broadCastUrl: String?) /** * Stop BroadCast started with [.startBroadCast] */ fun stopBroadCast() /** * Get broadcast state. * * @return true if broadcasting, false if not broadcasting. */ val isBroadCasting: Boolean /** * @return list of [VideoSize] */ val supportedResolutions: List<VideoSize?>? /** * Switch camera used. Can be called on preview or while stream, ignored with preview off. * * @throws UZCameraOpenException If the other camera doesn't support same resolution. */ @Throws(UZCameraOpenException::class) fun switchCamera() /** * Start camera preview. Ignored, if stream or preview is started. * resolution of preview 640x480 * * @param cameraFacing front or back camera. Like: [com.pedro.encoder.input.video.CameraHelper.Facing.BACK] * [com.pedro.encoder.input.video.CameraHelper.Facing.FRONT] */ fun startPreview(cameraFacing: Facing?) /** * Start camera preview. Ignored, if stream or preview is started. * * @param cameraFacing front or back camera. Like: [com.pedro.encoder.input.video.CameraHelper.Facing.BACK] * [com.pedro.encoder.input.video.CameraHelper.Facing.FRONT] * @param width of preview in px. * @param height of preview in px. */ fun startPreview(cameraFacing: Facing?, width: Int, height: Int) /** * is Front Camera */ val isFrontCamera: Boolean /** * check is on preview * * @return true if onpreview, false if not preview. */ val isOnPreview: Boolean /** * Stop camera preview. Ignored if streaming or already stopped. You need call it after * * stopStream to release camera properly if you will close activity. */ fun stopPreview() /** * Get record state. * * @return true if recording, false if not recoding. */ val isRecording: Boolean /** * Start record a MP4 video. Need be called while stream. * * @param savePath where file will be saved. * @throws IOException If you init it before start stream. */ @Throws(IOException::class) fun startRecord(savePath: String?) /** * Stop record MP4 video started with @startRecord. If you don't call it file will be unreadable. */ fun stopRecord() /** * take a photo * * @param callback [UZTakePhotoCallback] */ fun takePhoto(callback: UZTakePhotoCallback?) /** * Set video bitrate of H264 in kb while stream. * * @param bitrate H264 in kb. */ fun setVideoBitrateOnFly(bitrate: Int) /** * @return bitrate in kps */ val bitrate: Int fun reTry(delay: Long, reason: String?): Boolean /** * Check support Flashlight * if use Camera1 always return false * * @return true if support, false if not support. */ val isLanternSupported: Boolean /** * required: <uses-permission android:name="android.permission.FLASHLIGHT"></uses-permission> */ @Throws(Exception::class) fun enableLantern() /** * required: <uses-permission android:name="android.permission.FLASHLIGHT"></uses-permission> */ fun disableLantern() val isLanternEnabled: Boolean /** * Return max zoom level * * @return max zoom level */ val maxZoom: Float /** * Return current zoom level * * @return current zoom level */ /** * Set zoomIn or zoomOut to camera. * Use this method if you use a zoom slider. * * @param level Expected to be >= 1 and <= max zoom level * @see Camera2Base.getMaxZoom */ var zoom: Float /** * Set zoomIn or zoomOut to camera. * * @param event motion event. Expected to get event.getPointerCount() > 1 */ fun setZoom(event: MotionEvent?) }
28.829861
111
0.657473
3.046875
f14ffd020b17c8f1a5a89a1bc7cc4ba4f22a988b
38,965
rb
Ruby
lib/orm_asciidoctor.rb
gvaish/orm_asciidoctor
f9b2fc999d4c620007462ee7b4bbe71beaf45de6
[ "MIT" ]
1
2017-01-16T06:06:53.000Z
2017-01-16T06:06:53.000Z
lib/orm_asciidoctor.rb
gvaish/orm_asciidoctor
f9b2fc999d4c620007462ee7b4bbe71beaf45de6
[ "MIT" ]
null
null
null
lib/orm_asciidoctor.rb
gvaish/orm_asciidoctor
f9b2fc999d4c620007462ee7b4bbe71beaf45de6
[ "MIT" ]
null
null
null
RUBY_ENGINE = 'unknown' unless defined? RUBY_ENGINE require 'strscan' require 'set' $:.unshift(File.dirname(__FILE__)) # Public: Methods for parsing Asciidoc input files and rendering documents # using eRuby templates. # # Asciidoc documents comprise a header followed by zero or more sections. # Sections are composed of blocks of content. For example: # # = Doc Title # # == Section 1 # # This is a paragraph block in the first section. # # == Section 2 # # This section has a paragraph block and an olist block. # # . Item 1 # . Item 2 # # Examples: # # Use built-in templates: # # lines = File.readlines("your_file.asc") # doc = Asciidoctor::Document.new(lines) # html = doc.render # File.open("your_file.html", "w+") do |file| # file.puts html # end # # Use custom (Tilt-supported) templates: # # lines = File.readlines("your_file.asc") # doc = Asciidoctor::Document.new(lines, :template_dir => 'templates') # html = doc.render # File.open("your_file.html", "w+") do |file| # file.puts html # end module Asciidoctor module SafeMode # A safe mode level that disables any of the security features enforced # by Asciidoctor (Ruby is still subject to its own restrictions). UNSAFE = 0; # A safe mode level that closely parallels safe mode in AsciiDoc. This value # prevents access to files which reside outside of the parent directory of # the source file and disables any macro other than the include::[] macro. SAFE = 1; # A safe mode level that disallows the document from setting attributes # that would affect the rendering of the document, in addition to all the # security features of SafeMode::SAFE. For instance, this level disallows # changing the backend or the source-highlighter using an attribute defined # in the source document. This is the most fundamental level of security # for server-side deployments (hence the name). SERVER = 10; # A safe mode level that disallows the document from attempting to read # files from the file system and including the contents of them into the # document, in additional to all the security features of SafeMode::SERVER. # For instance, this level disallows use of the include::[] macro and the # embedding of binary content (data uri), stylesheets and JavaScripts # referenced by the document.(Asciidoctor and trusted extensions may still # be allowed to embed trusted content into the document). # # Since Asciidoctor is aiming for wide adoption, this level is the default # and is recommended for server-side deployments. SECURE = 20; # A planned safe mode level that disallows the use of passthrough macros and # prevents the document from setting any known attributes, in addition to all # the security features of SafeMode::SECURE. # # Please note that this level is not currently implemented (and therefore not # enforced)! #PARANOID = 100; end # Flags to control compliance with the behavior of AsciiDoc module Compliance # AsciiDoc supports both single-line and underlined # section titles. # This option disables the underlined variant. # Compliance value: true @underline_style_section_titles = true class << self attr_accessor :underline_style_section_titles end # Asciidoctor will recognize commonly-used Markdown syntax # to the degree it does not interfere with existing # AsciiDoc syntax and behavior. # Compliance value: false @markdown_syntax = true class << self attr_accessor :markdown_syntax end end # The root path of the Asciidoctor gem ROOT_PATH = File.expand_path(File.join(File.dirname(__FILE__), '..')) # Flag to indicate whether encoding of external strings needs to be forced to UTF-8 # _All_ input data must be force encoded to UTF-8 if Encoding.default_external is *not* UTF-8 # Address failures performing string operations that are reported as "invalid byte sequence in US-ASCII" # Ruby 1.8 doesn't seem to experience this problem (perhaps because it isn't validating the encodings) FORCE_ENCODING = RUBY_VERSION > '1.9' && Encoding.default_external != Encoding::UTF_8 # Flag to indicate that line length should be calculated using a unicode mode hint FORCE_UNICODE_LINE_LENGTH = RUBY_VERSION < '1.9' # The endline character to use when rendering output EOL = "\n" # The default document type # Can influence markup generated by render templates DEFAULT_DOCTYPE = 'article' # The backend determines the format of the rendered output, default to htmlbook (for ORM) DEFAULT_BACKEND = 'htmlbook' DEFAULT_STYLESHEET_KEYS = ['', 'DEFAULT'].to_set DEFAULT_STYLESHEET_NAME = 'asciidoctor.css' # Pointers to the preferred version for a given backend. BACKEND_ALIASES = { 'html' => 'html5', 'docbook' => 'docbook45', 'htmlbook' => 'htmlbook' } # Default page widths for calculating absolute widths DEFAULT_PAGE_WIDTHS = { 'docbook' => 425 } # Default extensions for the respective base backends DEFAULT_EXTENSIONS = { 'html' => '.html', 'htmlbook' => '.html', 'docbook' => '.xml', 'asciidoc' => '.ad', 'markdown' => '.md' } # Set of file extensions recognized as AsciiDoc documents (stored as a truth hash) ASCIIDOC_EXTENSIONS = { '.asciidoc' => true, '.adoc' => true, '.ad' => true, '.asc' => true, '.txt' => true } SECTION_LEVELS = { '=' => 0, '-' => 1, '~' => 2, '^' => 3, '+' => 4 } ADMONITION_STYLES = ['NOTE', 'TIP', 'IMPORTANT', 'WARNING', 'CAUTION'].to_set PARAGRAPH_STYLES = ['comment', 'example', 'literal', 'listing', 'normal', 'pass', 'quote', 'sidebar', 'source', 'verse', 'abstract', 'partintro'].to_set VERBATIM_STYLES = ['literal', 'listing', 'source', 'verse'].to_set DELIMITED_BLOCKS = { '--' => [:open, ['comment', 'example', 'literal', 'listing', 'pass', 'quote', 'sidebar', 'source', 'verse', 'admonition', 'abstract', 'partintro'].to_set], '----' => [:listing, ['literal', 'source'].to_set], '....' => [:literal, ['listing', 'source'].to_set], '====' => [:example, ['admonition'].to_set], '****' => [:sidebar, Set.new], '____' => [:quote, ['verse'].to_set], '""' => [:quote, ['verse'].to_set], '++++' => [:pass, Set.new], '|===' => [:table, Set.new], ',===' => [:table, Set.new], ':===' => [:table, Set.new], '!===' => [:table, Set.new], '////' => [:comment, Set.new], '```' => [:fenced_code, Set.new], '~~~' => [:fenced_code, Set.new] } DELIMITED_BLOCK_LEADERS = DELIMITED_BLOCKS.keys.map {|key| key[0..1] }.to_set BREAK_LINES = { '\'' => :ruler, '-' => :ruler, '*' => :ruler, '_' => :ruler, '<' => :page_break } #LIST_CONTEXTS = [:ulist, :olist, :dlist, :colist] NESTABLE_LIST_CONTEXTS = [:ulist, :olist, :dlist] # TODO validate use of explicit style name above ordered list (this list is for selecting an implicit style) ORDERED_LIST_STYLES = [:arabic, :loweralpha, :lowerroman, :upperalpha, :upperroman] #, :lowergreek] ORDERED_LIST_MARKER_PATTERNS = { :arabic => /\d+[.>]/, :loweralpha => /[a-z]\./, :lowerroman => /[ivx]+\)/, :upperalpha => /[A-Z]\./, :upperroman => /[IVX]+\)/ #:lowergreek => /[a-z]\]/ } ORDERED_LIST_KEYWORDS = { 'loweralpha' => 'a', 'lowerroman' => 'i', 'upperalpha' => 'A', 'upperroman' => 'I' #'lowergreek' => 'a' #'arabic' => '1' #'decimal' => '1' } LIST_CONTINUATION = '+' LINE_BREAK = ' +' # attributes which be changed within the content of the document (but not # header) because it has semantic meaning; ex. numbered FLEXIBLE_ATTRIBUTES = %w(numbered) # NOTE allows for empty space in line as it could be left by the template engine BLANK_LINE_PATTERN = /^[[:blank:]]*\n/ LINE_FEED_ENTITY = '&#10;' # or &#x0A; # Flags to control compliance with the behavior of AsciiDoc COMPLIANCE = { # AsciiDoc terminates paragraphs adjacent to # block content (delimiter or block attribute list) # Compliance value: true # TODO what about literal paragraph? :block_terminates_paragraph => true, # AsciiDoc does not treat paragraphs labeled with a # verbatim style (literal, listing, source, verse) # as verbatim; override this behavior # Compliance value: false :strict_verbatim_paragraphs => true, # AsciiDoc allows start and end delimiters around # a block to be different lengths # this option requires that they be the same # Compliance value: false :congruent_block_delimiters => true, # AsciiDoc drops lines that contain references to missing attributes. # This behavior is not intuitive to most writers # Compliance value: 'drop-line' :attribute_missing => 'skip', # AsciiDoc drops lines that contain an attribute unassignemnt. # This behavior may need to be tuned depending on the circumstances. # Compliance value: 'drop-line' :attribute_undefined => 'drop-line', } # The following pattern, which appears frequently, captures the contents between square brackets, # ignoring escaped closing brackets (closing brackets prefixed with a backslash '\' character) # # Pattern: # (?:\[((?:\\\]|[^\]])*?)\]) # Matches: # [enclosed text here] or [enclosed [text\] here] REGEXP = { # NOTE: this is a inline admonition note :admonition_inline => /^(#{ADMONITION_STYLES.to_a * '|'}):\s/, # [[Foo]] :anchor => /^\[\[([^\s\[\]]+)\]\]$/, # Foowhatevs [[Bar]] :anchor_embedded => /^(.*?)\s*\[\[([^\[\]]+)\]\]$/, # [[ref]] (anywhere inline) :anchor_macro => /\\?\[\[([\w":].*?)\]\]/, # matches any unbounded block delimiter: # listing, literal, example, sidebar, quote, passthrough, table, fenced code # does not include open block or air quotes # TIP position the most common blocks towards the front of the pattern :any_blk => %r{^(?:(?:-|\.|=|\*|_|\+|/){4,}|[\|,;!]={3,}|(?:`|~){3,}.*)$}, # detect a list item of any sort # [[:graph:]] is a non-blank character :any_list => /^(?: <?\d+>[[:blank:]]+[[:graph:]]| [[:blank:]]*(?:-|(?:\*|\.){1,5}|\d+\.|[A-Za-z]\.|[IVXivx]+\))[[:blank:]]+[[:graph:]]| [[:blank:]]*.*?(?::{2,4}|;;)(?:[[:blank:]]+[[:graph:]]|$) )/x, # :foo: bar # :Author: Dan # :numbered!: # :long-entry: Attribute value lines ending in ' +' # are joined together as a single value, # collapsing the line breaks and indentation to # a single space. :attr_entry => /^:(!?\w.*?):(?:[[:blank:]]+(.*))?$/, # An attribute list above a block element # # Can be strictly positional: # [quote, Adam Smith, Wealth of Nations] # Or can have name/value pairs # [NOTE, caption="Good to know"] # Can be defined by an attribute # [{lead}] :blk_attr_list => /^\[(|[[:blank:]]*[\w\{,.#"'%].*)\]$/, # block attribute list or block id (bulk query) :attr_line => /^\[(|[[:blank:]]*[\w\{,.#"'%].*|\[[^\[\]]*\])\]$/, # attribute reference # {foo} # {counter:pcount:1} # {set:foo:bar} # {set:name!} :attr_ref => /(\\)?\{((set|counter2?):.+?|\w+(?:[\-]\w+)*)(\\)?\}/, # The author info line the appears immediately following the document title # John Doe <[email protected]> :author_info => /^(\w[\w\-'.]*)(?: +(\w[\w\-'.]*))?(?: +(\w[\w\-'.]*))?(?: +<([^>]+)>)?$/, # [[[Foo]]] (anywhere inline) :biblio_macro => /\\?\[\[\[([\w:][\w:.-]*?)\]\]\]/, # callout reference inside literal text # <1> (optionally prefixed by //, # or ;; line comment chars) # <1> <2> (multiple callouts on one line) # <!--1--> (for XML-based languages) # special characters are already be replaced at this point during render :callout_render => /(?:(?:\/\/|#|;;) ?)?(\\)?&lt;!?(--|)(\d+)\2&gt;(?=(?: ?\\?&lt;!?\2\d+\2&gt;)*$)/, # ...but not while scanning :callout_quick_scan => /\\?<!?(--|)(\d+)\1>(?=(?: ?\\?<!?\1\d+\1>)*$)/, :callout_scan => /(?:(?:\/\/|#|;;) ?)?(\\)?<!?(--|)(\d+)\2>(?=(?: ?\\?<!?\2\d+\2>)*$)/, # <1> Foo :colist => /^<?(\d+)>[[:blank:]]+(.*)/, # //// # comment block # //// :comment_blk => %r{^/{4,}$}, # // (and then whatever) :comment => %r{^//(?:[^/]|$)}, # one,two;three;four :ssv_or_csv_delim => /,|;/, # one two three :space_delim => /([^\\])[[:blank:]]+/, # Ctrl + Alt+T # Ctrl,T :kbd_delim => /(?:\+|,)(?=[[:blank:]]*[^\1])/, # one\ two\ three :escaped_space => /\\([[:blank:]])/, # 29 :digits => /^\d+$/, # foo:: || foo::: || foo:::: || foo;; # Should be followed by a definition, on the same line... # foo:: That which precedes 'bar' (see also, <<bar>>) # ...or on a separate line # foo:: # That which precedes 'bar' (see also, <<bar>>) # The term may be an attribute reference # {term_foo}:: {def_foo} # NOTE negative match for comment line is intentional since that isn't handled when looking for next list item # QUESTION should we check for line comment in regex or when scanning the lines? :dlist => /^(?!\/\/)[[:blank:]]*(.*?)(:{2,4}|;;)(?:[[:blank:]]+(.*))?$/, :dlist_siblings => { # (?:.*?[^:])? - a non-capturing group which grabs longest sequence of characters that doesn't end w/ colon '::' => /^(?!\/\/)[[:blank:]]*((?:.*[^:])?)(::)(?:[[:blank:]]+(.*))?$/, ':::' => /^(?!\/\/)[[:blank:]]*((?:.*[^:])?)(:::)(?:[[:blank:]]+(.*))?$/, '::::' => /^(?!\/\/)[[:blank:]]*((?:.*[^:])?)(::::)(?:[[:blank:]]+(.*))?$/, ';;' => /^(?!\/\/)[[:blank:]]*(.*)(;;)(?:[[:blank:]]+(.*))?$/ }, :illegal_sectid_chars => /&(?:[[:alpha:]]+|#[[:digit:]]+|#x[[:alnum:]]+);|\W+?/, # footnote:[text] # footnoteref:[id,text] # footnoteref:[id] :footnote_macro => /\\?(footnote|footnoteref):\[((?:\\\]|[^\]])*?)\]/, # gist::123456[] :generic_blk_macro => /^(\w[\w\-]*)::(\S+?)\[((?:\\\]|[^\]])*?)\]$/, # kbd:[F3] # kbd:[Ctrl+Shift+T] # kbd:[Ctrl+\]] # kbd:[Ctrl,T] # btn:[Save] :kbd_btn_macro => /\\?(?:kbd|btn):\[((?:\\\]|[^\]])+?)\]/, # menu:File[New...] # menu:View[Page Style > No Style] # menu:View[Page Style, No Style] :menu_macro => /\\?menu:(\w|\w.*?\S)\[[[:blank:]]*(.+?)?\]/, # "File > New..." :menu_inline_macro => /\\?"(\w[^"]*?[[:blank:]]*&gt;[[:blank:]]*[^"[:blank:]][^"]*)"/, # image::filename.png[Caption] # video::http://youtube.com/12345[Cats vs Dogs] :media_blk_macro => /^(image|video|audio)::(\S+?)\[((?:\\\]|[^\]])*?)\]$/, # image:filename.png[Alt Text] # image:http://example.com/images/filename.png[Alt Text] # image:filename.png[More [Alt\] Text] (alt text becomes "More [Alt] Text") # icon:github[large] :image_macro => /\\?(?:image|icon):([^:\[][^\[]*)\[((?:\\\]|[^\]])*?)\]/, # indexterm:[Tigers,Big cats] # (((Tigers,Big cats))) :indexterm_macro => /\\?(?:indexterm:(?:\[((?:\\\]|[^\]])*?)\])|\(\(\((.*?)\)\)\)(?!\)))/m, # indexterm2:[Tigers] # ((Tigers)) :indexterm2_macro => /\\?(?:indexterm2:(?:\[((?:\\\]|[^\]])*?)\])|\(\((.*?)\)\)(?!\)))/m, # whitespace at the beginning of the line :leading_blanks => /^([[:blank:]]*)/, # leading parent directory references in path :leading_parent_dirs => /^(?:\.\.\/)*/, # + From the Asciidoc User Guide: "A plus character preceded by at # least one space character at the end of a non-blank line forces # a line break. It generates a line break (br) tag for HTML outputs. # # + (would not match because there's no space before +) # + (would match and capture '') # Foo + (would and capture 'Foo') :line_break => /^(.*)[[:blank:]]\+$/, # inline link and some inline link macro # FIXME revisit! :link_inline => %r{(^|link:|\s|>|&lt;|[\(\)\[\]])(\\?(?:https?|ftp|irc)://[^\s\[\]<]*[^\s.,\[\]<])(?:\[((?:\\\]|[^\]])*?)\])?}, # inline link macro # link:path[label] :link_macro => /\\?(?:link|mailto):([^\s\[]+)(?:\[((?:\\\]|[^\]])*?)\])/, # inline email address # [email protected] :email_inline => /[\\>:]?\w[\w.%+-]*@[[:alnum:]][[:alnum:].-]*\.[[:alpha:]]{2,4}\b/, # <TAB>Foo or one-or-more-spaces-or-tabs then whatever :lit_par => /^([[:blank:]]+.*)$/, # . Foo (up to 5 consecutive dots) # 1. Foo (arabic, default) # a. Foo (loweralpha) # A. Foo (upperalpha) # i. Foo (lowerroman) # I. Foo (upperroman) # REVIEW leading space has already been stripped, so may not need in regex :olist => /^[[:blank:]]*(\.{1,5}|\d+\.|[A-Za-z]\.|[IVXivx]+\))[[:blank:]]+(.*)$/, # ''' (ruler) # <<< (pagebreak) :break_line => /^('|<){3,}$/, # ''' or ' ' ' (ruler) # --- or - - - (ruler) # *** or * * * (ruler) # <<< (pagebreak) :break_line_plus => /^(?:'|<){3,}$|^ {0,3}([-\*_])( *)\1\2\1$/, # inline passthrough macros # +++text+++ # $$text$$ # pass:quotes[text] :pass_macro => /\\?(?:(\+{3}|\${2})(.*?)\1|pass:([a-z,]*)\[((?:\\\]|[^\]])*?)\])/m, # passthrough macro allowed in value of attribute assignment # pass:[text] :pass_macro_basic => /^pass:([a-z,]*)\[(.*)\]$/, # inline literal passthrough macro # `text` :pass_lit => /(^|[^`\w])(?:\[([^\]]+?)\])?(\\?`([^`\s]|[^`\s].*?\S)`)(?![`\w])/m, # placeholder for extracted passthrough text :pass_placeholder => /\e(\d+)\e/, # The document revision info line the appears immediately following the # document title author info line, if present # v1.0, 2013-01-01: Ring in the new year release :revision_info => /^(?:\D*(.*?),)?(?:\s*(?!:)(.*?))(?:\s*(?!^):\s*(.*))?$/, # \' within a word :single_quote_esc => /(\w)\\'(\w)/, # an alternative if our backend generated single-quoted html/xml attributes #:single_quote_esc => /(\w|=)\\'(\w)/, # used for sanitizing attribute names :illegal_attr_name_chars => /[^\w\-]/, # 1*h,2*,^3e :table_colspec => /^(?:(\d+)\*)?([<^>](?:\.[<^>]?)?|(?:[<^>]?\.)?[<^>])?(\d+%?)?([a-z])?$/, # 2.3+<.>m # TODO might want to use step-wise scan rather than this mega-regexp :table_cellspec => { :start => /^[[:blank:]]*(?:(\d+(?:\.\d*)?|(?:\d*\.)?\d+)([*+]))?([<^>](?:\.[<^>]?)?|(?:[<^>]?\.)?[<^>])?([a-z])?\|/, :end => /[[:blank:]]+(?:(\d+(?:\.\d*)?|(?:\d*\.)?\d+)([*+]))?([<^>](?:\.[<^>]?)?|(?:[<^>]?\.)?[<^>])?([a-z])?$/ }, # docbook45 # html5 :trailing_digit => /[[:digit:]]+$/, # .Foo but not . Foo or ..Foo :blk_title => /^\.([^\s.].*)$/, # matches double quoted text, capturing quote char and text (single-line) :dbl_quoted => /^("|)(.*)\1$/, # matches double quoted text, capturing quote char and text (multi-line) :m_dbl_quoted => /^("|)(.*)\1$/m, # == Foo # ^ yields a level 2 title # # == Foo == # ^ also yields a level 2 title # # both equivalent to this two-line version: # Foo # ~~~ # # match[1] is the delimiter, whose length determines the level # match[2] is the title itself # match[3] is an inline anchor, which becomes the section id :section_title => /^((?:=|#){1,6})\s+(\S.*?)(?:\s*\[\[([^\[]+)\]\])?(?:\s+\1)?$/, # does not begin with a dot and has at least one alphanumeric character :section_name => /^((?=.*\w+.*)[^.].*?)$/, # ====== || ------ || ~~~~~~ || ^^^^^^ || ++++++ # TODO build from SECTION_LEVELS keys :section_underline => /^(?:=|-|~|\^|\+)+$/, # toc::[] # toc::[levels=2] :toc => /^toc::\[(.*?)\]$/, # * Foo (up to 5 consecutive asterisks) # - Foo # REVIEW leading space has already been stripped, so may not need in regex :ulist => /^[[:blank:]]*(-|\*{1,5})[[:blank:]]+(.*)$/, # inline xref macro # <<id,reftext>> (special characters have already been escaped, hence the entity references) # xref:id[reftext] :xref_macro => /\\?(?:&lt;&lt;([\w":].*?)&gt;&gt;|xref:([\w":].*?)\[(.*?)\])/m, # ifdef::basebackend-html[] # ifndef::theme[] # ifeval::["{asciidoctor-version}" >= "0.1.0"] # ifdef::asciidoctor[Asciidoctor!] # endif::theme[] # endif::basebackend-html[] # endif::[] :ifdef_macro => /^[\\]?(ifdef|ifndef|ifeval|endif)::(\S*?(?:([,\+])\S+?)?)\[(.+)?\]$/, # "{asciidoctor-version}" >= "0.1.0" :eval_expr => /^(\S.*?)[[:blank:]]*(==|!=|<=|>=|<|>)[[:blank:]]*(\S.*)$/, # ...or if we want to be more strict up front about what's on each side #:eval_expr => /^(true|false|("|'|)\{\w+(?:\-\w+)*\}\2|("|')[^\3]*\3|\-?\d+(?:\.\d+)*)[[:blank:]]*(==|!=|<=|>=|<|>)[[:blank:]]*(true|false|("|'|)\{\w+(?:\-\w+)*\}\6|("|')[^\7]*\7|\-?\d+(?:\.\d+)*)$/, # include::chapter1.ad[] # include::example.txt[lines=1;2;5..10] :include_macro => /^\\?include::([^\[]+)\[(.*?)\]$/, # http://domain # https://domain # data:info :uri_sniff => %r{\A[[:alpha:]][[:alnum:].+-]*:/*}, :uri_encode_chars => /[^\w\-.!~*';:@=+$,()\[\]]/, :mantitle_manvolnum => /^(.*)\((.*)\)$/, :manname_manpurpose => /^(.*?)[[:blank:]]+-[[:blank:]]+(.*)$/ } INTRINSICS = Hash.new{|h,k| STDERR.puts "Missing intrinsic: #{k.inspect}"; "{#{k}}"}.merge( { 'startsb' => '[', 'endsb' => ']', 'brvbar' => '|', 'caret' => '^', 'asterisk' => '*', 'tilde' => '~', 'plus' => '&#43;', 'apostrophe' => '\'', 'backslash' => '\\', 'backtick' => '`', 'empty' => '', 'sp' => ' ', 'space' => ' ', 'two-colons' => '::', 'two-semicolons' => ';;', 'nbsp' => '&#160;', 'deg' => '&#176;', 'zwsp' => '&#8203;', 'quot' => '&#34;', 'apos' => '&#39;', 'lsquo' => '&#8216;', 'rsquo' => '&#8217;', 'ldquo' => '&#8220;', 'rdquo' => '&#8221;', 'wj' => '&#8288;', 'amp' => '&', 'lt' => '<', 'gt' => '>' } ) SPECIAL_CHARS = { '<' => '&lt;', '>' => '&gt;', '&' => '&amp;' } SPECIAL_CHARS_PATTERN = /[#{SPECIAL_CHARS.keys.join}]/ #SPECIAL_CHARS_PATTERN = /(?:<|>|&(?![[:alpha:]]{2,};|#[[:digit:]]{2,}+;|#x[[:alnum:]]{2,}+;))/ # unconstrained quotes:: can appear anywhere # constrained quotes:: must be bordered by non-word characters # NOTE these substituions are processed in the order they appear here and # the order in which they are replaced is important QUOTE_SUBS = [ # **strong** [:strong, :unconstrained, /\\?(?:\[([^\]]+?)\])?\*\*(.+?)\*\*/m], # *strong* [:strong, :constrained, /(^|[^\w;:}])(?:\[([^\]]+?)\])?\*(\S|\S.*?\S)\*(?=\W|$)/m], # ``double-quoted'' [:double, :constrained, /(^|[^\w;:}])(?:\[([^\]]+?)\])?``(\S|\S.*?\S)''(?=\W|$)/m], # 'emphasis' [:emphasis, :constrained, /(^|[^\w;:}])(?:\[([^\]]+?)\])?'(\S|\S.*?\S)'(?=\W|$)/m], # `single-quoted' [:single, :constrained, /(^|[^\w;:}])(?:\[([^\]]+?)\])?`(\S|\S.*?\S)'(?=\W|$)/m], # ++monospaced++ [:monospaced, :unconstrained, /\\?(?:\[([^\]]+?)\])?\+\+(.+?)\+\+/m], # +monospaced+ [:monospaced, :constrained, /(^|[^\w;:}])(?:\[([^\]]+?)\])?\+(\S|\S.*?\S)\+(?=\W|$)/m], # __emphasis__ [:emphasis, :unconstrained, /\\?(?:\[([^\]]+?)\])?\_\_(.+?)\_\_/m], # _emphasis_ [:emphasis, :constrained, /(^|[^\w;:}])(?:\[([^\]]+?)\])?_(\S|\S.*?\S)_(?=\W|$)/m], # ##unquoted## [:none, :unconstrained, /\\?(?:\[([^\]]+?)\])?##(.+?)##/m], # #unquoted# [:none, :constrained, /(^|[^\w;:}])(?:\[([^\]]+?)\])?#(\S|\S.*?\S)#(?=\W|$)/m], # ^superscript^ [:superscript, :unconstrained, /\\?(?:\[([^\]]+?)\])?\^(.+?)\^/m], # ~subscript~ [:subscript, :unconstrained, /\\?(?:\[([^\]]+?)\])?\~(.+?)\~/m] ] # NOTE in Ruby 1.8.7, [^\\] does not match start of line, # so we need to match it explicitly # order is significant REPLACEMENTS = [ # (C) [/\\?\(C\)/, '&#169;', :none], # (R) [/\\?\(R\)/, '&#174;', :none], # (TM) [/\\?\(TM\)/, '&#8482;', :none], # foo -- bar [/(^|\n| |\\)--( |\n|$)/, '&#8201;&#8212;&#8201;', :none], # foo--bar [/(\w)\\?--(?=\w)/, '&#8212;', :leading], # ellipsis [/\\?\.\.\./, '&#8230;', :leading], # single quotes [/(\w)\\?'(\w)/, '&#8217;', :bounding], # right arrow -> [/\\?-&gt;/, '&#8594;', :none], # right double arrow => [/\\?=&gt;/, '&#8658;', :none], # left arrow <- [/\\?&lt;-/, '&#8592;', :none], # right left arrow <= [/\\?&lt;=/, '&#8656;', :none], # restore entities [/\\?(&)amp;((?:[[:alpha:]]+|#[[:digit:]]+|#x[[:alnum:]]+);)/, '', :bounding] ] # Public: Parse the AsciiDoc source input into an Asciidoctor::Document # # Accepts input as an IO (or StringIO), String or String Array object. If the # input is a File, information about the file is stored in attributes on the # Document object. # # input - the AsciiDoc source as a IO, String or Array. # options - a String, Array or Hash of options to control processing (default: {}) # String and Array values are converted into a Hash. # See Asciidoctor::Document#initialize for details about options. # # returns the Asciidoctor::Document def self.load(input, options = {}) if (monitor = options.fetch(:monitor, false)) start = Time.now end attrs = (options[:attributes] ||= {}) if attrs.is_a?(Hash) || (RUBY_ENGINE == 'jruby' && attrs.is_a?(Java::JavaUtil::Map)) # all good; placed here as optimization elsif attrs.is_a? Array attrs = options[:attributes] = attrs.inject({}) do |accum, entry| k, v = entry.split '=', 2 accum[k] = v || '' accum end elsif attrs.is_a? String # convert non-escaped spaces into null character, so we split on the # correct spaces chars, and restore escaped spaces attrs = attrs.gsub(REGEXP[:space_delim], "\\1\0").gsub(REGEXP[:escaped_space], '\1') attrs = options[:attributes] = attrs.split("\0").inject({}) do |accum, entry| k, v = entry.split '=', 2 accum[k] = v || '' accum end elsif attrs.respond_to?('keys') && attrs.respond_to?('[]') # convert it to a Hash as we know it original_attrs = attrs attrs = options[:attributes] = {} original_attrs.keys.each do |key| attrs[key] = original_attrs[key] end else raise ArgumentError, "illegal type for attributes option: #{attrs.class.ancestors}" end lines = nil if input.is_a? File lines = input.readlines input_mtime = input.mtime input_path = File.expand_path(input.path) # hold off on setting infile and indir until we get a better sense of their purpose attrs['docfile'] = input_path attrs['docdir'] = File.dirname(input_path) attrs['docname'] = File.basename(input_path, File.extname(input_path)) attrs['docdate'] = docdate = input_mtime.strftime('%Y-%m-%d') attrs['doctime'] = doctime = input_mtime.strftime('%H:%M:%S %Z') attrs['docdatetime'] = %(#{docdate} #{doctime}) elsif input.respond_to?(:readlines) input.rewind rescue nil lines = input.readlines elsif input.is_a?(String) lines = input.lines.entries elsif input.is_a?(Array) lines = input.dup else raise "Unsupported input type: #{input.class}" end if monitor read_time = Time.now - start start = Time.now end doc = Document.new(lines, options) if monitor parse_time = Time.now - start monitor[:read] = read_time monitor[:parse] = parse_time monitor[:load] = read_time + parse_time end doc end # Public: Parse the contents of the AsciiDoc source file into an Asciidoctor::Document # # Accepts input as an IO, String or String Array object. If the # input is a File, information about the file is stored in # attributes on the Document. # # input - the String AsciiDoc source filename # options - a String, Array or Hash of options to control processing (default: {}) # String and Array values are converted into a Hash. # See Asciidoctor::Document#initialize for details about options. # # returns the Asciidoctor::Document def self.load_file(filename, options = {}) Asciidoctor.load(File.new(filename), options) end # Public: Parse the AsciiDoc source input into an Asciidoctor::Document and render it # to the specified backend format # # Accepts input as an IO, String or String Array object. If the # input is a File, information about the file is stored in # attributes on the Document. # # If the :in_place option is true, and the input is a File, the output is # written to a file adjacent to the input file, having an extension that # corresponds to the backend format. Otherwise, if the :to_file option is # specified, the file is written to that file. If :to_file is not an absolute # path, it is resolved relative to :to_dir, if given, otherwise the # Document#base_dir. If the target directory does not exist, it will not be # created unless the :mkdirs option is set to true. If the file cannot be # written because the target directory does not exist, or because it falls # outside of the Document#base_dir in safe mode, an IOError is raised. # # If the output is going to be written to a file, the header and footer are # rendered unless specified otherwise (writing to a file implies creating a # standalone document). Otherwise, the header and footer are not rendered by # default and the rendered output is returned. # # input - the String AsciiDoc source filename # options - a String, Array or Hash of options to control processing (default: {}) # String and Array values are converted into a Hash. # See Asciidoctor::Document#initialize for details about options. # # returns the Document object if the rendered result String is written to a # file, otherwise the rendered result String def self.render(input, options = {}) in_place = options.delete(:in_place) || false to_file = options.delete(:to_file) to_dir = options.delete(:to_dir) mkdirs = options.delete(:mkdirs) || false monitor = options.fetch(:monitor, false) write_in_place = in_place && input.is_a?(File) write_to_target = to_file || to_dir stream_output = !to_file.nil? && to_file.respond_to?(:write) if write_in_place && write_to_target raise ArgumentError, 'the option :in_place cannot be used with either the :to_dir or :to_file option' end if !options.has_key?(:header_footer) && (write_in_place || write_to_target) options[:header_footer] = true end doc = Asciidoctor.load(input, options) if to_file == '/dev/null' return doc elsif write_in_place to_file = File.join(File.dirname(input.path), "#{doc.attributes['docname']}#{doc.attributes['outfilesuffix']}") elsif !stream_output && write_to_target working_dir = options.has_key?(:base_dir) ? File.expand_path(options[:base_dir]) : File.expand_path(Dir.pwd) # QUESTION should the jail be the working_dir or doc.base_dir??? jail = doc.safe >= SafeMode::SAFE ? working_dir : nil if to_dir to_dir = doc.normalize_system_path(to_dir, working_dir, jail, :target_name => 'to_dir', :recover => false) if to_file to_file = doc.normalize_system_path(to_file, to_dir, nil, :target_name => 'to_dir', :recover => false) # reestablish to_dir as the final target directory (in the case to_file had directory segments) to_dir = File.dirname(to_file) else to_file = File.join(to_dir, "#{doc.attributes['docname']}#{doc.attributes['outfilesuffix']}") end elsif to_file to_file = doc.normalize_system_path(to_file, working_dir, jail, :target_name => 'to_dir', :recover => false) # establish to_dir as the final target directory (in the case to_file had directory segments) to_dir = File.dirname(to_file) end if !File.directory? to_dir if mkdirs Helpers.require_library 'fileutils' FileUtils.mkdir_p to_dir else raise IOError, "target directory does not exist: #{to_dir}" end end end start = Time.now if monitor output = doc.render if monitor render_time = Time.now - start monitor[:render] = render_time monitor[:load_render] = monitor[:load] + render_time end if to_file start = Time.now if monitor if stream_output to_file.write output.rstrip # ensure there's a trailing endline to_file.write EOL else File.open(to_file, 'w') {|file| file.write output } # these assignments primarily for testing, diagnostics or reporting doc.attributes['outfile'] = outfile = File.expand_path(to_file) doc.attributes['outdir'] = File.dirname(outfile) end if monitor write_time = Time.now - start monitor[:write] = write_time monitor[:total] = monitor[:load_render] + write_time end # NOTE document cannot control this behavior if safe >= SafeMode::SERVER if !stream_output && doc.safe < SafeMode::SECURE && (doc.attr? 'basebackend-html') && (doc.attr? 'linkcss') && (doc.attr? 'copycss') copy_asciidoctor_stylesheet = DEFAULT_STYLESHEET_KEYS.include?(stylesheet = (doc.attr 'stylesheet')) #copy_user_stylesheet = !copy_asciidoctor_stylesheet && (doc.attr? 'copycss') copy_coderay_stylesheet = (doc.attr? 'source-highlighter', 'coderay') && (doc.attr 'coderay-css', 'class') == 'class' copy_pygments_stylesheet = (doc.attr? 'source-highlighter', 'pygments') && (doc.attr 'pygments-css', 'class') == 'class' if copy_asciidoctor_stylesheet || copy_coderay_stylesheet || copy_pygments_stylesheet Helpers.require_library 'fileutils' outdir = doc.attr('outdir') stylesdir = doc.normalize_system_path(doc.attr('stylesdir'), outdir, doc.safe >= SafeMode::SAFE ? outdir : nil) Helpers.mkdir_p stylesdir if mkdirs if copy_asciidoctor_stylesheet File.open(File.join(stylesdir, DEFAULT_STYLESHEET_NAME), 'w') {|f| f.write Asciidoctor::HTML5.default_asciidoctor_stylesheet } end #if copy_user_stylesheet #end if copy_coderay_stylesheet File.open(File.join(stylesdir, 'asciidoctor-coderay.css'), 'w') {|f| f.write Asciidoctor::HTML5.default_coderay_stylesheet } end if copy_pygments_stylesheet File.open(File.join(stylesdir, 'asciidoctor-pygments.css'), 'w') {|f| f.write Asciidoctor::HTML5.pygments_stylesheet(doc.attr 'pygments-style') } end end end if !stream_output && doc.safe < SafeMode::SECURE && (doc.attr? 'basebackend-htmlbook') && (doc.attr? 'linkcss') && (doc.attr? 'copycss') copy_asciidoctor_stylesheet = DEFAULT_STYLESHEET_KEYS.include?(stylesheet = (doc.attr 'stylesheet')) #copy_user_stylesheet = !copy_asciidoctor_stylesheet && (doc.attr? 'copycss') copy_coderay_stylesheet = (doc.attr? 'source-highlighter', 'coderay') && (doc.attr 'coderay-css', 'class') == 'class' copy_pygments_stylesheet = (doc.attr? 'source-highlighter', 'pygments') && (doc.attr 'pygments-css', 'class') == 'class' if copy_asciidoctor_stylesheet || copy_coderay_stylesheet || copy_pygments_stylesheet Helpers.require_library 'fileutils' outdir = doc.attr('outdir') stylesdir = doc.normalize_system_path(doc.attr('stylesdir'), outdir, doc.safe >= SafeMode::SAFE ? outdir : nil) Helpers.mkdir_p stylesdir if mkdirs if copy_asciidoctor_stylesheet File.open(File.join(stylesdir, DEFAULT_STYLESHEET_NAME), 'w') {|f| f.write Asciidoctor::HTMLBook.default_asciidoctor_stylesheet } end #if copy_user_stylesheet #end if copy_coderay_stylesheet File.open(File.join(stylesdir, 'asciidoctor-coderay.css'), 'w') {|f| f.write Asciidoctor::HTMLBook.default_coderay_stylesheet } end if copy_pygments_stylesheet File.open(File.join(stylesdir, 'asciidoctor-pygments.css'), 'w') {|f| f.write Asciidoctor::HTMLBook.pygments_stylesheet(doc.attr 'pygments-style') } end end end doc else output end end # Public: Parse the contents of the AsciiDoc source file into an Asciidoctor::Document # and render it to the specified backend format # # input - the String AsciiDoc source filename # options - a String, Array or Hash of options to control processing (default: {}) # String and Array values are converted into a Hash. # See Asciidoctor::Document#initialize for details about options. # # returns the Document object if the rendered result String is written to a # file, otherwise the rendered result String def self.render_file(filename, options = {}) Asciidoctor.render(File.new(filename), options) end # modules require 'orm_asciidoctor/debug' require 'orm_asciidoctor/substituters' require 'orm_asciidoctor/helpers' # abstract classes require 'orm_asciidoctor/abstract_node' require 'orm_asciidoctor/abstract_block' # concrete classes require 'orm_asciidoctor/attribute_list' require 'orm_asciidoctor/backends/base_template' require 'orm_asciidoctor/block' require 'orm_asciidoctor/callouts' require 'orm_asciidoctor/document' require 'orm_asciidoctor/inline' require 'orm_asciidoctor/lexer' require 'orm_asciidoctor/list' require 'orm_asciidoctor/path_resolver' require 'orm_asciidoctor/reader' require 'orm_asciidoctor/renderer' require 'orm_asciidoctor/section' require 'orm_asciidoctor/table' # info require 'orm_asciidoctor/version' end
36.759434
210
0.564943
3.015625
fb8ef83325d631383360c1adf5cff235fb28d90c
2,710
c
C
src/pal/tests/palsuite/threading/SwitchToThread/test1/test1.c
CyberSys/coreclr-mono
83b2cb83b32faa45b4f790237b5c5e259692294a
[ "MIT" ]
277
2015-01-04T20:42:36.000Z
2022-03-21T06:52:03.000Z
src/pal/tests/palsuite/threading/SwitchToThread/test1/test1.c
CyberSys/coreclr-mono
83b2cb83b32faa45b4f790237b5c5e259692294a
[ "MIT" ]
31
2015-01-05T08:00:38.000Z
2016-01-05T01:18:59.000Z
src/pal/tests/palsuite/threading/SwitchToThread/test1/test1.c
CyberSys/coreclr-mono
83b2cb83b32faa45b4f790237b5c5e259692294a
[ "MIT" ]
46
2015-01-21T00:41:59.000Z
2021-03-23T07:00:01.000Z
// // Copyright (c) Microsoft. All rights reserved. // Licensed under the MIT license. See LICENSE file in the project root for full license information. // /*============================================================================= ** ** Source: test1.c ** ** Purpose: Test to ensure SwitchToThread works, without ** causing test to hang ** ** Dependencies: PAL_Initialize ** Fail ** SwitchToThread ** WaitForMultipleObject ** CreateThread ** GetLastError ** ** **===========================================================================*/ #include <palsuite.h> #define THREAD_COUNT 10 #define REPEAT_COUNT 1000 #define TIMEOUT 60000 void PALAPI Run_Thread(LPVOID lpParam); /** * main * * executable entry point */ INT __cdecl main( INT argc, CHAR **argv ) { DWORD dwParam; HANDLE hThread[THREAD_COUNT]; DWORD threadId[THREAD_COUNT]; int i = 0; int returnCode = 0; /*PAL initialization */ if( (PAL_Initialize(argc, argv)) != 0 ) { return FAIL; } for( i = 0; i < THREAD_COUNT; i++ ) { dwParam = (int) i; //Create thread hThread[i] = CreateThread( NULL, /* no security attributes */ 0, /* use default stack size */ (LPTHREAD_START_ROUTINE)Run_Thread,/* thread function */ (LPVOID)dwParam, /* argument to thread function */ 0, /* use default creation flags */ &threadId[i] /* returns the thread identifier*/ ); if(hThread[i] == NULL) { Fail("Create Thread failed for iteration %d GetLastError value is %d\n", i, GetLastError()); } } returnCode = WaitForMultipleObjects(THREAD_COUNT, hThread, TRUE, TIMEOUT); if( WAIT_OBJECT_0 != returnCode ) { Trace("Wait for Object(s) returned %d, expected value is %d, and GetLastError value is %d\n", returnCode, WAIT_OBJECT_0, GetLastError()); } PAL_Terminate(); return PASS; } void PALAPI Run_Thread (LPVOID lpParam) { int i = 0; int Id=(int)lpParam; for(i=0; i < REPEAT_COUNT; i++ ) { // No Last Error is set.. if(!SwitchToThread()) { Trace( "The operating system did not switch execution to another thread," "for thread id[%d], iteration [%d]\n", Id, i ); } } }
27.373737
146
0.482657
3.171875
15b1bd569f5f103bf58c3e319ea2f77c1372ccb0
2,783
kt
Kotlin
lib/src/main/kotlin/com/amarland/iconvector/lib/Utils.kt
amarland/iconvector
e05c63280dc922ce6bd3ea961c53ce1f9a415213
[ "Apache-2.0" ]
null
null
null
lib/src/main/kotlin/com/amarland/iconvector/lib/Utils.kt
amarland/iconvector
e05c63280dc922ce6bd3ea961c53ce1f9a415213
[ "Apache-2.0" ]
null
null
null
lib/src/main/kotlin/com/amarland/iconvector/lib/Utils.kt
amarland/iconvector
e05c63280dc922ce6bd3ea961c53ce1f9a415213
[ "Apache-2.0" ]
null
null
null
/* * Copyright 2021 Anthony Marland * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.amarland.iconvector.lib import kotlin.math.roundToInt import com.amarland.iconvector.lib.IconVGIntermediateRepresentation as IR internal fun IntArray.insert(index: Int, value: Int): IntArray { require(index in 0..size) return IntArray(size + 1).apply { val source = this@insert val destination = this destination[index] = value if (index == 0) { System.arraycopy(source, 0, destination, 1, source.size) } else { System.arraycopy(source, 0, destination, 0, index) if (index < source.size - 1) { System.arraycopy(source, index, destination, index + 1, source.size - index) } } } } internal fun FloatArray.insert(index: Int, value: Float): FloatArray { require(index in 0..size) return FloatArray(size + 1).apply { val source = this@insert val destination = this destination[index] = value if (index == 0) { System.arraycopy(source, 0, destination, 1, source.size) } else { System.arraycopy(source, 0, destination, 0, index) if (index < source.size - 1) { System.arraycopy(source, index, destination, index + 1, source.size - index) } } } } fun argbColorToHexString(argb: UInt) = '#' + ((((argb shr 24) / 255U) * (argb and 0x00FFFFFFU)).toString(16) .padStart(length = 6, padChar = '0')) fun Iterable<IR.Path.Segment>.toSvgPathDataString(decimalPlaces: Int = Int.MAX_VALUE) = buildString { joinTo(this, separator = " ") { segment -> var index = 0 segment.arguments.joinToString( separator = " ", prefix = "${segment.command.value} " ) { value -> if (segment.command == IR.Path.Command.ARC_TO && (index == 3 || index == 4)) { value.roundToInt().toString() } else if (decimalPlaces in 0..5) { "%.${decimalPlaces}f".format(value) } else { value.toString() }.also { index++ } } } }
34.7875
94
0.588933
3.078125
8588ad3fb52fe92ae1e82e6a5e5390afc41987b0
3,088
js
JavaScript
rollup.config.js
icesjs/theme
cac4488165bc1379d26d948cc8b448d6ef03c53c
[ "MIT" ]
null
null
null
rollup.config.js
icesjs/theme
cac4488165bc1379d26d948cc8b448d6ef03c53c
[ "MIT" ]
null
null
null
rollup.config.js
icesjs/theme
cac4488165bc1379d26d948cc8b448d6ef03c53c
[ "MIT" ]
null
null
null
import * as path from 'path' import * as fs from 'fs' import cp from 'child_process' import typescript from '@rollup/plugin-typescript' import externals from 'rollup-plugin-node-externals' import pkg from './package.json' const isEnvDevelopment = process.env.NODE_ENV === 'development' const input = 'src/index.ts' const webpackPlugin = `@ices/theme-webpack-plugin` const sourcemap = !isEnvDevelopment || 'inline' function writeFileSync(filePath, content) { const unExistsDirs = [] let file = filePath while (!fs.existsSync((file = path.dirname(file)))) { unExistsDirs.unshift(file) } for (const dir of unExistsDirs) { fs.mkdirSync(dir) } fs.writeFileSync(filePath, content) } function makeFakeThemeFile() { writeFileSync( path.join(path.resolve(path.dirname(pkg.main)), 'theme.js'), `// Auto generated code throw new Error( \`Please add ThemeWebpackPlugin from '${webpackPlugin}' to your config of webpack first: // webpack.config.js const ThemeWebpackPlugin = require('${webpackPlugin}') module.exports = { plugins: [new ThemeWebpackPlugin()] }\` ) ` ) } function makeTypesFile() { cp.execSync('yarn types', { stdio: 'ignore' }) const paths = ['types/react', 'types/vue'] for (const p of paths) { const dir = path.resolve(p) if (fs.existsSync(dir)) { for (const dts of fs.readdirSync(dir)) { fs.renameSync(path.join(dir, dts), path.join(path.resolve(p.replace(/^types\//, '')), dts)) } } } } function getPlugins(format, makeTypes) { return [ externals({ builtins: true, deps: true, peerDeps: true, exclude: 'tslib', }), typescript({ removeComments: true, noUnusedLocals: !isEnvDevelopment, target: 'es5', }), makeTypes && { name: 'make-types', generateBundle: makeTypesFile, }, ].filter(Boolean) } makeFakeThemeFile() export default [ { input, external: ['./theme'], output: { file: pkg.module, format: 'es', sourcemap, }, plugins: getPlugins('es'), }, { input, external: ['./theme'], output: { file: pkg.main, exports: 'auto', format: 'cjs', sourcemap, }, plugins: getPlugins('cjs'), }, { input: 'src/react/index.tsx', external: ['../index'], output: { file: 'react/index.js', paths: (id) => { if (id === path.resolve('src/index')) { return path.relative(path.resolve('react'), path.resolve(pkg.module)).replace(/\\/g, '/') } return id }, format: 'es', sourcemap, }, plugins: getPlugins('es'), }, { input: 'src/react/index.tsx', external: ['../index'], output: { file: 'react/index.cjs.js', paths: (id) => { if (id === path.resolve('src/index')) { return path.relative(path.resolve('react'), path.resolve(pkg.main)).replace(/\\/g, '/') } return id }, exports: 'auto', format: 'cjs', sourcemap, }, plugins: getPlugins('cjs', true), }, ]
23.393939
99
0.591321
3.1875
a101f6805c4eee7efe596729930038f01c347ba2
2,914
asm
Assembly
Mips/fizzbuzz.asm
Mystic-Developer/FizzBuzz-EVERYTHING
2daed09d9d7b5e25e1027b7a740c179e653f74af
[ "MIT" ]
null
null
null
Mips/fizzbuzz.asm
Mystic-Developer/FizzBuzz-EVERYTHING
2daed09d9d7b5e25e1027b7a740c179e653f74af
[ "MIT" ]
null
null
null
Mips/fizzbuzz.asm
Mystic-Developer/FizzBuzz-EVERYTHING
2daed09d9d7b5e25e1027b7a740c179e653f74af
[ "MIT" ]
null
null
null
.data fizz: .asciiz "Fizz" buzz: .asciiz "Buzz" fizzbuzz: .asciiz "FizzBuzz" new_line: .asciiz "\n" prompt: .asciiz "Please enter the highest number you want to go for FizzBuzz: " .text addi $s0, $zero, 5 # s0 = 5; addi $s1, $zero, 3 # s1 = 3; addi $t0, $zero, 1 # t0 = 1; This will be the while loop counter! # sends user prompt message li $v0, 4 la $a0, prompt syscall # gets number from user and stores it in $v0 li $v0, 5 syscall # moves users input from $v0 to $s4 move $s4, $v0 while: bgt $t0, $s4, exit # while t0 < 100 div $t0, $s1 # divide $t0 by 3 mfhi $s2 # store the remainder of $t0 / 3 in $s2 div $t0, $s0 # divide $t0 by 5 mfhi $s3 # store the remainder of $t0 / 5 in $s3 beq $s2, $zero, three_is_good_check_five_for_fizzbuzz # if $s2 is equal to 0, then go check $s3 and see if remainder of 5 checks out for fizzbuzz five_wasnt_good_continue_looping: # if s3 didn't check out for fizzbuzz return to this point of while loop div $t0, $s1 # divide $t0 by 3 mfhi $s2 # store the remainder of $t0 / 3 in $s2 beq $s2, $zero, print_fizz #if $s2 is equal to 0, then print fizz div $t0, $s0 # divide $t0 by 5 mfhi $s2 # store the remainder of $t0 / 5 in $s2 beq $s2, $zero, print_buzz # if $s2 is equal to 0, then print buzz # prints $t0 current number li $v0, 1 add $a0, $t0, $zero syscall # prints a newline li $v0, 4 la $a0, new_line syscall addi $t0, $t0, 1 # increases t0 counter by 1 j while # jumps back to top of while loop three_is_good_check_five_for_fizzbuzz: beq $s3, $zero, print_fizzbuzz # checks to see if the remainder of t0/5, stored in $s3 is 0. If so jumps to print fizzbuzz! j five_wasnt_good_continue_looping # if it wasnt 0, then you return to the while loop print_fizzbuzz: # prints fizzbuzz li $v0, 4 la $a0, fizzbuzz syscall # prints a newline li $v0, 4 la $a0, new_line syscall addi $t0, $t0, 1 # increases t0 counter by 1 j while # jumps back to top of while loop print_fizz: # prints fizz li $v0, 4 la $a0, fizz syscall # prints a newline li $v0, 4 la $a0, new_line syscall addi $t0, $t0, 1 # increases t0 counter by 1 j while # jumps back to top of while loop print_buzz: #prints buzz li $v0, 4 la $a0, buzz syscall # prints a newline li $v0, 4 la $a0, new_line syscall addi $t0, $t0, 1 # increases t0 counter by 1 j while # jumps back to top of while loop exit: # ends program li $v0, 10 syscall
23.691057
151
0.56383
3.171875
0b6ffbf766a563164a019a52f34be9e1263ae173
4,197
py
Python
core/env.py
ayyuriss/EigenFunctions
8cb6c22871fcddb633392c0a12691e960dad5143
[ "MIT" ]
null
null
null
core/env.py
ayyuriss/EigenFunctions
8cb6c22871fcddb633392c0a12691e960dad5143
[ "MIT" ]
null
null
null
core/env.py
ayyuriss/EigenFunctions
8cb6c22871fcddb633392c0a12691e960dad5143
[ "MIT" ]
null
null
null
import xxhash import numpy as np from base.grid import SimpleGRID import scipy.sparse as SP h = xxhash.xxh64() s_to_i = lambda x,size : size*x[0]+x[1] i_to_s = lambda x,size : (x%size,x//size) def hash(x): h.reset() h.update(x) return h.digest() class Indexer(object): def __init__(self): self.total = 0 self.dict = {} def get(self,hs): val = self.dict.get(hs,-1) if val == -1: val = self.total self.dict[hs] = val self.total += 1 return val def reset(self): self.__init__() class HashIndexer(object): def __init__(self): self.total = 0 self.dict = {} def get(self,state): hs=hash(state) val = self.dict.get(hs,-1) if val == -1: val = self.total self.dict[hs] = val self.total += 1 return val def reset(self): self.__init__() def get_graph(size): env = SimpleGRID(grid_size=size,max_time=5000) input_shape = env.observation_space.shape min_batch = size**2-size indexer = Indexer() W = np.zeros((min_batch,min_batch)) states = np.zeros(min_batch).astype(int) data = np.zeros((min_batch,)+input_shape) while indexer.total<min_batch: done = False s = env.reset() #s = s.transpose(2,0,1)#np.expand_dims(s,axis=0) i = indexer.get(s_to_i(env.get_cat(),size)) states[i] = s_to_i(env.get_cat(),size) data[states[i]] = s while not done: s,r,done = env.step(np.random.randint(4)) #s = np.expand_dims(s,axis=0) #s = s.transpose(-1,0,1) j = indexer.get(s_to_i(env.get_cat(),size)) states[j] = s_to_i(env.get_cat(),size) data[states[j]] = s W[states[i],states[j]] = W[states[j],states[i]] = 1 if r==1: print(s_to_i(env.get_cat(),size),indexer.total) i = j return data, W class GraphBuilder(object): def __init__(self, env, action_set, batch_size): self.env = env self.action_set = action_set self.h = xxhash.xxh64() self.max_size = batch_size self.indices = set() self._total = 0 self.dict = {} self.states = [] self.prev = 0 self.roll = self.roller() def submit(self,state, new=False): hs = self.hash(state) val = self.dict.get(hs,-1) if val == -1: self.states.append(state) val = self._total self.dict[hs] = self._total self._total += 1 if not new: self.indices.add((self.prev,val)) self.prev = val def reset(self): self.indices = set() self._total = 0 self.dict = {} self.states = [] self.prev = 0 def roller(self): done = True while True: self.reset() while not self.full: if done: s = self.env.reset() self.submit(s.copy(), new=done) done = False while not done and not self.full: s,_,done,_ = self.env.step(np.random.choice(self.action_set)) self.submit(s.copy()) S,W = self.get_graph() W = W.toarray() #W = (W+W.T)/2 W = np.maximum(W,W.T) #np.fill_diagonal(W, 1) yield S, W def get(self): return self.roll.__next__() def hash(self,x): self.h.reset() self.h.update(x) return self.h.digest() def get_graph(self): if not self.full: raise "Graph not full Yet" indices = np.array(list(self.indices)) rows = indices[:,0] cols = indices[:,1] data = np.ones(len(rows)) return np.array(self.states),SP.coo_matrix((data, (rows, cols)),shape=(self.max_size, self.max_size)) @property def size(self): return self._total @property def full(self): return self.size == self.max_size
26.732484
109
0.510841
3.640625
653e20753803cf3d8c774a1a90f5c5407a146bd4
3,463
py
Python
pnet/measure.py
changshuowang/PersistenceNetwork
519aa3b4a123091ae6cc3cf619182b5be54fcac3
[ "ISC" ]
1
2020-01-20T06:44:14.000Z
2020-01-20T06:44:14.000Z
pnet/measure.py
changshuowang/PersistenceNetwork
519aa3b4a123091ae6cc3cf619182b5be54fcac3
[ "ISC" ]
null
null
null
pnet/measure.py
changshuowang/PersistenceNetwork
519aa3b4a123091ae6cc3cf619182b5be54fcac3
[ "ISC" ]
null
null
null
import numpy as np from sklearn.metrics import average_precision_score as ap from sklearn.metrics import roc_auc_score """ each row is an instance each column is the prediction of a class """ def _score_to_rank(score_list): rank_array = np.zeros([len(score_list)]) score_array = np.array(score_list) idx_sorted = (-score_array).argsort() rank_array[idx_sorted] = np.arange(len(score_list))+1 rank_list = rank_array.tolist() return rank_list # For clip evaluation def auc_y_classwise(Y_target, Y_score): """ Y_target: list of lists. {0, 1} real labels Y_score: list of lists. real values prediction values """ # Y_target = np.squeeze(np.array(Y_target)) # Y_score = np.squeeze(np.array(Y_score)) Y_target = np.array(Y_target) Y_score = np.array(Y_score) auc_list = roc_auc_score(Y_target, Y_score, average=None) return auc_list def ap_y_classwise(Y_target, Y_score): """ Y_target: list of lists. {0, 1} real labels Y_score: list of lists. real values prediction values """ # Y_target = np.squeeze(np.array(Y_target)) # Y_score = np.squeeze(np.array(Y_score)) Y_target = np.array(Y_target) Y_score = np.array(Y_score) ap_list = ap(Y_target, Y_score, average=None) return ap_list def auc(Y_target, Y_score): """ Y_target: list of lists. {0, 1} real labels Y_score: list of lists. real values prediction values """ Y_target = np.array(Y_target) Y_score = np.array(Y_score) auc_list = [] for i in range(Y_score.shape[1]): try: auc = roc_auc_score(Y_target[:, i], Y_score[:, i]) except: continue auc_list.append(auc) return auc_list def mean_auc(Y_target, Y_score): auc_list = auc(Y_target, Y_score) mean_auc = np.mean(auc_list) return mean_auc def mean_auc_y(Y_target, Y_score): ''' along y-axis ''' return mean_auc(Y_target, Y_score) def mean_auc_x(Y_target, Y_score): ''' along x-axis ''' return mean_auc(np.array(Y_target).T, np.array(Y_score).T) def mean_average_precision(Y_target, Y_score): """ mean average precision raw-based operation Y_target: list of lists. {0, 1} real labels Y_score: list of lists. real values prediction values """ p = float(len(Y_target)) temp_sum = 0 for y_target, y_score in zip(Y_target, Y_score): y_target = np.array(y_target) y_score = np.array(y_score) if (y_target == 0).all() or (y_target == 1).all(): p -= 1 continue idx_target = np.nonzero(y_target > 0)[0] n_target = float(len(idx_target)) rank_list = np.array(_score_to_rank(y_score)) target_rank_list = rank_list[idx_target] temp_sum_2 = 0 for target_rank in target_rank_list: mm = sum([1 for ii in idx_target if rank_list[ii] <= target_rank])/float(target_rank) temp_sum_2 += mm temp_sum += temp_sum_2/n_target measure = temp_sum/p return measure def map(Y_target, Y_score): return mean_average_precision(Y_target, Y_score) def map_x(Y_target, Y_score): return mean_average_precision(Y_target, Y_score) def map_y(Y_target, Y_score): return mean_average_precision(np.array(Y_target).T, np.array(Y_score).T)
24.913669
74
0.636442
3.59375
dfe7872eca28b5a45f2dcfe0d8b6eff301974230
4,858
ts
TypeScript
packages/@glimmer/syntax/lib/traversal/traverse.ts
cyk/glimmer-vm
e30b3d64232a0972365c8cbb1dd8e236a0f33dcf
[ "MIT" ]
null
null
null
packages/@glimmer/syntax/lib/traversal/traverse.ts
cyk/glimmer-vm
e30b3d64232a0972365c8cbb1dd8e236a0f33dcf
[ "MIT" ]
null
null
null
packages/@glimmer/syntax/lib/traversal/traverse.ts
cyk/glimmer-vm
e30b3d64232a0972365c8cbb1dd8e236a0f33dcf
[ "MIT" ]
null
null
null
import visitorKeys from '../types/visitor-keys'; import { cannotRemoveNode, cannotReplaceNode, cannotReplaceOrRemoveInKeyHandlerYet, } from './errors'; import { Node, NodeType, ParentNode, ChildKey } from '../types/nodes'; import { NodeVisitor, NodeFunction, NodeHandler, KeyFunction, KeyHandler } from '../types/visitor'; function getEnterFunction(handler: KeyHandler): KeyFunction | undefined; function getEnterFunction(handler: NodeHandler): NodeFunction | undefined; function getEnterFunction( handler: NodeHandler | KeyHandler ): NodeFunction | KeyFunction | undefined { return typeof handler === 'function' ? handler : handler.enter; } function getExitFunction(handler: KeyHandler): KeyFunction | undefined; function getExitFunction(handler: NodeHandler): NodeFunction | undefined; function getExitFunction( handler: NodeHandler | KeyHandler ): NodeFunction | KeyFunction | undefined { return typeof handler !== 'function' ? handler.exit : undefined; } function getKeyHandler(handler: NodeHandler, key: ChildKey): KeyHandler | undefined { let keyVisitor = typeof handler !== 'function' ? handler.keys : undefined; if (keyVisitor === undefined) return; let keyHandler = keyVisitor[key]; if (keyHandler !== undefined) { // widen specific key to all keys return keyHandler as KeyHandler; } return keyVisitor.All; } function getNodeHandler(visitor: NodeVisitor, nodeType: NodeType): NodeHandler | undefined { let handler = visitor[nodeType]; if (handler !== undefined) { // widen specific Node to all nodes return handler as NodeHandler; } return visitor.All; } function visitNode(visitor: NodeVisitor, node: Node): Node | Node[] | undefined | null | void { let handler = getNodeHandler(visitor, node.type); let enter: NodeFunction | undefined; let exit: NodeFunction | undefined; if (handler !== undefined) { enter = getEnterFunction(handler); exit = getExitFunction(handler); } let result: Node | Node[] | undefined | null | void; if (enter !== undefined) { result = enter(node); } if (result !== undefined && result !== null) { if (JSON.stringify(node) === JSON.stringify(result)) { result = undefined; } else if (Array.isArray(result)) { return visitArray(visitor, result) || result; } else { return visitNode(visitor, result) || result; } } if (result === undefined) { let keys = visitorKeys[node.type]; for (let i = 0; i < keys.length; i++) { // we know if it has child keys we can widen to a ParentNode visitKey(visitor, handler, node as ParentNode, keys[i]); } if (exit !== undefined) { result = exit(node); } } return result; } function visitKey( visitor: NodeVisitor, handler: NodeHandler | undefined, node: ParentNode, key: ChildKey ) { let value = node[key] as Node | Node[] | null | undefined; if (!value) { return; } let keyEnter: KeyFunction | undefined; let keyExit: KeyFunction | undefined; if (handler !== undefined) { let keyHandler = getKeyHandler(handler, key); if (keyHandler !== undefined) { keyEnter = getEnterFunction(keyHandler); keyExit = getExitFunction(keyHandler); } } if (keyEnter !== undefined) { if (keyEnter(node, key) !== undefined) { throw cannotReplaceOrRemoveInKeyHandlerYet(node, key); } } if (Array.isArray(value)) { visitArray(visitor, value); } else { let result = visitNode(visitor, value); if (result !== undefined) { assignKey(node, key, result); } } if (keyExit !== undefined) { if (keyExit(node, key) !== undefined) { throw cannotReplaceOrRemoveInKeyHandlerYet(node, key); } } } function visitArray(visitor: NodeVisitor, array: Node[]) { for (let i = 0; i < array.length; i++) { let result = visitNode(visitor, array[i]); if (result !== undefined) { i += spliceArray(array, i, result) - 1; } } } function assignKey(node: Node, key: ChildKey, result: Node | Node[] | null) { if (result === null) { throw cannotRemoveNode(node[key], node, key); } else if (Array.isArray(result)) { if (result.length === 1) { node[key] = result[0]; } else { if (result.length === 0) { throw cannotRemoveNode(node[key], node, key); } else { throw cannotReplaceNode(node[key], node, key); } } } else { node[key] = result; } } function spliceArray(array: Node[], index: number, result: Node | Node[] | null) { if (result === null) { array.splice(index, 1); return 0; } else if (Array.isArray(result)) { array.splice(index, 1, ...result); return result.length; } else { array.splice(index, 1, result); return 1; } } export default function traverse(node: Node, visitor: NodeVisitor) { visitNode(visitor, node); }
27.91954
99
0.656649
3.09375
16b7512b11bf197d8a97456fb0175e50565b85ad
2,218
ts
TypeScript
src/core/virtual-node.ts
xyzingh/EvNet
fbc14dd4ef10b237a322dd54763d25f160f4aa82
[ "MIT" ]
1
2020-09-10T07:23:30.000Z
2020-09-10T07:23:30.000Z
src/core/virtual-node.ts
xyzingh/EvNet
fbc14dd4ef10b237a322dd54763d25f160f4aa82
[ "MIT" ]
null
null
null
src/core/virtual-node.ts
xyzingh/EvNet
fbc14dd4ef10b237a322dd54763d25f160f4aa82
[ "MIT" ]
null
null
null
import { VirtualNodeAction, VirtualNode, Node, VirtualNodeActionTypes, Port, ElementType, } from 'core/types'; import { isVirtualPort } from 'core/utilities'; import { PortSet } from 'core/portset'; import { VirtualPort } from 'core/virtual-port'; /** * Record the actions taking place on VirtualNode. * @internal */ export class VirtualNodeActionQueue { public queue: VirtualNodeAction[] = []; public add(action: VirtualNodeAction): void { this.queue.push(action); } public shift(): VirtualNodeAction | undefined { return this.queue.shift(); } public clear(): VirtualNodeAction[] { const result = this.queue; this.queue = []; return result; } public replaceVirtualNodeWithRealNode( virtualNode: VirtualNode, realNode: Node, doActionsIfPossible = true, ): void { const len = this.queue.length; for (let i = 0; i < len; ++i) { const action = this.queue[i]; switch (action.type) { case VirtualNodeActionTypes.PipeAction: if (action.from.node === virtualNode) { action.from = realNode.ports.get(action.from.name); } if (action.to.node === virtualNode) { action.to = realNode.ports.get(action.to.name); } if ( doActionsIfPossible && !isVirtualPort(action.from) && !isVirtualPort(action.to) ) { this.doAction(action); this.queue.splice(i, 1); } break; } } } public doAction(action: VirtualNodeAction): void { switch (action.type) { case VirtualNodeActionTypes.PipeAction: (action.from as Port).pipe(action.to as Port); break; } } } export const virtualNodeActionQueue = new VirtualNodeActionQueue(); export class NextNode implements VirtualNode { public readonly type = ElementType.VirtualNode; public readonly brand = 'NextNode'; public readonly ports: PortSet<VirtualPort> = new PortSet<VirtualPort>( VirtualPort, false, this, ); }
27.382716
74
0.582507
3.09375
b2de089e75f188f3482c29fc33bcbb7a91997599
27,975
py
Python
src/app.py
chunyuyuan/NEWS_2019_network-master
0eec84b383156c82fbd64d900dce578700575d99
[ "MIT" ]
null
null
null
src/app.py
chunyuyuan/NEWS_2019_network-master
0eec84b383156c82fbd64d900dce578700575d99
[ "MIT" ]
null
null
null
src/app.py
chunyuyuan/NEWS_2019_network-master
0eec84b383156c82fbd64d900dce578700575d99
[ "MIT" ]
null
null
null
from flask import Flask, request, render_template, send_file, Response import io import base64 import csv import json import time from collections import OrderedDict import numpy import pandas as pd from numpy import genfromtxt from flask import jsonify from flask_cors import CORS from LoadingNetwork import EchoWebSocket import shutil import gc from tornado.wsgi import WSGIContainer from tornado.web import Application, FallbackHandler from tornado.websocket import WebSocketHandler from tornado.ioloop import IOLoop app = Flask('flasknado') #app = Flask(__name__) app.debug = True CORS(app) ##initial netwrok csv data############################ rawdata = open('NetworkWithDistance.txt') with open('NetworkWithDistance.txt') as f: rawdata = f.readlines() # you may also want to remove whitespace characters like `\n` at the end # of each line rawdata = [x.strip() for x in rawdata] my_data = genfromtxt('networkwithdist.csv', delimiter=',') # my_data=numpy.delete(my_data,(0),axis=0) header = ['id', 'id_to', 'lon', 'lat', 'basinid'] frame = pd.DataFrame(my_data, columns=header) data = [] MY_GLOBAL = [] with open('tempcsv.csv') as f: for line in f: temp = line.strip().split(',') data.append(temp) ############################# data1 = [] with open('MyFile1.txt') as f: r = 0 for line in f: if(r > 0): data2 = [] # print(line) temp = line.split("\",") data2.append(temp[0][1:]) temp1 = temp[1].split(",[") data2.append(temp1[0]) data2.append(temp1[1][:-2]) data1.append(data2) r += 1 header = ['celllist', 'cellid', 'cellto'] frame_celllist = pd.DataFrame(data1, columns=header) frame_celllist = frame_celllist.drop_duplicates() del data1[:] ################## data_c = [] with open('powerplant_cell_loc.csv') as f: r = 0 for line in f: if(r > 0): data_cc = line.split(",") data_c.append(data_cc) # print(line) r += 1 header = ['cellid', 'loc'] frame_cell = pd.DataFrame(data_c, columns=header) frame_cell = frame_cell.drop_duplicates() del data_c[:] ######################################################## import os import sys from SimpleHTTPServer import SimpleHTTPRequestHandler import BaseHTTPServer # class MyHTTPRequestHandler(SimpleHTTPRequestHandler): # def translate_path(self,path): # path = SimpleHTTPRequestHandler.translate_path(self,path) # if os.path.isdir(path): # for base in "index", "default": # for ext in ".html", ".htm", ".txt": # index = path + "/" + base + ext # if os.path.exists(index): # return index # return path # def test(HandlerClass = MyHTTPRequestHandler, # ServerClass = BaseHTTPServer.HTTPServer): # BaseHTTPServer.test(HandlerClass, ServerClass) ##################travesal network upstream############ '''def find_upstream(value): gc.collect() ii=0 li = [] temp=[] a=frame.ix[int(value)] temp.append(a) #print(MY_GLOBAL) MY_GLOBAL[:]=[] #x=data[int(value)] #x=frame[frame['id']==a['id_to']] #print x i=0 z=0 zz=0 while zz<len(temp): item=temp[zz] zz+=1 ##print(z,len(temp)) ## item=temp.pop() ## print item #x=frame[frame['id_to']==item['id']] x=data[int(float(item['id']))] #print x i=1 while i<len(x) : # d = OrderedDict() # xx=x.loc[x.index[i]] xx=frame.ix[int(float(x[i]))] # d['type'] = 'Feature' # d['geometry'] = { # 'type': 'MultiLineString', # 'coordinates': [[[float(xx['lon']),float(xx['lat'])],[float(item['lon']), float(item['lat'])]]] # } # d['properties'] = { "id":int(xx['id']),"id_to":int(xx['id_to']),"lon": float(xx['lon']),"lat": float(xx['lat']) # } # li.append(d) i+=1 # ii+=1 ##if ii%1000==0: ## print ii temp.append(xx) print(len(temp)) while z<len(temp): item=temp[z] z+=1 ##print(z,len(temp)) ## item=temp.pop() ## print item #x=frame[frame['id_to']==item['id']] x=data[int(float(item['id']))] #print x i=1 while i<len(x) : d = OrderedDict() #xx=x.loc[x.index[i]] xx=frame.ix[int(float(x[i]))] d['type'] = 'Feature' d['geometry'] = { 'type': 'MultiLineString', 'coordinates': [[[float(xx['lon']),float(xx['lat'])],[float(item['lon']), float(item['lat'])]]] } d['properties'] = { "id":int(xx['id']),"id_to":int(xx['id_to']),"lon": float(xx['lon']),"lat": float(xx['lat']) } li.append(d) d = OrderedDict() #xx=x.loc[x.index[i]] # xx=frame.ix[int(float(x[i]))] i+=1 ii+=1 if ii%1000==0 or (ii+1)/len(temp)==1: MY_GLOBAL.append((int)((ii+1)/(len(temp)* 1.0)*100)) ## print(checkInt,ii,len(temp)) ## print ii # temp.append(xx) #d = OrderedDict() #d['type'] = 'FeatureCollection' #d['features'] = li #print li print(ii) return li,200''' def find_upstream(value): gc.collect() ii = 0 li = [] temp = [] a = frame.ix[int(value)] temp.append(int(value)) MY_GLOBAL[:] = [] i = 0 z = 0 zz = 0 jstring = '' while z < len(temp): item = frame.ix[temp[z]] z += 1 x = data[int(float(item['id']))] #print x i = 1 while i < len(x): xx = frame.ix[int(float(x[i]))] jstring += '{"type": "Feature","geometry": { "type": "MultiLineString", "coordinates": [[[' + str(float(xx['lon'])) + ',' + str(float(xx['lat'])) + '],[' + str(float(item['lon'])) + ',' + str( float(item['lat'])) + ']]]},"properties": {"id_to": ' + str(int(xx['id_to'])) + ',"id":' + str(int(xx['id'])) + ',"lat":' + str(float(xx['lat'])) + ',"lon": ' + str(float(xx['lon'])) + '}},' ii += 1 temp.append(int(float(x[i]))) i += 1 if ii % 1000 == 0: # print(ii) MY_GLOBAL.append((int)((ii + 1) / (200000 * 1.0) * 100)) # print(checkInt,ii,len(temp)) ## print ii # temp.append(xx) #d = OrderedDict() #d['type'] = 'FeatureCollection' #d['features'] = li #print li # print(jstring) MY_GLOBAL.append(100) return jstring[:-1], 200 ##################travesal network downstream############ def find_downstream(value, sourceid): #print value,sourceid ii = 0 li = [] temp = [] jstring = '' # MY_GLOBAL[:]=[] a = frame.ix[int(value)] temp.append(a) check = True z = 0 while z < len(temp) and check: item = temp[z] z += 1 if(item['id_to'] == sourceid): check = False # break ## print item # if(item['id']==sourceid): # check=False x = frame.ix[frame['id'] == item['id_to']] #print x i = 0 while i < len(x): # d = OrderedDict() xx = x.ix[x.index[i]] jstring += '{"type": "Feature","geometry": { "type": "MultiLineString", "coordinates": [[[' + str(float(xx['lon'])) + ',' + str(float(xx['lat'])) + '],[' + str(float(item['lon'])) + ',' + str( float(item['lat'])) + ']]]},"properties": {"id_to": ' + str(int(xx['id_to'])) + ',"id":' + str(int(xx['id'])) + ',"lat":' + str(float(xx['lat'])) + ',"lon": ' + str(float(xx['lon'])) + '}},' # d['type'] = 'Feature' # d['geometry'] = { # 'type': 'MultiLineString', # 'coordinates': [[[float(xx['lon']),float(xx['lat'])],[float(item['lon']), float(item['lat'])]]] # } # d['properties'] = { "id":int(xx['id']),"id_to":int(xx['id_to']),"lon": float(xx['lon']),"lat": float(xx['lat']) # } # li.append(d) # d=OrderedDict() i += 1 ii += 1 temp.append(xx) # if(item['id']==sourceid): # check=False # MY_GLOBAL.append(100) # d = OrderedDict() # d['type'] = 'FeatureCollection' # d['features'] = li # print li # if (check==False): return jstring[:-1], 200 ##################travesal network downstream############ def find_downstream1(value): #print value,sourceid ii = 0 li = [] temp = [] jstring = '' # MY_GLOBAL[:]=[] a = frame.ix[int(value)] temp.append(a) check = True z = 0 while z < len(temp) and check: item = temp[z] z += 1 ## print item # if(item['id']==sourceid): # check=False x = frame.ix[frame['id'] == item['id_to']] #print x i = 0 while i < len(x): # d = OrderedDict() xx = x.ix[x.index[i]] jstring += '{"type": "Feature","geometry": { "type": "MultiLineString", "coordinates": [[[' + str(float(xx['lon'])) + ',' + str(float(xx['lat'])) + '],[' + str(float(item['lon'])) + ',' + str( float(item['lat'])) + ']]]},"properties": {"id_to": ' + str(int(xx['id_to'])) + ',"id":' + str(int(xx['id'])) + ',"lat":' + str(float(xx['lat'])) + ',"lon": ' + str(float(xx['lon'])) + '}},' # d['type'] = 'Feature' # d['geometry'] = { # 'type': 'MultiLineString', # 'coordinates': [[[float(xx['lon']),float(xx['lat'])],[float(item['lon']), float(item['lat'])]]] # } # d['properties'] = { "id":int(xx['id']),"id_to":int(xx['id_to']),"lon": float(xx['lon']),"lat": float(xx['lat']) # } # li.append(d) # d=OrderedDict() i += 1 ii += 1 temp.append(xx) # if(item['id']==sourceid): # check=False # MY_GLOBAL.append(100) # d = OrderedDict() # d['type'] = 'FeatureCollection' # d['features'] = li # print li # if (check==False): return jstring[:-1], 200 #######################pp upstream####################### def find_upstream_pp(cellid): gc.collect() # header=['celllist','cellid','cellto'] # header=['cellid','loc'] templi = frame_celllist[frame_celllist['cellid'] == cellid]['celllist'].tolist() templist = templi[0][1:-1].split(",") z = 0 jstring = '' while z < len(templist): curid = templist[z].strip() # print(curid,templist) curidloc = frame_cell[frame_cell['cellid'] == curid]['loc'].tolist() curidloc1 = curidloc[0].split("_") # print(curidloc1[0],curidloc1[1][:-1],curidloc[0]) z += 1 temp = frame_celllist[frame_celllist['cellid'] == curid]['cellto'].tolist() print(temp) temp = temp[0].split(",") if len(temp) == 1 and temp[0][:-1] == "none": # print(temp[0]) continue else: zz = 0 while zz < len(temp): # print(temp[zz],temp) x = temp[zz] zz += 1 if zz == len(temp): nextloc = frame_cell[frame_cell['cellid'] == x[:-1]]['loc'].tolist() else: nextloc = frame_cell[frame_cell['cellid'] == x]['loc'].tolist() nextloc1 = nextloc[0].split("_") # print(nextloc1[0],nextloc1[1][:-1],nextloc1) jstring += '{"type": "Feature","geometry": { "type": "MultiLineString", "coordinates": [[[' + str(curidloc1[0]) + ',' + str(curidloc1[1][:-1]) + '],[' + str( nextloc1[0]) + ',' + str(nextloc1[1][:-1]) + ']]]},"properties": {"lat":' + str(curidloc1[1][:-1]) + ',"lon": ' + str(curidloc1[0]) + '}},' # jstring+='{"type": "Feature","geometry": { "type": "MultiLineString", "coordinates": [[['+str(float(xx['lon']))+','+str(float(xx['lat']))+'],['+str(float(item['lon']))+','+str(float(item['lat']))+']]]},"properties": {"id_to": '+str(int(xx['id_to']))+',"id":'+str(int(xx['id']))+',"lat":'+str(float(xx['lat']))+',"lon": '+str(float(xx['lon']))+'}},'; return jstring[:-1], 200 #######################pp downstream####################### def find_downstream_pp(cellid, dcellid): gc.collect() # header=['celllist','cellid','cellto'] # header=['cellid','loc'] print(cellid, dcellid) templi = frame_celllist[frame_celllist['cellid'] == cellid]['celllist'].tolist() templist = templi[0][1:-1].split(",") z = len(templist) - 1 jstring = '' while z > 0: print(templist[z].strip()) curid = templist[z].strip() if curid != str(dcellid): z -= 1 else: print(z) break while z > 0: curid = templist[z].strip() # print(curid,templist) curidloc = frame_cell[frame_cell['cellid'] == curid]['loc'].tolist() curidloc1 = curidloc[0].split("_") # print(curidloc1[0],curidloc1[1][:-1],curidloc[0]) temp = frame_celllist[frame_celllist['cellid'] == templist[z].strip()]['cellto'].tolist() z -= 1 print(temp) temp = temp[0].split(",") if len(temp) == 1 and temp[0][:-1] == "none": # print(temp[0]) z -= 1 continue else: zz = 0 aaaa = 'false' while zz < len(temp): # print(temp[zz],temp) x = temp[zz] zz += 1 if zz == len(temp): if x[:-1] == curid: aaaa = 'true' nextloc = frame_cell[frame_cell['cellid'] == x[:-1]]['loc'].tolist() else: if x == curid: aaaa = 'true' nextloc = frame_cell[frame_cell['cellid'] == x]['loc'].tolist() if aaaa == 'true': nextloc1 = nextloc[0].split("_") # print(nextloc1[0],nextloc1[1][:-1],nextloc1) jstring += '{"type": "Feature","geometry": { "type": "MultiLineString", "coordinates": [[[' + str(curidloc1[0]) + ',' + str(curidloc1[1][:-1]) + '],[' + str( nextloc1[0]) + ',' + str(nextloc1[1][:-1]) + ']]]},"properties": {"lat":' + str(curidloc1[1][:-1]) + ',"lon": ' + str(curidloc1[0]) + '}},' # jstring+='{"type": "Feature","geometry": { "type": "MultiLineString", "coordinates": [[['+str(float(xx['lon']))+','+str(float(xx['lat']))+'],['+str(float(item['lon']))+','+str(float(item['lat']))+']]]},"properties": {"id_to": '+str(int(xx['id_to']))+',"id":'+str(int(xx['id']))+',"lat":'+str(float(xx['lat']))+',"lon": '+str(float(xx['lon']))+'}},'; print(jstring) if len(jstring) > 0: return jstring[:-1], 200 else: return jstring, 200 @app.route("/", methods=['GET', 'POST']) def index(): print(request) return render_template('test1.html') @app.route("/api/", methods=['GET', 'POST']) def update(): print(request.method) if request.method == "POST": source = request.form["source"] dist = request.form["dist"] pic = request.form["pic"] downfirst = request.form["downfirst"] pp = request.form["pp"] print(pp, source, dist, downfirst, pic) if(pp == 'yes'): upstream = request.form["upstream"] if(upstream == 'yes'): ucellid = request.form["ucellid"] re, ii = find_upstream_pp(ucellid) # print(re) return json.dumps(re), ii # if(upstream=='no'): ### ucellid = request.form["ucellid"] # dcellid = request.form["dcellid"] # re,ii=find_downstream_pp(ucellid,dcellid) # print(re) # if(pp=='no'): source = request.form["source"] dist = request.form["dist"] pic = request.form["pic"] downfirst = request.form["downfirst"] #print dist if(downfirst == 'no'): if(source == 'yes'): sourceid = request.form["sourceid"] #print sourceid import time start = time. time() re, ii = find_upstream(sourceid) end = time. time() #print ii,(end-start) # print(re) # print(MY_GLOBAL) return json.dumps(re), ii if(dist == 'yes'): distid = request.form["distid"] sourceid = request.form["sourceid"] MY_GLOBAL[:] = [] #print distid,sourceid re, ii = find_downstream(int(distid), int(sourceid)) print (re) gc.collect() MY_GLOBAL.append(100) return json.dumps(re, sort_keys=False, indent=4), ii if(downfirst == 'yes'): if(dist == 'yes'): distid = request.form["distid"] sourceid = request.form["sourceid"] MY_GLOBAL[:] = [] #print distid,sourceid re, ii = find_downstream1(int(distid)) print (re) gc.collect() MY_GLOBAL.append(100) return json.dumps(re, sort_keys=False, indent=4), ii if(pic == 'yes'): #print request.form MY_GLOBAL[:] = [] start1 = request.form["dist_lat"] start2 = request.form["dist_lon"] goal1 = request.form["source_lat"] goal2 = request.form["source_lon"] fromdate = request.form["from"] todate = request.form["to"] import time before = time.time() output, str1, str2, str3 = LoadingNetwork.main( [start1, start2], [goal1, goal2], fromdate, todate, rawdata) #print str1,str2,str3 after = time.time() print ("time,", after - before) if(isinstance(output, str)): return output, 201 else: # gc.collect() #print base64.b64encode(output.getvalue()) return base64.b64encode( output.getvalue()) + "***" + str1 + "***" + str2 + "***" + str3, 200 class WebSocket(WebSocketHandler): def on_message(self, message): # self.write_message("Received: " + message) # self.write_message("Received2: " + message) # m=message.split("&") print("Received message: " + m[0]) print("Received message: " + m[1]) print("Received message: " + m[2]) print("Received message: " + m[3]) print("Received message: " + m[4]) print("Received message: " + m[5]) print("Received message: " + m[6]) m=message[1:-1].split("&") source = m[0].split("=")[1] value = m[1].split("=")[1] dist = m[2].split("=")[1] value1 = m[3].split("=")[1] pic = m[4].split("=")[1] downfirst = m[5].split("=")[1] pp = m[6].split("=") print(pp, source, dist, downfirst, pic,value,value1) ###################################upstram##########################3 if(downfirst == 'no'): if(source == 'yes'): ################## gc.collect() ii = 0 li = [] temp = [] a = frame.ix[int(value)] temp.append(int(value)) i = 0 z = 0 zz = 0 jstring = '' while z < len(temp): item = frame.ix[temp[z]] z += 1 x = data[int(float(item['id']))] #print x i = 1 while i < len(x): xx = frame.ix[int(float(x[i]))] jstring += '{"type": "Feature","geometry": { "type": "MultiLineString", "coordinates": [[[' + str(float(xx['lon'])) + ',' + str(float(xx['lat'])) + '],[' + str(float(item['lon'])) + ',' + str( float(item['lat'])) + ']]]},"properties": {"id_to": ' + str(int(xx['id_to'])) + ',"id":' + str(int(xx['id'])) + ',"lat":' + str(float(xx['lat'])) + ',"lon": ' + str(float(xx['lon'])) + '}},' ii += 1 temp.append(int(float(x[i]))) i += 1 if(len(jstring)>1500000): zz+=5 self.write_message( jstring[:-1]) self.write_message( '~'+str(zz*1.0/100)) jstring = '' self.write_message( jstring[:-1]) self.write_message( '~1') ############################downstream######################### if(dist == 'yes'): ######################################################################## ii = 0 li = [] temp = [] jstring = '' # MY_GLOBAL[:]=[] a = frame.ix[int(value1)] temp.append(a) check = True z = 0 zz=0 while z < len(temp) and check: item = temp[z] z += 1 if(item['id_to'] == int(value)): check = False x = frame.ix[frame['id'] == item['id_to']] #print x i = 0 while i < len(x): # d = OrderedDict() xx = x.ix[x.index[i]] jstring += '{"type": "Feature","geometry": { "type": "MultiLineString", "coordinates": [[[' + str(float(xx['lon'])) + ',' + str(float(xx['lat'])) + '],[' + str(float(item['lon'])) + ',' + str( float(item['lat'])) + ']]]},"properties": {"id_to": ' + str(int(xx['id_to'])) + ',"id":' + str(int(xx['id'])) + ',"lat":' + str(float(xx['lat'])) + ',"lon": ' + str(float(xx['lon'])) + '}},' i += 1 ii += 1 temp.append(xx) if(len(jstring)>150000): zz+=5 self.write_message( jstring[:-1]) self.write_message( '~'+str(zz*1.0/100)) jstring = '' self.write_message( jstring[:-1]) self.write_message( '~1') ##########################downfirst############################################## if(downfirst == 'yes'): if(dist == 'yes'): ii = 0 li = [] temp = [] jstring = '' # MY_GLOBAL[:]=[] a = frame.ix[int(value1)] temp.append(a) z = 0 zz=0 while z < len(temp) : item = temp[z] z += 1 # break ## print item # if(item['id']==sourceid): # check=False x = frame.ix[frame['id'] == item['id_to']] #print x i = 0 while i < len(x): # d = OrderedDict() xx = x.ix[x.index[i]] jstring += '{"type": "Feature","geometry": { "type": "MultiLineString", "coordinates": [[[' + str(float(xx['lon'])) + ',' + str(float(xx['lat'])) + '],[' + str(float(item['lon'])) + ',' + str( float(item['lat'])) + ']]]},"properties": {"id_to": ' + str(int(xx['id_to'])) + ',"id":' + str(int(xx['id'])) + ',"lat":' + str(float(xx['lat'])) + ',"lon": ' + str(float(xx['lon'])) + '}},' # d['type'] = 'Feature' # d['geometry'] = { # 'type': 'MultiLineString', # 'coordinates': [[[float(xx['lon']),float(xx['lat'])],[float(item['lon']), float(item['lat'])]]] # } # d['properties'] = { "id":int(xx['id']),"id_to":int(xx['id_to']),"lon": float(xx['lon']),"lat": float(xx['lat']) # } # li.append(d) # d=OrderedDict() i += 1 ii += 1 temp.append(xx) # if(item['id']==sourceid): # check=False # MY_GLOBAL.append(100) # d = OrderedDict() # d['type'] = 'FeatureCollection' # d['features'] = li # print li # if (check==False): if(len(jstring)>150000): zz+=5 self.write_message( jstring[:-1]) self.write_message( '~'+str(zz*1.0/100)) jstring = '' self.write_message( jstring[:-1]) self.write_message( '~1') # if(downfirst == 'yes'): if(pic == 'yes'): #print request.form #"&dist_lat="+dist_lat+"&dist_lon="+dist_lon+"&source_lat="+source_lat+"&source_lon="+source_lon+"&from="+value3.value+"&to="+value4.value); #m[6].split("=") # start1 = request.form["dist_lat"] # start2 = request.form["dist_lon"] # goal1 = request.form["source_lat"] # goal2 = request.form["source_lon"] # fromdate = request.form["from"] # todate = request.form["to"] start1 = m[7].split("=")[1] start2 = m[8].split("=")[1] goal1 =m[9].split("=")[1] goal2 = m[10].split("=")[1] fromdate = m[11].split("=")[1] todate = m[12].split("=")[1] print(start1,start2,goal1,goal2,fromdate,todate) import time before = time.time() output, str1, str2, str3 = LoadingNetwork.main( [start1, start2], [goal1, goal2], fromdate, todate, rawdata) #print str1,str2,str3 # print(output) after = time.time() print ("time,", after - before) # if(isinstance(output, str)): # return output, 201 # else: # gc.collect() #print base64.b64encode(output.getvalue()) # return base64.b64encode( # output.getvalue()) + "***" + str1 + "***" + str2 + "***" + str3, 200 # if __name__ == "__main__": container = WSGIContainer(app) server = Application([ (r'/websocket/', WebSocket), (r'/we/', EchoWebSocket), (r'.*', FallbackHandler, dict(fallback=container)) ]) server.listen(5000) IOLoop.instance().start() # test()
16.913543
360
0.43378
3.484375
437bedebb256fe41023d70a8b4d2313d4a24d111
11,383
go
Go
service/endpoint.go
jirenius/rest2res
9a64fa84fea1eb741368be5875aea096f59ce32a
[ "MIT" ]
12
2019-05-07T01:30:30.000Z
2021-12-12T14:57:06.000Z
service/endpoint.go
jirenius/rest2res
9a64fa84fea1eb741368be5875aea096f59ce32a
[ "MIT" ]
null
null
null
service/endpoint.go
jirenius/rest2res
9a64fa84fea1eb741368be5875aea096f59ce32a
[ "MIT" ]
null
null
null
package service import ( "encoding/json" "errors" "fmt" "io/ioutil" "net/http" "reflect" "strconv" "strings" "sync" "time" res "github.com/jirenius/go-res" "github.com/jirenius/timerqueue" ) type endpoint struct { s *Service url string urlParams []string refreshCount int cachedURLs map[string]*cachedResponse access res.AccessHandler timeout time.Duration group string resetPatterns []string tq *timerqueue.Queue mu sync.RWMutex node } type cachedResponse struct { reloads int reqParams map[string]string crs map[string]cachedResource rerr *res.Error } type cachedResource struct { typ resourceType model map[string]interface{} collection []interface{} } type resourceType byte const defaultRefreshDuration = time.Second * 3 const ( resourceTypeUnset resourceType = iota resourceTypeModel resourceTypeCollection ) func newEndpoint(s *Service, cep *EndpointCfg) (*endpoint, error) { if cep.URL == "" { return nil, errors.New("missing url") } if cep.Pattern == "" { return nil, errors.New("missing pattern") } urlParams, err := urlParams(cep.URL) if err != nil { return nil, err } ep := &endpoint{ s: s, url: cep.URL, urlParams: urlParams, refreshCount: cep.RefreshCount, cachedURLs: make(map[string]*cachedResponse), access: cep.Access, timeout: time.Millisecond * time.Duration(cep.Timeout), } ep.tq = timerqueue.New(ep.handleRefresh, time.Millisecond*time.Duration(cep.RefreshTime)) return ep, nil } func (ep *endpoint) handler() res.Handler { return res.Handler{ Access: ep.access, GetResource: ep.getResource, Group: ep.url, } } func (ep *endpoint) handleRefresh(i interface{}) { ep.s.Debugf("Refreshing %s", i) url := i.(string) // Check if url is cached ep.mu.RLock() cresp, ok := ep.cachedURLs[url] ep.mu.RUnlock() if !ok { ep.s.Logf("Url %s not found in cache on refresh", url) return } params := cresp.reqParams ep.s.res.WithGroup(url, func(s *res.Service) { cresp.reloads++ if cresp.rerr != nil || cresp.reloads > ep.refreshCount { // Reset resources ep.mu.Lock() delete(ep.cachedURLs, url) ep.mu.Unlock() resetResources := make([]string, len(ep.resetPatterns)) for i, rp := range ep.resetPatterns { for _, param := range ep.urlParams { rp = strings.Replace(rp, "${"+param+"}", params[param], 1) } resetResources[i] = rp } ep.s.res.Reset(resetResources, nil) return } defer ep.tq.Add(i) ncresp := ep.getURL(url, params) if ncresp.rerr != nil { ep.s.Logf("Error refreshing url %s:\n\t%s", url, ncresp.rerr.Message) return } for rid, nv := range ncresp.crs { v, ok := cresp.crs[rid] if ok { r, err := ep.s.res.Resource(rid) if err != nil { // This shouldn't be possible. Let's panic. panic(fmt.Sprintf("error getting res resource %s:\n\t%s", rid, err)) } updateResource(v, nv, r) delete(cresp.crs, rid) } } // for rid := range cresp.crs { // r, err := ep.s.res.Resource(rid) // r.DeleteEvent() // } // Replacing the old cachedResources with the new ones cresp.crs = ncresp.crs }) } func updateResource(v, nv cachedResource, r res.Resource) { switch v.typ { case resourceTypeModel: updateModel(v.model, nv.model, r) case resourceTypeCollection: updateCollection(v.collection, nv.collection, r) } } func updateModel(a, b map[string]interface{}, r res.Resource) { ch := make(map[string]interface{}) for k := range a { if _, ok := b[k]; !ok { ch[k] = res.DeleteAction } } for k, v := range b { ov, ok := a[k] if !(ok && reflect.DeepEqual(v, ov)) { ch[k] = v } } r.ChangeEvent(ch) } func updateCollection(a, b []interface{}, r res.Resource) { var i, j int // Do a LCS matric calculation // https://en.wikipedia.org/wiki/Longest_common_subsequence_problem s := 0 m := len(a) n := len(b) // Trim of matches at the start and end for s < m && s < n && reflect.DeepEqual(a[s], b[s]) { s++ } if s == m && s == n { return } for s < m && s < n && reflect.DeepEqual(a[m-1], b[n-1]) { m-- n-- } var aa, bb []interface{} if s > 0 || m < len(a) { aa = a[s:m] m = m - s } else { aa = a } if s > 0 || n < len(b) { bb = b[s:n] n = n - s } else { bb = b } // Create matrix and initialize it w := m + 1 c := make([]int, w*(n+1)) for i = 0; i < m; i++ { for j = 0; j < n; j++ { if reflect.DeepEqual(aa[i], bb[j]) { c[(i+1)+w*(j+1)] = c[i+w*j] + 1 } else { v1 := c[(i+1)+w*j] v2 := c[i+w*(j+1)] if v2 > v1 { c[(i+1)+w*(j+1)] = v2 } else { c[(i+1)+w*(j+1)] = v1 } } } } idx := m + s i = m j = n rm := 0 var adds [][3]int addCount := n - c[w*(n+1)-1] if addCount > 0 { adds = make([][3]int, 0, addCount) } Loop: for { m = i - 1 n = j - 1 switch { case i > 0 && j > 0 && reflect.DeepEqual(aa[m], bb[n]): idx-- i-- j-- case j > 0 && (i == 0 || c[i+w*n] >= c[m+w*j]): adds = append(adds, [3]int{n, idx, rm}) j-- case i > 0 && (j == 0 || c[i+w*n] < c[m+w*j]): idx-- r.RemoveEvent(idx) rm++ i-- default: break Loop } } // Do the adds l := len(adds) - 1 for i := l; i >= 0; i-- { add := adds[i] r.AddEvent(bb[add[0]], add[1]-rm+add[2]+l-i) } } func (ep *endpoint) getResource(r res.GetRequest) { // Replace param placeholders url := ep.url for _, param := range ep.urlParams { url = strings.Replace(url, "${"+param+"}", r.PathParam(param), 1) } // Check if url is cached ep.mu.RLock() cresp, ok := ep.cachedURLs[url] ep.mu.RUnlock() if !ok { if ep.timeout > 0 { r.Timeout(ep.timeout) } cresp = ep.cacheURL(url, r.PathParams()) } // Return any encountered error when getting the endpoint if cresp.rerr != nil { r.Error(cresp.rerr) return } // Check if resource exists cr, ok := cresp.crs[r.ResourceName()] if !ok { r.NotFound() return } switch cr.typ { case resourceTypeModel: r.Model(cr.model) case resourceTypeCollection: r.Collection(cr.collection) } } func (ep *endpoint) cacheURL(url string, reqParams map[string]string) *cachedResponse { cresp := ep.getURL(url, reqParams) ep.mu.Lock() ep.cachedURLs[url] = cresp ep.mu.Unlock() ep.tq.Add(url) return cresp } func (ep *endpoint) getURL(url string, reqParams map[string]string) *cachedResponse { cr := cachedResponse{reqParams: reqParams} // Make HTTP request resp, err := http.Get(url) if err != nil { ep.s.Debugf("Error fetching endpoint: %s\n\t%s", url, err) cr.rerr = res.InternalError(err) return &cr } defer resp.Body.Close() // Handle non-2XX status codes if resp.StatusCode == 404 { cr.rerr = res.ErrNotFound return &cr } if resp.StatusCode < 200 || resp.StatusCode >= 300 { cr.rerr = res.InternalError(fmt.Errorf("unexpected response code: %d", resp.StatusCode)) return &cr } // Read body body, err := ioutil.ReadAll(resp.Body) if err != nil { cr.rerr = res.InternalError(err) return &cr } // Unmarshal body var v value if err = json.Unmarshal(body, &v); err != nil { cr.rerr = res.InternalError(err) return &cr } // Traverse the data crs := make(map[string]cachedResource) err = ep.traverse(crs, v, nil, reqParams) if err != nil { cr.rerr = res.InternalError(fmt.Errorf("invalid data structure for %s: %s", url, err)) return &cr } cr.crs = crs return &cr } func (ep *endpoint) traverse(crs map[string]cachedResource, v value, path []string, reqParams map[string]string) error { var err error switch v.typ { case valueTypeObject: _, err = traverseModel(crs, v, path, &ep.node, reqParams, "") case valueTypeArray: _, err = traverseCollection(crs, v, path, &ep.node, reqParams, "") default: return errors.New("endpoint didn't respond with a json object or array") } if err != nil { return err } return nil } func traverseModel(crs map[string]cachedResource, v value, path []string, n *node, reqParams map[string]string, pathPart string) (res.Ref, error) { if n.typ != resourceTypeModel { return "", fmt.Errorf("expected a model at %s", pathStr(path)) } // Append path part switch n.ptyp { case pathTypeDefault: path = append(path, pathPart) case pathTypeProperty: idv, ok := v.obj[n.idProp] if !ok { return "", fmt.Errorf("missing id property %s at:\n\t%s", n.idProp, pathStr(path)) } switch idv.typ { case valueTypeString: var idstr string err := json.Unmarshal(idv.raw, &idstr) if err != nil { return "", err } path = append(path, idstr) case valueTypeNumber: path = append(path, string(idv.raw)) default: return "", fmt.Errorf("invalid id value for property %s at:\n\t%s", n.idProp, pathStr(path)) } path = append(path) } model := make(map[string]interface{}) for k, kv := range v.obj { // Get next node next := n.nodes[k] if next == nil { next = n.param } switch kv.typ { case valueTypeObject: if next != nil { ref, err := traverseModel(crs, kv, path, next, reqParams, k) if err != nil { return "", err } model[k] = ref } case valueTypeArray: if next != nil { ref, err := traverseCollection(crs, kv, path, next, reqParams, k) if err != nil { return "", err } model[k] = ref } default: if next != nil { return "", fmt.Errorf("unexpected primitive value for property %s at %s", k, pathStr(path)) } model[k] = kv } } // Create rid p := make([]interface{}, len(n.params)) for j, pp := range n.params { switch pp.typ { case paramTypeURL: p[j] = reqParams[pp.name] case paramTypePath: p[j] = path[pp.idx] } } rid := fmt.Sprintf(n.pattern, p...) crs[rid] = cachedResource{ typ: resourceTypeModel, model: model, } return res.Ref(rid), nil } func traverseCollection(crs map[string]cachedResource, v value, path []string, n *node, reqParams map[string]string, pathPart string) (res.Ref, error) { if n.typ != resourceTypeCollection { return "", fmt.Errorf("expected a collection at %s", pathStr(path)) } if n.ptyp != pathTypeRoot { // Append path part path = append(path, pathPart) } collection := make([]interface{}, len(v.arr)) for j, kv := range v.arr { next := n.param switch kv.typ { case valueTypeObject: if next != nil { ref, err := traverseModel(crs, kv, path, next, reqParams, strconv.Itoa(j)) if err != nil { return "", err } collection[j] = ref } case valueTypeArray: if next != nil { ref, err := traverseCollection(crs, kv, path, next, reqParams, strconv.Itoa(j)) if err != nil { return "", err } collection[j] = ref } default: if next != nil { return "", fmt.Errorf("unexpected primitive value for element %d at %s", j, pathStr(path)) } collection[j] = kv } } // Create rid p := make([]interface{}, len(n.params)) for k, pp := range n.params { switch pp.typ { case paramTypeURL: p[k] = reqParams[pp.name] case paramTypePath: p[k] = path[pp.idx] } } rid := fmt.Sprintf(n.pattern, p...) crs[rid] = cachedResource{ typ: resourceTypeCollection, collection: collection, } return res.Ref(rid), nil } func pathStr(path []string) string { if len(path) == 0 { return "endpoint root" } return strings.Join(path, ".") }
21.118738
152
0.612844
3.125
77d7b261ad1febba3308206e114ebff010e09836
6,061
rs
Rust
src/lib.rs
y-fujii/nanore
e9ee360de11c49109a45829cb85d4c21db270e22
[ "MIT" ]
null
null
null
src/lib.rs
y-fujii/nanore
e9ee360de11c49109a45829cb85d4c21db270e22
[ "MIT" ]
null
null
null
src/lib.rs
y-fujii/nanore
e9ee360de11c49109a45829cb85d4c21db270e22
[ "MIT" ]
null
null
null
// (c) Yasuhiro Fujii <http://mimosa-pudica.net>, under MIT License. use std::*; pub enum RegEx<'a, T, U: Copy = ()> { Eps, Atom( Box<dyn 'a + Fn( usize, &T ) -> bool> ), Alt( Box<RegEx<'a, T, U>>, Box<RegEx<'a, T, U>> ), Seq( Box<RegEx<'a, T, U>>, Box<RegEx<'a, T, U>>, usize ), Repeat( Box<RegEx<'a, T, U>>, usize ), Weight( isize ), Mark( U ), } impl<'a, T, U: Copy> ops::Add for Box<RegEx<'a, T, U>> { type Output = Box<RegEx<'a, T, U>>; fn add( self, other: Self ) -> Self::Output { Box::new( RegEx::Alt( self, other ) ) } } impl<'a, T, U: Copy> ops::Mul for Box<RegEx<'a, T, U>> { type Output = Box<RegEx<'a, T, U>>; fn mul( self, other: Self ) -> Self::Output { Box::new( RegEx::Seq( self, other, usize::MAX ) ) } } pub fn eps<'a, T, U: Copy>() -> Box<RegEx<'a, T, U>> { Box::new( RegEx::Eps ) } pub fn atom<'a, T, U: Copy, F: 'a + Fn( usize, &T ) -> bool>( f: F ) -> Box<RegEx<'a, T, U>> { Box::new( RegEx::Atom( Box::new( f ) ) ) } pub fn rep<'a, T, U: Copy>( e0: Box<RegEx<'a, T, U>> ) -> Box<RegEx<'a, T, U>> { Box::new( RegEx::Repeat( e0, usize::MAX ) ) } pub fn weight<'a, T, U: Copy>( w: isize ) -> Box<RegEx<'a, T, U>> { Box::new( RegEx::Weight( w ) ) } pub fn mark<'a, T, U: Copy>( m: U ) -> Box<RegEx<'a, T, U>> { Box::new( RegEx::Mark( m ) ) } pub fn opt<'a, T, U: Copy>( e0: Box<RegEx<'a, T, U>> ) -> Box<RegEx<'a, T, U>> { eps() + e0 } pub fn any<'a, T, U: Copy>() -> Box<RegEx<'a, T, U>> { atom( move |_, _| true ) } pub fn val<'a, T: 'a + PartialEq, U: Copy>( v0: T ) -> Box<RegEx<'a, T, U>> { atom( move |_, v| *v == v0 ) } pub struct RegExRoot<'a, T, U: Copy = ()> { regex: Box<RegEx<'a, T, U>>, nstate: usize, } impl<'a, T, U: Copy> RegExRoot<'a, T, U> { pub fn new( mut e: Box<RegEx<'a, T, U>> ) -> RegExRoot<'a, T, U> { let n = Self::renumber( &mut e, 0 ); RegExRoot{ regex: e, nstate: n, } } fn renumber( e: &mut RegEx<'a, T, U>, i: usize ) -> usize { match *e { RegEx::Eps => i, RegEx::Atom( _ ) => i, RegEx::Alt( ref mut e0, ref mut e1 ) => { Self::renumber( e1, Self::renumber( e0, i ) ) } RegEx::Seq( ref mut e0, ref mut e1, ref mut s ) => { *s = Self::renumber( e0, i ); Self::renumber( e1, *s + 1 ) } RegEx::Repeat( ref mut e0, ref mut s ) => { *s = i; Self::renumber( e0, i + 1 ) } RegEx::Weight( _ ) => i, RegEx::Mark( _ ) => i, } } } struct Path<T>( usize, T, Option<rc::Rc<Path<T>>> ); #[derive( Clone )] struct State<T>( isize, Option<rc::Rc<Path<T>>> ); #[derive( Clone )] pub struct Matcher<'a, T, U: Copy = ()> { root: &'a RegExRoot<'a, T, U>, index: usize, s0: State<U>, states: Vec<State<U>>, s1: State<U>, } impl<'a, T, U: Copy> Matcher<'a, T, U> { pub fn new( root: &'a RegExRoot<'a, T, U> ) -> Matcher<'a, T, U> { let mut this = Matcher{ root: root, index: 0, s0: State( 0, None ), states: vec![ State( isize::MAX, None ); root.nstate ], s1: State( isize::MAX, None ), }; this.s1 = this.propagate( &root.regex, State( 0, None ) ); this } pub fn feed( &mut self, v: &T ) { let s0 = mem::replace( &mut self.s0, State( isize::MAX, None ) ); let s1 = self.shift( &self.root.regex, v, s0 ); self.index += 1; let s2 = self.propagate( &self.root.regex, State( isize::MAX, None ) ); self.s1 = Self::choice( s1, s2 ); } pub fn feed_iter<'b, Iter: IntoIterator<Item = &'b T>>( &mut self, iter: Iter ) where 'a: 'b { for v in iter { self.feed( v ); } } pub fn is_match( &self ) -> bool { self.s1.0 != isize::MAX } pub fn is_alive( &self ) -> bool { self.s0.0 != isize::MAX || self.s1.0 != isize::MAX || self.states.iter().any( |s| s.0 != isize::MAX ) } pub fn path( &self ) -> Vec<(usize, U)> { let mut result = Vec::new(); let mut it = self.s1.1.clone(); while let Some( e ) = it { result.push( (e.0, e.1) ); it = e.2.clone(); } result.reverse(); result } fn choice( s0: State<U>, s1: State<U> ) -> State<U> { if s1.0 < s0.0 { s1 } else { s0 } } fn choice_inplace( s0: &mut State<U>, s1: State<U> ) { if s1.0 < s0.0 { *s0 = s1; } } // handle epsilon transition. fn propagate( &mut self, e: &RegEx<'a, T, U>, s0: State<U> ) -> State<U> { match *e { RegEx::Eps => s0, RegEx::Atom( _ ) => State( isize::MAX, None ), RegEx::Alt( ref e0, ref e1 ) => { let s1 = self.propagate( e0, s0.clone() ); let s2 = self.propagate( e1, s0 ); Self::choice( s1, s2 ) } RegEx::Seq( ref e0, ref e1, s ) => { let s1 = self.propagate( e0, s0 ); Self::choice_inplace( &mut self.states[s], s1 ); let s2 = self.states[s].clone(); self.propagate( e1, s2 ) } RegEx::Repeat( ref e0, s ) => { Self::choice_inplace( &mut self.states[s], s0 ); let s1 = self.states[s].clone(); let s2 = self.propagate( e0, s1 ); Self::choice_inplace( &mut self.states[s], s2 ); self.states[s].clone() } RegEx::Weight( w ) => { let dw = if s0.0 != isize::MAX { w } else { 0 }; State( s0.0 + dw, s0.1 ) } RegEx::Mark( m ) => { State( s0.0, Some( rc::Rc::new( Path( self.index, m, s0.1 ) ) ) ) } } } // handle normal transition. fn shift( &mut self, e: &RegEx<'a, T, U>, v: &T, s0: State<U> ) -> State<U> { match *e { RegEx::Eps => State( isize::MAX, None ), RegEx::Atom( ref f ) => { if s0.0 != isize::MAX && f( self.index, v ) { s0 } else { State( isize::MAX, None ) } } RegEx::Alt( ref e0, ref e1 ) => { let s1 = self.shift( e0, v, s0.clone() ); let s2 = self.shift( e1, v, s0 ); Self::choice( s1, s2 ) } RegEx::Seq( ref e0, ref e1, s ) => { let s1 = self.shift( e0, v, s0 ); let s2 = mem::replace( &mut self.states[s], s1 ); self.shift( e1, v, s2 ) } RegEx::Repeat( ref e0, s ) => { let s1 = mem::replace( &mut self.states[s], State( isize::MAX, None ) ); self.states[s] = self.shift( e0, v, s1 ); State( isize::MAX, None ) } RegEx::Weight( _ ) => State( isize::MAX, None ), RegEx::Mark( _ ) => State( isize::MAX, None ), } } }
25.791489
95
0.519881
3.125
e5243bcaf71ccb5aaa825244f3d85a3ec0bf22aa
1,540
ts
TypeScript
src/services/input.ts
petli-full/awk-vscode
849f40f8b6ad81a5472817688b702d15f916c477
[ "MIT" ]
2
2021-03-26T15:36:14.000Z
2021-07-28T05:48:07.000Z
src/services/input.ts
petli-full/awk-vscode
849f40f8b6ad81a5472817688b702d15f916c477
[ "MIT" ]
1
2021-07-28T05:50:13.000Z
2021-08-09T02:03:48.000Z
src/services/input.ts
petli-full/awk-vscode
849f40f8b6ad81a5472817688b702d15f916c477
[ "MIT" ]
1
2021-03-26T15:36:17.000Z
2021-03-26T15:36:17.000Z
import * as vscode from 'vscode'; interface Input { load: () => void; reset: () => void; ready: (text: string) => Thenable<vscode.TextEditor>; get: () => string; getFilename: () => string; }; let _input = ''; let _filename = ''; const load = () => { const doc = vscode.window.activeTextEditor?.document; _input = (doc ? doc.getText() : '').trim(); _filename = (doc ? doc.fileName : '').trim(); }; let editor$: null | Thenable<vscode.TextEditor> = null; let _editor: null | vscode.TextEditor = null; const reset = () => { _input = ''; editor$ = null; _editor = null; }; const ready = (text: string): Thenable<vscode.TextEditor> => { if (editor$ === null || (_editor !== null && _editor.document.isClosed)) { _input = text; editor$ = vscode.workspace.openTextDocument({ language: 'plaintext', content: '' }).then(doc => { return vscode.window.showTextDocument(doc).then(editor => { editor.edit(builder => { builder.insert(doc.positionAt(0), text); }).then(() => editor); _editor = editor; return editor; }); }); return editor$; } else if (_editor === null) { return editor$.then(() => ready(text)); } return editor$; }; const get = (): string => { return _input; }; const getFilename = (): string => { return _filename; }; export const input: Input = { load, reset, ready, get, getFilename, };
23.333333
105
0.54026
3.015625
9bd21b0a0645626b2270758effb332d3ec926934
2,395
js
JavaScript
api/controllers/expenseController.js
SebastianDochia/budget-app
7b03c3bf03866f30756eaa0dbf7669fc74767aac
[ "MIT" ]
null
null
null
api/controllers/expenseController.js
SebastianDochia/budget-app
7b03c3bf03866f30756eaa0dbf7669fc74767aac
[ "MIT" ]
null
null
null
api/controllers/expenseController.js
SebastianDochia/budget-app
7b03c3bf03866f30756eaa0dbf7669fc74767aac
[ "MIT" ]
null
null
null
'use strict'; const firebase = require('../db'); const Expense = require('../models/expense'); const firestore = firebase.firestore(); const addExpense = async (req, res, next) => { try { const data = req.body; await firestore.collection('expenses').doc().set(data); res.send('Expense saved successfuly'); } catch (error) { res.status(400).send(error.message); } } const getAllExpenses = async (req, res, next) => { try { const expenses = await firestore.collection('expenses'); const data = await expenses.get(); const expensesArray = []; if(data.empty) { res.status(404).send('No expense found'); }else { data.forEach(doc => { const expense = new Expense( doc.id, doc.data().body.name, doc.data().body.value, doc.data().body.date, doc.data().body.category, ); expensesArray.push(expense); }); res.send(expensesArray); } } catch (error) { res.status(400).send(error.message); } } const getExpense = async (req, res, next) => { try { const id = req.params.id; const expense = await firestore.collection('expense').doc(id); const data = await expense.get(); if(!data.exists) { res.status(404).send('Expense with the given ID not found'); }else { res.send(data.data()); } } catch (error) { res.status(400).send(error.message); } } const updateExpense = async (req, res, next) => { try { const id = req.params.id; const data = req.body; const expense = await firestore.collection('expenses').doc(id); await expense.update(data); res.send('Expense updated successfuly'); } catch (error) { res.status(400).send(error.message); } } const deleteExpense = async (req, res, next) => { try { const id = req.params.id; await firestore.collection('expenses').doc(id).delete(); res.send('Expense deleted successfuly'); } catch (error) { res.status(400).send(error.message); } } module.exports = { addExpense, getAllExpenses, getExpense, updateExpense, deleteExpense, }
28.176471
72
0.54238
3.03125
369fa64ebc77cae1df42bea6460e705c18ab2ee7
2,988
rs
Rust
src/chess/moves/rook.rs
bloatoo/zchess
e7cacb4e36d4646b6ef086c1e634a8c314223744
[ "MIT" ]
2
2021-12-28T22:41:51.000Z
2021-12-29T10:02:33.000Z
src/chess/moves/rook.rs
bloatoo/zchess
e7cacb4e36d4646b6ef086c1e634a8c314223744
[ "MIT" ]
null
null
null
src/chess/moves/rook.rs
bloatoo/zchess
e7cacb4e36d4646b6ef086c1e634a8c314223744
[ "MIT" ]
null
null
null
use crate::chess::utils::calculate_squares_to_edge; use crate::chess::{board::Edge, Board, Move, Piece}; pub const ROOK_MOVES: &[Move] = &[ Move { x: 8, y: 0, constraints: &[], }, Move { x: 0, y: 8, constraints: &[], }, ]; pub fn generate_rook_moves(board: &Board, sq: usize, piece: &Piece) -> Vec<usize> { let mut moves = vec![]; for mv in ROOK_MOVES.iter() { if mv.x == 0 { let top_edge = calculate_squares_to_edge(Edge::Top, sq); let mut valid = true; for i in 1..=top_edge { if !valid { continue; } let final_sq = sq + i as usize * 8; match board.piece_at(final_sq) { Some(p) => { if p.side() != piece.side() { moves.push(final_sq); } valid = false; } None => moves.push(final_sq), }; } let bottom_edge = calculate_squares_to_edge(Edge::Bottom, sq); let mut valid = true; for i in 1..=bottom_edge { if !valid { continue; } let final_sq = sq - i * 8; match board.piece_at(final_sq) { Some(p) => { if p.side() != piece.side() { moves.push(final_sq); } valid = false; } None => moves.push(final_sq), } } } else { let right_edge = calculate_squares_to_edge(Edge::Right, sq); let mut valid = true; for i in 1..=right_edge { if !valid { continue; } let final_sq = sq + i; match board.piece_at(final_sq) { Some(p) => { if p.side() != piece.side() { moves.push(final_sq); } valid = false; } None => moves.push(final_sq), } } let left_edge = calculate_squares_to_edge(Edge::Left, sq); let mut valid = true; for i in 1..=left_edge { if !valid { continue; } let final_sq = sq - i; match board.piece_at(final_sq) { Some(p) => { if p.side() != piece.side() { moves.push(final_sq); } valid = false; } None => moves.push(final_sq), } } } } moves }
27.925234
83
0.358434
3
404cb8362bcfdd803cb1b26c1c5bdcf7eb8586c2
1,977
kt
Kotlin
kotlin/src/test/kotlin/nonamedb/test/specs/unit/storage/engines/MemoryEngineSpec.kt
sndnv/nonamedb
e229ea19df94a89ddd135cf9e2b5bce5f1aa0f6f
[ "Apache-2.0" ]
1
2019-04-21T05:29:54.000Z
2019-04-21T05:29:54.000Z
kotlin/src/test/kotlin/nonamedb/test/specs/unit/storage/engines/MemoryEngineSpec.kt
sndnv/nonamedb
e229ea19df94a89ddd135cf9e2b5bce5f1aa0f6f
[ "Apache-2.0" ]
null
null
null
kotlin/src/test/kotlin/nonamedb/test/specs/unit/storage/engines/MemoryEngineSpec.kt
sndnv/nonamedb
e229ea19df94a89ddd135cf9e2b5bce5f1aa0f6f
[ "Apache-2.0" ]
null
null
null
package nonamedb.test.specs.unit.storage.engines import io.kotlintest.* import io.kotlintest.specs.StringSpec import nonamedb.storage.Done import nonamedb.storage.engines.MemoryEngine class MemoryEngineSpec : StringSpec(){ init { val timeout = 5.seconds val testKey = "some key" val testValue = "some value".toByteArray() val updatedTestValue = "some updated value".toByteArray() val testEngine = MemoryEngine() "should fail to retrieve missing data" { val result = testEngine.get(testKey) eventually(timeout) { result.getCompleted() shouldBe null } } "should successfully add data" { val result = testEngine.put(testKey, testValue) eventually(timeout) { result.getCompleted() shouldBe Done } } "should successfully retrieve data" { val result = testEngine.get(testKey) eventually(timeout) { result.getCompleted() shouldBe testValue } } "should successfully update data" { val result = testEngine.put(testKey, updatedTestValue) eventually(timeout) { result.getCompleted() shouldBe Done } } "should successfully retrieve updated data" { val result = testEngine.get(testKey) eventually(timeout) { result.getCompleted() shouldBe updatedTestValue } } "should successfully remove data" { val result = testEngine.put(testKey, "".toByteArray()) eventually(timeout) { result.getCompleted() shouldBe Done } } "should fail to retrieve removed data" { val result = testEngine.get(testKey) eventually(timeout) { result.getCompleted() shouldBe null } } } }
29.954545
66
0.56955
3.171875
7f1610e61fe4d126e2a9d2e38f0b4661c65c9e03
993
go
Go
cmd/util/pool_test.go
pop/wash
16e54fa159f28802fadef1d0d5632d32b07c6f86
[ "Apache-2.0" ]
180
2019-03-19T16:54:48.000Z
2022-03-21T01:20:21.000Z
cmd/util/pool_test.go
pop/wash
16e54fa159f28802fadef1d0d5632d32b07c6f86
[ "Apache-2.0" ]
413
2019-03-19T17:05:50.000Z
2021-07-01T16:45:26.000Z
cmd/util/pool_test.go
pop/wash
16e54fa159f28802fadef1d0d5632d32b07c6f86
[ "Apache-2.0" ]
39
2019-03-19T16:55:47.000Z
2022-01-28T10:57:23.000Z
package cmdutil import ( "sync" "testing" "github.com/stretchr/testify/assert" ) // Test that a pool with a single worker finishes. func TestPool1(t *testing.T) { p := NewPool(1) val := 0 p.Submit(func() { val++ p.Done() }) p.Finish() assert.Equal(t, 1, val) } // Test that a pool with two workers executes them concurrently and finishes. func TestPool2(t *testing.T) { p := NewPool(2) var mux1, mux2 sync.Mutex val := 0 // Start with both mutexes locked. In sequence wait on one and unlock the other so that both // functions must run concurrently to correctly unlock them. mux1.Lock() mux2.Lock() p.Submit(func() { // Wait on 1. mux1.Lock() val++ // Signal 2. mux2.Unlock() p.Done() }) p.Submit(func() { // Signal 1. mux1.Unlock() // Wait on 2. mux2.Lock() val++ p.Done() }) // At the end both mutexes are again locked. // Wait for completion and ensure both functions have updated the value. p.Finish() assert.Equal(t, 2, val) }
17.421053
93
0.650554
3.3125
9671d678b496b5bd1714a0e06a8fe25b3fbd9372
2,769
php
PHP
app/Http/Controllers/LabelsResourceController.php
austin-dudzik/spark
4f2014121473ffe1a1f5929636b85272ff2ef421
[ "MIT" ]
null
null
null
app/Http/Controllers/LabelsResourceController.php
austin-dudzik/spark
4f2014121473ffe1a1f5929636b85272ff2ef421
[ "MIT" ]
null
null
null
app/Http/Controllers/LabelsResourceController.php
austin-dudzik/spark
4f2014121473ffe1a1f5929636b85272ff2ef421
[ "MIT" ]
null
null
null
<?php namespace App\Http\Controllers; use App\Traits\ViewSorter; use App\Models\Label; use App\Models\Task; use Illuminate\Http\Request; use Illuminate\Support\Facades\Auth; class LabelsResourceController extends Controller { use ViewSorter; /** * Display a list of all labels * */ public function index() { // Return labels view return view('labels', [ 'labels' => Label::query()-> with(['tasks'])-> where('user_id', '=', Auth::id())-> get() ]); } /** * Store a new label in the database * * @param Request $request */ public function store(Request $request) { // Validate the request $fields = $request->validateWithBag('new_label', [ 'name' => 'required', 'color' => 'required', ]); // Assign the user ID to the request $fields['user_id'] = Auth::id(); // Create the label Label::query()->create($fields); // Redirect back return redirect()->back(); } /** * Display the specified label * * @param Label $label */ public function show(Label $label) { // Return the single label view return view('label-single', [ 'label' => Label::query()-> where('user_id', '=', Auth::id())-> where('id', '=', $label->id)-> first(), 'tasks' => Task::query()-> where('user_id', '=', Auth::id())-> where('label_id', '=', $label->id)-> whereNull('completed')-> orderBy($this->getSorters()->sort_by, $this->getSorters()->order_by)-> get(), ]); } /** * Update the specified resource in storage. * * @param Request $request * @param Label $label */ public function update(Request $request, Label $label) { // Validate the request $fields = $request->validateWithBag('edit_label_' . $label->id, [ 'name' => 'required', 'color' => 'required', ]); // Update the label Label::query()->find($label->id)->update($fields); // Redirect to index with success return redirect()->back(); } /** * Remove the specified label from storage. * * @param Label $label */ public function destroy(Label $label) { // Find and delete existing label Label::query()->find($label->id)->delete(); // Remove label from tasks Task::query()-> where('label_id', '=', $label->id)-> update(['label_id' => null]); // Redirect to index return redirect()->back(); } }
23.87069
82
0.506681
3.15625
70dc725ccf32e9d60106d386ebd957e8de6b19cd
2,342
c
C
Array/wuchao/other/7-16 c实现string函数/StringUtils.c
JessonYue/LeetCodeLearning
3c22a4fcdfe8b47f9f64b939c8b27742c4e30b79
[ "MIT" ]
39
2020-05-31T06:14:39.000Z
2021-01-09T11:06:39.000Z
Array/wuchao/other/7-16 c实现string函数/StringUtils.c
JessonYue/LeetCodeLearning
3c22a4fcdfe8b47f9f64b939c8b27742c4e30b79
[ "MIT" ]
7
2020-06-02T11:04:14.000Z
2020-06-11T14:11:58.000Z
Array/wuchao/other/7-16 c实现string函数/StringUtils.c
JessonYue/LeetCodeLearning
3c22a4fcdfe8b47f9f64b939c8b27742c4e30b79
[ "MIT" ]
20
2020-05-31T06:21:57.000Z
2020-10-01T04:48:38.000Z
// // Created by 吴超 on 2020/7/16. // #include <stdlib.h> #include <stdio.h> /** * C语言实现strcpy(字符串复制)、strcat(字符串链接)、strstr(字符串包含)、strchr(字符出现位置)、memcpy(拷贝) */ char *strcpy(char *source); char *strcat(char *dest, char *source); int strstr(char *dest, char *source); int strchr(char *source, char target); void* memcpy(char *dest, char* source, size_t size); size_t stringLen(char *source); char *strcpy(char *source) { char *dest = malloc(stringLen(source) * sizeof(char)); int i = 0; while (source[i] != '\0') { dest[i] = source[i]; i++; } return dest; } char *strcat(char *dest, char *source) { int destLen = stringLen(dest); int sourceLen = stringLen(source); char *result = malloc((destLen + sourceLen) * sizeof(char)); int i = 0; while (i < destLen) { result[i] = dest[i]; i++; } int j = 0; while (j < sourceLen) { result[i] = source[j]; i++; j++; } return result; } int strstr(char *dest, char *source){ int i = 0; int j = 0; int destLen = stringLen(dest); int sourceLen = stringLen(source); if(sourceLen>destLen) return -1; while(i<destLen){ if(dest[i]==source[j]){ i++; j++; if(j==sourceLen){ return i-sourceLen; } continue; } else { i++; j=0; } } return -1; } int strchr(char *source, char target){ int i = 0; while(source[i]!='\0'){ if(source[i]==target){ return i; } i++; } } void* memcpy(char *dest, char* source, size_t size){ int i =0; int destLen = stringLen(dest); int sourceLen = stringLen(source); while(i<size&&i<sourceLen){ dest[i] = source[i]; i++; } if(i<destLen){ dest[i] = '\0'; } return dest; } size_t stringLen(char *source) { int i = 0; while (source[i++] != '\0') { } return i-1; } int main() { char *a = "asdfwer"; char *b = strcpy(a); printf("copy:%s\n", b); printf("strcat:%s\n", strcat(a, "hello")); printf("strstr:%d\n", strstr("helloword","helloworld")); printf("strchr:%d\n", strchr("helloword",'l')); printf("memcpy:%s\n", memcpy(malloc(sizeof(char)),"helloworld",5)); }
20.54386
75
0.521349
3.1875
df947e31cb421d616ea0a0a99bb610e36a876113
2,148
ts
TypeScript
wallet-server-ui-proxy/middleware/backup.ts
user411/bitcoin-s-ts
4f5bfb586a47103e606ef8d1d4eb913f51103a29
[ "MIT" ]
11
2021-09-14T20:59:22.000Z
2022-03-09T09:01:49.000Z
wallet-server-ui-proxy/middleware/backup.ts
user411/bitcoin-s-ts
4f5bfb586a47103e606ef8d1d4eb913f51103a29
[ "MIT" ]
64
2021-08-30T15:24:52.000Z
2022-03-23T14:21:21.000Z
wallet-server-ui-proxy/middleware/backup.ts
user411/bitcoin-s-ts
4f5bfb586a47103e606ef8d1d4eb913f51103a29
[ "MIT" ]
4
2021-09-10T13:30:18.000Z
2022-03-28T12:38:57.000Z
import fs from 'fs' import path from 'path' import { Request, Response } from 'express' import * as WalletServer from 'wallet-ts/lib/index' import { RunConfig } from '../type/run-config' const Config = <RunConfig>require('../type/run-config') const logger = require('../middleware/logger') WalletServer.ConfigureServerURL(Config.walletServerUrl) WalletServer.ConfigureAuthorizationHeader(Config.serverAuthHeader) const filename = 'bitcoin-s-backup.zip' exports.downloadBackup = (req: Request, res: Response) => { // const r = req.body // don't currently care about request logger.info('downloadBackup ' + Config.backupDirectory) const fullPath = path.join(Config.backupDirectory, filename) logger.info('fullPath: ' + fullPath + ' walletServerUrl: ' + Config.walletServerUrl) // logger.info('auth header: ' + res.getHeader('Authorization')) // Sanity check try { fs.accessSync(Config.backupDirectory) // Will throw error if directory does not exist } catch (err) { logger.error('downloadBackup backupDirectory is not accessible ' + Config.backupDirectory) res.end() // Blob size 0 returned } // Use wallet-ts to create backup WalletServer.ZipDataDir(fullPath).then(result => { logger.info('ZipDataDir() complete') if (result.result === null) { // success case // Sanity check try { fs.accessSync(fullPath) // Will throw error if file does not exist } catch (err) { logger.error('downloadBackup fullPath is not accessible ' + fullPath) res.end() // Blob size 0 returned } const readStream = fs.createReadStream(fullPath) readStream.on('open', () => res.setHeader('Content-Type', 'application/zip; charset=utf-8')) readStream.on('error', (err) => { logger.error('readStream error ' + err) }) readStream.on('end', () => { // Always delete backup zip after sending fs.unlink(fullPath, function() { // Nothing to do }) }) readStream.pipe(res) } else { logger.error('downloadBackup ZipDataDir failed') res.end() // Blob size 0 returned } }) }
32.059701
94
0.664339
3.25
c6d25c4f72141f03ee058e94885ee553877d91e6
930
rb
Ruby
spec/controllers/admin/choices_controller_spec.rb
ianfleeton/zmey
d533ea22a6bbc051d6743aafb63beb3d69d8825c
[ "MIT" ]
null
null
null
spec/controllers/admin/choices_controller_spec.rb
ianfleeton/zmey
d533ea22a6bbc051d6743aafb63beb3d69d8825c
[ "MIT" ]
8
2015-03-19T13:05:58.000Z
2021-08-10T18:34:30.000Z
spec/controllers/admin/choices_controller_spec.rb
ianfleeton/zmey
d533ea22a6bbc051d6743aafb63beb3d69d8825c
[ "MIT" ]
null
null
null
require "rails_helper" module Admin RSpec.describe ChoicesController, type: :controller do before do logged_in_as_admin end describe "GET new" do it "instantiates a new Choice" do allow(controller).to receive(:feature_valid?) expect(Choice).to receive(:new).and_return(double(Choice).as_null_object) get "new" end it "sets @choice.feature_id to the feature_id supplied as a parameter" do choice = Choice.new allow(Choice).to receive(:new).and_return(choice) get "new", params: {feature_id: 123} expect(choice.feature_id).to eq 123 end context "when the feature is invalid" do it "redirects to the products page" do allow(controller).to receive(:feature_valid?).and_return(false) get "new" expect(response).to redirect_to(admin_products_path) end end end end end
28.181818
81
0.649462
3.046875
4009b06c7f2a79a55036a8541e8d8b3d6f9a817d
19,880
py
Python
ost/s1/grd_batch.py
KBodolai/OpenSarToolkit
29af1df36f10f28a17b56f39ad67f0c7f530b93a
[ "MIT" ]
null
null
null
ost/s1/grd_batch.py
KBodolai/OpenSarToolkit
29af1df36f10f28a17b56f39ad67f0c7f530b93a
[ "MIT" ]
null
null
null
ost/s1/grd_batch.py
KBodolai/OpenSarToolkit
29af1df36f10f28a17b56f39ad67f0c7f530b93a
[ "MIT" ]
null
null
null
#! /usr/bin/env python # -*- coding: utf-8 -*- """Batch processing for GRD products """ import os import json import itertools import logging import pandas as pd from pathlib import Path from godale._concurrent import Executor from ost import Sentinel1Scene from ost.s1 import grd_to_ard from ost.helpers import raster as ras from ost.generic import ts_extent from ost.generic import ts_ls_mask from ost.generic import ard_to_ts from ost.generic import timescan from ost.generic import mosaic logger = logging.getLogger(__name__) def _create_processing_dict(inventory_df): """Function that creates a dictionary to handle GRD batch processing This helper function takes the inventory dataframe and creates a dictionary with the track as key, and all the files to process as a list, whereas the list is :param inventory_df: :return: """ # initialize empty dictionary dict_scenes = {} # get relative orbits and loop through each track_list = inventory_df["relativeorbit"].unique() for track in track_list: # get acquisition dates and loop through each acquisition_dates = inventory_df["acquisitiondate"][ inventory_df["relativeorbit"] == track ].unique() # loop through dates for i, acquisition_date in enumerate(acquisition_dates): # get the scene ids per acquisition_date and write into a list single_id = inventory_df["identifier"][ (inventory_df["relativeorbit"] == track) & (inventory_df["acquisitiondate"] == acquisition_date) ].tolist() # add this list to the dictionary and associate the track number # as dict key dict_scenes[f"{track}_{i+1}"] = single_id return dict_scenes def create_processed_df(inventory_df, list_of_scenes, outfile, out_ls, error): df = pd.DataFrame(columns=["identifier", "outfile", "out_ls", "error"]) for scene in list_of_scenes: temp_df = pd.DataFrame() # get scene_id temp_df["identifier"] = inventory_df.identifier[ inventory_df.identifier == scene ].values # fill outfiles/error temp_df["outfile"] = outfile temp_df["out_ls"] = out_ls temp_df["error"] = error # append to final df and delete temp_df for next loop df = pd.concat([df, temp_df]) del temp_df return df def grd_to_ard_batch(inventory_df, config_file): # load relevant config parameters with open(config_file, "r") as file: config_dict = json.load(file) download_dir = Path(config_dict["download_dir"]) data_mount = Path(config_dict["data_mount"]) # where all frames are grouped into acquisitions processing_dict = _create_processing_dict(inventory_df) processing_df = pd.DataFrame(columns=["identifier", "outfile", "out_ls", "error"]) iter_list = [] for _, list_of_scenes in processing_dict.items(): # get the paths to the file scene_paths = [ Sentinel1Scene(scene).get_path(download_dir, data_mount) for scene in list_of_scenes ] iter_list.append(scene_paths) # now we run with godale, which works also with 1 worker executor = Executor( executor=config_dict["executor_type"], max_workers=config_dict["max_workers"] ) for task in executor.as_completed( func=grd_to_ard.grd_to_ard, iterable=iter_list, fargs=( [ str(config_file), ] ), ): list_of_scenes, outfile, out_ls, error = task.result() # return the info of processing as dataframe temp_df = create_processed_df( inventory_df, list_of_scenes, outfile, out_ls, error ) processing_df = pd.concat([processing_df, temp_df]) return processing_df def ards_to_timeseries(inventory_df, config_file): with open(config_file) as file: config_dict = json.load(file) ard = config_dict["processing"]["single_ARD"] ard_mt = config_dict["processing"]["time-series_ARD"] # create all extents _create_extents(inventory_df, config_file) # update extents in case of ls_mask if ard["create_ls_mask"] or ard_mt["apply_ls_mask"]: _create_mt_ls_mask(inventory_df, config_file) # finally create time-series _create_timeseries(inventory_df, config_file) def _create_extents(inventory_df, config_file): with open(config_file, "r") as file: config_dict = json.load(file) processing_dir = Path(config_dict["processing_dir"]) iter_list = [] for track in inventory_df.relativeorbit.unique(): # get the burst directory track_dir = processing_dir / track list_of_extents = list(track_dir.glob("*/*/*bounds.json")) # if extent does not already exist, add to iterable if not (track_dir / f"{track}.min_bounds.json").exists(): iter_list.append(list_of_extents) # now we run with godale, which works also with 1 worker executor = Executor( executor=config_dict["executor_type"], max_workers=os.cpu_count() ) out_dict = {"track": [], "list_of_scenes": [], "extent": []} for task in executor.as_completed( func=ts_extent.mt_extent, iterable=iter_list, fargs=( [ str(config_file), ] ), ): track, list_of_scenes, extent = task.result() out_dict["track"].append(track) out_dict["list_of_scenes"].append(list_of_scenes) out_dict["extent"].append(extent) return pd.DataFrame.from_dict(out_dict) def _create_extents_old(inventory_df, config_file): with open(config_file, "r") as file: config_dict = json.load(file) processing_dir = Path(config_dict["processing_dir"]) iter_list = [] for track in inventory_df.relativeorbit.unique(): # get the burst directory track_dir = processing_dir / track # get common burst extent list_of_scenes = list(track_dir.glob("**/*img")) list_of_scenes = [str(x) for x in list_of_scenes if "layover" not in str(x)] # if extent does not already exist, add to iterable if not (track_dir / f"{track}.extent.gpkg").exists(): iter_list.append(list_of_scenes) # now we run with godale, which works also with 1 worker executor = Executor( executor=config_dict["executor_type"], max_workers=config_dict["max_workers"] ) out_dict = {"track": [], "list_of_scenes": [], "extent": []} for task in executor.as_completed( func=ts_extent.mt_extent, iterable=iter_list, fargs=( [ str(config_file), ] ), ): track, list_of_scenes, extent = task.result() out_dict["track"].append(track) out_dict["list_of_scenes"].append(list_of_scenes) out_dict["extent"].append(extent) return pd.DataFrame.from_dict(out_dict) def _create_mt_ls_mask(inventory_df, config_file): """Helper function to union the Layover/Shadow masks of a Time-series This function creates a :param inventory_df: :param config_file: :return: """ with open(config_file, "r") as file: config_dict = json.load(file) processing_dir = Path(config_dict["processing_dir"]) iter_list = [] for track in inventory_df.relativeorbit.unique(): # get the burst directory track_dir = processing_dir / track # get common burst extent list_of_masks = list(track_dir.glob("*/*/*_ls_mask.json")) # if extent does not already exist, add to iterable if not (track_dir / f"{track}.ls_mask.json").exists(): iter_list.append(list_of_masks) # now we run with godale, which works also with 1 worker executor = Executor( executor=config_dict["executor_type"], max_workers=os.cpu_count() ) for task in executor.as_completed(func=ts_ls_mask.mt_layover, iterable=iter_list): task.result() def _create_mt_ls_mask_old(inventory_df, config_file): with open(config_file, "r") as file: config_dict = json.load(file) processing_dir = Path(config_dict["processing_dir"]) iter_list = [] for track in inventory_df.relativeorbit.unique(): # get the burst directory track_dir = processing_dir / track # get common burst extent list_of_scenes = list(track_dir.glob("**/*img")) list_of_layover = [str(x) for x in list_of_scenes if "layover" in str(x)] iter_list.append(list_of_layover) # now we run with godale, which works also with 1 worker executor = Executor( executor=config_dict["executor_type"], max_workers=config_dict["max_workers"] ) out_dict = {"track": [], "list_of_layover": [], "ls_mask": [], "ls_extent": []} for task in executor.as_completed( func=ts_ls_mask.mt_layover, iterable=iter_list, fargs=( [ str(config_file), ] ), ): track, list_of_layover, ls_mask, ls_extent = task.result() out_dict["track"].append(track) out_dict["list_of_layover"].append(list_of_layover) out_dict["ls_mask"].append(list_of_layover) out_dict["ls_extent"].append(ls_extent) return pd.DataFrame.from_dict(out_dict) def _create_timeseries(inventory_df, config_file): """Helper function to create Timeseries out of OST ARD products Based on the inventory GeoDataFrame and the configuration file, this function triggers the time-series processing for all bursts/tracks within the respective project. Each product/polarisation is treated singularly. Based on the ARD type/configuration settings, the function uses SNAP's Create-Stack function to unify the grid of each scene and applies a multi-temporal speckle filter if selected. The output are single GeoTiff files, whereas there is the possibility to reduce the data by converting the data format into uint8 or uint16. This is done by linearly stretching the data between -30 and +5 for backscatter, 0 and 1 for coherence, polarimetric anisotropy # and entropy, as well 0 and 90 for polarimetric alpha channel. All the data is cropped to the same extent based on the minimum bounds layer. This function executes the underlying functions using the godale framework for parallel execution. Executor type and number of parallel processes is defined within the configuration file. :param inventory_df: :type GeoDataFrame :param config_file: :type str/Path :return: """ with open(config_file, "r") as file: config_dict = json.load(file) processing_dir = Path(config_dict["processing_dir"]) iter_list = [] for track in inventory_df.relativeorbit.unique(): # get the burst directory track_dir = processing_dir / track for pol in ["VV", "VH", "HH", "HV"]: # see if there is actually any imagery in thi polarisation list_of_files = sorted( str(file) for file in list(track_dir.glob(f"20*/*data*/*ma0*{pol}*img")) ) if len(list_of_files) <= 1: continue # create list of dims if polarisation is present list_of_dims = sorted( str(dim) for dim in list(track_dir.glob("20*/*bs*dim")) ) iter_list.append([list_of_dims, track, "bs", pol]) executor = Executor( executor=config_dict["executor_type"], max_workers=config_dict["max_workers"] ) out_dict = { "track": [], "list_of_dims": [], "out_files": [], "out_vrt": [], "product": [], "error": [], } for task in executor.as_completed( func=ard_to_ts.gd_ard_to_ts, iterable=iter_list, fargs=( [ str(config_file), ] ), ): track, list_of_dims, out_files, out_vrt, product, error = task.result() out_dict["track"].append(track) out_dict["list_of_dims"].append(list_of_dims) out_dict["out_files"].append(out_files) out_dict["out_vrt"].append(out_vrt) out_dict["product"].append(product) out_dict["error"].append(error) return pd.DataFrame.from_dict(out_dict) def timeseries_to_timescan(inventory_df, config_file): # load ard parameters with open(config_file, "r") as file: config_dict = json.load(file) processing_dir = Path(config_dict["processing_dir"]) ard = config_dict["processing"]["single_ARD"] ard_mt = config_dict["processing"]["time-series_ARD"] ard_tscan = config_dict["processing"]["time-scan_ARD"] # get the db scaling right to_db = ard["to_db"] if ard["to_db"] or ard_mt["to_db"]: to_db = True dtype_conversion = True if ard_mt["dtype_output"] != "float32" else False iter_list, vrt_iter_list = [], [] for track in inventory_df.relativeorbit.unique(): # get track directory track_dir = processing_dir / track # define and create Timescan directory timescan_dir = track_dir / "Timescan" timescan_dir.mkdir(parents=True, exist_ok=True) # loop thorugh each polarization for polar in ["VV", "VH", "HH", "HV"]: if (timescan_dir / f".bs.{polar}.processed").exists(): logger.info(f"Timescans for track {track} already processed.") continue # get timeseries vrt time_series = track_dir / "Timeseries" / f"Timeseries.bs.{polar}.vrt" if not time_series.exists(): continue # create a datelist for harmonics scene_list = list(track_dir.glob(f"Timeseries/*bs.{polar}.tif")) # create a datelist for harmonics calculation datelist = [] for file in sorted(scene_list): datelist.append(file.name.split(".")[1]) # define timescan prefix timescan_prefix = timescan_dir / f"bs.{polar}" iter_list.append( [ time_series, timescan_prefix, ard_tscan["metrics"], dtype_conversion, to_db, ard_tscan["remove_outliers"], datelist, ] ) vrt_iter_list.append(timescan_dir) # now we run with godale, which works also with 1 worker executor = Executor( executor=config_dict["executor_type"], max_workers=config_dict["max_workers"] ) # run timescan creation out_dict = {"track": [], "prefix": [], "metrics": [], "error": []} for task in executor.as_completed(func=timescan.gd_mt_metrics, iterable=iter_list): burst, prefix, metrics, error = task.result() out_dict["track"].append(burst) out_dict["prefix"].append(prefix) out_dict["metrics"].append(metrics) out_dict["error"].append(error) timescan_df = pd.DataFrame.from_dict(out_dict) # run vrt creation for task in executor.as_completed( func=ras.create_tscan_vrt, iterable=vrt_iter_list, fargs=( [ str(config_file), ] ), ): task.result() return timescan_df def mosaic_timeseries(inventory_df, config_file): print(" -----------------------------------") logger.info("Mosaicking Time-series layers") print(" -----------------------------------") # ------------------------------------- # 1 load project config with open(config_file, "r") as ard_file: config_dict = json.load(ard_file) processing_dir = Path(config_dict["processing_dir"]) # create output folder ts_dir = processing_dir / "Mosaic" / "Timeseries" ts_dir.mkdir(parents=True, exist_ok=True) # loop through polarisations iter_list, vrt_iter_list = [], [] for p in ["VV", "VH", "HH", "HV"]: tracks = inventory_df.relativeorbit.unique() nr_of_ts = len( list((processing_dir / f"{tracks[0]}" / "Timeseries").glob(f"*.{p}.tif")) ) if not nr_of_ts >= 1: continue outfiles = [] for i in range(1, nr_of_ts + 1): filelist = list(processing_dir.glob(f"*/Timeseries/{i:02d}.*.{p}.tif")) filelist = [str(file) for file in filelist if "Mosaic" not in str(file)] # create datelist = [] for file in filelist: datelist.append(Path(file).name.split(".")[1]) filelist = " ".join(filelist) start, end = sorted(datelist)[0], sorted(datelist)[-1] if start == end: outfile = ts_dir / f"{i:02d}.{start}.bs.{p}.tif" else: outfile = ts_dir / f"{i:02d}.{start}-{end}.bs.{p}.tif" check_file = outfile.parent / f".{outfile.stem}.processed" outfiles.append(outfile) if check_file.exists(): logger.info(f"Mosaic layer {outfile.name} already processed.") continue logger.info(f"Mosaicking layer {outfile.name}.") iter_list.append([filelist, outfile, config_file]) vrt_iter_list.append([ts_dir, p, outfiles]) # now we run with godale, which works also with 1 worker executor = Executor( executor=config_dict["executor_type"], max_workers=config_dict["max_workers"] ) # run mosaicking for task in executor.as_completed(func=mosaic.gd_mosaic, iterable=iter_list): task.result() # run mosaicking vrts for task in executor.as_completed( func=mosaic.create_timeseries_mosaic_vrt, iterable=vrt_iter_list ): task.result() def mosaic_timescan(config_file): # load ard parameters with open(config_file, "r") as ard_file: config_dict = json.load(ard_file) processing_dir = Path(config_dict["processing_dir"]) metrics = config_dict["processing"]["time-scan_ARD"]["metrics"] if "harmonics" in metrics: metrics.remove("harmonics") metrics.extend(["amplitude", "phase", "residuals"]) if "percentiles" in metrics: metrics.remove("percentiles") metrics.extend(["p95", "p5"]) # create out directory of not existent tscan_dir = processing_dir / "Mosaic" / "Timescan" tscan_dir.mkdir(parents=True, exist_ok=True) # loop through all pontial proucts iter_list = [] for polar, metric in itertools.product(["VV", "HH", "VH", "HV"], metrics): # create a list of files based on polarisation and metric filelist = list(processing_dir.glob(f"*/Timescan/*bs.{polar}.{metric}.tif")) # break loop if there are no files if not len(filelist) >= 2: continue # get number filelist = " ".join([str(file) for file in filelist]) outfile = tscan_dir / f"bs.{polar}.{metric}.tif" check_file = outfile.parent / f".{outfile.stem}.processed" if check_file.exists(): logger.info(f"Mosaic layer {outfile.name} already processed.") continue iter_list.append([filelist, outfile, config_file]) # now we run with godale, which works also with 1 worker executor = Executor( executor=config_dict["executor_type"], max_workers=config_dict["max_workers"] ) # run mosaicking for task in executor.as_completed(func=mosaic.gd_mosaic, iterable=iter_list): task.result() ras.create_tscan_vrt(tscan_dir, config_file)
31.307087
88
0.624748
3.359375
9bc1651f5e75769617029e484b804f33c2d105f0
4,748
js
JavaScript
web/app/sw.js
nicolasparada/nakama
8d5aa79aa5ab30d405354408eb60399a345a20e1
[ "0BSD" ]
183
2017-09-16T12:59:48.000Z
2022-03-21T02:19:19.000Z
web/app/sw.js
arjundoye/nakama
31403b79476bf5176b4f18374cf87dbd67c85008
[ "ISC" ]
50
2019-02-01T06:16:40.000Z
2021-12-26T18:30:04.000Z
web/app/sw.js
nakamauwu/nakama
a0b481441d3400602b83c9f6681e6fb3f871ef05
[ "0BSD" ]
28
2018-01-15T16:44:09.000Z
2022-03-10T21:09:51.000Z
const OFFLINE_VERSION = 1 const CACHE_NAME = "offline" const OFFLINE_URL = "/offline.html" self.addEventListener("install", ev => { ev.waitUntil(cacheOfflinePage()) self.skipWaiting() }) async function cacheOfflinePage() { const cache = await caches.open(CACHE_NAME) await cache.add(new Request(OFFLINE_URL, { cache: "reload" })) } self.addEventListener("activate", ev => { ev.waitUntil(enableNavigationPreload()) self.clients.claim() }) async function enableNavigationPreload() { if ("navigationPreload" in self.registration) { await self.registration.navigationPreload.enable() } } self.addEventListener("fetch", ev => { if (ev.request.mode === "navigate") { ev.respondWith(networkWithOfflineNavigationFallback(ev)) } }) self.addEventListener("push", ev => { if (!ev.data) { return } const n = ev.data.json() if (!n) { return } ev.waitUntil(showNotification(n)) }) self.addEventListener("notificationclick", ev => { ev.notification.close() ev.waitUntil(openNotificationsPage(ev.notification.data)) }) async function showNotification(n) { const title = notificationTitle(n) const body = notificationBody(n) return self.registration.showNotification(title, { body, tag: n.id, timestamp: n.issuedAt, data: n, icon: location.origin + "/icons/logo-circle-512.png", }).then(() => { if ("setAppBadge" in navigator) { return navigator.setAppBadge() } }) } async function openNotificationsPage(n) { return clients.matchAll({ type: "window" }).then(clientList => { const pathname = notificationPathname(n) for (const client of clientList) { if (client.url === pathname && "focus" in client) { return client.focus() } } for (const client of clientList) { if (client.url === "/notifications" && "focus" in client) { return client.focus() } } for (const client of clientList) { if ("focused" in client && client.focused) { return client.navigate(pathname).then(client => "focus" in client ? client.focus() : client) } if ("visibilityState" in client && client.visibilityState === "visible") { return client.navigate(pathname).then(client => "focus" in client ? client.focus() : client) } } if ("openWindow" in clients) { return clients.openWindow(pathname) } }).then(client => client.postMessage({ type: "notificationclick", detail: n, }).then(() => { if ("clearAppBadge" in navigator) { return navigator.clearAppBadge() } })) } function notificationPathname(n) { if (typeof n.postID === "string" && n.postID !== "") { return "/posts/" + encodeURIComponent(n.postID) } if (n.type === "follow") { return "/@" + encodeURIComponent(n.actors[0]) } return "/notifications" } async function networkWithOfflineNavigationFallback(ev) { try { const preloadResponse = await ev.preloadResponse if (preloadResponse) { return preloadResponse } const networkResponse = await fetch(ev.request) return networkResponse } catch (error) { const cache = await caches.open(CACHE_NAME) const cachedResponse = await cache.match(OFFLINE_URL) return cachedResponse } } function notificationTitle(n) { switch (n.type) { case "follow": return "New follow" case "comment": return "New commented" case "post_mention": return "New post mention" case "comment_mention": return "New comment mention" } return "New notification" } function notificationBody(n) { const getActors = () => { const aa = n.actors switch (aa.length) { case 0: return "Someone" case 1: return aa[0] case 2: return `${aa[0]} and ${aa[1]}` } return `${aa[0]} and ${aa.length - 1} others` } const getAction = () => { switch (n.type) { case "follow": return "followed you" case "comment": return "commented in a post" case "post_mention": return "mentioned you in a post" case "comment_mention": return "mentioned you in a comment" } return "did something" } return getActors() + " " + getAction() }
26.674157
108
0.567186
3.046875
5b1d05ff04a50d724df5ca369fe808fe7708e5a2
1,293
kt
Kotlin
klay-demo/src/main/kotlin/klay/tests/core/CanvasStressTest.kt
cdietze/klay
72031aa267cd304a0612b31c871e2f5cf73d2c4c
[ "Apache-2.0" ]
4
2018-09-30T06:36:13.000Z
2019-03-26T02:37:25.000Z
klay-demo/src/main/kotlin/klay/tests/core/CanvasStressTest.kt
cdietze/klay
72031aa267cd304a0612b31c871e2f5cf73d2c4c
[ "Apache-2.0" ]
null
null
null
klay-demo/src/main/kotlin/klay/tests/core/CanvasStressTest.kt
cdietze/klay
72031aa267cd304a0612b31c871e2f5cf73d2c4c
[ "Apache-2.0" ]
null
null
null
package klay.tests.core import euklid.f.MathUtil import klay.core.Clock import klay.scene.CanvasLayer import kotlin.math.cos import kotlin.math.sin class CanvasStressTest(game: TestsGame) : Test(game, "Canvas Stress", "Animates a full-screen sized canvas, forcing a massive reupload of image data to " + "the GPU on every frame.") { override fun init() { val clayer = CanvasLayer(game.graphics, game.graphics.viewSize) game.rootLayer.add(clayer) var noSegs = 30 var direction = 1 conns.add(game.update.connect { clock: Clock -> val canvas = clayer.begin() canvas.clear() canvas.setStrokeWidth(3f) canvas.setStrokeColor(0x88ff0000.toInt()) noSegs += direction if (noSegs > 50) direction = -1 if (noSegs < 20) direction = 1 val r = 100f for (ii in 0..noSegs - 1) { val angle = 2 * MathUtil.PI * ii / noSegs val viewSize = game.plat.graphics.viewSize val x = r * cos(angle) + viewSize.width / 2 val y = r * sin(angle) + viewSize.height / 2 canvas.strokeCircle(x, y, 100f) } clayer.end() // reupload the image data }) } }
32.325
184
0.576953
3.234375
709bddbffae043056480c6a72ff06e06383b5294
1,671
go
Go
src/snippetgen/common/metadata/metadata_test.go
cclauss/discovery-artifact-manager
9eb6bcef290ef87006758349c725d440fbfc85d6
[ "Apache-2.0" ]
38
2017-07-20T17:54:08.000Z
2022-02-20T02:16:31.000Z
src/snippetgen/common/metadata/metadata_test.go
cclauss/discovery-artifact-manager
9eb6bcef290ef87006758349c725d440fbfc85d6
[ "Apache-2.0" ]
183
2017-03-23T17:17:24.000Z
2022-02-09T00:07:17.000Z
src/snippetgen/common/metadata/metadata_test.go
cclauss/discovery-artifact-manager
9eb6bcef290ef87006758349c725d440fbfc85d6
[ "Apache-2.0" ]
42
2017-03-23T19:20:20.000Z
2022-02-20T02:17:09.000Z
package metadata import "testing" func TestGetLanguage(t *testing.T) { for _, l := range AllowedLanguages { if g, ok := GetLanguage(l.Name); !ok { t.Errorf("language defined but not found: %s", l.Name) } else if g != l { t.Errorf("found wrong language, expected %v, found %v", l, g) } } } func TestNoLanguage(t *testing.T) { langs := [...]string{"foobar language"} for _, l := range langs { if _, exist := GetLanguage(l); exist { t.Errorf("language found but should not exist: %s", l) } } } func TestRequiredLanguages(t *testing.T) { for _, l := range RequiredLanguages { if !l.Required { t.Errorf("language is required but not marked required: %s", l.Name) } if g, ok := GetLanguage(l.Name); !ok { t.Errorf("language required but not defined: %s", l.Name) } else if l != g { t.Errorf("required language different from the definition: %s", l.Name) } } for _, l := range AllowedLanguages { found := false for _, r := range RequiredLanguages { if l.Name == r.Name { found = true break } } if l.Required && !found { t.Errorf("language marked required but not in RequiredLanguages: %s", l.Name) } } } func TestGetLanguageFromExt(t *testing.T) { for _, l := range AllowedLanguages { if g, ok := GetLanguageFromExt(l.Ext); !ok { t.Errorf("cannot look up extension: %s", l.Ext) } else if l != g { t.Errorf("language different from definition: %s", l.Name) } } } func TestNoLanguageFromExt(t *testing.T) { langs := [...]string{"foo", "bar"} for _, l := range langs { if _, exist := GetLanguageFromExt(l); exist { t.Errorf("language found but should not exist: %s", l) } } }
24.573529
80
0.630162
3.03125
a16097ae40d34fe1e6000e4d3e46862218e86dca
1,405
go
Go
2020/6/main.go
bobcatfish/adventofcode2016
20f781f256f4d649544c1f6e28da0d73e3bbc9e3
[ "Apache-2.0" ]
null
null
null
2020/6/main.go
bobcatfish/adventofcode2016
20f781f256f4d649544c1f6e28da0d73e3bbc9e3
[ "Apache-2.0" ]
3
2017-12-02T17:58:41.000Z
2020-12-11T04:33:02.000Z
2020/6/main.go
bobcatfish/adventofcode2016
20f781f256f4d649544c1f6e28da0d73e3bbc9e3
[ "Apache-2.0" ]
null
null
null
package main import ( "fmt" "io/ioutil" "log" "strings" ) func load() ([][]string, error) { data, err := ioutil.ReadFile("input.txt") if err != nil { return nil, fmt.Errorf("couldn't read file: %v", err) } pp := [][]string{} for _, v := range strings.Split(string(data), "\n\n") { pp = append(pp, strings.Fields(v)) } return pp, nil } func getCounts1(a [][]string) []map[rune]struct{} { mm := []map[rune]struct{}{} for _, ss := range a { m := map[rune]struct{}{} for _, s := range ss { for _, r := range s { m[r] = struct{}{} } } mm = append(mm, m) } return mm } func getCounts2(a [][]string) []map[rune]struct{} { mm := []map[rune]struct{}{} for _, ss := range a { m := map[rune]struct{}{} for i, s := range ss { if i == 0 { for _, r := range s { m[r] = struct{}{} } } else { for r, _ := range m { found := false for _, rr := range s { if r == rr { found = true break } } if !found { delete(m, r) } } } } mm = append(mm, m) } return mm } func sum(counts []map[rune]struct{}) int { c := 0 for _, m := range counts { c += len(m) } return c } func main() { a, err := load() if err != nil { log.Fatalf("Couldn't load nums from file: %v", err) } counts1 := getCounts1(a) fmt.Println(sum(counts1)) counts2 := getCounts2(a) fmt.Println(sum(counts2)) }
16.529412
56
0.518149
3.296875
70adac2679061c3253e42a94836a83bce765d64c
1,486
go
Go
pkg/storage/marker.go
asecurityteam/vpcflow-digesterd
7143c659e26b26e88fd04b04e03250b783d5c9ef
[ "Apache-2.0" ]
null
null
null
pkg/storage/marker.go
asecurityteam/vpcflow-digesterd
7143c659e26b26e88fd04b04e03250b783d5c9ef
[ "Apache-2.0" ]
6
2019-05-17T20:31:27.000Z
2021-02-01T22:21:42.000Z
pkg/storage/marker.go
asecurityteam/vpcflow-digesterd
7143c659e26b26e88fd04b04e03250b783d5c9ef
[ "Apache-2.0" ]
null
null
null
package storage import ( "bytes" "context" "sync" "time" "github.com/aws/aws-sdk-go/aws" "github.com/aws/aws-sdk-go/service/s3" "github.com/aws/aws-sdk-go/service/s3/s3iface" "github.com/aws/aws-sdk-go/service/s3/s3manager" "github.com/aws/aws-sdk-go/service/s3/s3manager/s3manageriface" ) // ProgressMarker is an implementation of Marker which allows for marking/unmarking of digests in progress type ProgressMarker struct { Bucket string Client s3iface.S3API uploader s3manageriface.UploaderAPI lock sync.Mutex now func() time.Time } // Mark flags the digest identified by key as being "in progress" func (m *ProgressMarker) Mark(ctx context.Context, key string) error { m.initUploader() now := m.now if now == nil { now = time.Now } _, err := m.uploader.UploadWithContext(ctx, &s3manager.UploadInput{ Bucket: aws.String(m.Bucket), Key: aws.String(key + inProgressSuffix), Body: bytes.NewReader([]byte(now().Format(time.RFC3339Nano))), }) return err } // Unmark flags the digest identified by key as not being "in progress" func (m *ProgressMarker) Unmark(ctx context.Context, key string) error { _, err := m.Client.DeleteObjectWithContext(ctx, &s3.DeleteObjectInput{ Bucket: aws.String(m.Bucket), Key: aws.String(key + inProgressSuffix), }) return err } func (m *ProgressMarker) initUploader() { m.lock.Lock() defer m.lock.Unlock() if m.uploader == nil { m.uploader = s3manager.NewUploaderWithClient(m.Client) } }
26.535714
106
0.720054
3
b2ffff75cff848e9cc4d8a6143bf4d9bf43e64d3
5,702
py
Python
sapy_script/SAP.py
fkfouri/sapy_script
476041288367e2098b955bc2377f442ce503e822
[ "MIT" ]
3
2018-12-03T15:51:54.000Z
2020-11-20T01:05:39.000Z
sapy_script/SAP.py
whrocha/sapy_script
476041288367e2098b955bc2377f442ce503e822
[ "MIT" ]
null
null
null
sapy_script/SAP.py
whrocha/sapy_script
476041288367e2098b955bc2377f442ce503e822
[ "MIT" ]
3
2018-07-28T21:53:32.000Z
2018-08-22T13:51:17.000Z
from multiprocessing import Pool, Manager from time import sleep from wmi import WMI from win32com.client import GetObject from subprocess import Popen from collections import Iterable from tqdm import tqdm from os import getpid from sapy_script.Session import Session session_process = None all_processes_id = [] def _on_init(sid, p_ids): p_ids.append(getpid()) global session_process app = SAP.app() i = 0 while True: con = app.Children(i) if con.Children(0).Info.systemsessionid == sid: session = con.Children(p_ids.index(getpid())) session_process = Session(session) break i = i + 1 def _task_executor(task): task['func'](task['data']) class SAP: def __init__(self, max_sessions=16): self._con = None self._tasks = [] self.max_sessions = max_sessions self.session = lambda i=0: Session(self._con.Children(i)) @staticmethod def app(): """Open SAPGui""" wmi_obj = WMI() sap_exists = len(wmi_obj.Win32_Process(name='saplgpad.exe')) > 0 if not sap_exists: Popen(['C:\Program Files (x86)\SAP\FrontEnd\SAPgui\saplgpad.exe']) while True: try: #temp = GetObject("SAPGUI").GetScriptingEngine #temp.Change("teste 456", "", "", "", "", ".\LocalSystem", "") #objService.Change(,, , , , , ".\LocalSystem", "") return GetObject("SAPGUI").GetScriptingEngine except: sleep(1) pass def connect(self, environment, client=None, user=None, password=None, lang=None, force=False): con = SAP.app().OpenConnection(environment, True) session = Session(con.Children(0)) if client is not None: session.findById("wnd[0]/usr/txtRSYST-MANDT").Text = client if user is not None: session.findById("wnd[0]/usr/txtRSYST-BNAME").Text = user if password is not None: session.findById("wnd[0]/usr/pwdRSYST-BCODE").Text = password if lang is not None: session.findById("wnd[0]/usr/txtRSYST-LANGU").Text = lang session.findById("wnd[0]").sendVKey(0) # Eventual tela de mudanca de senha change_pwd = False try: session.findById("wnd[1]/usr/pwdRSYST-NCODE").text = '' session.findById("wnd[1]/usr/pwdRSYST-NCOD2").text = '' change_pwd = True except: pass if change_pwd: raise ValueError('Please, set a new Password') # Derruba conexão SAP if force: try: session.findById("wnd[1]/usr/radMULTI_LOGON_OPT1").select() session.findById("wnd[1]/tbar[0]/btn[0]").press() except: pass else: try: session.findById("wnd[1]/usr/radMULTI_LOGON_OPT1").select() session.findById("wnd[1]").sendVKey(12) return False except: pass # Teste da Conexao if session.is_connected(): self._con = con return True self._con = None return False @property def connected(self): return self.session().is_connected() @staticmethod def session(): global session_process return session_process def sid(self): return self.session().Info.systemsessionid def logout(self): session = self.session() session.findById("wnd[0]/tbar[0]/okcd").text = "/nex" session.findById("wnd[0]").sendVKey(0) del session self._con = None @property def number_of_sessions(self): return 0 if self._con is None else len(self._con.Children) @number_of_sessions.setter def number_of_sessions(self, value): size = self.number_of_sessions if size == 0: return value = min(max(int(value), 1), self.max_sessions) minus = value < size arr = list(range(size, value)) arr.extend(reversed(range(value, size))) for i in arr: if minus: session = self.session(i) session.findById("wnd[0]/tbar[0]/okcd").text = "/i" session.findById("wnd[0]").sendVKey(0) else: self.session().createSession() sleep(0.5) def clear_tasks(self): self._tasks = [] def add_task(self, func, data): for dt in data: self._tasks.append({'func': func, 'data': dt}) def execute_tasks(self, resize_sessions=False): total = len(self._tasks) if total == 0: return if resize_sessions: self.number_of_sessions = total size = self.number_of_sessions if size == 0: return sess_manager = Manager().list([]) pool = Pool(processes=self.number_of_sessions, initializer=_on_init, initargs=(self.sid(), sess_manager)) response = list(tqdm(pool.imap_unordered(_task_executor, self._tasks))) pool.close() pool.join() return list(response) def execute_function(self, func, data, resize_sessions=False): if not isinstance(data, Iterable): data = [data] self.clear_tasks() self.add_task(func=func, data=data) response = self.execute_tasks(resize_sessions=resize_sessions) self.clear_tasks() return response @staticmethod def multi_arguments(func): def convert_args(pr): return func(**pr) return convert_args
28.368159
113
0.573834
3.3125
bcbdeed076feb6d35baafa8510638704f681698f
1,748
js
JavaScript
lib/stateTransition/StateTransitionFactory.js
jawid-h/js-dpp
47ae40331054a80569a4fe50ce7821f9636c2377
[ "MIT" ]
null
null
null
lib/stateTransition/StateTransitionFactory.js
jawid-h/js-dpp
47ae40331054a80569a4fe50ce7821f9636c2377
[ "MIT" ]
null
null
null
lib/stateTransition/StateTransitionFactory.js
jawid-h/js-dpp
47ae40331054a80569a4fe50ce7821f9636c2377
[ "MIT" ]
null
null
null
const { decode } = require('../util/serializer'); const InvalidStateTransitionError = require('./errors/InvalidStateTransitionError'); class StateTransitionFactory { /** * @param {validateStateTransitionStructure} validateStateTransitionStructure * @param {createStateTransition} createStateTransition */ constructor(validateStateTransitionStructure, createStateTransition) { this.validateStateTransitionStructure = validateStateTransitionStructure; this.createStateTransition = createStateTransition; } /** * Create State Transition from plain object * * @param {RawDataContractStateTransition|RawDocumentsStateTransition} rawStateTransition * @param {Object} options * @param {boolean} [options.skipValidation=false] * @return {DataContractStateTransition|DocumentsStateTransition} */ async createFromObject(rawStateTransition, options = {}) { const opts = Object.assign({ skipValidation: false }, options); if (!opts.skipValidation) { const result = await this.validateStateTransitionStructure(rawStateTransition); if (!result.isValid()) { throw new InvalidStateTransitionError(result.getErrors(), rawStateTransition); } } return this.createStateTransition(rawStateTransition); } /** * Create State Transition from string/buffer * * @param {Buffer|string} payload * @param {Object} options * @param {boolean} [options.skipValidation=false] * @return {DataContractStateTransition|DocumentsStateTransition} */ async createFromSerialized(payload, options = { }) { const rawStateTransition = decode(payload); return this.createFromObject(rawStateTransition, options); } } module.exports = StateTransitionFactory;
32.981132
91
0.745995
3.28125
37fc6856bfcabca29c5192ab7cc313e4a14ce07e
5,872
swift
Swift
VGLabel/Classes/VGLabelComponent.swift
JJson/VGLabel
4511f8f01189c4ad96f0b9423a15845f49e849d0
[ "MIT" ]
1
2017-11-15T07:04:54.000Z
2017-11-15T07:04:54.000Z
VGLabel/Classes/VGLabelComponent.swift
JJson/VGLabel
4511f8f01189c4ad96f0b9423a15845f49e849d0
[ "MIT" ]
null
null
null
VGLabel/Classes/VGLabelComponent.swift
JJson/VGLabel
4511f8f01189c4ad96f0b9423a15845f49e849d0
[ "MIT" ]
1
2022-03-24T08:38:55.000Z
2022-03-24T08:38:55.000Z
// // VGLabelComponent.swift // VGLabel // // Created by Vein on 2017/11/7. // Copyright © 2017年 Vein. All rights reserved. // import Foundation public class VGLabelComponent: NSObject { internal var componentIndex: Int = 0 internal var text: String = "" internal var tagLabel: String? internal var attributes: [String: String]? internal var position: Int = 0 class func compomemt(_ text: String, tag: String, attributes: [String: String]) -> VGLabelComponent { return VGLabelComponent(text, tag: tag, attributes: attributes) } init(_ text: String, tag: String, attributes: [String: String]) { self.text = text self.attributes = attributes tagLabel = tag } class func compomemt(_ tag: String, position: Int, attributes: [String: String]) -> VGLabelComponent { return VGLabelComponent(tag, position: position, attributes: attributes) } init(_ tag: String, position: Int, attributes: [String: String]) { self.attributes = attributes self.position = position tagLabel = tag } override public var description : String { var desc = "text: \(text), position: \(position)" if let tag = tagLabel { desc += ", tag: \(tag)" } if let att = attributes { desc += ", attributes: \(att)" } return desc } } public class VGLabelExtractedComponent: NSObject { internal var textComponents: [VGLabelComponent]? internal var plainText: String? class func labelExtractedComponent(_ textComponents: [VGLabelComponent], plainText: String) -> VGLabelExtractedComponent { let extractedComponent = VGLabelExtractedComponent() extractedComponent.textComponents = textComponents extractedComponent.plainText = plainText return extractedComponent } class func extractTextStyle(_ data: String, paragraphReplacement: String) -> VGLabelExtractedComponent { var text: NSString? = nil var tag: String? = nil var styleData = data var components = [VGLabelComponent]() var lastPosition = 0 let scanner = Scanner(string: data) while !scanner.isAtEnd { scanner.scanUpTo("<", into: nil) scanner.scanUpTo(">", into: &text) let delimiter = String(format: "%@>", text!) let position = (styleData as NSString).range(of: delimiter).location if position != NSNotFound { if delimiter.range(of: "<p")?.lowerBound.encodedOffset == 0 { let nsRange = NSRange(location: lastPosition, length: position + delimiter.count - lastPosition) styleData = (styleData as NSString).replacingOccurrences(of: delimiter, with: paragraphReplacement, options: .caseInsensitive, range: nsRange) } else { let nsRange = NSRange(location: lastPosition, length: position + delimiter.count - lastPosition) styleData = (styleData as NSString).replacingOccurrences(of: delimiter, with: "", options: .caseInsensitive, range: nsRange) } styleData = styleData.replacingOccurrences(of: "&lt;", with: "<") styleData = styleData.replacingOccurrences(of: "&gt;", with: ">") } if text?.range(of: "</").location == 0 { // end of tag tag = text?.substring(from: 2) if position != NSNotFound { for component in components.reversed() { if component.text.count == 0, component.tagLabel == tag { let text = (styleData as NSString).substring(with: NSRange(location: component.position, length: position - component.position)) component.text = text break } } } } else { // start of tag if let textComponents = text?.substring(from: 1).components(separatedBy: " ") { tag = textComponents[0] var attributes = [String: String]() for (index, textComponent) in textComponents.enumerated() { if index == 0 { continue } let pair = textComponent.components(separatedBy: "=") if pair.count > 0 { let key = pair[0].lowercased() if pair.count >= 2 { // Trim " charactere var value = (pair[1...pair.count - 1]).joined(separator: "=") value = (value as NSString).replacingOccurrences(of: "\"", with: "", options: .literal, range: NSRange(location: 0, length: 1)) value = (value as NSString).replacingOccurrences(of: "\"", with: "", options: .literal, range: NSRange(location: value.count-1, length: 1)) attributes[key] = value } else if pair.count == 1 { attributes[key] = key } } } let component = VGLabelComponent.compomemt("", tag: tag!, attributes: attributes) component.position = position components.append(component) } lastPosition = position } } return VGLabelExtractedComponent.labelExtractedComponent(components, plainText: styleData) } }
42.861314
171
0.535763
3.03125
f2cbc37b94c15a4b34d5f11c14e4a7235926cef3
2,355
lua
Lua
src/cards/theft.lua
Barocena/unotfm
cf0e59eb08a04818a8bd3d838b04b06378aa6fb6
[ "MIT" ]
4
2020-07-08T20:04:29.000Z
2022-02-13T14:19:14.000Z
src/cards/theft.lua
Barocena/unotfm
cf0e59eb08a04818a8bd3d838b04b06378aa6fb6
[ "MIT" ]
null
null
null
src/cards/theft.lua
Barocena/unotfm
cf0e59eb08a04818a8bd3d838b04b06378aa6fb6
[ "MIT" ]
4
2021-07-22T05:04:43.000Z
2022-01-21T20:13:16.000Z
function theft(n) if ROUND.gameMode.hell and ROUND.accumulated then ROUND.accumulated = nil addBlockPoint(ROUND.chair[n].owner) end local image = {tfm.exec.addImage(IMG.misc.genericLayer, "!1000", 0, 0, ROUND.chair[n].owner)} local option = {} for i, v in pairs(ROUND.chair) do if (v.mode ~= "DELETED") and i ~= n then table.insert(image, tfm.exec.addImage(IMG.misc.target, "!1000", v.x-50, 30)) option[i] = true end end ROUND.chair[n].action = { name = "THEFT", img = image, op = option, func = makeTheft } resetTimer() updateTurnTimer() end function makeTheft(n, target) if target then local temp = {} if n ~= target and ROUND.chair[target].mode ~= "DELETED" then if ROUND.chair[n].action then for i, v in pairs(ROUND.chair[n].action.img) do tfm.exec.removeImage(v) end end ROUND.chair[n].action = false -- local found = {} for i, v in pairs(ROUND.chair[target].hand) do if v[1] == "black" then table.insert(found, i) end end if #found > 0 then local rand = found[math.random(#found)] temp = table.remove(ROUND.chair[target].hand, rand) discardEffect(target, rand) table.insert(ROUND.chair[n].hand, temp) missCard(n, temp, 2000) explosion(5, ROUND.chair[n].x, 100, 5, 10) sortHand(ROUND.chair[n].hand) ROUND.chair[n].uno = nil updateHand(n) updateHand(target) if #ROUND.chair[target].hand == 1 then ROUND.chair[target].uno = "uno" end if isIlluminati(n) then unlockChair(ROUND.chair[n].owner, "illuminati") end if temp[2] == "draw4" then if ROUND.chair[n].theft then unlockChair(ROUND.chair[n].owner, "gaga") else ROUND.chair[n].theft = true end end updateScore(n) updateScore(target) showCardsGainned(n, 1) showCardsGainned(target, -1) end -- if #ROUND.chair[target].hand == 0 then local name = ROUND.chair[target].owner endGame(ROUND.chair[target].owner) unlockChair(target, "gift") else passTurn() ROUND.chair[n].confuse = false batataTimer(n) updateShadow(n) end end else if ROUND.chair[n].action then for i, v in pairs(ROUND.chair[n].action.img) do tfm.exec.removeImage(v) end ROUND.chair[n].action = false end passTurn() batataTimer(n) updateShadow(n) end end
24.53125
94
0.638641
3.109375
3ab55339a3a98a636c9974ba92fd90088eaec34e
1,852
sql
SQL
oracle/ora/awr/awrload.sql
hyee/dbcli
a39fdffdc5a15b9a6e17dc8e6e852003a8dedf0d
[ "MIT" ]
44
2015-05-07T16:11:03.000Z
2021-09-19T08:59:20.000Z
oracle/ora/awr/awrload.sql
hyee/dbcli
a39fdffdc5a15b9a6e17dc8e6e852003a8dedf0d
[ "MIT" ]
8
2015-05-08T03:38:03.000Z
2020-05-22T11:00:47.000Z
oracle/ora/awr/awrload.sql
hyee/dbcli
a39fdffdc5a15b9a6e17dc8e6e852003a8dedf0d
[ "MIT" ]
24
2015-05-07T16:17:41.000Z
2022-01-02T13:10:29.000Z
/*[[Import AWR repository dump. Usage: @@NAME <directory_name> <file_name> [<new_dbid>] --[[ @ARGS: 2 --]] ]]*/ SET SQLTIMEOUT 7200 DECLARE dir VARCHAR2(128) := :V1; file VARCHAR2(512) := :V2; did INT := :V3; root VARCHAR2(512); dump BFILE; len NUMBER; stage VARCHAR2(30) := 'DBCLI_AWR'; BEGIN SELECT MAX(directory_name), MAX(directory_path) INTO dir, root FROM ALL_DIRECTORIES WHERE upper(directory_name) = upper(dir); IF dir IS NULL THEN raise_application_error(-20001, 'Cannot access directory: ' || :V1); END IF; IF NOT regexp_like(root, '[\\/]$') THEN root := root || CASE WHEN root LIKE '%/%' THEN '/' ELSE '\' END; END IF; dump := bfilename(dir, file||'.dmp'); BEGIN dbms_lob.fileopen(dump); len := dbms_lob.getlength(dump); dbms_lob.fileclose(dump); EXCEPTION WHEN OTHERS THEN raise_application_error(-20001, 'Cannot access file: ' || root || file || '.dmp'); END; $IF DBMS_DB_VERSION.VERSION>18 $THEN sys.dbms_workload_repository.awr_imp(dmpfile => file, dmpdir => dir, new_dbid => did); $ELSE BEGIN stage := CASE sys_context('userenv', 'con_name') WHEN 'CDB$ROOT' THEN 'C##' END || stage; EXCEPTION WHEN OTHERS NULL; END; $IF DBMS_DB_VERSION.VERSION>17 $THEN sys.dbms_workload_repository.load(schname => stage, dmpfile => file, dmpdir => dir, new_dbid => did); $ELSE dbms_swrf_internal.awr_load(schname => stage,dmpfile => file, dmpdir => dir); dbms_swrf_internal.move_to_awr(schname => stage, new_dbid => did); dbms_swrf_internal.clear_awr_dbid; $END $END dbms_output.put_line('AWR repository is imported from ' || root || file || '.dmp'); END; /
33.672727
113
0.599892
3.015625
28606435e1e5e3f888988d0dc49fb37281a76692
9,578
lua
Lua
modules/libraries/backend/pulseaudio/init.lua
SkyyySi/norsome2
74e5977868230ddc9185f1d27a3d849440e9fb15
[ "Unlicense" ]
null
null
null
modules/libraries/backend/pulseaudio/init.lua
SkyyySi/norsome2
74e5977868230ddc9185f1d27a3d849440e9fb15
[ "Unlicense" ]
null
null
null
modules/libraries/backend/pulseaudio/init.lua
SkyyySi/norsome2
74e5977868230ddc9185f1d27a3d849440e9fb15
[ "Unlicense" ]
null
null
null
#!/usr/bin/env lua5.3 local awful = require("awful") local wibox = require("wibox") local gears = require("gears") local naughty = require("naughty") -- Define the base module local pulseaudio = { -- Create a new instance new = function(self, selected_cli) local object = { -- There are multiple pulseaudio clis available, see below. selected_cli = selected_cli or "pamixer" } setmetatable(object, self) self.__index = self return object end } -- Signals used by this module: -- > "pulseaudio::get_volume": used for widgets to be notified when -- | the volume was changed by another part of the code. -- Commands used to call the specific cli tools used to mamange pulseaudio. -- By default, this is configured for pamixer, but on Debian, you may want to -- override this with pulsemixer. pulseaudio.pamixer = {} pulseaudio.pulsemixer = {} -------------------------------------------------- --- Get volume --- -------------------------------------------------- -- Get the current playback volume. Callback must be nil or a function accepting -- a number value (the volume in percent). Note that this function is not -- intended to be called directly. Use the "pulseaudio::get_volume"-signal instead. function pulseaudio.pamixer.get_volume(callback) awful.spawn.easy_async({"pamixer", "--get-volume"}, function(volume) volume = tonumber(volume) if callback then callback(volume) end awesome.emit_signal("pulseaudio::get_volume", volume) end) end -- Run it automatically in a loop. pulseaudio.pamixer.volume_updater = gears.timer { timeout = 0.3, call_now = true, autostart = true, callback = function() pulseaudio.pamixer.get_volume() end, } -- Signal that the timer is alreay running. awesome.emit_signal("pulseaudio::volume_updater_running", true) -- Prevent multiple instances to needlessly run at the same time. awesome.connect_signal("pulseaudio::volume_updater_running", function(status) if status then notify("Stopped already running update timer!") pulseaudio.pamixer.volume_updater:stop() end end) -------------------------------------------------- --- Set volume --- -------------------------------------------------- -- Set the current playback volume. function pulseaudio.pamixer.set_volume(volume) awful.spawn({"pamixer", "--set-volume", tostring(volume)}) end -- Connect it to a signal. awesome.connect_signal("pulseaudio::set_volume", function(volume) pulseaudio.pamixer.set_volume(volume) end) -------------------------------------------------- --- Increment volume --- -------------------------------------------------- -- Increment the volume by n or 1 function pulseaudio.pamixer.increase_volume(volume) if not volume then volume = 1 end awful.spawn({"pamixer", "--increase", tostring(volume)}) end -- Connect it to a signal. awesome.connect_signal("pulseaudio::increase_volume", function(volume) pulseaudio.pamixer.increase_volume(volume) end) -------------------------------------------------- --- Decrement volume --- -------------------------------------------------- -- Decrement the volume by n or 1 function pulseaudio.pamixer.decrease_volume(volume) if not volume then volume = 1 end awful.spawn({"pamixer", "--decrease", tostring(volume)}) end -- Connect it to a signal. awesome.connect_signal("pulseaudio::decrease_volume", function(volume) pulseaudio.pamixer.decrease_volume(volume) end) -------------------------------------------------- --- Mute status --- -------------------------------------------------- -- Get the current mute status. true = muted, false = unmuted. -- Callback must be nil or a function accepting a boolean value. -- Note that this function is not intended to be called directly. -- Use the "pulseaudio::get_mute"-signal instead. function pulseaudio.pamixer.get_mute(callback) awful.spawn.easy_async({"pamixer", "--get-mute"}, function(status) -- cli commands always return a string and lua does not have a -- `tobool()`-builtin. if status == "true" then status = true else status = false end if callback then callback(tonumber(status)) end awesome.emit_signal("pulseaudio::get_mute", status) end) end -------------------------------------------------- --- Mute --- -------------------------------------------------- -- Mute the volume function pulseaudio.pamixer.mute() awful.spawn({"pamixer", "--mute"}) end -- Connect it to a signal. awesome.connect_signal("pulseaudio::mute", function() pulseaudio.pamixer.mute() end) -------------------------------------------------- --- Unmute --- -------------------------------------------------- -- Unmute the volume function pulseaudio.pamixer.unmute() awful.spawn({"pamixer", "--mute"}) end -- Connect it to a signal. awesome.connect_signal("pulseaudio::unmute", function() pulseaudio.pamixer.mute() end) -------------------------------------------------- --- Toggle mute --- -------------------------------------------------- -- Unmute the volume function pulseaudio.pamixer.unmute() awful.spawn({"pamixer", "--toggle-mute"}) end -- Connect it to a signal. awesome.connect_signal("pulseaudio::toggle_mute", function() pulseaudio.pamixer.mute() end) -------------------------------------------------- --- Widgets --- -------------------------------------------------- -- A few basic widgets for managing volume. Can be used as templates for -- creating your own, more advanced (or nicer looking) ones. pulseaudio.widget = {} -------------------------------------------------- --- Slider --- -------------------------------------------------- -- A basic volume slider based on wibox.widget.slider -- The first argument must be arguments for wibox.widget.slider -- as a table or nil. function pulseaudio.widget.volume_slider(args) --> table -- Set a few basic default options if none are given. if not args then args = { minimum = 0, value = 50, maximum = 100, forced_width = 200, } end -- Create a new slider widget. local slider = wibox.widget.slider(args) -- Connect the widget to the volume signal. awesome.connect_signal("pulseaudio::get_volume", function(volume) slider:set_value(volume) end) -- Allow to set the volume using the mouse wheel. slider:connect_signal("button::press", function(_,_,_,button) if button == 4 then awesome.emit_signal("pulseaudio::increase_volume", 5) elseif button == 5 then awesome.emit_signal("pulseaudio::decrease_volume", 5) end end) -- Change the system volume when the slider value changes. slider:connect_signal("property::value", function() awesome.emit_signal("pulseaudio::set_volume", slider.value) end) return slider end -------------------------------------------------- --- Text label --- -------------------------------------------------- function pulseaudio.widget.volume_label(args) --> table -- Create a new slider widget. local label = wibox.widget.textbox(args) -- Connect the widget to the volume signal. awesome.connect_signal("pulseaudio::get_volume", function(volume) label:set_text(volume) end) -- Allow to (un-)mute by left clicking the label and to -- set the volume using the mouse wheel. label:connect_signal("button::press", function(_,_,_,button) if button == 1 then awesome.emit_signal("pulseaudio::toggle_mute") elseif button == 4 then awesome.emit_signal("pulseaudio::increase_volume", 5) elseif button == 5 then awesome.emit_signal("pulseaudio::decrease_volume", 5) end end) return label end -------------------------------------------------- --- Status notifications --- -------------------------------------------------- pulseaudio.notification = { volume = { enabled = false }, mute = { enabled = false }, } function pulseaudio.notification.volume:enable() self.enabled = true awesome.connect_signal("pulseaudio::get_volume", function(volume) naughty.notification { title = "Volume change", message = "Current volume: " .. tostring(volume), category = "pulseaudio.volume", app_name = "pulseaudio_cli_awesome_bindings", timeout = 1, } end) end function pulseaudio.notification.volume:disable() self.enabled = false awesome.disconnect_signal("pulseaudio::get_volume") end function pulseaudio.notification.volume:toggle() if self.enabled then self:disable() else self:enable() end end function pulseaudio.notification.mute:enable() self.enabled = true awesome.connect_signal("pulseaudio::get_mute", function(muted) local title = "Unmuted" local text = "System unmuted" if muted then title = "Muted" text = "System muted" end naughty.notification { title = title, message = text, category = "pulseaudio.mute", app_name = "pulseaudio_cli_awesome_bindings", timeout = 1, } end) end function pulseaudio.notification.mute:disable() self.enabled = false awesome.disconnect_signal("pulseaudio::get_mute") end function pulseaudio.notification.mute:toggle() if self.enabled then self:disable() else self:enable() end end -- Select the correct cli tool. if pulseaudio.current_cli == "pamixer" then pulseaudio.cli = pulseaudio.pamixer elseif pulseaudio.current_cli == "pulsemixer" then pulseaudio.cli = pulseaudio.pulsemixer end return pulseaudio:new()
29.29052
83
0.609313
3.140625
99412084ad8354eeac28b286ec51d9199233a7ad
1,220
h
C
include/Crc.h
Tomash667/carpglib
c8701b170e4e2cbdb4d52fe3b7c8529afb3e97ed
[ "MIT" ]
4
2019-08-18T19:33:04.000Z
2021-08-07T02:12:54.000Z
include/Crc.h
Tomash667/carpglib
c8701b170e4e2cbdb4d52fe3b7c8529afb3e97ed
[ "MIT" ]
5
2019-08-14T05:45:56.000Z
2021-03-15T07:47:24.000Z
include/Crc.h
Tomash667/carpglib
c8701b170e4e2cbdb4d52fe3b7c8529afb3e97ed
[ "MIT" ]
2
2019-10-05T02:36:35.000Z
2021-02-15T20:20:09.000Z
#pragma once //----------------------------------------------------------------------------- class Crc { public: Crc() : m_crc(CRC32_NEGL) {} void Update(const byte *input, size_t length); uint Get() const { return ~m_crc; } operator uint() const { return Get(); } // Don't use for types with padding! template<typename T> void Update(const T& item) { Update((const byte*)&item, sizeof(item)); } template<> void Update(const string& str) { if(!str.empty()) Update((const byte*)str.c_str(), str.length()); } template<> void Update(const cstring& str) { assert(str); Update((const byte*)str, strlen(str)); } // Don't use for types with padding! template<typename T> void Update(const vector<T>& v) { Update(v.size()); if(!v.empty()) Update((const byte*)v.data(), v.size() * sizeof(T)); } template<> void Update(const vector<string>& v) { Update(v.size()); for(const string& s : v) Update(s); } void Update0() { Update<byte>(0); } void Update1() { Update<byte>(1); } static uint Calculate(Cstring filename); static uint Calculate(FileReader& file); private: static const uint CRC32_NEGL = 0xffffffffL; static const uint m_tab[256]; uint m_crc; };
17.941176
79
0.602459
3.0625
c7d423fa068508464eb1bdffe43ab9ac7402f665
6,603
py
Python
pyowl.py
vene/pyowl
edef4cfeb31c4ea52cc67dce581372e2270fce2a
[ "BSD-3-Clause" ]
57
2017-02-11T09:17:47.000Z
2021-02-21T11:07:28.000Z
pyowl.py
vene/pyowl
edef4cfeb31c4ea52cc67dce581372e2270fce2a
[ "BSD-3-Clause" ]
3
2019-08-07T15:09:00.000Z
2019-08-30T11:53:17.000Z
pyowl.py
vene/pyowl
edef4cfeb31c4ea52cc67dce581372e2270fce2a
[ "BSD-3-Clause" ]
13
2017-02-12T21:43:59.000Z
2019-09-15T03:10:06.000Z
# Author: Vlad Niculae <[email protected]> # License: BSD 3 clause from __future__ import print_function from __future__ import division import numpy as np from sklearn.base import BaseEstimator, ClassifierMixin, RegressorMixin from sklearn.utils.extmath import safe_sparse_dot from sklearn.isotonic import isotonic_regression from sklearn.preprocessing import LabelBinarizer from fista import fista from loss import get_loss def prox_owl(v, w): """Proximal operator of the OWL norm dot(w, reversed(sort(v))) Follows description and notation from: X. Zeng, M. Figueiredo, The ordered weighted L1 norm: Atomic formulation, dual norm, and projections. eprint http://arxiv.org/abs/1409.4271 """ # wlog operate on absolute values v_abs = np.abs(v) ix = np.argsort(v_abs)[::-1] v_abs = v_abs[ix] # project to K+ (monotone non-negative decreasing cone) v_abs = isotonic_regression(v_abs - w, y_min=0, increasing=False) # undo the sorting inv_ix = np.zeros_like(ix) inv_ix[ix] = np.arange(len(v)) v_abs = v_abs[inv_ix] return np.sign(v) * v_abs def _oscar_weights(alpha, beta, size): w = np.arange(size - 1, -1, -1, dtype=np.double) w *= beta w += alpha return w def _fit_owl_fista(X, y, w, loss, max_iter=500, max_linesearch=20, eta=2.0, tol=1e-3, verbose=0): # least squares loss def sfunc(coef, grad=False): y_scores = safe_sparse_dot(X, coef) if grad: obj, lp = loss(y, y_scores, return_derivative=True) grad = safe_sparse_dot(X.T, lp) return obj, grad else: return loss(y, y_scores) def nsfunc(coef, L): return prox_owl(coef, w / L) coef = np.zeros(X.shape[1]) return fista(sfunc, nsfunc, coef, max_iter, max_linesearch, eta, tol, verbose) class _BaseOwl(BaseEstimator): """ Solves sum loss(y_pred, y) + sum_j weights_j |coef|_(j) where u_(j) is the jth largest component of the vector u. and weights is a monotonic nonincreasing vector. OWL is also known as: sorted L1 norm, SLOPE Parameters ---------- weights: array, shape (n_features,) or tuple, length 2 Nonincreasing weights vector for the ordered weighted L1 penalty. If weights = (alpha, 0, 0, ..., 0), this amounts to a L_inf penalty. If weights = alpha * np.ones(n_features) it amounts to L1. If weights is a tuple = (alpha, beta), the OSCAR penalty is used:: alpha ||coef||_1 + beta sum_{i<j} max{|x_i|, |x_j|) by computing the corresponding `weights` vector as:: weights_i = alpha + beta(n_features - i - 1) loss: string, default: "squared" Loss function to use, see loss.py to add your own. max_iter: int, default: 500 Maximum FISTA iterations. max_linesearch: int, default: 20 Maximum number of FISTA backtracking line search steps. eta: float, default: 2 Amount by which to increase step size in FISTA bactracking line search. tol: float, default: 1e-3 Tolerance for the convergence criterion. verbose: int, default 0: Degree of verbosity to print from the solver. References ---------- X. Zeng, M. Figueiredo, The ordered weighted L1 norm: Atomic formulation, dual norm, and projections. eprint http://arxiv.org/abs/1409.4271 """ def __init__(self, weights, loss='squared', max_iter=500, max_linesearch=20, eta=2.0, tol=1e-3, verbose=0): self.weights = weights self.loss = loss self.max_iter = max_iter self.max_linesearch = max_linesearch self.eta = eta self.tol = tol self.verbose = verbose def fit(self, X, y): n_features = X.shape[1] loss = self.get_loss() weights = self.weights if isinstance(weights, tuple) and len(weights) == 2: alpha, beta = self.weights weights = _oscar_weights(alpha, beta, n_features) self.coef_ = _fit_owl_fista(X, y, weights, loss, self.max_iter, self.max_linesearch, self.eta, self.tol, self.verbose) return self def _decision_function(self, X): return safe_sparse_dot(X, self.coef_) class OwlRegressor(_BaseOwl, RegressorMixin): """Ordered Weighted L1--penalized (OWL) regression solved by FISTA""" __doc__ += _BaseOwl.__doc__ def get_loss(self): if self.loss != 'squared': raise NotImplementedError('Only regression loss implemented ' 'at the moment is squared.') return get_loss(self.loss) def predict(self, X): return self._decision_function(X) class OwlClassifier(_BaseOwl, ClassifierMixin): """Ordered Weighted L1--penalized (OWL) classification solved by FISTA""" __doc__ += _BaseOwl.__doc__ def get_loss(self): return get_loss(self.loss) def fit(self, X, y): self.lb_ = LabelBinarizer(neg_label=-1) y_ = self.lb_.fit_transform(y).ravel() return super(OwlClassifier, self).fit(X, y_) def decision_function(self, X): return self._decision_function(X) def predict(self, X): y_pred = self.decision_function(X) > 0 return self.lb_.inverse_transform(y_pred) if __name__ == '__main__': from sklearn.model_selection import train_test_split from sklearn.datasets import load_boston, load_breast_cancer print("OSCAR proximal operator on toy example:") v = np.array([1, 3, 2.9, 4, 0]) w_oscar = _oscar_weights(alpha=0.01, beta=1, size=5) print(prox_owl(v, w_oscar)) print() print("Regression") X, y = load_boston(return_X_y=True) X = np.column_stack([X, -X[:, 0] + 0.01 * np.random.randn(X.shape[0])]) X_tr, X_te, y_tr, y_te = train_test_split(X, y, random_state=0) clf = OwlRegressor(weights=(1, 100)) clf.fit(X_tr, y_tr) print("Correlated coefs", clf.coef_[0], clf.coef_[-1]) print("Test score", clf.score(X_te, y_te)) print() print("Classification") X, y = load_breast_cancer(return_X_y=True) X = np.column_stack([X, -X[:, 0] + 0.01 * np.random.randn(X.shape[0])]) X_tr, X_te, y_tr, y_te = train_test_split(X, y, random_state=0) clf = OwlClassifier(weights=(1, 100), loss='squared-hinge') clf.fit(X_tr, y_tr) print("Correlated coefs", clf.coef_[0], clf.coef_[-1]) print("Test score", clf.score(X_te, y_te))
31.442857
79
0.6335
3.09375
816541fd0ac9aa27940ad0ee8d31a1aaaa0de62e
2,467
rs
Rust
typebinder/src/pipeline/mod.rs
impero-com/ts_export
850f28f2286714e47ddc9179c56718b634cef279
[ "X11" ]
4
2021-02-22T17:01:07.000Z
2021-02-24T09:43:14.000Z
typebinder/src/pipeline/mod.rs
impero-com/typebinder
a5414b57ce8d6dcd8b57c9ea08c7a1ab646c5380
[ "X11" ]
18
2021-03-04T16:07:51.000Z
2021-11-03T17:04:55.000Z
typebinder/src/pipeline/mod.rs
impero-com/ts_export
850f28f2286714e47ddc9179c56718b634cef279
[ "X11" ]
2
2021-03-08T15:16:39.000Z
2021-03-19T15:28:24.000Z
//! The core logic of `typebinder` use crate::{ contexts::type_solving::TypeSolvingContext, error::TsExportError, exporters::Exporter, macros::context::MacroSolvingContext, path_mapper::PathMapper, step_spawner::PipelineStepSpawner, }; use syn::{punctuated::Punctuated, Path}; use self::module_step::{ModuleStepResult, ModuleStepResultData}; pub mod module_step; pub mod step_result; /// The Pipeline is the starting point of `typebinder`. /// /// A Pipeline is customized with both a [PipelineStepSpawner] and an [Exporter] implementor. /// /// When launched, the [Pipeline] will use its [PipelineStepSpawner] to spawn the "default" module, that is, the module with an empty path. /// Each [ModuleStep](crate::pipeline::module_step::ModuleStep) thereby generated is then launched, see [ModuleStep::launch](crate::pipeline::module_step::ModuleStep). /// /// Each output is passed to the [Exporter], that is responsible for outputting the data. pub struct Pipeline<PSS, E> { pub pipeline_step_spawner: PSS, pub exporter: E, pub path_mapper: PathMapper, } impl<PSS, E> Pipeline<PSS, E> where PSS: PipelineStepSpawner, E: Exporter, TsExportError: From<PSS::Error> + From<E::Error>, { pub fn launch( mut self, solving_context: &TypeSolvingContext, macro_context: &MacroSolvingContext, ) -> Result<(), TsExportError> { let path = Path { leading_colon: None, segments: Punctuated::default(), }; let res = self .pipeline_step_spawner .create_process(path)? .ok_or(TsExportError::FailedToLaunch)? .launch( &self.pipeline_step_spawner, solving_context, macro_context, &self.path_mapper, )?; let mut all_results: Vec<ModuleStepResultData> = Vec::new(); extractor(&mut all_results, res); for result_data in all_results.into_iter() { if result_data.imports.is_empty() && result_data.exports.is_empty() { continue; } self.exporter.export_module(result_data)?; } self.exporter.finish(); Ok(()) } } /// TODO: refactor this to a closure fn extractor(all: &mut Vec<ModuleStepResultData>, iter: ModuleStepResult) { iter.children .into_iter() .for_each(|child| extractor(all, child)); all.push(iter.data); }
31.628205
167
0.643697
3.015625
b6d937c6bf167ae0e8646a47e76aaee72e16392d
1,417
swift
Swift
swift-sdk/CommerceItem.swift
brunosylvain/swift-sdk
8c22e0390b47ff3fecf0ae6b2b71fefa180eb904
[ "MIT" ]
null
null
null
swift-sdk/CommerceItem.swift
brunosylvain/swift-sdk
8c22e0390b47ff3fecf0ae6b2b71fefa180eb904
[ "MIT" ]
null
null
null
swift-sdk/CommerceItem.swift
brunosylvain/swift-sdk
8c22e0390b47ff3fecf0ae6b2b71fefa180eb904
[ "MIT" ]
null
null
null
// // Created by Tapash Majumder on 6/6/18. // Copyright © 2018 Iterable. All rights reserved. // import UIKit /** `CommerceItem` represents a product. These are used by the commerce API; see [IterableAPI trackPurchase:items:dataFields:] */ @objcMembers public class CommerceItem: NSObject { /** id of this product */ public var id: String /** name of this product */ public var name: String /** price of this product */ public var price: NSNumber /** quantity of this product */ public var quantity: UInt /** Creates a `CommerceItem` with the specified properties - parameters: - id: id of the product - name: name of the product - price: price of the product - quantity: quantity of the product - returns: an instance of `CommerceItem` with the specified properties */ public init(id: String, name: String, price: NSNumber, quantity: UInt) { self.id = id self.name = name self.price = price self.quantity = quantity } /** A Dictionary respresentation of this item - returns: An NSDictionary representing this item */ public func toDictionary() -> [AnyHashable: Any] { return ["id": id, "name": name, "price": price, "quantity": quantity] } }
26.240741
123
0.587862
3
403ed8c759891b08ee51690d9702e340ed6f4833
2,620
py
Python
instascrape/exceptions.py
tnychn/instascrape
7aaf3c1a1786bbe80059ed6e0d93442a19a6f475
[ "MIT" ]
80
2020-05-28T17:22:14.000Z
2022-03-25T07:15:51.000Z
instascrape/exceptions.py
AlphaXenon/InstaScrape
7aaf3c1a1786bbe80059ed6e0d93442a19a6f475
[ "MIT" ]
23
2020-05-25T12:45:40.000Z
2022-03-06T05:44:41.000Z
instascrape/exceptions.py
AlphaXenon/InstaScrape
7aaf3c1a1786bbe80059ed6e0d93442a19a6f475
[ "MIT" ]
14
2020-06-28T05:52:28.000Z
2022-03-28T04:27:50.000Z
class InstascrapeError(Exception): """Base exception class for all of the exceptions raised by Instascrape.""" class ExtractionError(InstascrapeError): """Raised when Instascrape fails to extract specified data from HTTP response.""" def __init__(self, message: str): super().__init__("Failed to extract data from response. (message: '{0}')".format(message)) class PrivateAccessError(InstascrapeError): """Raised when user does not have permission to access specified data, i.e. private profile which the user is not following.""" def __init__(self): super().__init__("The user profile is private and not being followed by you.") class RateLimitedError(InstascrapeError): """Raised when Instascrape receives a 429 TooManyRequests from Instagram.""" def __init__(self): super().__init__("(429) Too many requests. Failed to query data. Rate limited by Instagram.") class NotFoundError(InstascrapeError): """Raised when Instascrape receives a 404 Not Found from Instagram.""" def __init__(self, message: str = None): super().__init__(message or "(404) Nothing found.") class ConnectionError(InstascrapeError): """Raised when Instascrape fails to connect to Instagram server.""" def __init__(self, url: str): super().__init__("Failed to connect to '{0}'.".format(url)) class LoginError(InstascrapeError): """Raised when Instascrape fails to perform authentication, e.g. wrong credentials.""" def __init__(self, message: str): super().__init__("Failed to log into Instagram. (message: '{0}')".format(message)) class TwoFactorAuthRequired(LoginError): """Raised when Instascrape fails to perform authentication due to two-factor authenticattion.""" def __init__(self): super().__init__("two-factor authentication is required") class CheckpointChallengeRequired(LoginError): """Raised when Instascrape fails to perform authentication due to checkpoint challenge.""" def __init__(self): super().__init__("checkpoint challenge solving is required") class AuthenticationRequired(InstascrapeError): """Raised when anonymous/unauthenticated (guest) user tries to perform actions that require authentication.""" def __init__(self): super().__init__("Login is required in order to perform this action.") class DownloadError(InstascrapeError): """Raised when Instascrape fails to download data from Instagram server.""" def __init__(self, message: str, url: str): super().__init__("Download Failed -> {0} (url: '{1}')".format(message, url))
35.890411
131
0.719084
3.296875
c7c0ddf187a121fa94e350a87f3a1a34fe08c11c
10,107
py
Python
ball.py
adata111/brick-carnival
38dcb03764e00b84010eaa61dbec79c087dc9295
[ "BSD-2-Clause" ]
null
null
null
ball.py
adata111/brick-carnival
38dcb03764e00b84010eaa61dbec79c087dc9295
[ "BSD-2-Clause" ]
null
null
null
ball.py
adata111/brick-carnival
38dcb03764e00b84010eaa61dbec79c087dc9295
[ "BSD-2-Clause" ]
null
null
null
from headers import * import globalVar from globalVar import TOP, BOTTOM, LIVES, HT, WIDTH, LEFT, RIGHT, obj_bricks, paddle, ALT_LIVES rows = HT cols = WIDTH class Ball: """docstring for Ball contains all methods that can modify state of the ball. """ def __init__(self, x, y, vx, vy, m): super().__init__() self.width = 1 self.height = 1 self.x = x self.y = y self.v_x = vx self.v_y = -1 self.moving = m self.thru = 0 self.dead = 0 self.fast = 0 self.fire = 0 def move(self,v=1): paddle=globalVar.paddle temp = self.v_x if(self.moving == 0): # movement with paddle if(paddle.x+paddle.width>=RIGHT and v>0): v = 0 elif(paddle.x<=LEFT and v<0): v = 0 self.x += v*paddle.v return self.x += self.v_x if(self.x+self.width>=RIGHT and self.v_x>0): self.v_x = -self.v_x elif(self.x<=LEFT and self.v_x<0): self.v_x = -self.v_x elif(self.x+self.v_x+self.width>RIGHT): self.x = RIGHT-self.v_x-self.width elif(self.x+self.v_x<LEFT): self.x = LEFT-self.v_x if(self.v_x!=temp): os.system('aplay -q ./sounds/ball_wall.wav&') self.y += self.v_y temp = self.v_y if(self.y+self.height>=BOTTOM and self.v_y>0): # v_y>0 means it will go down self.kill_ball() return elif(self.y<=TOP and self.v_y<0): self.v_y = -self.v_y if(self.y+self.v_y>BOTTOM): self.y = BOTTOM-self.height # self.kill_ball() return elif(self.y+self.v_y<TOP): # self.v_y = -self.v_y self.y = TOP-self.v_y if(self.v_y!=temp): os.system('aplay -q ./sounds/ball_wall.wav&') def check_paddle_collision(self): paddle = globalVar.paddle check =0 if(self.x+self.width > paddle.x and self.x<paddle.x+paddle.width): # ball is within x coordinates of paddle if(self.y+self.height==paddle.y): self.set_vel(- self.v_y) if(paddle.is_sticky()): self.moving = 0 os.system('aplay -q ./sounds/ball_paddle.wav&') return 1 if(self.x+self.width<paddle.x+paddle.width and self.x+self.v_x+self.width>paddle.x and self.v_x>=0): # top-left collision possible if(self.y+self.height<paddle.y and self.y+self.v_y+self.height>paddle.y): # top-left collision self.y = paddle.y-self.height - self.v_y check=1 elif(self.x>(paddle.x) and self.x+self.v_x<paddle.width+paddle.x and self.v_x<=0): # top-right collision possible if(self.y+self.height<paddle.y and self.v_y+self.y+self.height>paddle.y): # top-right collision # self.set_vel(-self.v_y) self.y = paddle.y-self.height - self.v_y check=1 return check def check_brick_collision(self): check = 0 v_x=self.v_x v_y=self.v_y for brick in reversed(globalVar.obj_bricks): if(brick.is_broken()): continue if((self.x>=brick.getx() and self.x+self.width<=brick.getx()+brick.width) and ((self.y<=brick.gety()+brick.height and self.y+self.height>=brick.gety() and self.v_y<0) or (self.y+self.height>=brick.gety() and self.y<=brick.gety()+brick.height and self.v_y>0)) ): if(self.v_y==0): #not possible, but okay continue # collision with top or bottom brick surface v_y = -self.v_y check = 1 elif(((self.x+self.width==brick.getx() and self.v_x>0) or (self.x==brick.getx()+brick.width and self.v_x<0)) and (self.y+self.height<=brick.gety()+brick.height and self.y>=brick.gety())): if(self.v_x==0): continue # collision with left or right edge of brick v_x = -self.v_x check = 1 if(check): if(self.thru==0): self.v_y = v_y self.v_x = v_x else: if(brick.strength == 100): # exploding brick.reduce_strength(self.v_x, self.v_y) break if(self.fire==0): brick.break_it(self.v_x, self.v_y) else: brick.fire(self.v_x, self.v_y) break if(brick.strength != -1): if(self.fire): brick.fire(self.v_x, self.v_y) else: brick.reduce_strength(self.v_x, self.v_y) else: if(self.fire): brick.fire(self.v_x, self.v_y) break if(check): os.system('aplay -q ./sounds/ball_brick.wav&') return check=0 for brick in globalVar.obj_bricks: if(brick.is_broken()): continue if((self.x+self.width)==brick.getx() and self.v_x>0): # top-left or bottom-left collision possible if(self.y==brick.gety()+brick.height and self.v_y<0): # bottom-left collision v_y = -self.v_y v_x = -self.v_x # self.v_x = -self.v_x check = 1 elif(self.y+self.height==brick.gety() and self.v_y>0): # top-left collision # self.v_y = -self.v_y v_x = -self.v_x v_y = -self.v_y check = 1 elif(self.x==(brick.getx()+brick.width) and self.v_x<0): # top-right or bottom-right collision possible if(self.y==brick.gety()+brick.height and self.v_y<0): # bottom-right collision v_y = -self.v_y v_x = -self.v_x # self.v_x = -self.v_x check = 1 elif(self.y+self.height==brick.gety() and self.v_y>0): # top-right collision # self.v_y = -self.v_y v_x = -self.v_x v_y = -self.v_y check = 1 if(check): # ball-brick collision occurred, reduce brick strength now os.system('aplay -q ./sounds/ball_brick.wav&') if(self.thru==0): self.v_y = v_y self.v_x = v_x else: # if thru ball power up is activated, destroy bricks if(brick.strength == 100): # exploding brick brick.reduce_strength(self.v_x, self.v_y) break if(self.fire==0): brick.break_it(self.v_x, self.v_y) else: # if fire ball power up is activated, destroy bricks exploding bricks style brick.fire(self.v_x, self.v_y) break if(brick.strength != -1): if(self.fire): brick.fire(self.v_x, self.v_y) else: brick.reduce_strength(self.v_x, self.v_y) else: # unbreakable brick breaks only if the ball is fireball if(self.fire): brick.fire(self.v_x, self.v_y) break if(check): return check=0 for brick in globalVar.obj_bricks: if(brick.is_broken()): continue if(self.x+self.width<brick.getx() or self.x>brick.getx()+brick.width or self.y+self.height<brick.gety() or self.y>brick.gety()+brick.height): x = self.x+self.v_x y = self.y + self.v_y for b_x in range(brick.getx(),brick.getx()+brick.width): for b_y in range(brick.gety(),brick.gety()+brick.height): if(x==b_x and y==b_y): if(self.v_x>0 and self.v_y>0): # approaching from north west if(x-brick.getx()<y-brick.gety()): v_x = -self.v_x elif(x-brick.getx()==y-brick.gety()): v_x = -self.v_x v_y = -self.v_y else: v_y = -self.v_y elif(self.v_x<0 and self.v_y>0): # approaching from north east if(brick.getx()+brick.width-x<y-brick.gety()): v_x = -self.v_x elif(brick.getx()+brick.width-x==y-brick.gety()): v_x = -self.v_x v_y = -self.v_y else: v_y = -self.v_y elif(self.v_x>0 and self.v_y<0): # approaching from south west if(x-brick.getx()<brick.gety()+brick.height-y): v_x = -self.v_x elif(x-brick.getx()==brick.gety()+brick.height-y): v_x = -self.v_x v_y = -self.v_y else: v_y = -self.v_y elif(self.v_x<0 and self.v_y<0): # approaching from south east if(brick.getx()+brick.width-x<brick.gety()+brick.height-y): v_x = -self.v_x elif(brick.getx()+brick.width-x==brick.gety()+brick.height-y): v_x = -self.v_x v_y = -self.v_y else: v_y = -self.v_y check = 1 break if(check==1): break if(check): os.system('aplay -q ./sounds/ball_brick.wav&') if(self.thru==0): self.x += self.v_x self.y += self.v_y self.v_y = v_y self.v_x = v_x else: if(brick.strength == 100): brick.reduce_strength(self.v_x, self.v_y) break if(self.fire==0): brick.break_it(self.v_x, self.v_y) else: brick.fire(self.v_x, self.v_y) break if(brick.strength != -1): if(self.fire): brick.fire(self.v_x, self.v_y) else: brick.reduce_strength(self.v_x, self.v_y) else: if(self.fire): brick.fire(self.v_x, self.v_y) break def kill_ball(self): p = globalVar.paddle if(p==None): return os.system('aplay -q ./sounds/lose_life.wav&') if(globalVar.ALT_LIVES>0): globalVar.ALT_LIVES -= 1 self.dead = 1 else: l = globalVar.LIVES globalVar.LIVES = l-1 for power_up in globalVar.power_ups: if(power_up.is_activated()): power_up.deactivate_power_up() self.x = random.randint(p.x, p.x+p.width-self.width) self.y = p.y-self.height self.moving = 0 self.v_y = -2 self.v_x = 0 self.thru = 0 self.fast = 0 self.fire = 0 def set_props(self, x, y, vx, vy): self.x = x self.y = y self.v_x = vx self.v_y = vy def is_moving(self): return self.moving def set_moving(self): self.moving = 1 if(not globalVar.paddle.is_sticky()): self.set_vel() def set_vel(self, vy=-1): paddle = globalVar.paddle cen = paddle.width//2 p1 = cen//2 p3 = cen+p1 if(self.x - paddle.x<=p1): self.v_x = self.v_x - 2 elif(self.x - paddle.x<cen): self.v_x = self.v_x - 1 elif(self.x - paddle.x==cen): self.v_x = self.v_x elif(self.x - paddle.x<=p3): self.v_x += 1 elif(self.x - paddle.x>p3): self.v_x += 2 self.v_y = vy def incr_vel(self): if(self.fast): return self.fast = 1 if(self.v_y > 0): self.v_y += 1 elif(self.v_y < 0 or not (self.is_moving())): self.v_y -= 1 def decr_vel(self): if(self.fast): self.fast =0 if(self.v_y > 0): self.v_y -= 1 elif(self.v_y < 0): self.v_y += 1 def getArr(self, colour, symbol, arr): y = self.y h = self.height w = self.width x = self.x for i in range(y, y+h): for j in range(x,x+w): arr[i][j] = (colour +symbol + Style.RESET_ALL) return arr def set_thru(self): self.thru = 1 def unset_thru(self): self.thru = 0 def set_fire(self): self.fire = 1 def unset_fire(self): self.fire = 0
27.464674
264
0.610567
3.21875
1bde984571b7f4f4e48121c1095f28ca435b7f01
6,195
py
Python
PyFile/pyfile.py
chyka-dev/PyFile
a52e69c712c10934bc88c0b75b3f536e12303c83
[ "MIT" ]
null
null
null
PyFile/pyfile.py
chyka-dev/PyFile
a52e69c712c10934bc88c0b75b3f536e12303c83
[ "MIT" ]
null
null
null
PyFile/pyfile.py
chyka-dev/PyFile
a52e69c712c10934bc88c0b75b3f536e12303c83
[ "MIT" ]
null
null
null
# -*- coding:utf-8 -*- import os import six from .pystring import PyString class PyFile(object): """More human-friendly file access interface. Works on Python2 and 3. Usage: file = File(".bashrc") file.write("Hello, world!!") print(file.read()) del file """ class Mode: r = "rb" w = "wb+" a = "ab+" def __init__(self, path, encoding="utf-8"): self.path = path self.encoding = encoding self.mode = None self._fd = None def __del__(self): self.ensure_close() def __str__(self): return "<File object: path={}, encoding={} mode={}".format( self.path, self.encoding, self.mode ) def __iter__(self): self.ensure_open(self.Mode.r) for l in self._fd: yield PyString(l) def statinfo(self): return os.stat(self.path) def size(self): return os.stat(self.path).st_size def top(self): """ Usage: >>> file = File("hello.txt") >>> print(file.read()) hello, world >>> print(file.read()) >>> print(file.top()) >>> print(file.read()) hello, world """ return self.seek(0) def end(self): """ Usage: >>> file = File("hello.txt") >>> print(file.end()) >>> print(file.read()) """ return self.seek(0, 2) def seek(self, *args, **kwargs): """WIP what about mode?""" return self._fd.seek(*args, **kwargs) def truncate(self, *args, **kwargs): """WIP How should I work??""" return self._fd.truncate(*args, **kwargs) def read(self, *args, **kwargs): """ Usage: >>> file = File("hello.txt") >>> print(file.read()) hello, world """ self.ensure_open(self.Mode.r) return PyString(self._fd.read(*args, **kwargs)) def readline(self, *args, **kwargs): self.ensure_open(self.Mode.r) return PyString(self._fd.readline(*args, **kwargs)) def readlines(self, *args, **kwargs): self.ensure_open(self.Mode.r) return (PyString(s) for s in self._fd.readlines(*args, **kwargs)) def write(self, data, *args, **kwargs): """ Usage: >>> file = File("hello.txt") >>> file.write("hello, world") >>> print(file.read()) hello, world """ self.ensure_open(self.Mode.w) return self.__write(data, *args, **kwargs) def writelines(self, seq, *args, **kwargs): """ Usage: >>> file = File("hello.txt") >>> file.writelines(["hello", "world"]) >>> print(file.read()) hello world """ self.ensure_open(self.Mode.w) seq = [self.__ensure_nl(line) for line in seq] return self.__writelines(seq, *args, **kwargs) def append(self, data, *args, **kwargs): """ Usage: >>> file = File("hello.txt") >>> print(file.read()) hello >>> file.append(", world") >>> print(file.read()) hello, world """ self.ensure_open(self.Mode.a) return self.__write(data, *args, **kwargs) def appendlines(self, seq, *args, **kwargs): """ Usage: >>> file = File("hello.txt") >>> print(file.read()) hello >>> file.appendlines(["world", "!!"]) >>> print(file.read()) hello world !! """ self.ensure_open(self.Mode.a) seq = [self.__ensure_nl("")] + [self.__ensure_nl(line) for line in seq] return self.__writelines(seq, *args, **kwargs) def open(self, mode, *args, **kwargs): """ An alias of ensure_open Usage: >>> file = File(path, encoding).open(File.Mode.R) """ return self.ensure_open(mode, *args, **kwargs) def close(self, *args, **kwargs): """An alias of ensure_close Usage: >>> file.close() """ self.ensure_close(*args, **kwargs) def ensure_open(self, mode, *args, **kwargs): """ Open the file with mode `mode` if not opend. Usually you don't have to use this method directly. Use read, write, append,.. methods instead. Usage: >>> file.ensure_open(File.Mode.R) """ if self._fd and self.mode == mode: return self self.mode = mode self._fd = self.__open( self.path, mode, *args, **kwargs ) return self def ensure_close(self, *args, **kwargs): """ Close the file if opened. Usually you don't have to use this method directly. Usage: >>> file.ensure_close() """ if not self._fd: return self._fd.close(*args, **kwargs) self._fd = None self.mode = None return def __ensure_nl(self, string): """Append new line chars to the end of `string`. Usage: >>> assert self.__ensure_nl("") == "\n" >>> assert self.__ensure_nl("hello") == "hello\n" """ if not string.endswith("\n"): string += "\n" return string def __write(self, data, *args, **kwargs): """Use this instead of fd.write. """ data = PyString(data, self.encoding) self._fd.write(data.encode(self.encoding), *args, **kwargs) def __writelines(self, seq, *args, **kwargs): """Use this instead of fd.writelines. """ seq = [PyString(s, self.encoding).encode(self.encoding) for s in seq] self._fd.writelines(seq, *args, **kwargs) def __open(self, *args, **kwargs): # In python2, open doesn't accept `encoding`. # In python3, `encoding` cannot be specified on binary mode. if 'encoding' in kwargs: del kwargs['encoding'] return open(*args, **kwargs)
26.934783
80
0.501211
3.375
3e727e35fdc429e6a7c905d1f46288b990a809bb
1,038
swift
Swift
Sources/ValueProvider/Miscellanious.swift
KizzyCode/persistentstate-swift
22566951b25fe6dd5e0a2d109bda1492b9f6d032
[ "BSD-2-Clause", "MIT" ]
null
null
null
Sources/ValueProvider/Miscellanious.swift
KizzyCode/persistentstate-swift
22566951b25fe6dd5e0a2d109bda1492b9f6d032
[ "BSD-2-Clause", "MIT" ]
null
null
null
Sources/ValueProvider/Miscellanious.swift
KizzyCode/persistentstate-swift
22566951b25fe6dd5e0a2d109bda1492b9f6d032
[ "BSD-2-Clause", "MIT" ]
null
null
null
import Foundation /// A type that implements a default constructor public protocol Default { /// Creates a new default instance of `Self` init() } /// A mapped value/dictionary ID public protocol ID { /// The ID bytes var bytes: Data { get } } extension Data: ID { public var bytes: Data { self } } extension String: ID { public var bytes: Data { self.data(using: .utf8)! } } /// A type that defines methods for encoding public protocol ValueEncoder { /// Encodes an instance of the indicated type /// /// - Parameter value: The instance to encode func encode<T: Encodable>(_ value: T) throws -> Data } extension JSONEncoder: ValueEncoder {} /// A type that defines methods for decoding public protocol ValueDecoder { /// Decodes an instance of the indicated type /// /// - Parameters: /// - type: The target type /// - data: The data to decode func decode<T: Decodable>(_ type: T.Type, from data: Data) throws -> T } extension JSONDecoder: ValueDecoder {}
23.590909
74
0.65896
3.109375
84c5ee02cb1cd393b85356f2a49149b3e3d7c9cc
2,037
h
C
Project8/NodeList.h
nicky189/cs202
ecfb9b92e094bfa29102e586ffd615d719b45532
[ "MIT" ]
null
null
null
Project8/NodeList.h
nicky189/cs202
ecfb9b92e094bfa29102e586ffd615d719b45532
[ "MIT" ]
null
null
null
Project8/NodeList.h
nicky189/cs202
ecfb9b92e094bfa29102e586ffd615d719b45532
[ "MIT" ]
null
null
null
#ifndef NODELIST_H_ #define NODELIST_H_ #include "DataType.h" class Node{ friend class NodeList; //allows direct accessing of link and data from class NodeList public: Node() : m_next( NULL ) { } Node(const DataType & data, Node * next = NULL) : m_next( next ), m_data( data ) { } Node(const Node & other) : m_next( other.m_next ), m_data( other.m_data ) { } DataType & data(){ //gets non-const reference, can be used to modify value of underlying data //return const_cast<DataType &>(static_cast<const Node &>(*this).getData()); //an alternative implementation, just for studying reference return m_data; } const DataType & data() const{ //gets const reference, can be used to access value of underlying data return m_data; } private: Node * m_next; DataType m_data; }; class NodeList{ friend std::ostream & operator<<(std::ostream & os, //(i) const NodeList & nodeList); public: NodeList(); //(1) NodeList(size_t count, const DataType & value); //(2) NodeList(const NodeList & other); //(3) ~NodeList(); //(4) NodeList & operator= (const NodeList & rhs); //(5) Node * front(); //(6) Node * back(); //(7) Node * find(const DataType & target, //(8) Node * & previous, const Node * start = NULL); Node * insertAfter(const DataType & target, //(9) const DataType & value); Node * insertBefore(const DataType & target, //(10) const DataType & value); Node * erase(const DataType & target); //(11) DataType & operator[] (size_t position); //(12a) const DataType & operator[] (size_t position) const; //(12b) size_t size() const; //(13) bool empty() const; //(14) void clear(); //(15) private: Node * m_head; }; #endif //NODELIST_H_
26.802632
144
0.552283
3.078125
0a89d3a27590be15490c3a27155eb9c2ff4bd500
2,074
kt
Kotlin
collaboration-suite-utils/src/main/java/com/kaleyra/collaboration_suite_utils/network_observer/InternetObserver.kt
Bandyer/Kaleyra-Android-Collaboration-Suite-Utils
216049e9b5e234e441c234e121d689c493591373
[ "Apache-2.0", "MIT" ]
null
null
null
collaboration-suite-utils/src/main/java/com/kaleyra/collaboration_suite_utils/network_observer/InternetObserver.kt
Bandyer/Kaleyra-Android-Collaboration-Suite-Utils
216049e9b5e234e441c234e121d689c493591373
[ "Apache-2.0", "MIT" ]
null
null
null
collaboration-suite-utils/src/main/java/com/kaleyra/collaboration_suite_utils/network_observer/InternetObserver.kt
Bandyer/Kaleyra-Android-Collaboration-Suite-Utils
216049e9b5e234e441c234e121d689c493591373
[ "Apache-2.0", "MIT" ]
null
null
null
/* * Copyright (C) 2022 Kaleyra S.p.a. All Rights Reserved. * See LICENSE.txt for licensing information */ package com.kaleyra.collaboration_suite_utils.network_observer import android.Manifest import androidx.annotation.RequiresPermission import kotlinx.coroutines.* import kotlinx.coroutines.channels.BufferOverflow import kotlinx.coroutines.flow.* import java.net.HttpURLConnection import java.net.URL /** * Utility class which allows to observe the internet state. It tells if there is actually internet connection. */ class InternetObserver @RequiresPermission(Manifest.permission.INTERNET) constructor(private val intervalInMs: Long) { private val isConnectedFlow: MutableSharedFlow<Boolean> = MutableSharedFlow(onBufferOverflow = BufferOverflow.DROP_OLDEST, replay = 1) private var job: Job = CoroutineScope(Dispatchers.IO).launch { while (true) { isConnectedFlow.emit(isConnected()) delay(intervalInMs) } } /** * Call to observe the internet state. It returns true if internet is reachable, false otherwise * * @return SharedFlow<Boolean> */ fun observe(): Flow<Boolean> = isConnectedFlow.distinctUntilChanged() /** * Stop the observer */ fun stop() = job.cancel() private fun isConnected(): Boolean { var urlConnection: HttpURLConnection? = null val result = kotlin.runCatching { urlConnection = initConnection() urlConnection!!.responseCode == 204 }.getOrNull() ?: false urlConnection?.disconnect() return result } private fun initConnection() = (URL(HOST).openConnection() as HttpURLConnection).apply { connectTimeout = CONNECT_TIMEOUT readTimeout = READ_TIMEOUT instanceFollowRedirects = false useCaches = false } private companion object { const val HOST = "https://clients3.google.com/generate_204" const val CONNECT_TIMEOUT = 10000 const val READ_TIMEOUT = 10000 } }
32.40625
118
0.682739
3
2f3550f45eaf39f60243ac6016b0a728b6dafd98
8,420
php
PHP
src/DbAdmin/Traits/TableSelectTrait.php
lagdo/jaxon-dbadmin
fb4389223d1f7ad9aba0cd6e8256c95275942661
[ "Apache-2.0" ]
null
null
null
src/DbAdmin/Traits/TableSelectTrait.php
lagdo/jaxon-dbadmin
fb4389223d1f7ad9aba0cd6e8256c95275942661
[ "Apache-2.0" ]
null
null
null
src/DbAdmin/Traits/TableSelectTrait.php
lagdo/jaxon-dbadmin
fb4389223d1f7ad9aba0cd6e8256c95275942661
[ "Apache-2.0" ]
null
null
null
<?php namespace Lagdo\DbAdmin\DbAdmin\Traits; use Lagdo\DbAdmin\Driver\Entity\TableSelectEntity; use function intval; use function count; use function html_entity_decode; use function strip_tags; use function array_flip; use function in_array; use function substr; trait TableSelectTrait { /** * Print columns box in select * @param array $select Result of processSelectColumns()[0] * @param array $columns Selectable columns * @param array $options * @return array */ private function getColumnsOptions(array $select, array $columns, array $options): array { return [ 'select' => $select, 'values' => (array)$options["columns"], 'columns' => $columns, 'functions' => $this->driver->functions(), 'grouping' => $this->driver->grouping(), ]; } /** * Print search box in select * * @param array $columns Selectable columns * @param array $indexes * @param array $options * * @return array */ private function getFiltersOptions(array $columns, array $indexes, array $options): array { $fulltexts = []; foreach ($indexes as $i => $index) { $fulltexts[$i] = $index->type == "FULLTEXT" ? $this->util->html($options["fulltext"][$i]) : ''; } return [ // 'where' => $where, 'values' => (array)$options["where"], 'columns' => $columns, 'indexes' => $indexes, 'operators' => $this->driver->operators(), 'fulltexts' => $fulltexts, ]; } /** * Print order box in select * * @param array $columns Selectable columns * @param array $options * * @return array */ private function getSortingOptions(array $columns, array $options): array { $values = []; $descs = (array)$options["desc"]; foreach ((array)$options["order"] as $key => $value) { $values[] = [ 'col' => $value, 'desc' => $descs[$key] ?? 0, ]; } return [ // 'order' => $order, 'values' => $values, 'columns' => $columns, ]; } /** * Print limit box in select * * @param string $limit Result of processSelectLimit() * * @return array */ private function getLimitOptions(string $limit): array { return ['value' => $this->util->html($limit)]; } /** * Print text length box in select * * @param int $textLength Result of processSelectLength() * * @return array */ private function getLengthOptions(int $textLength): array { return [ 'value' => $textLength === 0 ? 0 : $this->util->html($textLength), ]; } /** * Print action box in select * * @param array $indexes * * @return array */ // private function getActionOptions(array $indexes) // { // $columns = []; // foreach ($indexes as $index) { // $current_key = \reset($index->columns); // if ($index->type != "FULLTEXT" && $current_key) { // $columns[$current_key] = 1; // } // } // $columns[""] = 1; // return ['columns' => $columns]; // } /** * Print command box in select * * @return bool whether to print default commands */ // private function getCommandOptions() // { // return !$this->driver->isInformationSchema($this->driver->database()); // } /** * Print import box in select * * @return bool whether to print default import */ // private function getImportOptions() // { // return !$this->driver->isInformationSchema($this->driver->database()); // } /** * Print extra text in the end of a select form * * @param array $emailFields Fields holding e-mails * @param array $columns Selectable columns * * @return array */ // private function getEmailOptions(array $emailFields, array $columns) // { // } /** * @param array $queryOptions * * @return int */ private function setDefaultOptions(array &$queryOptions): int { $defaultOptions = [ 'columns' => [], 'where' => [], 'order' => [], 'desc' => [], 'fulltext' => [], 'limit' => '50', 'text_length' => '100', 'page' => '1', ]; foreach ($defaultOptions as $name => $value) { if (!isset($queryOptions[$name])) { $queryOptions[$name] = $value; } } $page = intval($queryOptions['page']); if ($page > 0) { $page -= 1; // Page numbers start at 0 here, instead of 1. } $queryOptions['page'] = $page; return $page; } /** * @param array $fields * * @return array */ private function getFieldsOptions(array $fields): array { $rights = []; // privilege => 0 $columns = []; // selectable columns $textLength = 0; foreach ($fields as $key => $field) { $name = $this->util->fieldName($field); if (isset($field->privileges["select"]) && $name != "") { $columns[$key] = html_entity_decode(strip_tags($name), ENT_QUOTES); if ($this->util->isShortable($field)) { $textLength = $this->util->processSelectLength(); } } $rights[] = $field->privileges; } return [$rights, $columns, $textLength]; } /** * @param array $indexes * @param array $select * @param mixed $tableStatus * * @return array */ private function setPrimaryKey(array &$indexes, array $select, $tableStatus): array { $primary = null; $unselected = []; foreach ($indexes as $index) { if ($index->type == "PRIMARY") { $primary = array_flip($index->columns); $unselected = ($select ? $primary : []); foreach ($unselected as $key => $val) { if (in_array($this->driver->escapeId($key), $select)) { unset($unselected[$key]); } } break; } } $oid = $tableStatus->oid; if ($oid && !$primary) { /*$primary = */$unselected = [$oid => 0]; $indexes[] = ["type" => "PRIMARY", "columns" => [$oid]]; } return $unselected; } /** * @param string $table * @param array $columns * @param array $fields * @param array $select * @param array $group * @param array $where * @param array $order * @param array $unselected * @param int $limit * @param int $page * * @return TableSelectEntity */ private function getSelectEntity(string $table, array $columns, array $fields, array $select, array $group, array $where, array $order, array $unselected, int $limit, int $page): TableSelectEntity { $select2 = $select; $group2 = $group; if (empty($select2)) { $select2[] = "*"; $convert_fields = $this->driver->convertFields($columns, $fields, $select); if ($convert_fields) { $select2[] = substr($convert_fields, 2); } } foreach ($select as $key => $val) { $field = $fields[$this->driver->unescapeId($val)] ?? null; if ($field && ($as = $this->driver->convertField($field))) { $select2[$key] = "$as AS $val"; } } $isGroup = count($group) < count($select); if (!$isGroup && !empty($unselected)) { foreach ($unselected as $key => $val) { $select2[] = $this->driver->escapeId($key); if (!empty($group2)) { $group2[] = $this->driver->escapeId($key); } } } // From driver.inc.php return new TableSelectEntity($table, $select2, $where, $group2, $order, $limit, $page); } }
28.542373
139
0.493349
3.015625
a1e4f9de1a2c3d177810684e3270894543c2d4e4
1,124
c
C
cours9/semaphore.c
Menelau/LOG645-Course-examples
ef35b7c10daa2610dd1b4da10658b4acb623d7b4
[ "MIT" ]
null
null
null
cours9/semaphore.c
Menelau/LOG645-Course-examples
ef35b7c10daa2610dd1b4da10658b4acb623d7b4
[ "MIT" ]
null
null
null
cours9/semaphore.c
Menelau/LOG645-Course-examples
ef35b7c10daa2610dd1b4da10658b4acb623d7b4
[ "MIT" ]
2
2021-09-23T15:56:33.000Z
2021-12-15T12:15:23.000Z
#include <pthread.h> #include <stdio.h> #include <stdlib.h> #include <unistd.h> #include <string.h> #include <semaphore.h> long thread_count; sem_t semaphore; void* Semaphore_Function(void* args) { long rank = (long) args; sem_wait(&semaphore); printf("Hello from thread %ld.\n", rank); sleep(10); // simulating intensive calculation... printf("Thread %ld returning token.\n", rank); sem_post(&semaphore); return NULL; } int main(int argc, char *argv[]) { thread_count = strtol(argv[1], NULL, 10); pthread_t *thread_handles; thread_handles = malloc(thread_count * sizeof(pthread_t)); // initializing a semaphore with 4 "tokens" or threads that can run in parallel sem_init(&semaphore, 0, 4); for (long thread = 0; thread < thread_count; thread++) { pthread_create(&thread_handles[thread], NULL, &Semaphore_Function, (void*) thread); } for (int thread = 0; thread < thread_count; thread++) { pthread_join(thread_handles[thread], NULL); } //destroy semaphore sem_destroy(&semaphore); free(thread_handles); return 0; }
24.977778
91
0.662811
3.203125
163b389e91b9ce381ce43b77bd922f4308b69cbd
913
ts
TypeScript
app/components/menu-bar.component.ts
YaelMendes/GeoMapStories
df744aa757079074ddff3af8240d2f4905a67c54
[ "MIT" ]
null
null
null
app/components/menu-bar.component.ts
YaelMendes/GeoMapStories
df744aa757079074ddff3af8240d2f4905a67c54
[ "MIT" ]
null
null
null
app/components/menu-bar.component.ts
YaelMendes/GeoMapStories
df744aa757079074ddff3af8240d2f4905a67c54
[ "MIT" ]
null
null
null
import {Component, Input, Injectable} from '@angular/core'; import {User} from '../objects/user'; import {DataService} from '../services/data.service'; import {VARIABLES} from '../AppSettings'; @Component({ selector: 'menu-bar', templateUrl: '../html/menu-bar.component.html', styleUrls: ['../css/menu-bar.component.css'] }) export class MenuBarComponent { model: User = new User('yafou'); connected: boolean=false; constructor(private dataService: DataService) { } onSubmit() { console.log('onSubmit is called ! model=' + this.model); this.tryConnect(this.model); } private tryConnect(model: User) { if (VARIABLES.MODE_TEST) { this.connected=true; this.model = this.retrieveUserInfos(); } else { //TODO: call backend to check user/pwd and retrieve infos/stories/.... } } private retrieveUserInfos(): User { return this.model; } }
20.75
76
0.659365
3.078125
407cc58f6332cad601adf4604b758ab8ec38d70e
2,396
py
Python
NVLL/analysis/word_freq.py
jennhu/vmf_vae_nlp
95a39fa9f7a0659e432475e8dfb9a46e305d53b7
[ "MIT" ]
159
2018-08-31T15:57:36.000Z
2022-03-27T15:31:38.000Z
NVLL/analysis/word_freq.py
jennhu/vmf_vae_nlp
95a39fa9f7a0659e432475e8dfb9a46e305d53b7
[ "MIT" ]
9
2018-10-11T15:58:50.000Z
2019-04-16T03:13:33.000Z
NVLL/analysis/word_freq.py
jennhu/vmf_vae_nlp
95a39fa9f7a0659e432475e8dfb9a46e305d53b7
[ "MIT" ]
21
2018-09-01T17:57:20.000Z
2021-12-17T03:31:01.000Z
import os def count(dic, fname): with open(fname, 'r') as fd: lines = fd.read().splitlines() filtered_sents = [] for l in lines: words = l.split(" ") _ratio = comp_unk_ratio(words) if _ratio <= 0.05: filtered_sents.append(words) for w in words: if w in dic: dic[w] += 1 else: dic[w] = 1 return dic, filtered_sents def read_sent(): pass def comp_unk_ratio(sent): total = len(sent) + 0.000001 cnt = 0 for w in sent: if w == '<unk>': cnt += 1 return cnt / total def comp_ratio(): pass def generate_based_on_word_freq(): count_word_freq() def generate_based_on_sentiment(): pass def count_word_freq(): d = {} os.chdir("../../data/yelp") d, _ = count(d, "valid.txt") d, filtered_sents_test = count(d, "test.txt") sorted_d = sorted(d, key=d.get, reverse=True) print("Len of trimmed vocab {}".format(len(sorted_d))) print("Num of Test samples after trimming {}".format(len(filtered_sents_test))) uncommon = sorted_d[-10000:] print(uncommon) divide = 5 every = int(len(filtered_sents_test) / divide) sent_dictionary = {} for sent in filtered_sents_test: total = len(sent) cnt = 0. for w in sent: if w in uncommon: cnt += 1 sent_dictionary[" ".join(sent)] = cnt / total sorted_sents = sorted(sent_dictionary, key=sent_dictionary.get, reverse=True) for piece in range(divide): start = int(piece * every) end = int((piece + 1) * every) tmp_sents = sorted_sents[start:end] with open("test-rare-" + str(piece) + ".txt", 'w') as fd: fd.write("\n".join(tmp_sents)) if __name__ == "__main__": bank_size = 1000 # Generate 2 set of sentences. # Before beginning # if a sentence has more than 10% UNK, remove it. ############ # Based on WordFreq Vocab size=15K # Divide # Top 1K sample with largest Common Word Ratio (common word= top3K freq word) # Top 1K sample with largest Uncommon Word Ratio (uncommon word= top3K infreq word) generate_based_on_word_freq() ############ # Based on Sentiment (sample from 5star and 1star) #############
26.32967
87
0.563022
3.390625
f082f97017dd469928617efda67490dacef988de
1,135
js
JavaScript
assets/js/rotation.js
boardingschool/boardingschool.github.io
7c8aee829b294bf8ab9f1f02fb7efee5859b0382
[ "CC-BY-3.0" ]
null
null
null
assets/js/rotation.js
boardingschool/boardingschool.github.io
7c8aee829b294bf8ab9f1f02fb7efee5859b0382
[ "CC-BY-3.0" ]
null
null
null
assets/js/rotation.js
boardingschool/boardingschool.github.io
7c8aee829b294bf8ab9f1f02fb7efee5859b0382
[ "CC-BY-3.0" ]
null
null
null
$(window).load(function() { //start after HTML, images have loaded var InfiniteRotator = { init: function() { //initial fade-in time (in milliseconds) var initialFadeIn = 0; //interval between items (in milliseconds) var itemInterval = 5000; //cross-fade time (in milliseconds) var fadeTime = 2500; //count number of items var numberOfItems = $('.img-item').length; //set current item var currentItem = 0; //show first item $('.img-item').eq(currentItem).fadeIn(initialFadeIn); //loop through the items var infiniteLoop = setInterval(function(){ $('.img-item').eq(currentItem).fadeOut(fadeTime); if(currentItem == numberOfItems -1){ currentItem = 0; }else{ currentItem++; } $('.img-item').eq(currentItem).fadeIn(fadeTime); }, itemInterval); } }; InfiniteRotator.init(); });
27.02381
66
0.489868
3.265625
d272b67bfa1137eb6c3871e24aadb62b6fb72ffa
12,498
php
PHP
parser.php
hylandry/opensimstuff
18f5fc5d2399b14d464066b1754b096ce8db9c2d
[ "MIT" ]
null
null
null
parser.php
hylandry/opensimstuff
18f5fc5d2399b14d464066b1754b096ce8db9c2d
[ "MIT" ]
1
2020-06-09T17:27:35.000Z
2020-06-09T17:27:35.000Z
parser.php
hylandry/opensimstuff
18f5fc5d2399b14d464066b1754b096ce8db9c2d
[ "MIT" ]
null
null
null
<?php // // Modified for PHP7 and MySQLi // // This needs php-curl installed // include("databaseinfo.php"); //Supress all Warnings/Errors //error_reporting(0); $now = time(); // // Search DB // // //mysqli_select_db ($db, $DB_NAME); if (!isset($db)) $db=mysqli_connect ($DB_HOST, $DB_USER, $DB_PASS, $DB_NAME); if (mysqli_connect_errno()) { printf("Connect failed: %s\n", mysqli_connect_error()); exit(); } function GetURL($host, $port, $url) { $url = "http://$host:$port/$url"; $ch = curl_init(); curl_setopt($ch, CURLOPT_URL, $url); curl_setopt($ch, CURLOPT_RETURNTRANSFER, 1); curl_setopt($ch, CURLOPT_CONNECTTIMEOUT, 10); curl_setopt($ch, CURLOPT_TIMEOUT, 30); $data = curl_exec($ch); if (curl_errno($ch) == 0) { curl_close($ch); return $data; } curl_close($ch); return ""; } function CheckHost($hostname, $port) { if (!isset($db)) $db=mysqli_connect ($DB_HOST, $DB_USER, $DB_PASS, $DB_NAME); global $now; $xml = GetURL($hostname, $port, "?method=collector"); if ($xml == "") //No data was retrieved? (CURL may have timed out) { $failcounter = "failcounter + 1"; echo "fail: ".$failcounter."<br>\n"; } else $failcounter = "0"; //Update nextcheck to be 10 minutes from now. The current OS instance //won't be checked again until at least this much time has gone by. $next = $now + 600; mysqli_query($db, "UPDATE hostsregister SET nextcheck = $next," . " checked = 1, failcounter = " . $failcounter . " WHERE host = '" . mysqli_real_escape_string($db, $hostname) . "'" . " AND port = '" . mysqli_real_escape_string($db, $port) . "'"); if ($xml != "") parse($hostname, $port, $xml); } function parse($hostname, $port, $xml) { global $now, $db; if (!isset($db)) $db=mysqli_connect ($DB_HOST, $DB_USER, $DB_PASS, $DB_NAME); /////////////////////////////////////////////////////////////////////// // // Search engine sim scanner // // // Load XML doc from URL // $objDOM = new DOMDocument(); $objDOM->resolveExternals = false; //Don't try and parse if XML is invalid or we got an HTML 404 error. if ($objDOM->loadXML($xml) == False) return; // // Get the region data to update // $regiondata = $objDOM->getElementsByTagName("regiondata"); //If returned length is 0, collector method may have returned an error if ($regiondata->length == 0) return; $regiondata = $regiondata->item(0); // // Update nextcheck so this host entry won't be checked again until after // the DataSnapshot module has generated a new set of data to be parsed. // $expire = $regiondata->getElementsByTagName("expire")->item(0)->nodeValue; $next = $now + $expire; $updater = mysqli_query($db, "UPDATE hostsregister SET nextcheck = $next " . "WHERE host = '" . mysqli_real_escape_string($db, $hostname) . "' AND " . "port = '" . mysqli_real_escape_string($db, $port) . "'"); // // Get the region data to be saved in the database // $regionlist = $regiondata->getElementsByTagName("region"); foreach ($regionlist as $region) { $regioncategory = $region->getAttributeNode("category")->nodeValue; // // Start reading the Region info // $info = $region->getElementsByTagName("info")->item(0); $regionuuid = $info->getElementsByTagName("uuid")->item(0)->nodeValue; $regionname = $info->getElementsByTagName("name")->item(0)->nodeValue; $regionhandle = $info->getElementsByTagName("handle")->item(0)->nodeValue; $url = $info->getElementsByTagName("url")->item(0)->nodeValue; // // First, check if we already have a region that is the same // $check = mysqli_query($db, "SELECT * FROM regions WHERE regionuuid = '" . mysqli_real_escape_string($db, $regionuuid) . "'"); if (mysqli_num_rows($check) > 0) { mysqli_query($db, "DELETE FROM regions WHERE regionuuid = '" . mysqli_real_escape_string($db, $regionuuid) . "'"); mysqli_query($db, "DELETE FROM parcels WHERE regionuuid = '" . mysqli_real_escape_string($db, $regionuuid) . "'"); mysqli_query($db, "DELETE FROM allparcels WHERE regionUUID = '" . mysqli_real_escape_string($db, $regionuuid) . "'"); mysqli_query($db, "DELETE FROM parcelsales WHERE regionUUID = '" . mysqli_real_escape_string($db, $regionuuid) . "'"); mysqli_query($db, "DELETE FROM objects WHERE regionuuid = '" . mysqli_real_escape_string($db, $regionuuid) . "'"); } $data = $region->getElementsByTagName("data")->item(0); $estate = $data->getElementsByTagName("estate")->item(0); $username = $estate->getElementsByTagName("name")->item(0)->nodeValue; $useruuid = $estate->getElementsByTagName("uuid")->item(0)->nodeValue; $estateid = $estate->getElementsByTagName("id")->item(0)->nodeValue; // // Second, add the new info to the database // $sql = "INSERT INTO regions VALUES('" . mysqli_real_escape_string($db, $regionname) . "','" . mysqli_real_escape_string($db, $regionuuid) . "','" . mysqli_real_escape_string($db, $regionhandle) . "','" . mysqli_real_escape_string($db, $url) . "','" . mysqli_real_escape_string($db, $username) ."','" . mysqli_real_escape_string($db, $useruuid) ."')"; mysqli_query($db, $sql); // // Start reading the parcel info // $parcel = $data->getElementsByTagName("parcel"); foreach ($parcel as $value) { $parcelname = $value->getElementsByTagName("name")->item(0)->nodeValue; $parceluuid = $value->getElementsByTagName("uuid")->item(0)->nodeValue; $infouuid = $value->getElementsByTagName("infouuid")->item(0)->nodeValue; $parcellanding = $value->getElementsByTagName("location")->item(0)->nodeValue; $parceldescription = $value->getElementsByTagName("description")->item(0)->nodeValue; $parcelarea = $value->getElementsByTagName("area")->item(0)->nodeValue; $parcelcategory = $value->getAttributeNode("category")->nodeValue; $parcelsaleprice = $value->getAttributeNode("salesprice")->nodeValue; $dwell = $value->getElementsByTagName("dwell")->item(0)->nodeValue; $owner = $value->getElementsByTagName("owner")->item(0); $owneruuid = $owner->getElementsByTagName("uuid")->item(0)->nodeValue; // Adding support for groups $group = $value->getElementsByTagName("group")->item(0); if ($group != "") { $groupuuid = $group->getElementsByTagName("groupuuid")->item(0)->nodeValue; } else { $groupuuid = "00000000-0000-0000-0000-000000000000"; } // // Check bits on Public, Build, Script // $parcelforsale = $value->getAttributeNode("forsale")->nodeValue; $parceldirectory = $value->getAttributeNode("showinsearch")->nodeValue; $parcelbuild = $value->getAttributeNode("build")->nodeValue; $parcelscript = $value->getAttributeNode("scripts")->nodeValue; $parcelpublic = $value->getAttributeNode("public")->nodeValue; // // Save // //$db=mysqli_connect ($DB_HOST, $DB_USER, $DB_PASSWORD); $sql = "INSERT INTO allparcels VALUES('" . mysqli_real_escape_string($db, $regionuuid) . "','" . mysqli_real_escape_string($db, $parcelname) . "','" . mysqli_real_escape_string($db, $owneruuid) . "','" . mysqli_real_escape_string($db, $groupuuid) . "','" . mysqli_real_escape_string($db, $parcellanding) . "','" . mysqli_real_escape_string($db, $parceluuid) . "','" . mysqli_real_escape_string($db, $infouuid) . "','" . mysqli_real_escape_string($db, $parcelarea) . "' )"; mysqli_query($db,$sql); if ($parceldirectory == "true") { $sql = "INSERT INTO parcels VALUES('" . mysqli_real_escape_string($db, $regionuuid) . "','" . mysqli_real_escape_string($db, $parcelname) . "','" . mysqli_real_escape_string($db, $parceluuid) . "','" . mysqli_real_escape_string($db, $parcellanding) . "','" . mysqli_real_escape_string($db, $parceldescription) . "','" . mysqli_real_escape_string($db, $parcelcategory) . "','" . mysqli_real_escape_string($db, $parcelbuild) . "','" . mysqli_real_escape_string($db, $parcelscript) . "','" . mysqli_real_escape_string($db, $parcelpublic) . "','". mysqli_real_escape_string($db, $dwell) . "','" . mysqli_real_escape_string($db, $infouuid) . "','" . mysqli_real_escape_string($db, $regioncategory) . "')"; mysqli_query($db, $sql); } if ($parcelforsale == "true") { $sql = "INSERT INTO parcelsales VALUES('" . mysqli_real_escape_string($db, $regionuuid) . "','" . mysqli_real_escape_string($db, $parcelname) . "','" . mysqli_real_escape_string($db, $parceluuid) . "','" . mysqli_real_escape_string($db, $parcelarea) . "','" . mysqli_real_escape_string($db, $parcelsaleprice) . "','" . mysqli_real_escape_string($db, $parcellanding) . "','" . mysqli_real_escape_string($db, $infouuid) . "', '" . mysqli_real_escape_string($db, $dwell) . "', '" . mysqli_real_escape_string($db, $estateid) . "', '" . mysqli_real_escape_string($db, $regioncategory) . "')"; mysqli_query($db, $sql); } } // // Handle objects // $objects = $data->getElementsByTagName("object"); foreach ($objects as $value) { $uuid = $value->getElementsByTagName("uuid")->item(0)->nodeValue; $regionuuid = $value->getElementsByTagName("regionuuid")->item(0)->nodeValue; $parceluuid = $value->getElementsByTagName("parceluuid")->item(0)->nodeValue; $location = $value->getElementsByTagName("location")->item(0)->nodeValue; $title = $value->getElementsByTagName("title")->item(0)->nodeValue; $description = $value->getElementsByTagName("description")->item(0)->nodeValue; $flags = $value->getElementsByTagName("flags")->item(0)->nodeValue; mysqli_query($db, "INSERT INTO objects VALUES('" . mysqli_real_escape_string($db, $uuid) . "','" . mysqli_real_escape_string($db, $parceluuid) . "','" . mysqli_real_escape_string($db, $location) . "','" . mysqli_real_escape_string($db, $title) . "','" . mysqli_real_escape_string($db, $description) . "','" . mysqli_real_escape_string($db, $regionuuid) . "')"); } } } $sql = "SELECT host, port FROM hostsregister " . "WHERE nextcheck < $now AND checked = 0 LIMIT 0,10"; $jobsearch = mysqli_query($db,$sql); // // If the sql query returns no rows, all entries in the hostsregister // table have been checked. Reset the checked flag and re-run the // query to select the next set of hosts to be checked. // if (mysqli_num_rows($jobsearch) == 0) { mysqli_query($db, "UPDATE hostsregister SET checked = 0"); $jobsearch = mysqli_query($db,$sql); } while ($jobs = mysqli_fetch_row($jobsearch)) CheckHost($jobs[0], $jobs[1]); ?>
36.976331
97
0.552088
3.0625
c7cfca04a6d46c8657fca251fdef016d7c180a06
7,637
py
Python
src/main.py
FranciscoCharles/doom-fire-simulator
fccd45e5c96d37de00a6979ec00a5e13a668d4d9
[ "MIT" ]
1
2021-05-19T16:12:37.000Z
2021-05-19T16:12:37.000Z
src/main.py
FranciscoCharles/doom-fire-simulator
fccd45e5c96d37de00a6979ec00a5e13a668d4d9
[ "MIT" ]
null
null
null
src/main.py
FranciscoCharles/doom-fire-simulator
fccd45e5c96d37de00a6979ec00a5e13a668d4d9
[ "MIT" ]
null
null
null
#_*_coding:utf-8_*_ #created by FranciscoCharles in april,2021. from os import environ if 'PYGAME_HIDE_SUPPORT_PROMPT' not in environ: environ['PYGAME_HIDE_SUPPORT_PROMPT'] = 'hidden' del environ import pygame import colorsys import numpy as np from menu import Menu, HslColor from fire import FireColorArray from random import randint from typing import Tuple, List, Optional, NewType RgbColor = Tuple[int,int,int] def rgb_to_float(r:int, g:int, b:int) ->[HslColor]: return (r/255, g/255, b/255) def rgb_to_int(r:int, g:int, b:int) -> RgbColor: return (int(r*255), int(g*255), int(b*255)) class DoomFireSimulator: def __init__(self) -> None: pygame.init() pygame.font.init() self.SCREEN_W = 830 self.SCREEN_H = 480 pygame.display.set_caption('FireDoomSimaltor v1.0.1') self.display = pygame.display.set_mode((self.SCREEN_W,self.SCREEN_H)) icon = pygame.image.load('images/icon32.png') pygame.display.set_icon(icon) self.clock = pygame.time.Clock() self.FPS = 20 self.on_fire = True self.fire_size = (8,8) self.fire_x = 40 self.fire_y = 40 self.decay_value = 3 self.wind_force = 7 self.wind_mi_force = 0 self.colors = self.selectColorPalette() self.fire_array = FireColorArray(40,50) self.setBaseFlameValue(len(self.colors)-1) self.menu = Menu(len(self.colors)-1) def selectColorPalette(self, options: Optional[HslColor] = None) -> List[RgbColor]: self.colors = [ (7,7,7),(31,7,7),(47,15,7),(71,15,7),(87,23,7),(103,31,7),(119,31,7),(143,39,7),(159,47,7),(175,63,7), (191,71,7),(199,71,7),(223,79,7),(223,87,7),(223,87,7),(215,95,7),(215,95,7),(215,103,15),(207,111,15), (207,119,15),(207,127,15),(207,135,23),(199,135,23),(199,143,23),(199,151,31),(191,159,31),(191,159,31), (191,167,39),(191,167,39),(191,175,47),(183,175,47),(183,183,47),(183,183,55),(207,207,111),(223,223,159), (239,239,199),(255,255,255)] if isinstance(options, tuple) and len(options)==3: (shift_color, decay_l,decay_s) = options shift_color = (shift_color%360)/360 result = [] for cor in self.colors: cor = rgb_to_float(*cor) (_,l,s) = colorsys.rgb_to_hls(*cor) cor = colorsys.hls_to_rgb(shift_color, decay_l*l, decay_s*s) result.append(rgb_to_int(*cor)) self.colors = result return self.colors def setBaseFlameValue(self, value: int) -> None: row,columns = self.fire_array.shape row -= 1 for column in range(columns): self.fire_array[row,column] = value def updatePixelFire(self, row: int, column: int) -> None: decay = randint(0, self.decay_value) shift = column + randint(self.wind_mi_force, self.wind_force) self.fire_array[row, shift] = (self.fire_array[row+1, column]-decay) if self.fire_array[row, shift]<0: self.fire_array[row, shift] = 0 return shift def evaporateFire(self) -> None: rows,columns = self.fire_array.shape for row in range(rows-1): for column in range(columns): self.updatePixelFire(row, column) def drawFire(self) -> None: rows,columns = self.fire_array.shape w,h = self.fire_size for row in range(rows): for column in range(columns): color = self.fire_array[row,column] rect = (self.fire_x+column*w,self.fire_y+row*h,w,h) pygame.draw.rect(self.display, self.colors[color], rect) self.evaporateFire() @property def rectFire(self) -> Tuple[int,int,int,int]: h,w = self.fire_array.shape return (self.fire_x, self.fire_y, w*self.fire_size[0], h*self.fire_size[1]) def run(self) -> None: ticks = 0 game = True draw_menu = True valid_keys = ['q','w','a','s','z','left','right','up','down'] key_pressed = '' (x,y,w,h) = self.rectFire fire_rect = (x-1, y-1, w+2, h+2) rect_menu = (399, 39, 392, 402) positions = self.menu.getListPositionMenu(620, 94) pygame.draw.rect(self.display, (0xaaaaaa), fire_rect, 1) while game: self.clock.tick(self.FPS) for e in pygame.event.get(): if e.type == pygame.QUIT: game = False break elif e.type == pygame.KEYDOWN: key = pygame.key.name(e.key) if key == 'escape': game = False break elif key in valid_keys: ticks = pygame.time.get_ticks() key_pressed = key getattr(self, key_pressed)() draw_menu = True elif e.type == pygame.KEYUP: key_pressed = '' if key_pressed and (pygame.time.get_ticks()-ticks)>500: getattr(self, key_pressed)() draw_menu = True if game: if draw_menu: pygame.draw.rect(self.display, (0), rect_menu) self.menu.draw(self.display, positions) pygame.draw.rect(self.display, (0xaaaaaa), rect_menu, 1) pygame.display.update(rect_menu) draw_menu = False self.drawFire() pygame.display.update(fire_rect) self.stop() def changePalette(self) -> None: color = None if self.menu['color intensity']['value']: color = self.menu.currentColorValue() self.selectColorPalette(color) def updateSimulationValues(self) -> None: name = self.menu.name if name=='FPS': self.FPS = self.menu['FPS']['value'] elif name=='decay': self.decay_value = self.menu['decay']['value'] elif name=='wind direction': type_index = self.menu['wind direction']['value'] type_value = self.menu['wind direction']['types'][type_index] wind_force = self.menu['wind force']['value'] if type_value=='left': self.wind_mi_force = 0 self.wind_force = wind_force elif type_value=='right': self.wind_mi_force = -wind_force self.wind_force = 0 else: self.wind_force = wind_force self.wind_mi_force = -wind_force elif name=='wind force': self.wind_force = self.menu['wind force']['value'] elif name in ['H','S','L','color intensity']: self.changePalette() def left(self) -> None: self.menu.decrement() self.updateSimulationValues() def right(self) -> None: self.menu.increment() self.updateSimulationValues() def up(self) -> None: self.menu.up() def down(self) -> None: self.menu.down() def a(self) -> None: self.left() def s(self) -> None: self.right() def w(self) -> None: self.up() def z(self) -> None: self.down() def q(self) -> None: self.on_fire = not self.on_fire self.setBaseFlameValue((len(self.colors)-1) if self.on_fire else 0) def stop(self) -> None: pygame.quit() if __name__ == '__main__': DoomFireSimulator().run()
35.52093
118
0.552573
3.125
332ec3ad83ab42693d9db460bc909a8573da26d4
1,210
py
Python
src/model/synapses/numba_backend/VoltageJump.py
Fassial/pku-intern
4463e7d5a5844c8002f7e3d01b4fadc3a20e2038
[ "MIT" ]
null
null
null
src/model/synapses/numba_backend/VoltageJump.py
Fassial/pku-intern
4463e7d5a5844c8002f7e3d01b4fadc3a20e2038
[ "MIT" ]
null
null
null
src/model/synapses/numba_backend/VoltageJump.py
Fassial/pku-intern
4463e7d5a5844c8002f7e3d01b4fadc3a20e2038
[ "MIT" ]
null
null
null
""" Created on 12:39, June. 4th, 2021 Author: fassial Filename: VoltageJump.py """ import brainpy as bp __all__ = [ "VoltageJump", ] class VoltageJump(bp.TwoEndConn): target_backend = ['numpy', 'numba', 'numba-parallel', 'numba-cuda'] def __init__(self, pre, post, conn, weight = 1., delay = 0., **kwargs ): # init params self.weight = weight self.delay = delay # init connections self.conn = conn(pre.size, post.size) self.pre_ids, self.post_ids = self.conn.requires("pre_ids", "post_ids") self.size = len(self.pre_ids) # init vars self.Isyn = self.register_constant_delay("Isyn", size = self.size, delay_time = self.delay ) # init super super(VoltageJump, self).__init__(pre = pre, post = post, **kwargs) def update(self, _t): # set post.V for i in range(self.size): pre_id, post_id = self.pre_ids[i], self.post_ids[i] self.Isyn.push(i, self.pre.spike[pre_id] * self.weight ) if not self.post.refractory[post_id]: self.post.V[post_id] += self.Isyn.pull(i)
26.304348
79
0.566116
3.015625
fea027e63c8ec137410b07cae121edd763bbcdf7
1,270
kt
Kotlin
buildSrc/src/main/kotlin/MarkdownUtil.kt
StanleyProjects/AndroidExtension.UserInterface
245a7c3c8f7518ddcf17f02629b01404800852da
[ "Apache-2.0" ]
null
null
null
buildSrc/src/main/kotlin/MarkdownUtil.kt
StanleyProjects/AndroidExtension.UserInterface
245a7c3c8f7518ddcf17f02629b01404800852da
[ "Apache-2.0" ]
11
2021-12-19T13:37:00.000Z
2022-01-13T16:00:59.000Z
buildSrc/src/main/kotlin/MarkdownUtil.kt
StanleyProjects/AndroidExtension.UserInterface
245a7c3c8f7518ddcf17f02629b01404800852da
[ "Apache-2.0" ]
null
null
null
object MarkdownUtil { fun url( text: String, value: String ): String { return "[$text]($value)" } fun image( text: String, url: String ): String { return "!" + url(text = text, value = url) } fun table( heads: List<String>, dividers: List<String>, rows: List<List<String>> ): String { require(heads.size > 1) { "Size of heads must be more than 1!" } require(heads.size == dividers.size) { "Size of heads and size of dividers must be equal!" } val firstRow = rows.firstOrNull() requireNotNull(firstRow) { "Rows must be exist!" } for (i in 1 until rows.size) { require(firstRow.size == rows[i].size) { "Size of columns in all rows must be equal!" } } require(heads.size == firstRow.size) { "Size of heads and size of rows must be equal!" } val result = mutableListOf( heads.joinToString(separator = "|"), dividers.joinToString(separator = "|") ) result.addAll(rows.map { it.joinToString(separator = "|") }) return result.joinToString(separator = SystemUtil.newLine) } }
29.534884
72
0.531496
3.328125
4e0cf10ba62a61bbde206ff0ae503fbef4c0c6dc
1,953
asm
Assembly
Microcontroller_Lab/Lab_6/Lab_6/Read_Code_With_Comments.asm
MuhammadAlBarham/pic16f778_projects
c12e15e48a62cd16f869cbe9411728a4eea8f499
[ "MIT" ]
null
null
null
Microcontroller_Lab/Lab_6/Lab_6/Read_Code_With_Comments.asm
MuhammadAlBarham/pic16f778_projects
c12e15e48a62cd16f869cbe9411728a4eea8f499
[ "MIT" ]
null
null
null
Microcontroller_Lab/Lab_6/Lab_6/Read_Code_With_Comments.asm
MuhammadAlBarham/pic16f778_projects
c12e15e48a62cd16f869cbe9411728a4eea8f499
[ "MIT" ]
null
null
null
Include "p16F84A.inc" ; ---------------------------------------------------------- ; General Purpose RAM Assignments ; ---------------------------------------------------------- cblock 0x0C Counter Endc ; ---------------------------------------------------------- ; Macro Definitions ; ---------------------------------------------------------- Read_EEPROM macro Bcf STATUS, RP0 ;Go to Bank 0 Clrf EEADR ;Clear EEADR (EEADR=0) Bsf STATUS, RP0 ;Go to Bank 1 Bsf EECON1, RD ;Begin Read Bcf STATUS, RP0 ;Go to Bank 0 Endm ; ---------------------------------------------------------- ; Vector definition ; ---------------------------------------------------------- org 0x000 nop goto Main INT_Routine org 0x004 goto INT_Routine ; ---------------------------------------------------------- ; The main Program ; ---------------------------------------------------------- Main Read_EEPROM Clrf Counter ;Clear the counter Bsf STATUS, RP0 ;Go to Bank 1 Clrf TRISB ;Make PORTB as OUTPUT Bcf STATUS, RP0 ;Go to BANK 0 Movlw A'H' ;Move Character to W-Reg Subwf EEDATA,w ;Check If the first char. is H Btfsc STATUS,Z ;If Yes goto finish Goto Finish Incf Counter,f Movlw A'M' Subwf EEDATA,w Btfsc STATUS,Z Finish Incf Counter,f Call Look_Up Movwf PORTB Loop Goto Loop ; ---------------------------------------------------------- ; Sub Routine Definitions ; ---------------------------------------------------------- ;This Look_Up table for 7-Seg. Display Look_Up Movf Counter,w Addwf PCL,f Retlw B'00111111' ; Number 0 Retlw B'00000110' ; Number 1 Retlw B'01011011' ; Number 2 Retlw B'01001111' ; Number 3 Retlw B'01100110' ; Number 4 Retlw B'01101101' ; Number 5 end
29.149254
61
0.410138
3.046875
fdaf633a2d6691a5cac79970a134936e25b5c10c
3,480
lua
Lua
src/scripts/client/gameplay/viz_handlers/flash_units.lua
Psimage/Lovely-Camera-Mod
7ecb1c354c3e00091ef6005d53eb19d0fcf7f56e
[ "MIT" ]
null
null
null
src/scripts/client/gameplay/viz_handlers/flash_units.lua
Psimage/Lovely-Camera-Mod
7ecb1c354c3e00091ef6005d53eb19d0fcf7f56e
[ "MIT" ]
null
null
null
src/scripts/client/gameplay/viz_handlers/flash_units.lua
Psimage/Lovely-Camera-Mod
7ecb1c354c3e00091ef6005d53eb19d0fcf7f56e
[ "MIT" ]
null
null
null
---------------------------------------------------------------- -- Copyright (c) 2012 Klei Entertainment Inc. -- All Rights Reserved. -- SPY SOCIETY. ---------------------------------------------------------------- local viz_thread = include( "gameplay/viz_thread" ) local array = include( "modules/array" ) local cdefs = include( "client_defs" ) local util = include( "client_util" ) local simdefs = include( "sim/simdefs" ) local simquery = include( "sim/simquery" ) --------------------------------------------------------------- local flash_units = class( viz_thread ) function flash_units:init( boardrig, viz, rig, duration ) viz_thread.init( self, viz, self.onResume ) viz:registerHandler( simdefs.EV_FRAME_UPDATE, self ) self.boardrig = boardrig self.rig = rig self.duration = duration --move rig from layers["main"] to layers["ceiling"] --increment usage_count and enable fullscreen darkening overlay if 1 local rigProp = rig:getProp() local main = boardrig._layers["main"] local ceiling = boardrig._layers["ceiling"] main:removeProp( rigProp ) ceiling:insertProp( rigProp ) rigProp:setPriority( 110000 ) if not boardrig._flashThreadCount or boardrig._flashThreadCount == 0 then boardrig._flashThreadCount = 1 --print( "inserting dimmer" ) --local bSoundPlayed = false local timer = MOAITimer.new() timer:setSpan( duration / (60*2) ) timer:setMode( MOAITimer.PING_PONG ) timer:start() local uniformDriver = function( uniforms ) local t = timer:getTime() / (duration / (60*2) ) t = math.min(0.7,t*3) uniforms:setUniformFloat( "ease", t ) --print('dimmer ease', t ) end local uniforms = KLEIShaderUniforms.new() uniforms:setUniformDriver( uniformDriver ) local dimmerProp = KLEIFullscreenProp.new() dimmerProp:setShader( MOAIShaderMgr.getShader( MOAIShaderMgr.KLEI_POST_PROCESS_PASS_THROUGH_EASE ) ) dimmerProp:setShaderUniforms( uniforms ) dimmerProp:setTexture( "data/images/the_darkness.png" ) dimmerProp:setBlendMode( MOAIProp.BLEND_NORMAL ) dimmerProp:setPriority( 100000 ) ceiling:insertProp( dimmerProp ) boardrig._dimmerProp = dimmerProp else boardrig._flashThreadCount = boardrig._flashThreadCount + 1 end end function flash_units:onStop() self.rig:refreshRenderFilter() --move rig from layers["ceiling"] to layers["main"] --decrement usage_count and disable fullscreen darkening overlay if 0 local rigProp = self.rig:getProp() local main = self.boardrig._layers["main"] local ceiling = self.boardrig._layers["ceiling"] ceiling:removeProp( rigProp ) main:insertProp( rigProp ) local count = self.boardrig._flashThreadCount - 1 self.boardrig._flashThreadCount = count if count <= 0 then --print( "deleting dimmer" ) local dimmerProp = self.boardrig._dimmerProp ceiling:removeProp( dimmerProp ) self.boardrig._dimmerProp = nil end end function flash_units:onResume( ev ) while self.duration > 0 do if self.duration % 20 == 0 then self.rig:getProp():setRenderFilter( cdefs.RENDER_FILTERS["focus_highlite"] ) elseif self.duration % 10 == 0 then self.rig:refreshRenderFilter() end self.duration = self.duration - 1 coroutine.yield() end end return flash_units
31.926606
109
0.641092
3.078125
da1f4b82540846bf3b10497980830f102e02fe01
2,018
lua
Lua
resources/[scripts]/[hoppe]/[jobs]/hpp_cet/towtruck.lua
HoppeDevz/bclrp
acc33ae5032fb2488dacfa49046470feb8cac32e
[ "MIT" ]
4
2020-09-15T17:43:21.000Z
2022-01-14T16:49:16.000Z
resources/[scripts]/[hoppe]/[jobs]/hpp_cet/towtruck.lua
kFxDaKing/bclrp
acc33ae5032fb2488dacfa49046470feb8cac32e
[ "MIT" ]
null
null
null
resources/[scripts]/[hoppe]/[jobs]/hpp_cet/towtruck.lua
kFxDaKing/bclrp
acc33ae5032fb2488dacfa49046470feb8cac32e
[ "MIT" ]
4
2020-09-14T11:47:50.000Z
2021-02-15T20:39:45.000Z
RegisterCommand("towtruck", function() vehicle = GetVehiclePedIsUsing(GetPlayerPed(-1)) vehicletow = GetDisplayNameFromVehicleModel(GetEntityModel(GetVehiclePedIsUsing(GetPlayerPed(-1)))) end) local reboque = nil local rebocado = nil RegisterCommand("tow",function(source,args) --local vehicle = GetPlayersLastVehicle() --local vehicletow = IsVehicleModel(vehicle,GetHashKey("flatbed")) if vehicletow and not IsPedInAnyVehicle(PlayerPedId()) then --rebocado = getVehicleInDirection(GetEntityCoords(PlayerPedId()),GetOffsetFromEntityInWorldCoords(PlayerPedId(),0.0,5.0,0.0)) rebocado = GetPlayersLastVehicle() if reboque == nil then if vehicle ~= rebocado then local min,max = GetModelDimensions(GetEntityModel(rebocado)) AttachEntityToEntity(rebocado,vehicle,GetEntityBoneIndexByName(vehicle,"bodyshell"),0,-2.2,0.4-min.z,0,0,0,1,1,0,1,0,1) reboque = rebocado end else AttachEntityToEntity(reboque,vehicle,20,-0.5,-15.0,-0.3,0.0,0.0,0.0,false,false,true,false,20,true) DetachEntity(reboque,false,false) PlaceObjectOnGroundProperly(reboque) reboque = nil rebocado = nil end end end) Citizen.CreateThread(function() while true do Citizen.Wait(0) if enablemechud then if vehicletow == nil then vehicletow = "NENHUM" end rebocado = GetDisplayNameFromVehicleModel(GetEntityModel(GetPlayersLastVehicle())) if rebocado == nil then rebocado = "NENHUM" elseif rebocado == "FLATBED" then rebocado = "NENHUM" end --print(vehicletow) --print(rebocado) if vehicletow ~= "FLATBED" then drawTxt("REBOQUE:~r~"..vehicletow,4,0.5,0.93,0.50,255,255,255,255) elseif vehicletow == "FLATBED" then drawTxt("REBOQUE:~g~"..vehicletow,4,0.5,0.93,0.50,255,255,255,255) end drawTxt("REBOCADO:~r~"..rebocado,4,0.5,0.96,0.50,255,255,255,255) end end end) enablemechud = false RegisterCommand("towhud", function() if not enablemechud then enablemechud = true elseif enablemechud then enablemechud = false end end)
29.246377
128
0.730426
3.09375
b89f9543ecb27486610847a26f0dea237fce7656
3,804
rs
Rust
days/day25/src/lib.rs
dfm/adventofcode
ab2c4228229988d79ba7a9034069961650830031
[ "Apache-2.0" ]
2
2020-12-05T23:14:48.000Z
2021-12-27T04:39:33.000Z
days/day25/src/lib.rs
dfm/adventofcode
ab2c4228229988d79ba7a9034069961650830031
[ "Apache-2.0" ]
null
null
null
days/day25/src/lib.rs
dfm/adventofcode
ab2c4228229988d79ba7a9034069961650830031
[ "Apache-2.0" ]
1
2019-12-24T04:56:27.000Z
2019-12-24T04:56:27.000Z
use aoc::solver::Solver; pub struct Day25; #[derive(Copy, Clone)] enum Cell { Empty, East, South, } struct Grid { width: usize, height: usize, value: Vec<Cell>, } impl Grid { fn new(data: &str) -> Self { let width = data.lines().next().unwrap().trim().len(); let mut grid = Grid { width, height: 0, value: Vec::new(), }; for line in data.lines() { for c in line.trim().chars() { grid.value.push(match c { '>' => Cell::East, 'v' => Cell::South, _ => Cell::Empty, }); } } grid.height = grid.value.len() / width; grid } fn east(&self, n: usize) -> usize { if n % self.width == self.width - 1 { n + 1 - self.width } else { n + 1 } } fn south(&self, n: usize) -> usize { if n >= (self.height - 1) * self.width { n % self.width } else { n + self.width } } fn step(&self) -> (usize, Self) { let mut count = 0; let mut grid = Grid { width: self.width, height: self.height, value: Vec::new(), }; grid.value.resize(self.width * self.height, Cell::Empty); for (n, v) in self.value.iter().enumerate() { if matches!(v, Cell::East) { let target = self.east(n); if matches!(self.value[target], Cell::Empty) { grid.value[target] = Cell::East; count += 1; } else { grid.value[n] = Cell::East; } } } for (n, v) in self.value.iter().enumerate() { if matches!(v, Cell::South) { let target = self.south(n); if matches!(grid.value[target], Cell::Empty) && !matches!(self.value[target], Cell::South) { grid.value[target] = Cell::South; count += 1; } else { grid.value[n] = Cell::South; } } } (count, grid) } } impl std::fmt::Display for Cell { fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result { write!( f, "{}", match self { Cell::Empty => '.', Cell::East => '>', Cell::South => 'v', } ) } } impl std::fmt::Display for Grid { fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result { let mut n = 0; let mut result = String::new(); for _ in 0..self.height { for _ in 0..self.width { let line = format!("{}", self.value[n]); n += 1; result.push_str(&line); } result.push('\n'); } write!(f, "{}", result) } } impl Solver<&str> for Day25 { fn part1(data: &str) -> usize { let mut count = 0; let mut grid = Grid::new(data); loop { let result = grid.step(); count += 1; if result.0 == 0 { break; } grid = result.1; } count } fn part2(_data: &str) -> usize { 0 } } #[cfg(test)] mod tests { use super::*; const DATA: &str = "v...>>.vv> .vv>>.vv.. >>.>v>...v >>v>>.>.v. v>v.vv.v.. >.>>..v... .vv..>.>v. v.v..>>v.v ....v..v.> "; #[test] fn test_part1() { assert_eq!(Day25::part1(DATA), 58); } #[test] fn test_part2() { assert_eq!(Day25::part2(DATA), 0); } }
22.508876
68
0.398002
3.28125
b156345f812e80c4199551982bf4bc47bc72fad8
1,984
h
C
sorce/mikey/Opera/Grass.h
montoyamoraga/shbobo
3469747603dfead376111f38b455af1250365848
[ "MIT" ]
16
2020-12-21T04:52:20.000Z
2022-02-28T10:15:34.000Z
sorce/mikey/Opera/Grass.h
montoyamoraga/shbobo
3469747603dfead376111f38b455af1250365848
[ "MIT" ]
8
2021-01-02T01:01:26.000Z
2021-12-19T01:40:34.000Z
sorce/mikey/Opera/Grass.h
montoyamoraga/shbobo
3469747603dfead376111f38b455af1250365848
[ "MIT" ]
4
2021-01-01T15:27:43.000Z
2021-08-10T21:14:29.000Z
struct BistaTranz { float squl, squr; bool state; BistaTranz (){squl=squr=0;} float calx(float fmm, float den) { if (state) { squl += fmm; if (squl >= den) { squr = -den; state = !state; } } else { squr += fmm; if (squr >= den) { squl = -den; state = !state; } } return squl; } }; struct Bista : Opero { BistaTranz bt; shOpr mul, add, fmo, den; Bista(lua_State *L): Opero() { broinger(L, 1, "fre", &fmo); broinger(L, 2, "den", &den, 1.0); broinger(L, 3, "mul", &mul,1.0); broinger(L, 4, "add", &add); } float calx(float sr) { mint = 0; float fmm = 4*fmo->calx(sr)/sr; float deno = fabs(den->calx(sr)); bt.calx(fmm,deno); mint = bt.squr * mul->calx(sr) + add->calx(sr); return mint; } static const char className[]; const char * getClassName() { return className; } }; const char Bista::className[] = "Bista"; struct RunglTranz { unsigned char pattern; float mint; float lastcar; RunglTranz(){pattern=rand();} float calx(bool car, bool mod) { if (car && !lastcar) pattern = (pattern << 1) | (mod ? 1 : 0); lastcar = car; //printf("patternfloat%d\n",pattern); mint = (float)pattern / 256; return mint; } }; struct Grass : Opero { BistaTranz square[4]; RunglTranz castle[4]; static const char className[]; shOpr mul, add, fmo, cha; Grass(lua_State *L): Opero() { broinger(L, 1, "fre", &fmo); broinger(L, 2, "cha", &cha); broinger(L, 3, "mul", &mul,1.0); broinger(L, 4, "add", &add); } float calx(float sr) { mint = 0; float chaos = cha->calx(sr); //printf("chaos%f\n",chaos); float fmm = 4*fmo->calx(sr)/sr; for (int i = 0; i < 4; i++) { square[i].calx( fmm*(1+chaos*castle[i].calx( square[(i+2)%4].state, square[(i+1)%4].state)), 1+(float)i/10); } float mullo = mul->calx(sr); mint = square[0].squl*mullo + add->calx(sr); return mint; } const char * getClassName() { return className; } }; const char Grass::className[] = "Grass";
22.545455
52
0.583669
3.0625
ef53113206ffe662ae977a2dee15a9f65785ecd4
1,950
pls
SQL
home/lib/python/exemple/plgrader/one.pls
PremierLangage/premierlangage
7134a2aadffee2bf264abee6c4b23ea33f1b390b
[ "CECILL-B" ]
8
2019-01-30T13:51:59.000Z
2022-01-08T03:26:53.000Z
apps/misc_tests/resources/lib/python/exemple/plgrader/one.pls
PremierLangage/premierlangage
7134a2aadffee2bf264abee6c4b23ea33f1b390b
[ "CECILL-B" ]
286
2019-01-18T21:35:51.000Z
2022-03-24T18:53:59.000Z
apps/misc_tests/resources/lib/python/exemple/plgrader/one.pls
PremierLangage/premierlangage
7134a2aadffee2bf264abee6c4b23ea33f1b390b
[ "CECILL-B" ]
4
2019-02-11T13:38:30.000Z
2021-03-02T20:59:00.000Z
from playexo.strategy import StrategyAPI def get_last_answered_index(request, activity): strat = StrategyAPI(activity) pls = strat.get_pl_list() i = 0 for pl in pls: if not strat.get_last_good_answer(pl, request): return i i += 1 return 0 def strategy(request, activity): """ Process request to determine what do to. Should return an HttpResponse. """ strat = StrategyAPI(activity) current = get_last_answered_index(request, activity) if request.method == 'GET': # Request changing which exercise will be loaded action = request.GET.get("action", None) if action == "pl": strat.set_pl(strat.get_pl_sha1(request.GET.get("pl_sha1", None)), request) return HttpResponseRedirect("/playexo/activity/") # Remove get parameters from url elif action == "pltp": pl = strat.get_current_pl(request) if (pl): can_do = pl.sha1; strat.set_pl(None, request) dic = strat.get_pl_dic(strat.get_current_pl(request)) if 'oneshot' in dic and dic['oneshot'] == 'True': seed = None else: seed = strat.get_seed_from_answer(strat.get_last_answer(strat.get_current_pl(request), request)) exercise = strat.load_exercise(request, seed) if request.method == 'GET': # Request changing or interacting an exercise if action == "reset": strat.reset_pl(exercise) elif action == "next": pl = strat.get_next_pl(request) strat.set_pl(pl, request) return HttpResponseRedirect("/playexo/activity/") # Remove get parameters from url if request.method == 'POST': state, feedback = strat.evaluate(exercise, request) return strat.send_evaluate_feedback(state, feedback) strat.add_to_context(exercise, 'current_auth', current) return strat.render(exercise, request)
38.235294
104
0.640513
3
8c1cd1193490b6043dceafbc312e302e08e02328
1,496
kt
Kotlin
java-time/src/main/kotlin/com/github/debop/javatimes/PeriodExtensions.kt
debop/joda-time-kotlin
cbb0efedaa53bdf9d77b230d8477cb0ae0d7abd7
[ "ECL-2.0", "Apache-2.0" ]
91
2016-07-15T03:06:17.000Z
2021-12-07T11:16:44.000Z
java-time/src/main/kotlin/com/github/debop/javatimes/PeriodExtensions.kt
debop/joda-time-kotlin
cbb0efedaa53bdf9d77b230d8477cb0ae0d7abd7
[ "ECL-2.0", "Apache-2.0" ]
11
2016-11-23T11:04:30.000Z
2021-05-18T13:07:11.000Z
java-time/src/main/kotlin/com/github/debop/javatimes/PeriodExtensions.kt
debop/joda-time-kotlin
cbb0efedaa53bdf9d77b230d8477cb0ae0d7abd7
[ "ECL-2.0", "Apache-2.0" ]
9
2017-01-23T13:35:25.000Z
2020-06-08T06:26:48.000Z
package com.github.debop.javatimes import java.time.Period import java.time.temporal.Temporal operator fun Period.unaryMinus(): Period = this.negated() @Suppress("UNCHECKED_CAST") operator fun <T : Temporal> Period.plus(instant: T): T = addTo(instant) as T @Suppress("UNCHECKED_CAST") operator fun <T : Temporal> Period.minus(instant: T): T = subtractFrom(instant) as T @JvmOverloads fun periodOf(years: Int, months: Int = 0, days: Int = 0): Period = Period.of(years, months, days) /** * year sequence of `Period` */ suspend fun Period.yearSequence(): Sequence<Int> = sequence { var year = 0 val years = [email protected] if(years > 0) { while(year < years) { yield(year++) } } else { while(year > years) { yield(year--) } } } /** * month sequence of `java.time.Period` */ suspend fun Period.monthSequence(): Sequence<Int> = sequence { var month = 0 val months = [email protected] if(months > 0) { while(month < months) { yield(month++) } } else { while(month > months) { yield(month--) } } } /** * day sequence of `java.time.Period` */ suspend fun Period.daySequence(): Sequence<Int> = sequence { var day = 0 val days = [email protected] if(days > 0) { while(day < days) { yield(day++) } } else { while(day > days) { yield(day--) } } }
22.666667
97
0.57754
3.25
27e0e1bdc49eab4fdb1dfb4431fe18bd1c9bc125
6,115
kt
Kotlin
app/src/main/java/com/starline/hamsteradoption/HamsterDetailActivity.kt
MaxNeverSleep/HamsterAdoption
38416348921b78c2f51933ddbc287f234fd0ac5c
[ "Apache-2.0" ]
null
null
null
app/src/main/java/com/starline/hamsteradoption/HamsterDetailActivity.kt
MaxNeverSleep/HamsterAdoption
38416348921b78c2f51933ddbc287f234fd0ac5c
[ "Apache-2.0" ]
null
null
null
app/src/main/java/com/starline/hamsteradoption/HamsterDetailActivity.kt
MaxNeverSleep/HamsterAdoption
38416348921b78c2f51933ddbc287f234fd0ac5c
[ "Apache-2.0" ]
null
null
null
package com.starline.hamsteradoption import android.graphics.Paint import android.os.Bundle import android.util.Log import androidx.activity.compose.setContent import androidx.appcompat.app.AppCompatActivity import androidx.compose.foundation.Image import androidx.compose.foundation.layout.* import androidx.compose.material.* import androidx.compose.material.icons.Icons import androidx.compose.material.icons.filled.ArrowBack import androidx.compose.runtime.Composable import androidx.compose.ui.Alignment import androidx.compose.ui.Modifier import androidx.compose.ui.layout.ContentScale import androidx.compose.ui.res.painterResource import androidx.compose.ui.text.font.FontWeight import androidx.compose.ui.unit.dp import androidx.compose.ui.unit.sp import androidx.core.graphics.scaleMatrix import com.starline.hamsteradoption.ui.theme.HamsterAdoptionTheme val hamsterAae = arrayOf( "3 months", "4 months", "1 year 4 months", "2 years", "3 months", "5 months", "7 months", "1 1moneths", "1 months", "3 months", "8 months", "1 months", ) class HamsterDetailActivity : AppCompatActivity() { override fun onCreate(savedInstanceState: Bundle?) { super.onCreate(savedInstanceState) val name = intent.getStringExtra("HAMSTER_NAME") val desc = intent.getStringExtra("HAMSTER_DESC") var detailImage1: Int = 0 var detailImage2: Int = 0 var age: String = "1 months" when (name) { "Sussy" -> { detailImage1 = R.mipmap.hamster_preview_1 detailImage2 = R.mipmap.hamster_1_1 age = "1 months" } "Jack" -> { detailImage1 = R.mipmap.hamster_preview_2 detailImage2 = R.mipmap.hamster_2_1 age = "3 months" } "David" -> { detailImage1 = R.mipmap.hamster_preview_3 detailImage2 = R.mipmap.hamster_3_1 age = "6 months" } "Stephen" -> { detailImage1 = R.mipmap.hamster_preview_4 detailImage2 = R.mipmap.hamster_4_1 age = "4 months" } "Kiro" -> { detailImage1 = R.mipmap.hamster_preview_5 detailImage2 = R.mipmap.hamster_5_1 age = "2 months" } "Warden" -> { detailImage1 = R.mipmap.hamster_preview_6 detailImage2 = R.mipmap.hamster_6_1 age = "4 months" } "Love" -> { detailImage1 = R.mipmap.hamster_preview_7 detailImage2 = R.mipmap.hamster_7_1 age = "2 months" } "Cookie" -> { detailImage1 = R.mipmap.hamster_preview_8 detailImage2 = R.mipmap.hamster_8_1 age = "5 months" } "Hamster007" -> { detailImage1 = R.mipmap.hamster_preview_9 detailImage2 = R.mipmap.hamster_5_1 age = "4 months" } "SweetHamster" -> { detailImage1 = R.mipmap.hamster_preview_10 detailImage2 = R.mipmap.hamster_4_1 age = "6 months" } "Lam" -> { detailImage1 = R.mipmap.hamster_preview_11 detailImage2 = R.mipmap.hamster_1_1 age = "7 months" } "Pinkie" -> { detailImage1 = R.mipmap.hamster_preview_12 detailImage2 = R.mipmap.hamster_6_1 age = "12 months" } } setContent { HamsterAdoptionTheme { MyDetail(name, age, detailImage1, detailImage2, { back() }) } } } private fun back() { Log.i("test", "finish activity结束") super.finish() } } @Composable fun MyDetail( name: String?, age: String?, detailImage1: Int, detailImage2: Int, onClick: () -> Unit ) { Surface(color = MaterialTheme.colors.background) { Column { TopAppBar( navigationIcon = { IconButton(onClick = { onClick }) { Icon(Icons.Filled.ArrowBack, "back") } }, title = { Text( text = "$name's Detail", fontWeight = FontWeight.Bold ) } ) Row { Image( painter = painterResource(id = detailImage1), contentDescription = name, Modifier .width(180.dp) .height(180.dp) .padding(30.dp, 30.dp), contentScale = ContentScale.FillBounds ) Text( text = "Name : $name\r\n\nAge : $age\r\n\nDistance : 4.5km", Modifier.padding(top = 40.dp), fontWeight = FontWeight.Bold, fontSize = 18.sp ) } Text( text = "$name ,she’s really light for a dwarf, do not mistake her weight for her looks as when she arrived, she looks a lil preggie ...Turns out after 4 weeks, she just has a food baby and big hips (yes #bodygoals) Therefore, pls give Wheelington a chance even if you’re looking for a round hammy, light hams can be chonky too!", Modifier.padding(horizontal = 30.dp, vertical = 10.dp), style = MaterialTheme.typography.body2 ) Image( painter = painterResource(id = detailImage2), contentDescription = name, Modifier .width(400.dp) .height(280.dp) .padding(30.dp, 30.dp), contentScale = ContentScale.FillBounds ) } } }
33.598901
345
0.521668
3
8e447720674c91b173384fcd9c06cc17164dc87b
3,122
rb
Ruby
lib/fastly_nsq/manager.rb
fastly/fastly_nsq
e05d3972ebcfb25f7a42eb0268d482e5142d4658
[ "MIT" ]
11
2016-01-30T00:59:26.000Z
2022-03-04T21:48:06.000Z
lib/fastly_nsq/manager.rb
fastly/fastly_nsq
e05d3972ebcfb25f7a42eb0268d482e5142d4658
[ "MIT" ]
65
2016-02-02T23:27:41.000Z
2022-02-23T14:32:01.000Z
lib/fastly_nsq/manager.rb
fastly/fastly_nsq
e05d3972ebcfb25f7a42eb0268d482e5142d4658
[ "MIT" ]
2
2016-04-18T15:36:09.000Z
2022-03-23T08:17:47.000Z
# frozen_string_literal: true ## # Interface for tracking listeners and managing the processing pool. class FastlyNsq::Manager DEADLINE = 30 # @return [Boolean] Set true when all listeners are stopped attr_reader :done # @return [FastlyNsq::PriorityThreadPool] attr_reader :pool # @return [Logger] attr_reader :logger ## # Create a FastlyNsq::Manager # # @param logger [Logger] # @param max_threads [Integer] Maxiumum number of threads to be used by {FastlyNsq::PriorityThreadPool} # @param pool_options [Hash] Options forwarded to {FastlyNsq::PriorityThreadPool} constructor. def initialize(logger: FastlyNsq.logger, max_threads: FastlyNsq.max_processing_pool_threads, **pool_options) @done = false @logger = logger @pool = FastlyNsq::PriorityThreadPool.new( { fallback_policy: :caller_runs, max_threads: max_threads }.merge(pool_options), ) end ## # Hash of listeners. Keys are topics, values are {FastlyNsq::Listener} instances. # @return [Hash] def topic_listeners @topic_listeners ||= {} end ## # Array of listening topic names # @return [Array] def topics topic_listeners.keys end ## # Set of {FastlyNsq::Listener} objects # @return [Set] def listeners topic_listeners.values.to_set end ## # Stop the manager. # Terminates the listeners and stops all processing in the pool. # @param deadline [Integer] Number of seconds to wait for pool to stop processing def terminate(deadline = DEADLINE) return if done stop_listeners return if pool.shutdown? stop_processing(deadline) @done = true end ## # Manager state # @return [Boolean] def stopped? done end ## # Add a {FastlyNsq::Listener} to the @topic_listeners # @param listener [FastlyNsq::Listener} def add_listener(listener) logger.info { "topic #{listener.topic}, channel #{listener.channel}: listening" } if topic_listeners[listener.topic] logger.warn { "topic #{listener.topic}: duplicate listener" } end topic_listeners[listener.topic] = listener end ## # Transer listeners to a new manager and stop processing from the existing pool. # @param new_manager [FastlyNsq::Manager] new manager to which listeners will be added # @param deadline [Integer] Number of seconds to wait for exsiting pool to stop processing def transfer(new_manager, deadline: DEADLINE) new_manager.topic_listeners.merge!(topic_listeners) stop_processing(deadline) topic_listeners.clear @done = true end ## # Terminate all listeners def stop_listeners logger.info { 'Stopping listeners' } listeners.each(&:terminate) topic_listeners.clear end protected ## # Shutdown the pool # @param deadline [Integer] Number of seconds to wait for pool to stop processing def stop_processing(deadline) logger.info { 'Stopping processors' } pool.shutdown logger.info { 'Waiting for processors to finish...' } return if pool.wait_for_termination(deadline) logger.info { 'Killing processors...' } pool.kill end end
25.382114
110
0.704997
3
402fab453bb9e2ac59ef1604d2cf41a8d383046e
1,270
py
Python
move.py
Nexowned/SnakeAI
95b5d4a9d20df124040ff9335ad09409ca9ff607
[ "Apache-2.0" ]
null
null
null
move.py
Nexowned/SnakeAI
95b5d4a9d20df124040ff9335ad09409ca9ff607
[ "Apache-2.0" ]
null
null
null
move.py
Nexowned/SnakeAI
95b5d4a9d20df124040ff9335ad09409ca9ff607
[ "Apache-2.0" ]
null
null
null
from enum import Enum class Move(Enum): LEFT = -1 STRAIGHT = 0 RIGHT = 1 class Direction(Enum): NORTH = 0 EAST = 1 SOUTH = 2 WEST = 3 def get_new_direction(self, move): return Direction(self.value + move.value) % 4 def get_xy_manipulation(self): m = { Direction.NORTH: (0, -1), Direction.EAST: (1, 0), Direction.SOUTH: (0, 1), Direction.WEST: (-1, 0) } return m[self] def get_xy_moves(self): m = { Direction.NORTH: [Direction.NORTH.get_xy_manipulation(), Direction.EAST.get_xy_manipulation(), Direction.WEST.get_xy_manipulation()], Direction.EAST: [Direction.NORTH.get_xy_manipulation(), Direction.EAST.get_xy_manipulation(), Direction.SOUTH.get_xy_manipulation()], Direction.SOUTH: [Direction.SOUTH.get_xy_manipulation(), Direction.EAST.get_xy_manipulation(), Direction.WEST.get_xy_manipulation()], Direction.WEST: [Direction.NORTH.get_xy_manipulation(), Direction.WEST.get_xy_manipulation(), Direction.SOUTH.get_xy_manipulation()], } return m[self]
30.238095
106
0.573228
3.234375
dda83b2c35d62055796e4e123db761f5ab14e3a9
5,711
go
Go
vendor/github.com/joomcode/redispipe/redis/request_writer.go
anuragprafulla/components-contrib
91be9ad2a0767526049e0c95225b5afb3791e353
[ "MIT" ]
229
2018-12-20T09:36:33.000Z
2022-03-31T18:39:26.000Z
vendor/github.com/joomcode/redispipe/redis/request_writer.go
anuragprafulla/components-contrib
91be9ad2a0767526049e0c95225b5afb3791e353
[ "MIT" ]
12
2019-09-27T14:14:19.000Z
2022-03-10T00:06:20.000Z
vendor/github.com/joomcode/redispipe/redis/request_writer.go
anuragprafulla/components-contrib
91be9ad2a0767526049e0c95225b5afb3791e353
[ "MIT" ]
17
2018-12-21T17:34:47.000Z
2022-02-09T19:07:44.000Z
package redis import ( "strconv" "github.com/joomcode/errorx" ) // AppendRequest appends request to byte slice as RESP request (ie as array of strings). // // It could fail if some request value is not nil, integer, float, string or byte slice. // In case of error it still returns modified buffer, but truncated to original size, it could be used save reallocation. // // Note: command could contain single space. In that case, it will be split and last part will be prepended to arguments. func AppendRequest(buf []byte, req Request) ([]byte, error) { oldSize := len(buf) space := -1 for i, c := range []byte(req.Cmd) { if c == ' ' { space = i break } } if space == -1 { buf = appendHead(buf, '*', len(req.Args)+1) buf = appendHead(buf, '$', len(req.Cmd)) buf = append(buf, req.Cmd...) buf = append(buf, '\r', '\n') } else { buf = appendHead(buf, '*', len(req.Args)+2) buf = appendHead(buf, '$', space) buf = append(buf, req.Cmd[:space]...) buf = append(buf, '\r', '\n') buf = appendHead(buf, '$', len(req.Cmd)-space-1) buf = append(buf, req.Cmd[space+1:]...) buf = append(buf, '\r', '\n') } for i, val := range req.Args { switch v := val.(type) { case string: buf = appendHead(buf, '$', len(v)) buf = append(buf, v...) case []byte: buf = appendHead(buf, '$', len(v)) buf = append(buf, v...) case int: buf = appendBulkInt(buf, int64(v)) case uint: buf = appendBulkUint(buf, uint64(v)) case int64: buf = appendBulkInt(buf, int64(v)) case uint64: buf = appendBulkUint(buf, uint64(v)) case int32: buf = appendBulkInt(buf, int64(v)) case uint32: buf = appendBulkUint(buf, uint64(v)) case int8: buf = appendBulkInt(buf, int64(v)) case uint8: buf = appendBulkUint(buf, uint64(v)) case int16: buf = appendBulkInt(buf, int64(v)) case uint16: buf = appendBulkUint(buf, uint64(v)) case bool: if v { buf = append(buf, "$1\r\n1"...) } else { buf = append(buf, "$1\r\n0"...) } case float32: str := strconv.FormatFloat(float64(v), 'f', -1, 32) buf = appendHead(buf, '$', len(str)) buf = append(buf, str...) case float64: str := strconv.FormatFloat(v, 'f', -1, 64) buf = appendHead(buf, '$', len(str)) buf = append(buf, str...) case nil: buf = append(buf, "$0\r\n"...) default: return buf[:oldSize], ErrArgumentType.NewWithNoMessage(). WithProperty(EKVal, val). WithProperty(EKArgPos, i). WithProperty(EKRequest, req) } buf = append(buf, '\r', '\n') } return buf, nil } func appendInt(b []byte, i int64) []byte { var u uint64 if i >= 0 && i <= 9 { b = append(b, byte(i)+'0') return b } if i > 0 { u = uint64(i) } else { b = append(b, '-') u = uint64(-i) } return appendUint(b, u) } func appendUint(b []byte, u uint64) []byte { if u <= 9 { b = append(b, byte(u)+'0') return b } digits := [20]byte{} p := 20 for u > 0 { n := u / 10 p-- digits[p] = byte(u-n*10) + '0' u = n } return append(b, digits[p:]...) } func appendHead(b []byte, t byte, i int) []byte { if i < 0 { panic("negative length header") } b = append(b, t) b = appendUint(b, uint64(i)) return append(b, '\r', '\n') } func appendBulkInt(b []byte, i int64) []byte { if i >= -99999999 && i <= 999999999 { b = append(b, '$', '0', '\r', '\n') } else { b = append(b, '$', '0', '0', '\r', '\n') } l := len(b) b = appendInt(b, i) li := byte(len(b) - l) if li < 10 { b[l-3] = li + '0' } else { d := li / 10 b[l-4] = d + '0' b[l-3] = li - (d * 10) + '0' } return b } func appendBulkUint(b []byte, i uint64) []byte { if i <= 999999999 { b = append(b, '$', '0', '\r', '\n') } else { b = append(b, '$', '0', '0', '\r', '\n') } l := len(b) b = appendUint(b, i) li := byte(len(b) - l) if li < 10 { b[l-3] = li + '0' } else { d := li / 10 b[l-4] = d + '0' b[l-3] = li - (d * 10) + '0' } return b } // ArgToString returns string representataion of an argument. // Used in cluster to determine cluster slot. // Have to be in sync with AppendRequest func ArgToString(arg interface{}) (string, bool) { var bufarr [20]byte var buf []byte switch v := arg.(type) { case string: return v, true case []byte: return string(v), true case int: buf = appendInt(bufarr[:0], int64(v)) case uint: buf = appendUint(bufarr[:0], uint64(v)) case int64: buf = appendInt(bufarr[:0], int64(v)) case uint64: buf = appendUint(bufarr[:0], uint64(v)) case int32: buf = appendInt(bufarr[:0], int64(v)) case uint32: buf = appendUint(bufarr[:0], uint64(v)) case int8: buf = appendInt(bufarr[:0], int64(v)) case uint8: buf = appendUint(bufarr[:0], uint64(v)) case int16: buf = appendInt(bufarr[:0], int64(v)) case uint16: buf = appendUint(bufarr[:0], uint64(v)) case bool: if v { return "1", true } return "0", true case float32: return strconv.FormatFloat(float64(v), 'f', -1, 32), true case float64: return strconv.FormatFloat(v, 'f', -1, 64), true case nil: return "", true default: return "", false } return string(buf), true } // CheckRequest checks requests command and arguments to be compatible with connector. func CheckRequest(req Request, singleThreaded bool) error { if err := ForbiddenCommand(req.Cmd, singleThreaded); err != nil { return err.(*errorx.Error).WithProperty(EKRequest, req) } for i, arg := range req.Args { switch val := arg.(type) { case string, []byte, int, uint, int64, uint64, int32, uint32, int8, uint8, int16, uint16, bool, float32, float64, nil: // ok default: return ErrArgumentType.NewWithNoMessage(). WithProperty(EKVal, val). WithProperty(EKArgPos, i). WithProperty(EKRequest, req) } } return nil }
24.097046
121
0.596393
3.328125
402ca108a9c3f098029d64faeab25fe9ff44caf8
2,763
py
Python
Mesh/System/Entity/Function/Powered.py
ys-warble/Mesh
115e7391d19ea09db3c627d8b8ed90b3e3bef9b5
[ "MIT" ]
null
null
null
Mesh/System/Entity/Function/Powered.py
ys-warble/Mesh
115e7391d19ea09db3c627d8b8ed90b3e3bef9b5
[ "MIT" ]
2
2019-02-25T00:10:15.000Z
2019-03-22T20:13:32.000Z
Mesh/System/Entity/Function/Powered.py
ys-warble/Mesh
115e7391d19ea09db3c627d8b8ed90b3e3bef9b5
[ "MIT" ]
null
null
null
from enum import Enum from Mesh.System.Entity.Channel.PowerWire import PowerWire from Mesh.System.Entity.Function import BaseFunction, Function from Mesh.System.Entity.Function.Tasked import TaskName, SystemTask from Mesh.util.TypeList import TypeList class PowerType(Enum): ELECTRIC = 101 class Power: def __init__(self, power_type): self.power_type = power_type class ElectricPower(Power): def __init__(self, voltage): super().__init__(PowerType.ELECTRIC) self.voltage = voltage def __eq__(self, other): return self.power_type == other.power_type and self.voltage == other.voltage def __str__(self): return '%s(voltage=%s)' % (type(self).__name__, self.voltage) class PowerInput: identifier = 'PowerInput' def __init__(self, parent, power=ElectricPower(voltage=0)): self.parent = parent self.power = power self.power_wires = TypeList(PowerWire) def set_power(self, power=ElectricPower(voltage=0)): self.power = power if self.parent.has_function(Function.TASKED): self.parent.send_task(SystemTask(name=TaskName.SET_POWER, value={'power': power})) else: if self.power in self.parent.get_function(Function.POWERED).input_power_ratings: self.parent.active = True else: self.parent.active = False def get_power(self): if len(self.power_wires) > 0: return self.power_wires[0].get_power() else: return self.power class PowerOutput: identifier = 'PowerOutput' def __init__(self, parent, power=ElectricPower(voltage=0)): self.parent = parent self.power = power self.power_wires = TypeList(PowerWire) def get_power(self): return self.power def set_power(self, power=ElectricPower(voltage=0)): self.power = power for wire in self.power_wires: wire.set_power(self.power) class Powered(BaseFunction): tasks = [ TaskName.SET_POWER, ] def __init__(self, entity): super().__init__(entity) self.power_inputs = TypeList(PowerInput) self.power_outputs = TypeList(PowerOutput) self.input_power_ratings = [] self.output_power_ratings = [] def eval(self): pass def init(self): pass def terminate(self): pass def get_power_input(self, index=0): if index < len(self.power_inputs): return self.power_inputs[index] else: raise IndexError def get_power_output(self, index=0): if index < len(self.power_outputs): return self.power_outputs[index] else: raise IndexError
26.314286
94
0.642056
3.28125
b0c17de3777c79065b9d41b281b353d09c785a63
1,215
kt
Kotlin
network/src/main/kotlin/rs/dusk/network/rs/codec/game/encode/message/ContainerItemsMessage.kt
dusk-rs/server-old
4af70ecb731d9ce292d086c81c21eda66bfaa040
[ "CC-BY-3.0" ]
52
2020-12-09T06:46:47.000Z
2022-03-19T19:53:53.000Z
network/src/main/kotlin/rs/dusk/network/rs/codec/game/encode/message/ContainerItemsMessage.kt
dusk-rs/server-old
4af70ecb731d9ce292d086c81c21eda66bfaa040
[ "CC-BY-3.0" ]
114
2020-12-10T23:02:59.000Z
2021-06-02T03:02:00.000Z
network/src/main/kotlin/rs/dusk/network/rs/codec/game/encode/message/ContainerItemsMessage.kt
dusk-rs/server-old
4af70ecb731d9ce292d086c81c21eda66bfaa040
[ "CC-BY-3.0" ]
9
2020-12-13T21:45:34.000Z
2022-01-26T18:23:59.000Z
package rs.dusk.network.rs.codec.game.encode.message import rs.dusk.core.network.model.message.Message /** * Sends a list of items to display on a interface item group component * @param key The id of the container * @param items List of the item ids to display * @param amounts List of the item amounts to display * @param secondary Optional to send to the primary or secondary container */ data class ContainerItemsMessage(val key: Int, val items: IntArray, val amounts: IntArray, val secondary: Boolean) : Message { override fun equals(other: Any?): Boolean { if (this === other) return true if (javaClass != other?.javaClass) return false other as ContainerItemsMessage if (key != other.key) return false if (!items.contentEquals(other.items)) return false if (!amounts.contentEquals(other.amounts)) return false if (secondary != other.secondary) return false return true } override fun hashCode(): Int { var result = key result = 31 * result + items.contentHashCode() result = 31 * result + amounts.contentHashCode() result = 31 * result + secondary.hashCode() return result } }
35.735294
126
0.678189
3.0625
1bef23af1d1e1885b3fbe2f77c7b1a3aa023161b
2,703
py
Python
tests/test_subscription.py
avito-tech/alert-autoconf
73d9270c6f9f0655cfc68ae3dac4e7406acf10ae
[ "MIT" ]
null
null
null
tests/test_subscription.py
avito-tech/alert-autoconf
73d9270c6f9f0655cfc68ae3dac4e7406acf10ae
[ "MIT" ]
null
null
null
tests/test_subscription.py
avito-tech/alert-autoconf
73d9270c6f9f0655cfc68ae3dac4e7406acf10ae
[ "MIT" ]
null
null
null
from unittest import TestCase from alert_autoconf.moira import MoiraAlert def _make_sub(**kwargs): sub = { 'tags': [], 'contacts': [], 'escalations': [], 'sched': {'startOffset': 0, 'endOffset': 1439, 'tzOffset': 0, 'days': []}, } sub.update(**kwargs) return sub def _make_esc(offset=10, contacts=None): return {'contacts': contacts or [], 'offset_in_minutes': offset} class SubscriptionCmpTest(TestCase): def test_two_empty(self): s1 = _make_sub() s2 = _make_sub() r = MoiraAlert._subscription_not_changed(s1, s2) self.assertTrue(r) def test_tags_changed(self): s1 = _make_sub(tags=['t1']) s2 = _make_sub() r = MoiraAlert._subscription_not_changed(s1, s2) self.assertFalse(r) def test_tags_equal(self): s1 = _make_sub(tags=['t1', 't2']) s2 = _make_sub(tags=['t1', 't2']) r = MoiraAlert._subscription_not_changed(s1, s2) self.assertTrue(r) def test_contacts_equal(self): s1 = _make_sub(contacts=['c1', 'c2']) s2 = _make_sub(contacts=['c1', 'c2']) r = MoiraAlert._subscription_not_changed(s1, s2) self.assertTrue(r) def test_tags_and_contacts_equal(self): s1 = _make_sub(contacts=['c1', 'c2'], tags=['t1']) s2 = _make_sub(contacts=['c1', 'c2'], tags=['t1']) r = MoiraAlert._subscription_not_changed(s1, s2) self.assertTrue(r) def test_tags_and_contacts_not_equal(self): s1 = _make_sub(contacts=['z1', 'c2'], tags=['t1']) s2 = _make_sub(contacts=['c1', 'c2'], tags=['t1']) r = MoiraAlert._subscription_not_changed(s1, s2) self.assertFalse(r) def test_escalations_empty(self): s1 = _make_sub(escalations=[_make_esc()]) s2 = _make_sub(escalations=[_make_esc()]) r = MoiraAlert._subscription_not_changed(s1, s2) self.assertTrue(r) def test_escalations_diff_offsets(self): s1 = _make_sub(escalations=[_make_esc(20)]) s2 = _make_sub(escalations=[_make_esc()]) r = MoiraAlert._subscription_not_changed(s1, s2) self.assertFalse(r) def test_escalations_order(self): s1 = _make_sub(escalations=[_make_esc(20), _make_esc(10)]) s2 = _make_sub(escalations=[_make_esc(10), _make_esc(20)]) r = MoiraAlert._subscription_not_changed(s1, s2) self.assertTrue(r) def test_escalations_contacts_order(self): s1 = _make_sub(escalations=[_make_esc(contacts=['1', '2'])]) s2 = _make_sub(escalations=[_make_esc(contacts=['2', '1'])]) r = MoiraAlert._subscription_not_changed(s1, s2) self.assertTrue(r)
33.37037
82
0.623381
3.015625
3d238d6a6e35dd9449ab92eabfd502e8e7e9c381
1,187
go
Go
config/config.go
patarra/jira-todo-sync
736a1bb8b5a76894138a8bd3b0f37a85d10871ae
[ "MIT" ]
null
null
null
config/config.go
patarra/jira-todo-sync
736a1bb8b5a76894138a8bd3b0f37a85d10871ae
[ "MIT" ]
null
null
null
config/config.go
patarra/jira-todo-sync
736a1bb8b5a76894138a8bd3b0f37a85d10871ae
[ "MIT" ]
null
null
null
package config import ( "errors" "fmt" "github.com/spf13/viper" "sync" ) type JiraConfig struct { Server string `mapstructure:"server"` User string `mapstructure:"user"` Password string `mapstructure:"password"` } type TodoistConfig struct { Token string `mapstructure:"token"` } type Config struct { Jira JiraConfig `mapstructure:"jira"` Todoist TodoistConfig `mapstructure:"todoist"` } var instance Config var once sync.Once var initialised = false func InitConfig(cfgFile string) (*Config, error) { var onceErr error = nil once.Do(func() { viper.SetConfigFile(cfgFile) viper.SetConfigType("toml") if err := viper.ReadInConfig(); err != nil { onceErr = errors.New(fmt.Sprintf("couldn't load config from %s: %s\n", cfgFile, err)) } if err := viper.Unmarshal(&instance); err != nil { onceErr = errors.New(fmt.Sprintf("couldn't read config: %s\n", err)) } initialised = true }) if onceErr == nil { return &instance, nil } else { return nil, onceErr } } func GetConfig() (*Config, error) { if !initialised { return nil, errors.New("config is not initialised yet, please call InitConfig(cfgFile)") } return &instance, nil }
20.824561
90
0.689132
3.046875
7f71cd62fefa40eebb9ea107e698b6ca746b7814
1,427
go
Go
internal/logger/logger.go
vsdmars/actor
7f5a8a9ca8801684a2008213435ecff4f142506c
[ "MIT" ]
null
null
null
internal/logger/logger.go
vsdmars/actor
7f5a8a9ca8801684a2008213435ecff4f142506c
[ "MIT" ]
4
2019-03-04T19:39:25.000Z
2019-04-03T02:28:27.000Z
internal/logger/logger.go
vsdmars/actor
7f5a8a9ca8801684a2008213435ecff4f142506c
[ "MIT" ]
null
null
null
package logger import ( "fmt" "os" "go.uber.org/zap" "go.uber.org/zap/zapcore" ) var logger serviceLogger var origLogger serviceLogger func init() { initLogger() } // LogSync sync logger output func LogSync() { // ignore logger Sync error logger.Sync() } // SetLogger sets caller provided zap logger // // reset to service's default logger by passing in nil pointer func SetLogger(l *zap.Logger) { if l != nil { logger.Logger = l logger.provided = true return } logger = origLogger } // SetLogLevel sets the service log level // // noop if caller provides it's own zap logger func SetLogLevel(level zapcore.Level) { if logger.provided { return } logger.config.Level.SetLevel(level) } func initLogger() { // default log level set to 'info' atom := zap.NewAtomicLevelAt(zap.InfoLevel) config := zap.Config{ Level: atom, Development: false, Sampling: &zap.SamplingConfig{ Initial: 100, Thereafter: 100, }, Encoding: "json", // console, json, toml EncoderConfig: zap.NewProductionEncoderConfig(), OutputPaths: []string{"stderr"}, ErrorOutputPaths: []string{"stderr"}, } mylogger, err := config.Build() if err != nil { fmt.Printf("Initialize zap logger error: %v\n", err) os.Exit(1) } logger = serviceLogger{mylogger, &config, false} origLogger = logger } // GetLog gets the current logger func GetLog() serviceLogger { return logger }
18.063291
62
0.683952
3.0625
857b8c2dbc0be5ef308797b18469cd500d2836fa
1,470
js
JavaScript
lib/setup.js
jkresner/meanair-scream
c5c179e83692f371c722178d4af62398d0eabe1c
[ "MIT" ]
3
2015-09-22T03:20:03.000Z
2016-03-02T12:10:44.000Z
lib/setup.js
jkresner/meanair-scream
c5c179e83692f371c722178d4af62398d0eabe1c
[ "MIT" ]
null
null
null
lib/setup.js
jkresner/meanair-scream
c5c179e83692f371c722178d4af62398d0eabe1c
[ "MIT" ]
null
null
null
module.exports = ({log}) => ({ data(done) { log.step('data:db') global.DB = require('./db')(() => { let seeder = require('./db.seed')(DB, OPTS) seeder.testToSeed(y => y ? seeder.restoreBSONData(done) : done()) }) let ISODate = global.ISODate = str => moment(str).toDate() let ID = global.ID = global.ObjectId = (DB||{}).ObjectId log.step('data:fixture') global.FIXTURE = require('./data.fixture') if (!DB) done() return { ISODate, ID } }, runner() { let Mocha = require('mocha') log.step('tests:init') return new Mocha(OPTS.config.mocha) .addFile(join(__dirname,'runner')) .run(status => { log.info('DONE', `${status==0?'No':'With'} errors\n`).flush() process.exit(status) }) }, app(done) { let start = new Date() log.step('app:init') global.APP = OPTS.App(function(e) { log.info('APP', `${e?'fail':'ready'} (${new Date()-start}ms)`).flush() log.step('tests:run') done(e) }) }, /* If unhandledPromiseRejection f => Error, p => Profile If failed test / assertion f => mocha.ctx, p => Error */ fail(f, p) { // console.log('in fail....', f, p) if (f.stack) log.error(f) else if ((p instanceof Error)) log.error(p) log.info('FAIL', `${log.step()} `.white + `${log.runner.scope.join(' > ')}`.spec) process.exit(1) // Exiting stops default mocha exit output } })
23.709677
85
0.542857
3.140625
f076aaf49a3d8fba6fb5ba17c6020bb113d2de01
5,417
py
Python
src/jsonengine/main.py
youhengzhou/json-crud-engine
8ee614af6dddbe1236a78a7debf71048f476a3ff
[ "MIT" ]
2
2021-07-02T04:33:36.000Z
2022-01-09T23:40:30.000Z
src/jsonengine/main.py
youhengzhou/json-crud-engine
8ee614af6dddbe1236a78a7debf71048f476a3ff
[ "MIT" ]
null
null
null
src/jsonengine/main.py
youhengzhou/json-crud-engine
8ee614af6dddbe1236a78a7debf71048f476a3ff
[ "MIT" ]
null
null
null
# JSON engine 21 9 16 # database # eng.json # engine # eng.py import os import json path = os.getcwd() + '\\json_engine_database\\' path_string = '' def set_path(string): global path path = os.getcwd() + string def dictionary_kv(dictionary, key, value): dictionary[key] = value return dictionary def set_path_string(args,create_flag): global path_string if (args): path_string = str(args[0]) + '\\' if os.path.exists(path + path_string)==False: if create_flag == True: os.makedirs(path + path_string) else: return False return path_string def create(dictionary, *args): path_string = set_path_string(args,True) with open(path + path_string + 'eng.json', 'w') as outfile: json.dump(dictionary, outfile, indent=4) def retrieve(*args): path_string = set_path_string(args,False) if path_string == False: return False with open(path + path_string + 'eng.json', 'r') as f: return(json.load(f)) def retrieve_k(key, *args): path_string = set_path_string(args,False) if path_string == False: return False with open(path + path_string + 'eng.json', 'r') as f: if key in json.load(f): with open(path + path_string + 'eng.json', 'r') as f: return(json.load(f)[key]) else: return False def update(dictionary, *args): path_string = set_path_string(args,False) if path_string == False: return False with open(path + path_string + 'eng.json', 'w') as outfile: json.dump(dictionary, outfile, indent=4) return True def update_kv(key, value, *args): path_string = set_path_string(args,False) if path_string == False: return False with open(path + path_string + 'eng.json', 'w') as outfile: json.dump({key: value}, outfile, indent=4) return True def patch(dictionary, *args): path_string = set_path_string(args,False) if path_string == False: return False with open(path + path_string + 'eng.json', 'r') as f: data=(json.load(f)) data.update(dictionary) with open(path + path_string + 'eng.json', 'w') as outfile: json.dump(data, outfile, indent=4) return True def patch_kv(key, value, *args): path_string = set_path_string(args,False) if path_string == False: return False with open(path + path_string + 'eng.json', 'r') as f: data=(json.load(f)) data.update({key: value}) with open(path + path_string + 'eng.json', 'w') as outfile: json.dump(data, outfile, indent=4) return True def delete(*args): if (args): path_string = str(args[0]) + '\\' if os.path.exists(path + path_string + 'eng.json'): os.remove(path + path_string + 'eng.json') os.rmdir(path + path_string) return True else: return False def delete_k(key, *args): if (args): path_string = str(args[0]) + '\\' if os.path.exists(path + path_string + 'eng.json'): with open(path + path_string + 'eng.json', 'r') as f: if key in json.load(f): data = json.load(f) data.pop(key) with open(path + path_string + 'eng.json', 'w') as outfile: json.dump(data, outfile, indent=4) return True else: return False else: return False def display(*args): if (args): path_string = str(args[0]) + '\\' if os.path.exists(path + path_string + 'eng.json'): with open(path + path_string + 'eng.json', 'r') as f: print(json.load(f)) return True else: print('The selected file does not exist') return False def display_key(key, *args): if (args): path_string = str(args[0]) + '\\' if os.path.exists(path + path_string + 'eng.json'): with open(path + path_string + 'eng.json', 'r') as f: if key in json.load(f): print(key + ' ' + str(json.load(f)[key])) return True else: print('The selected file does not exist') return False def display_nkv(key, *args): if (args): path_string = str(args[0]) + '\\' if os.path.exists(path + path_string + 'eng.json'): with open(path + path_string + 'eng.json', 'r') as f: if key in json.load(f): data = json.load(f) data.pop(key,'key not found') print(data) return True else: print('The selected file does not exist') return False def display_ind(*args): if (args): path_string = str(args[0]) + '\\' if os.path.exists(path + path_string + 'eng.json'): with open(path + path_string + 'eng.json', 'r') as f: print(json.dumps(json.load(f), indent=4)) else: print('The selected file does not exist') def display_ind_nkv(key, *args): if (args): path_string = str(args[0]) + '\\' if os.path.exists(path + path_string + 'eng.json'): with open(path + path_string + 'eng.json', 'r') as f: data = json.load(f) data.pop(key,'key not found') print(json.dumps(data, indent=4)) else: print('The selected file does not exist')
31.132184
75
0.568027
3.1875
19d428aad36966348419dd413eabc7e98a9aa51d
13,103
lua
Lua
Plugins/UnrealLua/LuaSource/luahotupdate.lua
asomfai/unreal.lua
61a7f3fd2e967ffd970c9b2ac72f12aa2af34bd8
[ "MIT" ]
311
2017-01-31T04:24:13.000Z
2022-03-02T10:12:58.000Z
Plugins/UnrealLua/LuaSource/luahotupdate.lua
asomfai/unreal.lua
61a7f3fd2e967ffd970c9b2ac72f12aa2af34bd8
[ "MIT" ]
39
2017-02-14T09:33:02.000Z
2020-02-14T07:45:33.000Z
Plugins/UnrealLua/LuaSource/luahotupdate.lua
asomfai/unreal.lua
61a7f3fd2e967ffd970c9b2ac72f12aa2af34bd8
[ "MIT" ]
98
2017-01-30T17:49:34.000Z
2022-03-15T08:16:34.000Z
if _VERSION == "Lua 5.3" then function getfenv(f) if type(f) == "function" then local name, value = debug.getupvalue(f, 1) if name == "_ENV" then return value else return _ENV end end end function setfenv(f, Env) if type(f) == "function" then local name, value = debug.getupvalue(f, 1) if name == "_ENV" then debug.setupvalue(f, 1, Env) end end end debug = debug or {} debug.setfenv = setfenv function loadstring( ... ) return load(...) end end local HU = {} function HU.FailNotify(...) if HU.NotifyFunc then HU.NotifyFunc(...) end end function HU.DebugNofity(...) if HU.DebugNofityFunc then HU.DebugNofityFunc(...) end end local function GetWorkingDir() if HU.WorkingDir == nil then local p = io.popen("echo %cd%") if p then HU.WorkingDir = p:read("*l").."\\" p:close() end end return HU.WorkingDir end local function Normalize(path) path = path:gsub("/","\\") if path:find(":") == nil then path = GetWorkingDir()..path end local pathLen = #path if path:sub(pathLen, pathLen) == "\\" then path = path:sub(1, pathLen - 1) end local parts = { } for w in path:gmatch("[^\\]+") do if w == ".." and #parts ~=0 then table.remove(parts) elseif w ~= "." then table.insert(parts, w) end end return table.concat(parts, "\\") end function HU.InitFileMap(RootPath) local TheMap = {} for _, rootpath in pairs(RootPath) do rootpath = Normalize(rootpath) local file = io.popen("dir /S/B /A:A \""..rootpath.."\"") io.input(file) for line in io.lines() do local FileName = string.match(line,".*\\(.*)%.lua") if FileName ~= nil then if TheMap[FileName] == nil then TheMap[FileName] = {} end local luapath = string.sub(line, #rootpath+2, #line-4) luapath = string.gsub(luapath, "\\", ".") HU.LuaPathToSysPath[luapath] = SysPath table.insert(TheMap[FileName], {SysPath = line, LuaPath = luapath}) end end file:close() end return TheMap end function HU.InitFakeTable() local meta = {} HU.Meta = meta local function FakeT() return setmetatable({}, meta) end local function EmptyFunc() end local function pairs() return EmptyFunc end local function setmetatable(t, metaT) HU.MetaMap[t] = metaT return t end local function getmetatable(t, metaT) return setmetatable({}, t) end local function require(LuaPath) if not HU.RequireMap[LuaPath] then local FakeTable = FakeT() HU.RequireMap[LuaPath] = FakeTable end return HU.RequireMap[LuaPath] end function meta.__index(t, k) if k == "setmetatable" then return setmetatable elseif k == "pairs" or k == "ipairs" then return pairs elseif k == "next" then return EmptyFunc elseif k == "require" then return require elseif HU.CallOriginFunctions and HU.CallOriginFunctions[k] then return _G[k] else local FakeTable = FakeT() rawset(t, k, FakeTable) return FakeTable end end function meta.__newindex(t, k, v) rawset(t, k, v) end function meta.__call() return FakeT(), FakeT(), FakeT() end function meta.__add() return meta.__call() end function meta.__sub() return meta.__call() end function meta.__mul() return meta.__call() end function meta.__div() return meta.__call() end function meta.__mod() return meta.__call() end function meta.__pow() return meta.__call() end function meta.__unm() return meta.__call() end function meta.__concat() return meta.__call() end function meta.__eq() return meta.__call() end function meta.__lt() return meta.__call() end function meta.__le() return meta.__call() end function meta.__len() return meta.__call() end return FakeT end function HU.InitProtection() HU.Protection = {} HU.Protection[setmetatable] = true HU.Protection[pairs] = true HU.Protection[ipairs] = true HU.Protection[next] = true HU.Protection[require] = true HU.Protection[HU] = true HU.Protection[HU.Meta] = true HU.Protection[math] = true HU.Protection[string] = true HU.Protection[table] = true end function HU.AddFileFromHUList() package.loaded[HU.UpdateListFile] = nil local FileList = require (HU.UpdateListFile) HU.ALL = false HU.HUMap = {} for _, file in pairs(FileList) do if file == "_ALL_" then HU.ALL = true for k, v in pairs(HU.FileMap) do for _, path in pairs(v) do HU.HUMap[path.LuaPath] = path.SysPath end end return end if not HU.FileMap[file] then if HU.TryReloadFileCount[file] == nil or HU.TryReloadFileCount[file] == 0 then HU.FileMap = HU.InitFileMap(HU.RootPath) if not HU.FileMap[file] then HU.FailNotify("HotUpdate can't not find "..file) HU.TryReloadFileCount[file] = 3 end else HU.TryReloadFileCount[file] = HU.TryReloadFileCount[file] - 1 end end if HU.FileMap[file] then for _, path in pairs(HU.FileMap[file]) do HU.HUMap[path.LuaPath] = path.SysPath end end end end function HU.ErrorHandle(e) HU.FailNotify("HotUpdate Error\n"..tostring(e)) HU.ErrorHappen = true end function HU.LoadStringFunc(SysPath) io.input(SysPath) local CodeStr = io.read("*all") io.input():close() return CodeStr end function HU.BuildNewCode(SysPath, LuaPath) local NewCode = HU.LoadStringFunc(SysPath) if HU.ALL and HU.OldCode[SysPath] == nil then HU.OldCode[SysPath] = NewCode return end if HU.OldCode[SysPath] == NewCode then return false end HU.DebugNofity(SysPath) local chunk = "--[["..LuaPath.."]] " chunk = chunk..NewCode local NewFunction = loadstring(chunk) if not NewFunction then HU.FailNotify(SysPath.." has syntax error.") collectgarbage("collect") return false else HU.FakeENV = HU.FakeT() HU.MetaMap = {} HU.RequireMap = {} setfenv(NewFunction, HU.FakeENV) local NewObject HU.ErrorHappen = false xpcall(function () NewObject = NewFunction() end, HU.ErrorHandle) if not HU.ErrorHappen then HU.OldCode[SysPath] = NewCode return true, NewObject else collectgarbage("collect") return false end end end function HU.Travel_G() local visited = {} visited[HU] = true local function f(t) if (type(t) ~= "function" and type(t) ~= "table") or visited[t] or HU.Protection[t] then return end visited[t] = true if type(t) == "function" then for i = 1, math.huge do local name, value = debug.getupvalue(t, i) if not name then break end if type(value) == "function" then for _, funcs in ipairs(HU.ChangedFuncList) do if value == funcs[1] then debug.setupvalue(t, i, funcs[2]) end end end f(value) end elseif type(t) == "table" then f(debug.getmetatable(t)) local changeIndexs = {} for k,v in pairs(t) do f(k); f(v); if type(v) == "function" then for _, funcs in ipairs(HU.ChangedFuncList) do if v == funcs[1] then t[k] = funcs[2] end end end if type(k) == "function" then for index, funcs in ipairs(HU.ChangedFuncList) do if k == funcs[1] then changeIndexs[#changeIndexs+1] = index end end end end for _, index in ipairs(changeIndexs) do local funcs = HU.ChangedFuncList[index] t[funcs[2]] = t[funcs[1]] t[funcs[1]] = nil end end end f(_G) local registryTable = debug.getregistry() f(registryTable) for _, funcs in ipairs(HU.ChangedFuncList) do if funcs[3] == "HUDebug" then funcs[4]:HUDebug() end end end function HU.ReplaceOld(OldObject, NewObject, LuaPath, From, Deepth) if type(OldObject) == type(NewObject) then if type(NewObject) == "table" then HU.UpdateAllFunction(OldObject, NewObject, LuaPath, From, "") elseif type(NewObject) == "function" then HU.UpdateOneFunction(OldObject, NewObject, LuaPath, nil, From, "") end end end function HU.HotUpdateCode(LuaPath, SysPath) local OldObject = package.loaded[LuaPath] if OldObject ~= nil then HU.VisitedSig = {} HU.ChangedFuncList = {} local Success, NewObject = HU.BuildNewCode(SysPath, LuaPath) if Success then HU.ReplaceOld(OldObject, NewObject, LuaPath, "Main", "") for LuaPath, NewObject in pairs(HU.RequireMap) do local OldObject = package.loaded[LuaPath] HU.ReplaceOld(OldObject, NewObject, LuaPath, "Main_require", "") end setmetatable(HU.FakeENV, nil) HU.UpdateAllFunction(HU.ENV, HU.FakeENV, " ENV ", "Main", "") if #HU.ChangedFuncList > 0 then HU.Travel_G() end collectgarbage("collect") end elseif HU.OldCode[SysPath] == nil then HU.OldCode[SysPath] = HU.LoadStringFunc(SysPath) end end function HU.ResetENV(object, name, From, Deepth) local visited = {} local function f(object, name) if not object or visited[object] then return end visited[object] = true if type(object) == "function" then HU.DebugNofity(Deepth.."HU.ResetENV", name, " from:"..From) xpcall(function () setfenv(object, HU.ENV) end, HU.FailNotify) elseif type(object) == "table" then HU.DebugNofity(Deepth.."HU.ResetENV", name, " from:"..From) for k, v in pairs(object) do f(k, tostring(k).."__key", " HU.ResetENV ", Deepth.." " ) f(v, tostring(k), " HU.ResetENV ", Deepth.." ") end end end f(object, name) end function HU.UpdateUpvalue(OldFunction, NewFunction, Name, From, Deepth) HU.DebugNofity(Deepth.."HU.UpdateUpvalue", Name, " from:"..From) local OldUpvalueMap = {} local OldExistName = {} for i = 1, math.huge do local name, value = debug.getupvalue(OldFunction, i) if not name then break end OldUpvalueMap[name] = value OldExistName[name] = true end for i = 1, math.huge do local name, value = debug.getupvalue(NewFunction, i) if not name then break end if OldExistName[name] then local OldValue = OldUpvalueMap[name] if type(OldValue) ~= type(value) then debug.setupvalue(NewFunction, i, OldValue) elseif type(OldValue) == "function" then HU.UpdateOneFunction(OldValue, value, name, nil, "HU.UpdateUpvalue", Deepth.." ") elseif type(OldValue) == "table" then HU.UpdateAllFunction(OldValue, value, name, "HU.UpdateUpvalue", Deepth.." ") debug.setupvalue(NewFunction, i, OldValue) else debug.setupvalue(NewFunction, i, OldValue) end else HU.ResetENV(value, name, "HU.UpdateUpvalue", Deepth.." ") end end end function HU.UpdateOneFunction(OldObject, NewObject, FuncName, OldTable, From, Deepth) if HU.Protection[OldObject] or HU.Protection[NewObject] then return end if OldObject == NewObject then return end local signature = tostring(OldObject)..tostring(NewObject) if HU.VisitedSig[signature] then return end HU.VisitedSig[signature] = true HU.DebugNofity(Deepth.."HU.UpdateOneFunction "..FuncName.." from:"..From) if pcall(debug.setfenv, NewObject, getfenv(OldObject)) then HU.UpdateUpvalue(OldObject, NewObject, FuncName, "HU.UpdateOneFunction", Deepth.." ") HU.ChangedFuncList[#HU.ChangedFuncList + 1] = {OldObject, NewObject, FuncName, OldTable} end end function HU.UpdateAllFunction(OldTable, NewTable, Name, From, Deepth) if HU.Protection[OldTable] or HU.Protection[NewTable] then return end local IsSame = getmetatable(OldTable) == getmetatable(NewTable) local IsSame = IsSame and OldTable == NewTable if IsSame == true then return end local signature = tostring(OldTable)..tostring(NewTable) if HU.VisitedSig[signature] then return end HU.VisitedSig[signature] = true HU.DebugNofity(Deepth.."HU.UpdateAllFunction "..Name.." from:"..From) for ElementName, Element in pairs(NewTable) do local OldElement = OldTable[ElementName] if type(Element) == type(OldElement) then if type(Element) == "function" then HU.UpdateOneFunction(OldElement, Element, ElementName, OldTable, "HU.UpdateAllFunction", Deepth.." ") elseif type(Element) == "table" then HU.UpdateAllFunction(OldElement, Element, ElementName, "HU.UpdateAllFunction", Deepth.." ") end elseif OldElement == nil and type(Element) == "function" then if pcall(setfenv, Element, HU.ENV) then OldTable[ElementName] = Element end end end local OldMeta = debug.getmetatable(OldTable) local NewMeta = HU.MetaMap[NewTable] if type(OldMeta) == "table" and type(NewMeta) == "table" then HU.UpdateAllFunction(OldMeta, NewMeta, Name.."'s Meta", "HU.UpdateAllFunction", Deepth.." ") end end function HU.SetFileLoader(InitFileMapFunc, LoadStringFunc) HU.InitFileMap = InitFileMapFunc HU.LoadStringFunc = LoadStringFunc end function HU.Init(UpdateListFile, RootPath, FailNotify, ENV, CallOriginFunctions) HU.UpdateListFile = UpdateListFile HU.HUMap = {} HU.FileMap = {} HU.NotifyFunc = FailNotify HU.OldCode = {} HU.ChangedFuncList = {} HU.VisitedSig = {} HU.FakeENV = nil HU.ENV = ENV or _G HU.LuaPathToSysPath = {} HU.RootPath = RootPath HU.FileMap = HU.InitFileMap(RootPath) HU.FakeT = HU.InitFakeTable() HU.CallOriginFunctions = CallOriginFunctions HU.InitProtection() HU.ALL = false HU.TryReloadFileCount = {} end function HU.Update() HU.AddFileFromHUList() for LuaPath, SysPath in pairs(HU.HUMap) do HU.HotUpdateCode(LuaPath, SysPath) end end return HU
28.861233
108
0.6845
3.140625
bc5bf135a68e1a20f8283e82b82ae3e27ca64d3a
4,800
asm
Assembly
source/tokeniser/tokenise/tokenise.asm
paulscottrobson/6502-basic
d4c360041bfa49427a506465e58bb0ef94beaa44
[ "MIT" ]
3
2021-09-30T19:34:11.000Z
2021-10-31T06:55:50.000Z
source/tokeniser/tokenise/tokenise.asm
paulscottrobson/6502-Basic
d4c360041bfa49427a506465e58bb0ef94beaa44
[ "MIT" ]
null
null
null
source/tokeniser/tokenise/tokenise.asm
paulscottrobson/6502-Basic
d4c360041bfa49427a506465e58bb0ef94beaa44
[ "MIT" ]
1
2021-12-07T21:58:44.000Z
2021-12-07T21:58:44.000Z
; ************************************************************************************************ ; ************************************************************************************************ ; ; Name: tokenise.asm ; Purpose: Tokenise a string ; Created: 8th March 2021 ; Reviewed: 16th March 2021 ; Author: Paul Robson ([email protected]) ; ; ************************************************************************************************ ; ************************************************************************************************ .section storage tokenHeader: ; bytes (all zero) to create a fake 'program line' .fill 3 tokenBuffer: ; token buffer. .fill 256 tokenBufferIndex: ; count of characters in buffer. .fill 1 .send storage .section code ; ************************************************************************************************ ; ; Tokenise string at (codePtr) into tokenising buffer ; A != 0 if tokenising successful. ; ; ************************************************************************************************ Tokenise: ;; <tokenise> jsr TokeniseMakeASCIIZ ; convert to ASCIIZ string. TokeniseASCIIZ: ;; <tokenisez> jsr TokeniseFixCase ; remove controls and lower case outside quotes. lda #0 ; reset the token buffer index sta tokenBufferIndex tay ; start pointer lda #$80 ; empty token buffer ($80 ends it) sta tokenBuffer ; ; Main tokenisation loop ; _TokLoop: lda (codePtr),y ; get next character beq _TokExit ; if zero, then exit. iny ; skip over spaces. cmp #" " beq _TokLoop dey ; point back to character. cmp #"&" ; Hexadecimal constant. beq _TokHexConst cmp #'"' ; Quoted String beq _TokQString cmp #"Z"+1 ; > 'Z' is punctuation bcs _TokPunctuation cmp #"A" ; A..Z identifier bcs _TokIdentifier cmp #"9"+1 bcs _TokPunctuation ; between 9 and A exclusive, punctuation cmp #"0" bcc _TokPunctuation ; < 0, punctuation. lda #10 ; 0..9 constant in base 10. bne _TokConst ; ; Handle hexadecimal constant. ; _TokHexConst: iny ; consume token. lda #TKW_AMP ; Write ampersand token out jsr TokenWrite lda #16 ; ; Handle constant in base A ; _TokConst: jsr TokeniseInteger bcs _TokLoop bcc _TokFail ; ; Quoted string ; _TokQString: jsr TokeniseString bcs _TokLoop bcc _TokFail ; ; Punctuation token. ; _TokPunctuation: jsr TokenisePunctuation bcs _TokLoop bcc _TokFail ; ; Identifier or text token ; _TokIdentifier: jsr TokeniseIdentifier bcs _TokLoop bcc _TokFail _TokExit: lda #1 rts _TokFail: lda #0 rts ; ************************************************************************************************ ; ; Write A to tokenise buffer ; ; ************************************************************************************************ TokenWrite: sta tempShort ; save XA pha .pshx lda tempShort ldx tokenBufferIndex ; geet index sta tokenBuffer,x ; write byte to buffer lda #TOK_EOL ; pre-emptively write EOL marker after sta tokenBuffer+1,x inc tokenBufferIndex ; bump index .pulx pla rts ; ************************************************************************************************ ; ; Make string at (codePtr) ASCIIZ ; ; ************************************************************************************************ TokeniseMakeASCIIZ: ldy #0 ; get length of string. lda (codePtr),y tay iny ; +1, the NULL goes here. lda #0 sta (codePtr),y ; write the trailing NULL. inc codePtr ; bump the pointer. bne _TMKAExit inc codePtr+1 _TMKAExit: rts ; ************************************************************************************************ ; ; Make upper case and remove controls for everything outside quotes. ; ; ************************************************************************************************ TokeniseFixCase: ldy #0 ; position in buffer. ldx #1 ; bit 0 of this is 'in quotes' _TFCFlipQ: txa eor #1 tax _TFCLoop: lda (codePtr),y ; get character beq _TFCExit ; if zero exit. cmp #32 ; if control bcc _TFCControl iny ; preconsume cmp #'"' beq _TFCFlipQ cmp #"a" ; check if L/C bcc _TFCLoop cmp #"z"+1 bcs _TFCLoop ; cpx #0 ; in quotes, if so, leave alone. bne _TFCLoop dey eor #"A"^"a" ; make U/C _TFCWrite: sta (codePtr),y iny jmp _TFCLoop ; _TFCControl: lda #" " bne _TFCWrite _TFCExit: rts .send code
25.13089
98
0.4625
3.21875
40af8c8f3236438b38a2ec95b565b4efbf998b1b
4,709
py
Python
dsbox/utils/utils.py
Pandinosaurus/dsbox
aea56049025ed7e6e66427f8636286f8be1b6e03
[ "Apache-2.0" ]
16
2020-05-11T09:10:15.000Z
2021-04-13T08:43:28.000Z
dsbox/utils/utils.py
Pandinosaurus/dsbox
aea56049025ed7e6e66427f8636286f8be1b6e03
[ "Apache-2.0" ]
1
2020-12-03T20:02:32.000Z
2020-12-03T20:02:32.000Z
dsbox/utils/utils.py
Pandinosaurus/dsbox
aea56049025ed7e6e66427f8636286f8be1b6e03
[ "Apache-2.0" ]
1
2020-05-11T17:22:20.000Z
2020-05-11T17:22:20.000Z
import gzip import pickle import networkx as nx import matplotlib.pyplot as plt from smart_open import open """ Some util functions used to navigate into Airflow DAGs. """ def breadth_first_search_task_list(task_root, task_list=[], mode='upstream'): sub_task_list = [] queue = [task_root] while len(queue) > 0: task = queue.pop(0) sub_task_list.append(task) next_tasks = None if mode == 'upstream': next_tasks = task.upstream_list else: next_tasks = task.downstream_list for next_task in next_tasks: if next_task not in queue and next_task not in task_list and next_task not in sub_task_list: queue.append(next_task) return sub_task_list def breadth_first_search_shell_list(task_roots): shell_task_list = [task_roots] done_tasks = set() queue = task_roots while len(queue) > 0: tasks = queue next_tasks = [] for task in tasks: for next_task in task.downstream_list: if next_task not in done_tasks: next_tasks.append(next_task) done_tasks.add(next_task) if len(next_tasks) > 0: shell_task_list.append(next_tasks) queue = next_tasks return shell_task_list def get_dag_roots(dag): roots = [] for task in dag.tasks: if len(task.upstream_list) == 0: roots.append(task) return roots def execute_dag(dag, verbose=False, mode='downstream'): task_list = [] roots = dag.roots for root in roots: sub_task_list = breadth_first_search_task_list(root, task_list, mode=mode) task_list = sub_task_list + task_list for task in task_list: if verbose: print(dag.dag_id + '-' + str(task)) if task.task_type == 'SubDagOperator': execute_dag(task.subdag, verbose=verbose) else: task.execute(dag.get_template_env()) return task_list def plot_dag(dag): fig, ax = plt.subplots(figsize=(15, 10), dpi=150) G = nx.DiGraph() color_list = [] for task in dag.tasks: if len(task.downstream_list) > 0: for next_task in task.downstream_list: G.add_edge(task, next_task) for node in G.nodes(): if len(node.ui_color) == 7: color_list.append(node.ui_color) else: last_code = node.ui_color[-1] color_list.append(str(node.ui_color).ljust(7, last_code)) pos = nx.drawing.nx_agraph.graphviz_layout(G, prog='dot') nx.draw_networkx_nodes(G, pos, node_shape='D', node_color=color_list) nx.draw_networkx_edges(G, pos, edge_color='gray', alpha=0.8) nx.draw_networkx_labels(G, pos, font_size=5) ax.set_axis_off() plt.title("DAG preview", fontsize=8) plt.show() """ Some utils function used to persist objects. """ def pickle_compress(obj): return gzip.zlib.compress(pickle.dumps(obj)) def decompress_unpickle(obj_zp): return pickle.loads(gzip.zlib.decompress(obj_zp)) def write_object_file(file_path, obj): obj_pz = pickle_compress(obj) file_obj = open(file_path, 'wb') file_obj.write(obj_pz) file_obj.close() def load_object_file(file_path): file_obj = open(file_path, 'rb') obj_pz = file_obj.read() obj = decompress_unpickle(obj_pz) return obj """ Some misc utils. """ def pandas_downcast_numeric(df_to_downcast, float_type_to_downcast=("float64", "float32"), int_type_to_downcast=("int64", "int32")): float_cols = [c for c in df_to_downcast.columns if df_to_downcast[c].dtype == float_type_to_downcast[0]] int_cols = [c for c in df_to_downcast.columns if df_to_downcast[c].dtype == int_type_to_downcast[0]] df_to_downcast[float_cols] = df_to_downcast[float_cols].apply(lambda x: x.astype(float_type_to_downcast[1])) df_to_downcast[int_cols] = df_to_downcast[int_cols].apply(lambda x: x.astype(int_type_to_downcast[1])) def format_dict_path_items(dictionary, replace_value): for k, v in dictionary.items(): if isinstance(v, dict): dictionary[k] = format_dict_path_items(v, replace_value) else: if isinstance(v, list): formatted_list = [] for list_item in v: if type(list_item) == str: list_item = list_item.format(replace_value) formatted_list.append(list_item) dictionary[k] = formatted_list else: if type(dictionary[k]) == str: dictionary[k] = dictionary[k].format(replace_value) return dictionary
28.539394
112
0.637078
3.140625
6191b56409da4f2d51541a3fc44a0ca10a01b642
1,663
kt
Kotlin
app/src/main/java/alektas/telecomapp/utils/L.kt
Alektas/Telecom-System
1e83fbe6daa496f4c4f47d41f404d3e66fb200ff
[ "Apache-2.0" ]
null
null
null
app/src/main/java/alektas/telecomapp/utils/L.kt
Alektas/Telecom-System
1e83fbe6daa496f4c4f47d41f404d3e66fb200ff
[ "Apache-2.0" ]
null
null
null
app/src/main/java/alektas/telecomapp/utils/L.kt
Alektas/Telecom-System
1e83fbe6daa496f4c4f47d41f404d3e66fb200ff
[ "Apache-2.0" ]
null
null
null
package alektas.telecomapp.utils import alektas.telecomapp.BuildConfig class L { companion object { private const val MEASURING_TIME_TAG = "MEASURING_TIME" private val startPoints = mutableListOf<Pair<String, Long>>() fun d(log: String) { if (BuildConfig.DEBUG) println(log) } fun d(where: Any, log: String) { if (BuildConfig.DEBUG) println("[${Thread.currentThread().name}][${where.javaClass.simpleName}]: $log") } fun d(tag: String, log: String) { if (BuildConfig.DEBUG) println("[${Thread.currentThread().name}][$tag]: $log") } /** * Start measuring time. Invoke this method at the moment where you want to start measuring. * To count time use [stop] method at right moment. It will log measured time to the Lagcat. */ fun start(pointName: String = "") { d(MEASURING_TIME_TAG, "Start measuring from |Point-$pointName|") startPoints.add(pointName to System.nanoTime()) } /** * Stop measuring time. Invoke this method at the moment where you want to stop measuring. * To start measuring use [start] method at right moment. * This method will log measured time to the Lagcat. */ fun stop() { val endTime = System.nanoTime() d(MEASURING_TIME_TAG, "*** Stop measuring ***") startPoints.forEachIndexed { i, p -> d(MEASURING_TIME_TAG, "Time from |Point-$i:${p.first}| |${(endTime - p.second) * 1.0e-9}| seconds") } startPoints.clear() } } }
36.152174
115
0.583885
3.28125
252a7e7fbadd13a20e101809c6dfe0bf95856270
2,514
kt
Kotlin
src/main/kotlin/lain/Lexer.kt
liminalitythree/bakadesu
6d5fbcc29da148fd72ecb58b164fc5c845267e5c
[ "CC0-1.0" ]
null
null
null
src/main/kotlin/lain/Lexer.kt
liminalitythree/bakadesu
6d5fbcc29da148fd72ecb58b164fc5c845267e5c
[ "CC0-1.0" ]
null
null
null
src/main/kotlin/lain/Lexer.kt
liminalitythree/bakadesu
6d5fbcc29da148fd72ecb58b164fc5c845267e5c
[ "CC0-1.0" ]
null
null
null
package lain import java.time.Clock class Lexer(val source: List<String>) { private val tokens = mutableListOf<Token>() private var start = 0 private var current = 0 fun scanTokens(): List<Token> { while (!isAtEnd()) { // we are at the beginning of the next lexeme start = current scanToken() } tokens.add(Token(TokenType.EOF, "", null)) return tokens.toList() } // scan one token maybe private fun scanToken() { val c:String = advance() when (c) { "\uD83E\uDD1C" -> addToken(TokenType.LEFT_PAREN) "\uD83E\uDD1B" -> addToken(TokenType.RIGHT_PAREN) "\uD83D\uDC49" -> addToken(TokenType.LEFT_BRACKET) "\uD83D\uDC48" -> addToken(TokenType.RIGHT_BRACKET) "〰" -> addToken(TokenType.COMMA) else -> { if (isDigit(c)) number() else identifier(c) } } } // turns [0-9]+ into a token and adds it to token list private fun number() { while (ClockNumbers.isClock(peek())) advance() addToken(TokenType.NUMBER, ClockNumbers.parseClocks(source.subList(start, current))) } // identifier is just 1 emoji private fun identifier(c: String) { addToken(TokenType.IDENTIFIER, c) } // consumes current character if matches expected, else returns false private fun match(expected: String): Boolean { if (isAtEnd()) return false if (source[current] != expected) return false current++ return true } // returns character without consuming it private fun peek(): String { if (isAtEnd()) return "\u0000" return source[current] } // returns true if c is [0-9] private fun isDigit(c: String): Boolean { return ClockNumbers.isClock(c) } // returns true if at end of string source, false if not private fun isAtEnd(): Boolean { return current >= source.size } // advance the pointer and return the new current char private fun advance(): String { current++ return source[current - 1] } // adds a token to the list of tokens private fun addToken(type: TokenType, literal: Any?) { val text = source.subList(start, current).reduce { a, e -> a.plus(e) } tokens.add(Token(type, text, literal)) } private fun addToken(type: TokenType) { addToken(type, null) } }
27.326087
92
0.585919
3.28125
64e001a125c823bf162f4795a42b3d093c67a517
1,868
rs
Rust
basic/src/main.rs
honkkki/rust-practice
73a0715c25ffb6ae10885cde092748d0effc5457
[ "MIT" ]
null
null
null
basic/src/main.rs
honkkki/rust-practice
73a0715c25ffb6ae10885cde092748d0effc5457
[ "MIT" ]
null
null
null
basic/src/main.rs
honkkki/rust-practice
73a0715c25ffb6ae10885cde092748d0effc5457
[ "MIT" ]
null
null
null
const MAX_POINTS: u32 = 1000; fn main() { let i: i64 = 1; println!("{}", i); let f = 1.1; // default f64 let a = 1; // default i32 let cc = '\u{1F601}'; println!("{}", f); println!("{}", a); println!("{}", cc); println!("{}", MAX_POINTS); // while let max = 10; let mut num = 0; while num * num < max { println!("{0} * {0} = {1}", num, num * num); num += 1; } let mut x = 1; const MAX_NUM: i8 = 10; while x < MAX_NUM { // 允许重复定义相同变量名 let mut y = x; while y < MAX_NUM { print!("{}*{}={} ", x, y, x * y); y += 1; } println!(); x += 1; } let mut num = 0; // loop无限循环 loop { println!("{0} * {0} = {1}", num, num * num); num += 1; if num * num > max { break; } } let t: bool = true; println!("{}", t); println!("--------------------------"); // tuple let tup: (i32, i64) = (500, 1000); println!("{}, {}", tup.0, tup.1); let (x, y) = tup; println!("{}, {}", x, y); println!("--------------------------"); // array let arr: [i32; 3] = [1, 2, 3]; println!("{}", arr[0]); for elem in arr { println!("arr: {}", elem) } println!("--------------------------"); // 控制流 let y = { let x = 1; x + 1 }; println!("{}", y); let num = get_num(); println!("{}", num); let condition = true; let num = if condition {1} else {0}; println!("{}", num); println!("--------------------------"); range_num(); let mut str = "hello"; println!("{}", str); str = "rust"; println!("{}", str); } fn get_num() -> i32 { 6 } fn range_num() { for num in 1..6 { // 1-5 println!("{}", num) } }
18.868687
52
0.367773
3.234375
3e817e4997df88adffabb8da5e0a76af8c34804c
8,985
h
C
CommandLineParser.h
malord/prime
f0e8be99b7dcd482708b9c928322bc07a3128506
[ "MIT" ]
null
null
null
CommandLineParser.h
malord/prime
f0e8be99b7dcd482708b9c928322bc07a3128506
[ "MIT" ]
null
null
null
CommandLineParser.h
malord/prime
f0e8be99b7dcd482708b9c928322bc07a3128506
[ "MIT" ]
null
null
null
// Copyright 2000-2021 Mark H. P. Lord #ifndef PRIME_COMMANDLINEPARSER_H #define PRIME_COMMANDLINEPARSER_H #include "Config.h" namespace Prime { class Log; /// A command line reader that supports short option (-v) and long options (--verbose), combined short options /// (e.g., -v -n -r can be shortened to -vnr) and -- to mark the end of the options. In option names containing /// a '-', the '-' is optional (e.g., --nocolour will match "no-colour"). A flag is an option that may be followed /// by a '-' to disable it, e.g., -G- or --colours-, or a + to enable it. Long name flags can also be specified /// with a "no-" or "disable-" prefix to negate them, e.g., --no-colours has the same result as --colours-. It is /// also possible to use long options by default, e.g., -trace instead of --trace, by using /// setImplicitLongOptionsEnabled(), which defaults to false. Values are options which expect one or more /// parameters, e.g., --dest ~/Desktop. class PRIME_PUBLIC CommandLineParser { public: CommandLineParser() { construct(); reset(); } explicit CommandLineParser(char** argv) { construct(); init(argv); } virtual ~CommandLineParser(); /// Set the arguments to be read. The last element argv must be null (i.e., like the argument array which /// is passed to main()). Note that returned strings are commonly pointers in to the strings in this array, /// so the array must remain valid until the command line has been read. bool init(char** argv); class PRIME_PUBLIC ResponseFileLoader { public: virtual ~ResponseFileLoader() { } /// Update ***argv to point to a new list of arguments to be parsed. The file name of the response file /// comes from path, which may itself come from another response file. virtual void loadResponseFile(const char* path, char*** argv, Log* log) = 0; }; void setResponseFileLoader(char responseFileChar, ResponseFileLoader* responseFileLoader) { _responseFileChar = responseFileChar; _responseFileLoader = responseFileLoader; } void reset(); /// If true, -trace will be considered to match --trace, rather than -t -r -a -c -e. Defaults to false. For /// this to work, the application must check for the long options before the short options. bool getImplicitLongOptionsEnabled() const { return _allowImplicitLongOptions; } void setImplicitLongOptionsEnabled(bool enabled) { _allowImplicitLongOptions = enabled; } /// Parse the next token from the argument list. Returns false if there are no more arguments to read. bool next(); /// Returns true if a basic, not-an-option argument was read. bool isFilename() const { return !_state.opt; } /// If a file name was read, returns it. const char* getFilename() const { return _state.opt ? NULL : _state.filename; } /// Returns true if a "--" argument has been encountered, signifying that all remaining arguments are files. bool hasOptionTerminatorBeenRead() const { return _state.noMoreOptions; } /// Returns true if an option, value or flag was read. bool isOption() const { return _state.opt ? true : false; } /// Returns the option that hasn't been read (for use when reporting errors, don't compare this, use /// readOption(), readFlag() or readValue()). const char* getOption() const { return _state.opt; } /// Returns the last option that was successfully read (with readOption(), readFlag() or readValue()). const char* getCurrentOption() const { return _state.currentOption; } /// Return the option or filename that was parsed. const char* getOptionOrFilename() const { return _state.opt ? _state.opt : _state.filename; } /// Returns true if the next argument is one of the | separated words. For example, for an archive utility /// you might ask cl.readCommand("add|a"), which would match "add", "a", "--add" and "-a" (and "-add" if /// implicit long options are enabled). bool readCommand(const char* words); /// Returns true if the specified option was read. e.g., readOption("verbose|v") bool readOption(const char* option) { return readOptionOrValueOrFlag(option, NULL, false); } /// If the specified option was read, returns true and sets *flag to true or false depending on whether the /// option was followed by a + or -, respectively. So -f or -f+ would set *flag to true, -f- to false. /// If flag is NULL, the result is stored internally and can be read by calling getFlag(). bool readFlag(const char* option, bool* flag = NULL) { return readOptionOrValueOrFlag(option, flag ? flag : &_state.flag, false); } /// Returns the flag read by readFlag() (or readColourFlag()) if they were called with a NULL flag pointer. bool getFlag() const { return _state.flag; } /// Returns true if the specified option, which should have a value, was read. After calling this you should /// call one of the fetch*() methods (fetchString(), fetchInt() etc.) to fetch the option's value. A value /// differs from a plain option in that it may be followed by an '=' sign, e.g., `--path=/bin`, which could /// also be supplied as `--path /bin` and `--path= /bin`. An option can have multiple values, e.g., /// `--offset 160 120`, and the fetch*() methods should be called for each. bool readValue(const char* option) { return readOptionOrValueOrFlag(option, NULL, true); } /// Fetch a string from the command line. Exits if there are no more arguments. const char* fetchString(); /// Fetch an intmax_t from the command line. Exits if there are no more arguments or the argument is invalid. intmax_t fetchIntmax(); /// Fetch an int from the command line. Exits if there are no more arguments or the argument is invalid. int fetchInt(); /// Fetch an intmax_t from the command line. If the next argument isn't a valid number, returns the default /// value and leaves the next argument to be read. intmax_t fetchOptionalIntmax(intmax_t defaultValue); /// Fetch an int from the command line. If the next argument isn't a valid number, returns the default /// value and leaves the next argument to be read. int fetchOptionalInt(int defaultValue); /// Fetch a float from the command line. Exits if there are no more arguments or the argument is invalid. float fetchFloat(); /// Fetch a double from the command line. Exits if there are no more arguments or the argument is invalid. double fetchDouble(); /// Fetch the next argument and convert the result to a bool. If there's no argument, or the next argument /// begins with the switch character (- or /) then true is assumed, but if there is an argument then yes, /// true, on, 1 and + are all considered true and no, false, off, 0 and - are all considered false. So, /// -f 1, -f+, -f and even -f YES are all considered true. -f -x will be considered true, and -x will /// correctly be read next. bool fetchBool(); /// Reads the standard colour/no colour flags (colour|color|colours|colors|G). bool readColourFlag(bool* flag = NULL); void skipLongOption(); void skipShortOption(); /// Skip an option's value. If unlessOption is true, if the next argument begins with a - then treat it as /// an option and don't skip it. void skipValue(bool unlessOption = false) { (void)fetchArgument(unlessOption); } // You can overload exit(ExitReason) to change how these are handled. void exitDueToMissingArgument() { exit(ExitReasonMissingArgument); } void exitDueToInvalidArgument() { exit(ExitReasonInvalidArgument); } void exitDueToUnknownOption() { exit(ExitReasonUnknownOption); } void exitDueToUnexpectedArgument() { exit(ExitReasonUnexpectedArgument); } void exitDueToUnknownOptionOrUnexpectedArgument() { exit(ExitReasonUnknownOptionOrUnexpectedArgument); } protected: enum ExitReason { ExitReasonMissingArgument, ExitReasonInvalidArgument, ExitReasonUnknownOption, ExitReasonUnexpectedArgument, ExitReasonUnknownOptionOrUnexpectedArgument, }; virtual void exit(ExitReason reason); private: void construct(); bool readOptionOrValueOrFlag(const char* option, bool* flag, bool hasParam); static bool equalLongOptionName(const char* have, const char* want, const char*& ptr, bool hasParam, bool hasFlag); const char* fetchArgument(bool optional); struct State { char** argv; const char* opt; const char* filename; bool noMoreOptions; bool isLongOption; bool flag; char currentOption[64]; } _state; bool _allowImplicitLongOptions; int _responseFileChar; ResponseFileLoader* _responseFileLoader; PRIME_UNCOPYABLE(CommandLineParser); }; } #endif
41.790698
119
0.689482
3.0625
fb2b96accbc2fe2b0ca62463a37a0b609aa455d7
3,120
c
C
LinkedListIterator/linked_list_iterator_main.c
Nam-H-Nguyen/DataStructure
61c86abf47171aecc66ba39e33364d12b12f94c1
[ "MIT" ]
1
2019-07-05T16:40:12.000Z
2019-07-05T16:40:12.000Z
LinkedListIterator/linked_list_iterator_main.c
Nam-H-Nguyen/DataStructure
61c86abf47171aecc66ba39e33364d12b12f94c1
[ "MIT" ]
null
null
null
LinkedListIterator/linked_list_iterator_main.c
Nam-H-Nguyen/DataStructure
61c86abf47171aecc66ba39e33364d12b12f94c1
[ "MIT" ]
null
null
null
/* * @file linked_list_iterator_main.c * * This file exercises the singly linked list and singly linked list * iterators functions. * * @since Oct 24, 2018 * @author: Nam H. Nguyen */ #include <stdlib.h> #include <stdio.h> #include <stdbool.h> #include "linked_list_iterator.h" /** * Test LinkedListIterator functions */ void testLinkedListIterator(void) { printf("\nstart testLinkedListIterator\n"); printf("initial list\n"); LinkedList *list = newLinkedList(5); printLinkedList(list); printf("list size: %ld\n", linkedListSize(list)); // add 5 nodes to the list printf("\nAdding 5 values to list\n"); addLastLinkedListVal(list, "A"); addLastLinkedListVal(list, "B"); addLastLinkedListVal(list, "C"); addLastLinkedListVal(list, "D"); addLastLinkedListVal(list, "E"); printLinkedList(list); printf("list size: %ld\n", linkedListSize(list)); printf("\nTraversing list forward with iterator\n"); LinkedListIterator *itr = newLinkedListIterator(list); printf("iterator count: %ld\n", getLinkedListIteratorCount(itr)); printf("iterator avail: %ld\n", getLinkedListIteratorAvailable(itr)); while (hasNextLinkedListIteratorVal(itr)) { const char *val; if (getNextLinkedListIteratorVal(itr, &val)) { printf("iterator next: \"%s\"\n", val); } else { printf("iterator next: unavailable\n"); } } printf("iterator count: %ld\n", getLinkedListIteratorCount(itr)); printf("iterator avail: %ld\n", getLinkedListIteratorAvailable(itr)); printf("\nMoving back one from end with iterator\n"); printf("iterator has prev: %s\n", hasPrevLinkedListIteratorVal(itr) ? "true" : "false"); const char *val; if (getPrevLinkedListIteratorVal(itr, &val)) { printf("iterator prev: \"%s\"\n", val); } else { printf("iterator prev: unavailable\n"); } printf("iterator count: %ld\n", getLinkedListIteratorCount(itr)); printf("iterator avail: %ld\n", getLinkedListIteratorAvailable(itr)); printf("\nMoving forward one to end with iterator\n"); if (getNextLinkedListIteratorVal(itr, &val)) { printf("iterator next: \"%s\"\n", val); } else { printf("iterator next: unavailable\n"); } printf("iterator count: %ld\n", getLinkedListIteratorCount(itr)); printf("iterator avail: %ld\n", getLinkedListIteratorAvailable(itr)); printf("\nResetting iterator\n"); resetLinkedListIterator(itr); printf("iterator has next: %s\n", hasNextLinkedListIteratorVal(itr) ? "true" : "false"); printf("iterator count: %ld\n", getLinkedListIteratorCount(itr)); printf("iterator avail: %ld\n", getLinkedListIteratorAvailable(itr)); printf("\nTrying to move back one from beginning with iterator\n"); printf("iterator has prev: %s\n", hasPrevLinkedListIteratorVal(itr) ? "true" : "false"); if (getPrevLinkedListIteratorVal(itr, &val)) { printf("iterator prev: \"%s\"\n", val); } else { printf("iterator prev: unavailable\n"); } printf("\nDeleting iterator and linked list\n"); deleteLinkedListIterator(itr); deleteLinkedList(list); printf("end testLinkedListIterator\n"); } /** * Test functions. */ int main(void) { testLinkedListIterator(); printf("program exiting\n"); }
30.588235
89
0.714103
3.15625
dc12fe3a72634b5363c218ab0b3d9830282fc7ea
6,959
py
Python
causalinference/core/propensity.py
youngminju-phd/Causalinference
630e8fb195754a720da41791b725d3dadabfb257
[ "BSD-3-Clause" ]
392
2016-06-08T19:43:08.000Z
2022-03-29T14:18:07.000Z
causalinference/core/propensity.py
youngminju-phd/Causalinference
630e8fb195754a720da41791b725d3dadabfb257
[ "BSD-3-Clause" ]
12
2017-04-28T20:25:54.000Z
2021-11-14T10:25:40.000Z
causalinference/core/propensity.py
youngminju-phd/Causalinference
630e8fb195754a720da41791b725d3dadabfb257
[ "BSD-3-Clause" ]
82
2016-06-08T19:43:11.000Z
2022-03-28T13:36:28.000Z
from __future__ import division import numpy as np from scipy.optimize import fmin_bfgs from itertools import combinations_with_replacement import causalinference.utils.tools as tools from .data import Dict class Propensity(Dict): """ Dictionary-like class containing propensity score data. Propensity score related data includes estimated logistic regression coefficients, maximized log-likelihood, predicted propensity scores, and lists of the linear and quadratic terms that are included in the logistic regression. """ def __init__(self, data, lin, qua): Z = form_matrix(data['X'], lin, qua) Z_c, Z_t = Z[data['controls']], Z[data['treated']] beta = calc_coef(Z_c, Z_t) self._data = data self._dict = dict() self._dict['lin'], self._dict['qua'] = lin, qua self._dict['coef'] = beta self._dict['loglike'] = -neg_loglike(beta, Z_c, Z_t) self._dict['fitted'] = sigmoid(Z.dot(beta)) self._dict['se'] = calc_se(Z, self._dict['fitted']) def __str__(self): table_width = 80 coefs = self._dict['coef'] ses = self._dict['se'] output = '\n' output += 'Estimated Parameters of Propensity Score\n\n' entries1 = ['', 'Coef.', 'S.e.', 'z', 'P>|z|', '[95% Conf. int.]'] entry_types1 = ['string']*6 col_spans1 = [1]*5 + [2] output += tools.add_row(entries1, entry_types1, col_spans1, table_width) output += tools.add_line(table_width) entries2 = tools.gen_reg_entries('Intercept', coefs[0], ses[0]) entry_types2 = ['string'] + ['float']*6 col_spans2 = [1]*7 output += tools.add_row(entries2, entry_types2, col_spans2, table_width) lin = self._dict['lin'] for (lin_term, coef, se) in zip(lin, coefs[1:], ses[1:]): entries3 = tools.gen_reg_entries('X'+str(lin_term), coef, se) output += tools.add_row(entries3, entry_types2, col_spans2, table_width) qua = self._dict['qua'] lin_num = len(lin)+1 # including intercept for (qua_term, coef, se) in zip(qua, coefs[lin_num:], ses[lin_num:]): name = 'X'+str(qua_term[0])+'*X'+str(qua_term[1]) entries4 = tools.gen_reg_entries(name, coef, se) output += tools.add_row(entries4, entry_types2, col_spans2, table_width) return output class PropensitySelect(Propensity): """ Dictionary-like class containing propensity score data. Propensity score related data includes estimated logistic regression coefficients, maximized log-likelihood, predicted propensity scores, and lists of the linear and quadratic terms that are included in the logistic regression. """ def __init__(self, data, lin_B, C_lin, C_qua): X_c, X_t = data['X_c'], data['X_t'] lin = select_lin_terms(X_c, X_t, lin_B, C_lin) qua = select_qua_terms(X_c, X_t, lin, C_qua) super(PropensitySelect, self).__init__(data, lin, qua) def form_matrix(X, lin, qua): N, K = X.shape mat = np.empty((N, 1+len(lin)+len(qua))) mat[:, 0] = 1 # constant term current_col = 1 if lin: mat[:, current_col:current_col+len(lin)] = X[:, lin] current_col += len(lin) for term in qua: # qua is a list of tuples of column numbers mat[:, current_col] = X[:, term[0]] * X[:, term[1]] current_col += 1 return mat def sigmoid(x, top_threshold=100, bottom_threshold=-100): high_x = (x >= top_threshold) low_x = (x <= bottom_threshold) mid_x = ~(high_x | low_x) values = np.empty(x.shape[0]) values[high_x] = 1.0 values[low_x] = 0.0 values[mid_x] = 1/(1+np.exp(-x[mid_x])) return values def log1exp(x, top_threshold=100, bottom_threshold=-100): high_x = (x >= top_threshold) low_x = (x <= bottom_threshold) mid_x = ~(high_x | low_x) values = np.empty(x.shape[0]) values[high_x] = 0.0 values[low_x] = -x[low_x] values[mid_x] = np.log(1 + np.exp(-x[mid_x])) return values def neg_loglike(beta, X_c, X_t): return log1exp(X_t.dot(beta)).sum() + log1exp(-X_c.dot(beta)).sum() def neg_gradient(beta, X_c, X_t): return (sigmoid(X_c.dot(beta))*X_c.T).sum(1) - \ (sigmoid(-X_t.dot(beta))*X_t.T).sum(1) def calc_coef(X_c, X_t): K = X_c.shape[1] neg_ll = lambda b: neg_loglike(b, X_c, X_t) neg_grad = lambda b: neg_gradient(b, X_c, X_t) logit = fmin_bfgs(neg_ll, np.zeros(K), neg_grad, full_output=True, disp=False) return logit[0] def calc_se(X, phat): H = np.dot(phat*(1-phat)*X.T, X) return np.sqrt(np.diag(np.linalg.inv(H))) def get_excluded_lin(K, included): included_set = set(included) return [x for x in range(K) if x not in included_set] def get_excluded_qua(lin, included): whole_set = list(combinations_with_replacement(lin, 2)) included_set = set(included) return [x for x in whole_set if x not in included_set] def calc_loglike(X_c, X_t, lin, qua): Z_c = form_matrix(X_c, lin, qua) Z_t = form_matrix(X_t, lin, qua) beta = calc_coef(Z_c, Z_t) return -neg_loglike(beta, Z_c, Z_t) def select_lin(X_c, X_t, lin_B, C_lin): # Selects, through a sequence of likelihood ratio tests, the # variables that should be included linearly in propensity # score estimation. K = X_c.shape[1] excluded = get_excluded_lin(K, lin_B) if excluded == []: return lin_B ll_null = calc_loglike(X_c, X_t, lin_B, []) def lr_stat_lin(lin_term): ll_alt = calc_loglike(X_c, X_t, lin_B+[lin_term], []) return 2 * (ll_alt - ll_null) lr_stats = np.array([lr_stat_lin(term) for term in excluded]) argmax_lr = lr_stats.argmax() if lr_stats[argmax_lr] < C_lin: return lin_B else: new_term = [excluded[argmax_lr]] return select_lin(X_c, X_t, lin_B+new_term, C_lin) def select_lin_terms(X_c, X_t, lin_B, C_lin): # Mostly a wrapper around function select_lin to handle cases that # require little computation. if C_lin <= 0: K = X_c.shape[1] return lin_B + get_excluded_lin(K, lin_B) elif C_lin == np.inf: return lin_B else: return select_lin(X_c, X_t, lin_B, C_lin) def select_qua(X_c, X_t, lin, qua_B, C_qua): # Selects, through a sequence of likelihood ratio tests, the # variables that should be included quadratically in propensity # score estimation. excluded = get_excluded_qua(lin, qua_B) if excluded == []: return qua_B ll_null = calc_loglike(X_c, X_t, lin, qua_B) def lr_stat_qua(qua_term): ll_alt = calc_loglike(X_c, X_t, lin, qua_B+[qua_term]) return 2 * (ll_alt - ll_null) lr_stats = np.array([lr_stat_qua(term) for term in excluded]) argmax_lr = lr_stats.argmax() if lr_stats[argmax_lr] < C_qua: return qua_B else: new_term = [excluded[argmax_lr]] return select_qua(X_c, X_t, lin, qua_B+new_term, C_qua) def select_qua_terms(X_c, X_t, lin, C_qua): # Mostly a wrapper around function select_qua to handle cases that # require little computation. if lin == []: return [] if C_qua <= 0: return get_excluded_qua(lin, []) elif C_qua == np.inf: return [] else: return select_qua(X_c, X_t, lin, [], C_qua)
24.765125
69
0.674522
3.203125