{ // 获取包含Hugging Face文本的span元素 const spans = link.querySelectorAll('span.whitespace-nowrap, span.hidden.whitespace-nowrap'); spans.forEach(span => { if (span.textContent && span.textContent.trim().match(/Hugging\s*Face/i)) { span.textContent = 'AI快站'; } }); }); // 替换logo图片的alt属性 document.querySelectorAll('img[alt*="Hugging"], img[alt*="Face"]').forEach(img => { if (img.alt.match(/Hugging\s*Face/i)) { img.alt = 'AI快站 logo'; } }); } // 替换导航栏中的链接 function replaceNavigationLinks() { // 已替换标记,防止重复运行 if (window._navLinksReplaced) { return; } // 已经替换过的链接集合,防止重复替换 const replacedLinks = new Set(); // 只在导航栏区域查找和替换链接 const headerArea = document.querySelector('header') || document.querySelector('nav'); if (!headerArea) { return; } // 在导航区域内查找链接 const navLinks = headerArea.querySelectorAll('a'); navLinks.forEach(link => { // 如果已经替换过,跳过 if (replacedLinks.has(link)) return; const linkText = link.textContent.trim(); const linkHref = link.getAttribute('href') || ''; // 替换Spaces链接 - 仅替换一次 if ( (linkHref.includes('/spaces') || linkHref === '/spaces' || linkText === 'Spaces' || linkText.match(/^s*Spacess*$/i)) && linkText !== 'OCR模型免费转Markdown' && linkText !== 'OCR模型免费转Markdown' ) { link.textContent = 'OCR模型免费转Markdown'; link.href = 'https://fast360.xyz'; link.setAttribute('target', '_blank'); link.setAttribute('rel', 'noopener noreferrer'); replacedLinks.add(link); } // 删除Posts链接 else if ( (linkHref.includes('/posts') || linkHref === '/posts' || linkText === 'Posts' || linkText.match(/^s*Postss*$/i)) ) { if (link.parentNode) { link.parentNode.removeChild(link); } replacedLinks.add(link); } // 替换Docs链接 - 仅替换一次 else if ( (linkHref.includes('/docs') || linkHref === '/docs' || linkText === 'Docs' || linkText.match(/^s*Docss*$/i)) && linkText !== '模型下载攻略' ) { link.textContent = '模型下载攻略'; link.href = '/'; replacedLinks.add(link); } // 删除Enterprise链接 else if ( (linkHref.includes('/enterprise') || linkHref === '/enterprise' || linkText === 'Enterprise' || linkText.match(/^s*Enterprises*$/i)) ) { if (link.parentNode) { link.parentNode.removeChild(link); } replacedLinks.add(link); } }); // 查找可能嵌套的Spaces和Posts文本 const textNodes = []; function findTextNodes(element) { if (element.nodeType === Node.TEXT_NODE) { const text = element.textContent.trim(); if (text === 'Spaces' || text === 'Posts' || text === 'Enterprise') { textNodes.push(element); } } else { for (const child of element.childNodes) { findTextNodes(child); } } } // 只在导航区域内查找文本节点 findTextNodes(headerArea); // 替换找到的文本节点 textNodes.forEach(node => { const text = node.textContent.trim(); if (text === 'Spaces') { node.textContent = node.textContent.replace(/Spaces/g, 'OCR模型免费转Markdown'); } else if (text === 'Posts') { // 删除Posts文本节点 if (node.parentNode) { node.parentNode.removeChild(node); } } else if (text === 'Enterprise') { // 删除Enterprise文本节点 if (node.parentNode) { node.parentNode.removeChild(node); } } }); // 标记已替换完成 window._navLinksReplaced = true; } // 替换代码区域中的域名 function replaceCodeDomains() { // 特别处理span.hljs-string和span.njs-string元素 document.querySelectorAll('span.hljs-string, span.njs-string, span[class*="hljs-string"], span[class*="njs-string"]').forEach(span => { if (span.textContent && span.textContent.includes('huggingface.co')) { span.textContent = span.textContent.replace(/huggingface.co/g, 'aifasthub.com'); } }); // 替换hljs-string类的span中的域名(移除多余的转义符号) document.querySelectorAll('span.hljs-string, span[class*="hljs-string"]').forEach(span => { if (span.textContent && span.textContent.includes('huggingface.co')) { span.textContent = span.textContent.replace(/huggingface.co/g, 'aifasthub.com'); } }); // 替换pre和code标签中包含git clone命令的域名 document.querySelectorAll('pre, code').forEach(element => { if (element.textContent && element.textContent.includes('git clone')) { const text = element.innerHTML; if (text.includes('huggingface.co')) { element.innerHTML = text.replace(/huggingface.co/g, 'aifasthub.com'); } } }); // 处理特定的命令行示例 document.querySelectorAll('pre, code').forEach(element => { const text = element.innerHTML; if (text.includes('huggingface.co')) { // 针对git clone命令的专门处理 if (text.includes('git clone') || text.includes('GIT_LFS_SKIP_SMUDGE=1')) { element.innerHTML = text.replace(/huggingface.co/g, 'aifasthub.com'); } } }); // 特别处理模型下载页面上的代码片段 document.querySelectorAll('.flex.border-t, .svelte_hydrator, .inline-block').forEach(container => { const content = container.innerHTML; if (content && content.includes('huggingface.co')) { container.innerHTML = content.replace(/huggingface.co/g, 'aifasthub.com'); } }); // 特别处理模型仓库克隆对话框中的代码片段 try { // 查找包含"Clone this model repository"标题的对话框 const cloneDialog = document.querySelector('.svelte_hydration_boundary, [data-target="MainHeader"]'); if (cloneDialog) { // 查找对话框中所有的代码片段和命令示例 const codeElements = cloneDialog.querySelectorAll('pre, code, span'); codeElements.forEach(element => { if (element.textContent && element.textContent.includes('huggingface.co')) { if (element.innerHTML.includes('huggingface.co')) { element.innerHTML = element.innerHTML.replace(/huggingface.co/g, 'aifasthub.com'); } else { element.textContent = element.textContent.replace(/huggingface.co/g, 'aifasthub.com'); } } }); } // 更精确地定位克隆命令中的域名 document.querySelectorAll('[data-target]').forEach(container => { const codeBlocks = container.querySelectorAll('pre, code, span.hljs-string'); codeBlocks.forEach(block => { if (block.textContent && block.textContent.includes('huggingface.co')) { if (block.innerHTML.includes('huggingface.co')) { block.innerHTML = block.innerHTML.replace(/huggingface.co/g, 'aifasthub.com'); } else { block.textContent = block.textContent.replace(/huggingface.co/g, 'aifasthub.com'); } } }); }); } catch (e) { // 错误处理但不打印日志 } } // 当DOM加载完成后执行替换 if (document.readyState === 'loading') { document.addEventListener('DOMContentLoaded', () => { replaceHeaderBranding(); replaceNavigationLinks(); replaceCodeDomains(); // 只在必要时执行替换 - 3秒后再次检查 setTimeout(() => { if (!window._navLinksReplaced) { console.log('[Client] 3秒后重新检查导航链接'); replaceNavigationLinks(); } }, 3000); }); } else { replaceHeaderBranding(); replaceNavigationLinks(); replaceCodeDomains(); // 只在必要时执行替换 - 3秒后再次检查 setTimeout(() => { if (!window._navLinksReplaced) { console.log('[Client] 3秒后重新检查导航链接'); replaceNavigationLinks(); } }, 3000); } // 增加一个MutationObserver来处理可能的动态元素加载 const observer = new MutationObserver(mutations => { // 检查是否导航区域有变化 const hasNavChanges = mutations.some(mutation => { // 检查是否存在header或nav元素变化 return Array.from(mutation.addedNodes).some(node => { if (node.nodeType === Node.ELEMENT_NODE) { // 检查是否是导航元素或其子元素 if (node.tagName === 'HEADER' || node.tagName === 'NAV' || node.querySelector('header, nav')) { return true; } // 检查是否在导航元素内部 let parent = node.parentElement; while (parent) { if (parent.tagName === 'HEADER' || parent.tagName === 'NAV') { return true; } parent = parent.parentElement; } } return false; }); }); // 只在导航区域有变化时执行替换 if (hasNavChanges) { // 重置替换状态,允许再次替换 window._navLinksReplaced = false; replaceHeaderBranding(); replaceNavigationLinks(); } }); // 开始观察document.body的变化,包括子节点 if (document.body) { observer.observe(document.body, { childList: true, subtree: true }); } else { document.addEventListener('DOMContentLoaded', () => { observer.observe(document.body, { childList: true, subtree: true }); }); } })(); \n"},"repo_name":{"kind":"string","value":"N00bface/Real-Dolmen-Stage-Opdrachten"},"path":{"kind":"string","value":"stageopdracht/src/main/resources/static/vendors/gentelella/vendors/echarts/test/polarScatter.html"},"language":{"kind":"string","value":"HTML"},"license":{"kind":"string","value":"mit"},"size":{"kind":"number","value":2536,"string":"2,536"}}},{"rowIdx":1701,"cells":{"code":{"kind":"string","value":"# Contributing\n \nI explicitly welcome contributions from people who have never contributed to open-source before: we were all beginners once!\nI can help build on a partially working pull request with the aim of getting it merged.\nI am also actively seeking to diversify our contributors and especially welcome contributions from women from all backgrounds and people of color. [1](#References)\n\nIf you're interested in contributing, fork this repo and create a pull request.\nPlease include a short descriptive link to your code in the readme, and order the link alphpabetically by file name.\nInclude a description of each data structure or algorithm at the top of the file, and if you feel that your code needs further explanation,\nyou can include a more detailed summary in the Data Structures or Algorithms subfolder's readme.\nPlease follow the [Ruby](https://github.com/bbatsov/ruby-style-guide) and [JavaScript](https://github.com/airbnb/javascript) Style Guides.\nTests are recommended, but optional.\n \nIf you're looking for inspiration, I'd love to have a:\n \n + [Priority Queue](https://en.wikipedia.org/wiki/Priority_queue)\n + [Valid Sudoku Board](https://en.wikipedia.org/wiki/Sudoku_solving_algorithms)\n + [Sorting Algorithms](https://en.wikipedia.org/wiki/Sorting_algorithm#Popular_sorting_algorithms)\n + [A* Search Algorithm](https://en.wikipedia.org/wiki/A*_search_algorithm)\n + [Knuth-Morris-Pratt Algorithm](https://en.wikipedia.org/wiki/Knuth%E2%80%93Morris%E2%80%93Pratt_algorithm)\n + [Heap](https://en.wikipedia.org/wiki/Heap_\\(data_structure\\))\n + [Bloom Filter](https://en.wikipedia.org/wiki/Bloom_filter)\n + [Or refactor one of these files!](/REFACTOR.md)\n\n\n## Attribution\n\n 1. I used and modified [Homebrew's](https://github.com/Homebrew/brew#contributing) welcoming contributing section.\n"},"repo_name":{"kind":"string","value":"Dbz/Algorithms"},"path":{"kind":"string","value":"CONTRIBUTING.md"},"language":{"kind":"string","value":"Markdown"},"license":{"kind":"string","value":"mit"},"size":{"kind":"number","value":1821,"string":"1,821"}}},{"rowIdx":1702,"cells":{"code":{"kind":"string","value":"Alchemy sentiment analysis: fb12d2c55fff36e1e268584e261b6b010b37279f\n\n\n\nAfrica Is Talking: 676dbd926bbb04fa69ce90ee81d3f5ffee2692aaf80eb5793bd70fe93e77dc2e\n"},"repo_name":{"kind":"string","value":"crakama/bc_7_twitment"},"path":{"kind":"string","value":"keys.py"},"language":{"kind":"string","value":"Python"},"license":{"kind":"string","value":"mit"},"size":{"kind":"number","value":156,"string":"156"}}},{"rowIdx":1703,"cells":{"code":{"kind":"string","value":"module.exports = {\n project: {\n server: {\n basePath: '',\n ip: '0.0.0.0',\n request: {\n sesskey: 'sid',\n limit: 5000,\n parameters: 60\n },\n render: 'swig',\n path: {\n routes: 'app/routes',\n views: 'app/views',\n public: 'public/',\n docs: false\n },\n views: {\n extension: 'swig',\n errors: 'errors/'\n }\n }\n },\n environment: {\n server: {\n debug: true,\n host: 'localhost',\n port: 3000,\n request: {\n secret: new Date().getTime() + '' + Math.random(),\n cors: true,\n geolocation: false\n },\n views: {\n cache: false\n }\n }\n }\n};"},"repo_name":{"kind":"string","value":"PearlVentures/Crux"},"path":{"kind":"string","value":"boilerplate/server/config.js"},"language":{"kind":"string","value":"JavaScript"},"license":{"kind":"string","value":"mit"},"size":{"kind":"number","value":699,"string":"699"}}},{"rowIdx":1704,"cells":{"code":{"kind":"string","value":"articles = new ArrayCollection();\n }\n\n\n public function __toString()\n {\n return $this->label;\n }\n\n\n public function getId()\n {\n return $this->id;\n }\n\n public function setLabel($label)\n {\n $this->label = $label;\n return $this;\n }\n\n public function getLabel()\n {\n return $this->label;\n }\n\n public function addArticle(Article $article)\n {\n $this->articles->add($article);\n return $this;\n }\n\n public function setArticles($articles)\n {\n $this->articles = $articles;\n return $this;\n }\n\n public function getArticles()\n {\n return $this->articles;\n }\n}"},"repo_name":{"kind":"string","value":"dysan1376/hospi"},"path":{"kind":"string","value":"src/RedMedica/ConsultasBundle/Entity/Category.php"},"language":{"kind":"string","value":"PHP"},"license":{"kind":"string","value":"mit"},"size":{"kind":"number","value":1682,"string":"1,682"}}},{"rowIdx":1705,"cells":{"code":{"kind":"string","value":"import React from \"react\";\nimport styled from 'styled-components'\nimport Link from './link';\n\nconst nextArrow = \"/icons/next-arrow.png\";\nconst prevArrow = \"/icons/prev-arrow.png\";\n\nconst PatternLink = styled.span`\n width: 100%;\n display: flex;\n flex-direction: column;\n padding: 1em;\n float: ${props => props.previous ? 'left' : 'right'}\n\n @media(min-width: $width-tablet) {\n width: auto;\n }\n`;\n\nconst ImageContainer = styled.span`\n height: 50px;\n`;\n\nconst Image = styled.img`\n height: 100%;\n background-color: white;\n float: ${props => props.previous ? 'right' : 'left'}\n`;\n\nconst ArrowContainer = styled.div`\n display: flex;\n flex-direction: ${props => props.previous ? 'row-reverse' : 'row'};\n align-items: center;\n`;\n\nconst Name = styled.p`\n padding: 10px 0;\n`;\n\nconst Arrow = styled.img`\n height: 10px;\n flex-direction: row-reverse;\n padding: ${props => props.previous ? '0 10px 0 0' : '0 0 0 10px'};\n`;\n\nconst NextPrevPattern = ({pattern, direction}) => {\n const previous = direction === \"previous\"\n\n return (\n \n \n \n \n \n \n {pattern.name}\n {\n (direction === \"next\") &&\n \n }\n {\n (direction === \"previous\") &&\n \n }\n \n \n \n )\n};\n\nexport default NextPrevPattern;\n"},"repo_name":{"kind":"string","value":"redfieldstefan/kibaktile.com"},"path":{"kind":"string","value":"src/components/next-prev-pattern.js"},"language":{"kind":"string","value":"JavaScript"},"license":{"kind":"string","value":"mit"},"size":{"kind":"number","value":1640,"string":"1,640"}}},{"rowIdx":1706,"cells":{"code":{"kind":"string","value":"// Copyright (c) 2009-2010 Satoshi Nakamoto\n// Copyright (c) 2009-2016 The Bitcoin Core developers\n// Distributed under the MIT software license, see the accompanying\n// file COPYING or http://www.opensource.org/licenses/mit-license.php.\n\n#ifndef DIGIBYTE_NET_PROCESSING_H\n#define DIGIBYTE_NET_PROCESSING_H\n\n#include \"net.h\"\n#include \"validationinterface.h\"\n\n/** Default for -maxorphantx, maximum number of orphan transactions kept in memory */\nstatic const unsigned int DEFAULT_MAX_ORPHAN_TRANSACTIONS = 100;\n/** Expiration time for orphan transactions in seconds */\nstatic const int64_t ORPHAN_TX_EXPIRE_TIME = 20 * 60;\n/** Minimum time between orphan transactions expire time checks in seconds */\nstatic const int64_t ORPHAN_TX_EXPIRE_INTERVAL = 5 * 60;\n/** Default number of orphan+recently-replaced txn to keep around for block reconstruction */\nstatic const unsigned int DEFAULT_BLOCK_RECONSTRUCTION_EXTRA_TXN = 100;\n\n/** Register with a network node to receive its signals */\nvoid RegisterNodeSignals(CNodeSignals& nodeSignals);\n/** Unregister a network node */\nvoid UnregisterNodeSignals(CNodeSignals& nodeSignals);\n\nclass PeerLogicValidation : public CValidationInterface {\nprivate:\n CConnman* connman;\n\npublic:\n PeerLogicValidation(CConnman* connmanIn);\n\n virtual void SyncTransaction(const CTransaction& tx, const CBlockIndex* pindex, int nPosInBlock);\n virtual void UpdatedBlockTip(const CBlockIndex *pindexNew, const CBlockIndex *pindexFork, bool fInitialDownload);\n virtual void BlockChecked(const CBlock& block, const CValidationState& state);\n virtual void NewPoWValidBlock(const CBlockIndex *pindex, const std::shared_ptr& pblock);\n};\n\nstruct CNodeStateStats {\n int nMisbehavior;\n int nSyncHeight;\n int nCommonHeight;\n std::vector vHeightInFlight;\n};\n\n/** Get statistics from node state */\nbool GetNodeStateStats(NodeId nodeid, CNodeStateStats &stats);\n/** Increase a node's misbehavior score. */\nvoid Misbehaving(NodeId nodeid, int howmuch);\n\n/** Process protocol messages received from a given node */\nbool ProcessMessages(CNode* pfrom, CConnman& connman, std::atomic& interrupt);\n/**\n * Send queued protocol messages to be sent to a give node.\n *\n * @param[in] pto The node which we are sending messages to.\n * @param[in] connman The connection manager for that node.\n * @param[in] interrupt Interrupt condition for processing threads\n * @return True if there is more work to be done\n */\nbool SendMessages(CNode* pto, CConnman& connman, std::atomic& interrupt);\n\n#endif // DIGIBYTE_NET_PROCESSING_H\n"},"repo_name":{"kind":"string","value":"DigiByte-Team/digibyte"},"path":{"kind":"string","value":"src/net_processing.h"},"language":{"kind":"string","value":"C"},"license":{"kind":"string","value":"mit"},"size":{"kind":"number","value":2633,"string":"2,633"}}},{"rowIdx":1707,"cells":{"code":{"kind":"string","value":"#\n# $Header: svn://svn/SWM/trunk/web/Reports/ReportAdvanced_TXSold.pm 8251 2013-04-08 09:00:53Z rlee $\n#\n\npackage Reports::ReportAdvanced_TXSold;\n\nuse strict;\nuse lib \".\";\nuse ReportAdvanced_Common;\nuse Reports::ReportAdvanced;\nour @ISA =qw(Reports::ReportAdvanced);\n\n\nuse strict;\n\nsub _getConfiguration {\n\tmy $self = shift;\n\n\tmy $currentLevel = $self->{'EntityTypeID'} || 0;\n\tmy $Data = $self->{'Data'};\n\tmy $SystemConfig = $self->{'SystemConfig'};\n\tmy $clientValues = $Data->{'clientValues'};\n\tmy $CommonVals = getCommonValues(\n\t\t$Data,\n\t\t{\n\t\t\tMYOB => 1,\n\t\t},\n\t);\n\n my $txt_Clr = $Data->{'SystemConfig'}{'txtCLR'} || 'Clearance';\n\n\tmy %config = (\n\t\tName => 'Transactions Sold Report',\n\n\t\tStatsReport => 0,\n\t\tMemberTeam => 0,\n\t\tReportEntity => 3,\n\t\tReportLevel => 0,\n\t\tTemplate => 'default_adv',\n TemplateEmail => 'default_adv_CSV',\n\t\tDistinctValues => 1,\n SQLBuilder => \\&SQLBuilder,\n DefaultPermType => 'NONE',\n\n\t\tFields => {\n\n\n\t\t\tintPaymentType=> [\n\t\t\t\t'Payment Type',\n\t\t\t\t{\n\t\t\t\t\tactive=>1, \n\t\t\t\t\tdisplaytype=>'lookup', \n\t\t\t\t\tfieldtype=>'dropdown', \n\t\t\t\t\tdropdownoptions => \\%Defs::paymentTypes, \n\t\t\t\t\tallowsort=>1, \n\t\t\t\t\tdbfield=>'TL.intPaymentType'\n\t\t\t\t}\n\t\t\t],\n\t\t\tstrTXN=> [\n\t\t\t\t'PayPal Reference Number',\n\t\t\t\t{\n\t\t\t\t\tdisplaytype=>'text', \n\t\t\t\t\tfieldtype=>'text', \n\t\t\t\t\tdbfield=>'TL.strTXN', \n\t\t\t\t\tactive=>1\n\t\t\t\t}\n\t\t\t],\n\t\t\tintLogID=> [\n\t\t\t\t'Payment Log ID',\n\t\t\t\t{\n\t\t\t\t\tdisplaytype=>'text', \n\t\t\t\t\tfieldtype=>'text', \n\t\t\t\t\tdbfield=>'TL.intLogID', \n\t\t\t\t\tallowgrouping=>1, \n\t\t\t\t\tactive=>1\n\t\t\t\t}\n\t\t\t],\n\t\t\tdtSettlement=> [\n\t\t\t\t'Settlement Date',\n\t\t\t\t{\n\t\t\t\t\tactive=>1, \n\t\t\t\t\tdisplaytype=>'date', \n\t\t\t\t\tfieldtype=>'datetime', \n\t\t\t\t\tallowsort=>1, \n\t\t\t\t\tdbformat=>' DATE_FORMAT(dtSettlement,\"%d/%m/%Y %H:%i\")'\n\t\t\t\t}\n\t\t\t],\n\t\t\tintAmount => [\n\t\t\t\t'Total Amount Paid',\n\t\t\t\t{\n\t\t\t\t\tdisplaytype=>'currency', \n\t\t\t\t\tfieldtype=>'text', \n\t\t\t\t\tallowsort=>1, \n\t\t\t\t\tdbfield=>'TL.intAmount', \n\t\t\t\t\tactive=>1\n\t\t\t\t}\n\t\t\t],\n\t\t\tSplitAmount=> [\n\t\t\t\t'Split Amount',\n\t\t\t\t{\n\t\t\t\t\tdisplaytype=>'currency', \n\t\t\t\t\tfieldtype=>'text', \n\t\t\t\t\tallowsort=>1, \n\t\t\t\t\ttotal=>1, \n\t\t\t\t\tactive=>1\n\t\t\t\t}\n\t\t\t],\n\t\t\tSplitLevel=> [\n\t\t\t\t'Split Level',\n\t\t\t\t{\n\t\t\t\t\tdisplaytype=>'text', \n\t\t\t\t\tfieldtype=>'text', \n\t\t\t\t\tallowsort=>1, \n\t\t\t\t\tactive=>1\n\t\t\t\t}\n\t\t\t],\n\t\t\tPaymentFor=> [\n\t\t\t\t'Payment For',\n\t\t\t\t{\n\t\t\t\t\tactive=>1, \n\t\t\t\t\tdisplaytype=>'text', \n\t\t\t\t\tfieldtype=>'text', \n\t\t\t\t\tallowsort => 1\n\t\t\t\t}\n\t\t\t],\n\t\t\tintExportBankFileID=> [\n\t\t\t\t'PayPal Distribution ID',\n\t\t\t\t{\n\t\t\t\t\tdisplaytype=>'text', \n\t\t\t\t\tfieldtype=>'text', \n\t\t\t\t\tdbfield=>'intExportAssocBankFileID'\n\t\t\t\t}\n\t\t\t],\n\t\t\tintMyobExportID=> [\n\t\t\t\t'SP Invoice Run',\n\t\t\t\t{\n\t\t\t\t\tdisplaytype=>'lookup', \n\t\t\t\t\tfieldtype=>'dropdown', \n\t\t\t\t\tdropdownoptions => $CommonVals->{'MYOB'}{'Values'}, \n\t\t\t\t\tactive=>1, \n\t\t\t\t\tdbfield=>'intMyobExportID'\n\t\t\t\t}\n\t\t\t],\n\t\t\tdtRun=> [\n\t\t\t\t'Date Funds Received',\n\t\t\t\t{\n\t\t\t\t\tdisplaytype=>'date', \n\t\t\t\t\tfieldtype=>'date', \n\t\t\t\t\tallowsort=>1, \n\t\t\t\t\tdbformat=>' DATE_FORMAT(dtRun,\"%d/%m/%Y\")', \n\t\t\t\t\tallowgrouping=>1, \n\t\t\t\t\tsortfield=>'TL.dtSettlement'\n\t\t\t\t}\n\t\t\t],\n\t\t},\n\n\t\tOrder => [qw(\n\t\t\tintLogID \n\t\t\tintPaymentType \n\t\t\tstrTXN \n\t\t\tintAmount \n\t\t\tdtSettlement \n\t\t\tPaymentFor \n\t\t\tSplitLevel \n\t\t\tSplitAmount \n\t\t\tintMyobExportID\n\t\t)],\n\t\tOptionGroups => {\n\t\t\tdefault => ['Details',{}],\n\t\t},\n\n\t\tConfig => {\n\t\t\tFormFieldPrefix => 'c',\n\t\t\tFormName => 'txnform_',\n\t\t\tEmailExport => 1,\n\t\t\tlimitView => 5000,\n\t\t\tEmailSenderAddress => $Defs::admin_email,\n\t\t\tSecondarySort => 1,\n\t\t\tRunButtonLabel => 'Run Report',\n\t\t},\n\t);\n\t$self->{'Config'} = \\%config;\n}\n\nsub SQLBuilder {\n my($self, $OptVals, $ActiveFields) =@_ ;\n my $currentLevel = $self->{'EntityTypeID'} || 0;\n my $intID = $self->{'EntityID'} || 0;\n my $Data = $self->{'Data'};\n my $clientValues = $Data->{'clientValues'};\n my $SystemConfig = $Data->{'SystemConfig'};\n\n my $from_levels = $OptVals->{'FROM_LEVELS'};\n my $from_list = $OptVals->{'FROM_LIST'};\n my $where_levels = $OptVals->{'WHERE_LEVELS'};\n my $where_list = $OptVals->{'WHERE_LIST'};\n my $current_from = $OptVals->{'CURRENT_FROM'};\n my $current_where = $OptVals->{'CURRENT_WHERE'};\n my $select_levels = $OptVals->{'SELECT_LEVELS'};\n\n my $sql = '';\n { #Work out SQL\n\n\t\tmy $clubWHERE = $currentLevel == $Defs::LEVEL_CLUB \n\t\t\t? qq[ AND ML.intClubID = $intID ] \n\t\t\t: '';\n\t\t$sql = qq[\n SELECT DISTINCT\n\t\t\t\tTL.intLogID,\n\t\t\t\tTL.intAmount,\n\t\t\t\tTL.strTXN,\n\t\t\t\tTL.intPaymentType,\n\t\t\t\tML.intLogType,\n\t\t\t\tML.intEntityType,\n\t\t\t\tML.intMyobExportID,\n\t\t\t\tdtSettlement,\n\t\t\t\tIF(T.intTableType=$Defs::LEVEL_PERSON, CONCAT(M.strLocalSurname, \", \", M.strLocalFirstname), Entity.strLocalName) as PaymentFor,\n\t\t\t\tSUM(ML.curMoney) as SplitAmount,\n\t\t\t\tIF(ML.intEntityType = $Defs::LEVEL_NATIONAL, 'National Split',\n\t\t\t\t\t\tIF(ML.intEntityType = $Defs::LEVEL_STATE, 'State Split',\n\t\t\t\t\t\t\t\tIF(ML.intEntityType = $Defs::LEVEL_REGION, 'Region Split',\n\t\t\t\t\t\t\t\t\t\tIF(ML.intEntityType = $Defs::LEVEL_ZONE, 'Zone Split',\n\t\t\t\t\t\t\t\t\t\t IF(ML.intEntityType = $Defs::LEVEL_CLUB, 'Club Split',\n\t\t\t\t\t\t\t\t\t\t IF((ML.intEntityType = 0 AND intLogType IN (2,3)), 'Fees', '')\n\t\t\t\t\t\t\t\t\t\t )\n\t\t\t\t\t\t\t\t\t\t)\n\t\t\t\t\t\t\t\t)\n\t\t\t\t\t\t)\n\t\t\t\t) as SplitLevel\n\t\t\tFROM\n\t\t\t\ttblTransLog as TL\n\t\t\t\tINNER JOIN tblMoneyLog as ML ON (\n\t\t\t\t\t\tML.intTransLogID = TL.intLogID\n\t\t\t\t\t\tAND ML.intLogType IN ($Defs::ML_TYPE_SPMAX, $Defs::ML_TYPE_LPF, $Defs::ML_TYPE_SPLIT)\n\t\t\t\t)\n\t\t\t\tLEFT JOIN tblTransactions as T ON (\n\t\t\t\t\t\tT.intTransactionID = ML.intTransactionID\n\t\t\t\t)\n\t\t\t\tLEFT JOIN tblPerson as M ON (\n\t\t\t\t\t\tM.intPersonID = T.intID\n\t\t\t\t\t\tAND T.intTableType = $Defs::LEVEL_PERSON\n\t\t\t\t)\n\t\t\t\tLEFT JOIN tblEntity as Entity ON (\n\t\t\t\t\t\tEntity.intEntityID = T.intID\n\t\t\t\t\t\tAND T.intTableType = $Defs::LEVEL_PERSON\n\t\t\t\t)\n\t\t\t\tLEFT JOIN tblRegoForm as RF ON (\n\t\t\t\t\t\tRF.intRegoFormID= TL.intRegoFormID\n\t\t\t\t)\n\t\t\tWHERE TL.intRealmID = $Data->{'Realm'}\n\t\t\t\t$clubWHERE\n\t\t\t\t$where_list\n\t\t\tGROUP BY TL.intLogID\n ];\n return ($sql,'');\n }\n}\n\n1;\n"},"repo_name":{"kind":"string","value":"facascante/slimerp"},"path":{"kind":"string","value":"fifs/web/Reports/ReportAdvanced_TXSold.pm"},"language":{"kind":"string","value":"Perl"},"license":{"kind":"string","value":"mit"},"size":{"kind":"number","value":5822,"string":"5,822"}}},{"rowIdx":1708,"cells":{"code":{"kind":"string","value":"#!/usr/bin/node --harmony\n\n'use strict'\n\nconst noble = require('noble'),\n program = require('commander')\n\nprogram\n .version('0.0.1')\n .option('-p, --prefix ', 'Manufacturer identifier prefixed to all fan commands', parseInt)\n .option('-t, --target [mac]', 'MAC address of devices to target', function(val){ return val.toLowerCase() })\n .option('-s, --service ', 'UUID of fan controller BLE service')\n .option('-w, --write ', 'UUID of fan controller BLE write characteristic')\n .option('-n, --notify ', 'UUID of fan controller BLE notify characteristic')\n\nclass FanRequest {\n\n writeInto(buffer) {\n throw new TypeError('Must override method')\n }\n\n toBuffer() {\n var buffer\n if (program.prefix > 0) {\n buffer = new Buffer(13)\n buffer.writeUInt8(program.prefix)\n this.writeInto(buffer.slice(1))\n } else {\n buffer = new Buffer(12)\n this.writeInto(buffer)\n }\n\n const checksum = buffer.slice(0, buffer.length - 1).reduce(function(a, b){\n return a + b\n }, 0) & 255\n\n buffer.writeUInt8(checksum, buffer.length - 1)\n return buffer\n }\n}\n\nclass FanGetStateRequest extends FanRequest {\n writeInto(buffer) {\n buffer.fill(0)\n buffer.writeUInt8(160)\n }\n}\n\nMath.clamp = function(number, min, max) {\n return Math.max(min, Math.min(number, max))\n}\n\nclass FanUpdateLightRequest extends FanRequest {\n\n constructor(isOn, level) {\n super()\n this.on = isOn ? 1 : 0\n this.level = Math.clamp(level, 0, 100)\n }\n\n writeInto(buffer) {\n buffer.fill(0)\n buffer.writeUInt8(161)\n buffer.writeUInt8(255, 4)\n buffer.writeUInt8(100, 5)\n buffer.writeUInt8((this.on << 7) | this.level, 6)\n buffer.fill(255, 7, 10)\n }\n\n}\n\nclass FanUpdateLevelRequest extends FanRequest {\n\n constructor(level) {\n super()\n this.level = Math.clamp(level, 0, 3)\n }\n\n writeInto(buffer) {\n buffer.fill(0)\n buffer.writeUInt8(161)\n buffer.writeUInt8(this.level, 4)\n buffer.fill(255, 5, 10)\n }\n\n}\n\nclass FanResponse {\n\n static fromBuffer(buffer) {\n if (program.prefix > 0) {\n buffer = buffer.slice(1)\n }\n\n if (buffer.readUInt8(0) != 176) { return null }\n const response = new FanResponse()\n\n const windVelocity = buffer.readUInt8(2)\n response.supportsFanReversal = (windVelocity & 0b00100000) != 0\n response.maximumFanLevel = windVelocity & 0b00011111\n\n const currentWindVelocity = buffer.readUInt8(4)\n response.isFanReversed = (currentWindVelocity & 0b10000000) != 0\n response.fanLevel = currentWindVelocity & 0b00011111\n\n const currentBrightness = buffer.readUInt8(6)\n response.lightIsOn = (currentBrightness & 0b10000000) != 0\n response.lightBrightness = (currentBrightness & 0b01111111)\n\n return response\n }\n\n}\n\n// MARK: -\n\nvar command\n\nprogram\n .command('current')\n .description('print current state')\n .action(function(env, options) {\n command = new FanGetStateRequest()\n })\n\nprogram\n .command('fan')\n .description('adjusts the fan')\n .option('-l --level ', 'Fan speed', /^(off|low|medium|high)$/i, 'high')\n .action(function(env, options) {\n var level\n switch (env.level) {\n case 'low':\n level = 1\n break\n case 'medium':\n level = 2\n break\n case 'high':\n level = 3\n break\n default:\n level = 0\n break\n }\n command = new FanUpdateLevelRequest(level)\n })\n\nprogram\n .command('light ')\n .description('adjusts the light')\n .option('-l, --level ', 'Light brightness', parseInt, 100)\n .action(function(env, options) {\n command = new FanUpdateLightRequest(env !== 'off', options.level)\n })\n\nprogram.parse(process.argv);\n\nif (!command) {\n program.help();\n}\n\nif (!program.target) {\n throw new Error('MAC address required')\n}\n\nconst serviceUUID = program.service || '539c681361a021374f79bf1a11984790'\nconst writeUUID = program.write || '539c681361a121374f79bf1a11984790'\nconst notifyUUID = program.notify || '539c681361a221374f79bf1a11984790'\n\nnoble.on('stateChange', function(state) {\n if (state === 'poweredOn') {\n console.log('scanning.')\n noble.startScanning([ serviceUUID ], false)\n } else {\n noble.stopScanning()\n }\n})\n\nnoble.on('discover', function(peripheral) {\n console.log('found ' + peripheral.address)\n if (peripheral.address !== program.target) { return }\n noble.stopScanning()\n explore(peripheral)\n});\n\nfunction bail(error) {\n console.log('failed: ' + error);\n process.exit(1)\n}\n\nfunction explore(peripheral) {\n console.log('connecting.')\n\n peripheral.once('disconnect', function() {\n peripheral.removeAllListeners()\n explore(peripheral)\n })\n\n peripheral.connect(function(error) {\n if (error) { bail(error); }\n\n peripheral.discoverSomeServicesAndCharacteristics([ serviceUUID ], [ writeUUID, notifyUUID ], function(error, services, characteristics) {\n if (error) { bail(error); }\n var service = services[0]\n var write = characteristics[0], notify = characteristics[1]\n\n notify.on('data', function(data, isNotification) {\n const response = FanResponse.fromBuffer(data)\n if (response) {\n console.log(response)\n } else {\n console.log('sent')\n }\n process.exit()\n })\n\n notify.subscribe(function(error) {\n if (error) { bail(error); }\n\n console.log('sending')\n\n const buffer = command.toBuffer()\n write.write(buffer, false, function(error){\n if (error) { bail(error); }\n })\n })\n })\n })\n}\n\n"},"repo_name":{"kind":"string","value":"zwaldowski/homebridge-satellite-fan"},"path":{"kind":"string","value":"test/poc.js"},"language":{"kind":"string","value":"JavaScript"},"license":{"kind":"string","value":"mit"},"size":{"kind":"number","value":5557,"string":"5,557"}}},{"rowIdx":1709,"cells":{"code":{"kind":"string","value":"---\nlayout: post\ncategory: \"工具\"\ntags: [zsh,fish,linux]\n---\n\n[TOC]\n\n### zsh 与yosemite 的bug? ###\n在更新了Mac Yosemite 后,发现各种问题,首先是php,macport等问题\n接着就是zsh了,不知道为什么,`zsh`总是几乎占了100%的cpu,这让我的macbook电池\n暴跌,非常郁闷. 开始怀疑是插件的问题,但是即使把插件全部关了,也还是那样.\n之前也用过fish,发现还是不错的一个shell,从设计上面说,非常方便.功能也不错.\n于是就准备换到fish算了.\n\n\n![image](/public/img/zsh-yosemite-bug.png)\n> 发了封邮件给zsh后,\n>reply: Any chance that it's this issue with zsh-autosuggestions?\n\n[问题解决](https://github.com/tarruda/zsh-autosuggestions/issues/24 )\n发现原来是因为zsh-autosuggestions 的问题.\n\n\n### fish优点 ###\n- Autosuggestions 自动提示history,命令补全等很方便\n- 命令的completions 甚至包括man的提示\n- 一些`zsh`我喜欢的插件fish也有 例如`autojump` 通过[oh-my-fish](https://github.com/bpinto/oh-my-fishURL ) 可以很方便安装\n- `fish-config`命令 可以在线的编辑fish的配置\n\n其实以上一些功能其实`zsh`也可以做到,不过个人觉得补全做的没有`fish`好,只是一直以来`zsh`的社区比较强大\n而`fish` 插件会少点,但是一般使用其实用不上很多插件,而且`zsh`用多几个插件就变得很慢.\n一直使用 [oh-my-zsh](https://github.com/robbyrussell/oh-my-zsh ) ,但是发现了[oh-my-fish](https://github.com/bpinto/oh-my-fishURL ) 后\n就想转过去了,因为以前一直以为fish没有插件支持.\n\n\n### 安装[oh-my-fish](https://github.com/bpinto/oh-my-fishURL )###\n brew install fish\n sudo vi /etc/shells 将/usr/local/bin/fish加上,否则下面的命令会报错\n chsh -s /usr/local/bin/fish\n \n git clone git://github.com/bpinto/oh-my-fish.git ~/.oh-my-fish\n copy配置文件\n cp ~/.oh-my-fish/templates/config.fish ~/.config/fish/config.fish\n \n### 设置fish ###\n\n- 编辑~/config/fish/config.fish\n\n```\n set fish_plugins autojump bundler brew\n set -xu PATH /usr/local/bin:$PATH\n \n 比较不爽的就是export 在这里不能用要使用`set -x`代替:\n\n `set -x PATH /usr/local/bin $PATH`\n \n -x : -export \n -u : 意思是对所有fish session都使用\n```\n### 编写fish插件 ###\n\nfish 的插件看起来非常好懂,是基于函数的\n\n```ruby\nfunction rg\n rails generate $argv\nend\n\n```\n\n \n\n\n\n\n\n\n\n"},"repo_name":{"kind":"string","value":"algking/algking.github.com"},"path":{"kind":"string","value":"_posts/2014-10-19-从oh-my-zsh到oh-my-fish.md"},"language":{"kind":"string","value":"Markdown"},"license":{"kind":"string","value":"mit"},"size":{"kind":"number","value":2439,"string":"2,439"}}},{"rowIdx":1710,"cells":{"code":{"kind":"string","value":"const HEX_SHORT = /^#([a-fA-F0-9]{3})$/;\nconst HEX = /^#([a-fA-F0-9]{6})$/;\n\nfunction roundColors(obj, round) {\n if (!round) return obj;\n\n const o = {};\n for (let k in obj) {\n o[k] = Math.round(obj[k]);\n }\n return o;\n}\n\nfunction hasProp(obj, key) {\n return obj.hasOwnProperty(key);\n}\n\nfunction isRgb(obj) {\n return hasProp(obj, \"r\") && hasProp(obj, \"g\") && hasProp(obj, \"b\");\n}\n\n\nexport default class Color {\n static normalizeHex(hex) {\n if (HEX.test(hex)) {\n return hex;\n\n } else if (HEX_SHORT.test(hex)) {\n const r = hex.slice(1, 2);\n const g = hex.slice(2, 3);\n const b = hex.slice(3, 4);\n return `#${r + r}${g + g}${b + b}`;\n }\n\n return null;\n }\n\n static hexToRgb(hex) {\n const normalizedHex = this.normalizeHex(hex);\n\n if (normalizedHex == null) {\n return null;\n }\n\n const m = normalizedHex.match(HEX);\n const i = parseInt(m[1], 16);\n const r = (i >> 16) & 0xFF;\n const g = (i >> 8) & 0xFF;\n const b = i & 0xFF;\n\n return { r, g, b };\n }\n\n static rgbToHex(rgb) {\n const { r, g, b} = rgb;\n const i = ((Math.round(r) & 0xFF) << 16) + ((Math.round(g) & 0xFF) << 8) + (Math.round(b) & 0xFF);\n const s = i.toString(16).toLowerCase();\n return `#${\"000000\".substring(s.length) + s}`;\n }\n\n static rgbToHsv(rgb, round = true) {\n const { r, g, b } = rgb;\n const min = Math.min(r, g, b);\n const max = Math.max(r, g, b);\n const delta = max - min;\n const hsv = {};\n\n if (max === 0) {\n hsv.s = 0;\n } else {\n hsv.s = (delta / max * 1000) / 10;\n }\n\n if (max === min) {\n hsv.h = 0;\n } else if (r === max) {\n hsv.h = (g - b) / delta;\n } else if (g === max) {\n hsv.h = 2 + (b - r) / delta;\n } else {\n hsv.h = 4 + (r - g) / delta;\n }\n\n hsv.h = Math.min(hsv.h * 60, 360);\n hsv.h = hsv.h < 0 ? hsv.h + 360 : hsv.h;\n\n hsv.v = ((max / 255) * 1000) / 10;\n\n return roundColors(hsv, round);\n }\n\n static rgbToXyz(rgb, round = true) {\n const r = rgb.r / 255;\n const g = rgb.g / 255;\n const b = rgb.b / 255;\n\n const rr = r > 0.04045 ? Math.pow(((r + 0.055) / 1.055), 2.4) : r / 12.92;\n const gg = g > 0.04045 ? Math.pow(((g + 0.055) / 1.055), 2.4) : g / 12.92;\n const bb = b > 0.04045 ? Math.pow(((b + 0.055) / 1.055), 2.4) : b / 12.92;\n\n const x = (rr * 0.4124 + gg * 0.3576 + bb * 0.1805) * 100;\n const y = (rr * 0.2126 + gg * 0.7152 + bb * 0.0722) * 100;\n const z = (rr * 0.0193 + gg * 0.1192 + bb * 0.9505) * 100;\n\n return roundColors({ x, y, z }, round);\n }\n\n static rgbToLab(rgb, round = true) {\n const xyz = Color.rgbToXyz(rgb, false);\n let { x, y, z } = xyz;\n\n x /= 95.047;\n y /= 100;\n z /= 108.883;\n\n x = x > 0.008856 ? Math.pow(x, 1 / 3) : 7.787 * x + 16 / 116;\n y = y > 0.008856 ? Math.pow(y, 1 / 3) : 7.787 * y + 16 / 116;\n z = z > 0.008856 ? Math.pow(z, 1 / 3) : 7.787 * z + 16 / 116;\n\n const l = (116 * y) - 16;\n const a = 500 * (x - y);\n const b = 200 * (y - z);\n\n return roundColors({ l, a, b }, round);\n }\n\n constructor(value) {\n this.original = value;\n\n if (isRgb(value)) {\n this.rgb = value;\n this.hex = Color.rgbToHex(value);\n\n } else {\n this.hex = Color.normalizeHex(value);\n this.rgb = Color.hexToRgb(this.hex);\n }\n\n this.hsv = Color.rgbToHsv(this.rgb);\n }\n}\n"},"repo_name":{"kind":"string","value":"tsuyoshiwada/color-classifier"},"path":{"kind":"string","value":"src/utils/color.js"},"language":{"kind":"string","value":"JavaScript"},"license":{"kind":"string","value":"mit"},"size":{"kind":"number","value":3342,"string":"3,342"}}},{"rowIdx":1711,"cells":{"code":{"kind":"string","value":"export { default } from 'ember-validation/components/ember-validation-error-list';\n"},"repo_name":{"kind":"string","value":"ajile/ember-validation"},"path":{"kind":"string","value":"app/components/ember-validation-error-list.js"},"language":{"kind":"string","value":"JavaScript"},"license":{"kind":"string","value":"mit"},"size":{"kind":"number","value":83,"string":"83"}}},{"rowIdx":1712,"cells":{"code":{"kind":"string","value":".WeatherStations {\n margin: 30px 30px 30px 30px;\n}\n\n.clear{\n clear: both;\n}"},"repo_name":{"kind":"string","value":"atSistemas/react-base"},"path":{"kind":"string","value":"src/app/containers/WeatherStations/styles.css"},"language":{"kind":"string","value":"CSS"},"license":{"kind":"string","value":"mit"},"size":{"kind":"number","value":77,"string":"77"}}},{"rowIdx":1713,"cells":{"code":{"kind":"string","value":"/*global window */\n/**\n * @license countdown.js v2.5.2 http://countdownjs.org\n * Copyright (c)2006-2014 Stephen M. McKamey.\n * Licensed under The MIT License.\n */\n/*jshint bitwise:false */\n\n/**\n * @public\n * @type {Object|null}\n */\nvar module;\n\n/**\n * API entry\n * @public\n * @param {function(Object)|Date|number} start the starting date\n * @param {function(Object)|Date|number} end the ending date\n * @param {number} units the units to populate\n * @return {Object|number}\n */\nvar countdown = (\n\n/**\n * @param {Object} module CommonJS Module\n */\nfunction(module) {\n\t/*jshint smarttabs:true */\n\n\t'use strict';\n\n\t/**\n\t * @private\n\t * @const\n\t * @type {number}\n\t */\n\tvar MILLISECONDS\t= 0x001;\n\n\t/**\n\t * @private\n\t * @const\n\t * @type {number}\n\t */\n\tvar SECONDS\t\t\t= 0x002;\n\n\t/**\n\t * @private\n\t * @const\n\t * @type {number}\n\t */\n\tvar MINUTES\t\t\t= 0x004;\n\n\t/**\n\t * @private\n\t * @const\n\t * @type {number}\n\t */\n\tvar HOURS\t\t\t= 0x008;\n\n\t/**\n\t * @private\n\t * @const\n\t * @type {number}\n\t */\n\tvar DAYS\t\t\t= 0x010;\n\n\t/**\n\t * @private\n\t * @const\n\t * @type {number}\n\t */\n\tvar WEEKS\t\t\t= 0x020;\n\n\t/**\n\t * @private\n\t * @const\n\t * @type {number}\n\t */\n\tvar MONTHS\t\t\t= 0x040;\n\n\t/**\n\t * @private\n\t * @const\n\t * @type {number}\n\t */\n\tvar YEARS\t\t\t= 0x080;\n\n\t/**\n\t * @private\n\t * @const\n\t * @type {number}\n\t */\n\tvar DECADES\t\t\t= 0x100;\n\n\t/**\n\t * @private\n\t * @const\n\t * @type {number}\n\t */\n\tvar CENTURIES\t\t= 0x200;\n\n\t/**\n\t * @private\n\t * @const\n\t * @type {number}\n\t */\n\tvar MILLENNIA\t\t= 0x400;\n\n\t/**\n\t * @private\n\t * @const\n\t * @type {number}\n\t */\n\tvar DEFAULTS\t\t= YEARS|MONTHS|DAYS|HOURS|MINUTES|SECONDS;\n\n\t/**\n\t * @private\n\t * @const\n\t * @type {number}\n\t */\n\tvar MILLISECONDS_PER_SECOND = 1000;\n\n\t/**\n\t * @private\n\t * @const\n\t * @type {number}\n\t */\n\tvar SECONDS_PER_MINUTE = 60;\n\n\t/**\n\t * @private\n\t * @const\n\t * @type {number}\n\t */\n\tvar MINUTES_PER_HOUR = 60;\n\n\t/**\n\t * @private\n\t * @const\n\t * @type {number}\n\t */\n\tvar HOURS_PER_DAY = 24;\n\n\t/**\n\t * @private\n\t * @const\n\t * @type {number}\n\t */\n\tvar MILLISECONDS_PER_DAY = HOURS_PER_DAY * MINUTES_PER_HOUR * SECONDS_PER_MINUTE * MILLISECONDS_PER_SECOND;\n\n\t/**\n\t * @private\n\t * @const\n\t * @type {number}\n\t */\n\tvar DAYS_PER_WEEK = 7;\n\n\t/**\n\t * @private\n\t * @const\n\t * @type {number}\n\t */\n\tvar MONTHS_PER_YEAR = 12;\n\n\t/**\n\t * @private\n\t * @const\n\t * @type {number}\n\t */\n\tvar YEARS_PER_DECADE = 10;\n\n\t/**\n\t * @private\n\t * @const\n\t * @type {number}\n\t */\n\tvar DECADES_PER_CENTURY = 10;\n\n\t/**\n\t * @private\n\t * @const\n\t * @type {number}\n\t */\n\tvar CENTURIES_PER_MILLENNIUM = 10;\n\n\t/**\n\t * @private\n\t * @param {number} x number\n\t * @return {number}\n\t */\n\tvar ceil = Math.ceil;\n\n\t/**\n\t * @private\n\t * @param {number} x number\n\t * @return {number}\n\t */\n\tvar floor = Math.floor;\n\n\t/**\n\t * @private\n\t * @param {Date} ref reference date\n\t * @param {number} shift number of months to shift\n\t * @return {number} number of days shifted\n\t */\n\tfunction borrowMonths(ref, shift) {\n\t\tvar prevTime = ref.getTime();\n\n\t\t// increment month by shift\n\t\tref.setMonth( ref.getMonth() + shift );\n\n\t\t// this is the trickiest since months vary in length\n\t\treturn Math.round( (ref.getTime() - prevTime) / MILLISECONDS_PER_DAY );\n\t}\n\n\t/**\n\t * @private\n\t * @param {Date} ref reference date\n\t * @return {number} number of days\n\t */\n\tfunction daysPerMonth(ref) {\n\t\tvar a = ref.getTime();\n\n\t\t// increment month by 1\n\t\tvar b = new Date(a);\n\t\tb.setMonth( ref.getMonth() + 1 );\n\n\t\t// this is the trickiest since months vary in length\n\t\treturn Math.round( (b.getTime() - a) / MILLISECONDS_PER_DAY );\n\t}\n\n\t/**\n\t * @private\n\t * @param {Date} ref reference date\n\t * @return {number} number of days\n\t */\n\tfunction daysPerYear(ref) {\n\t\tvar a = ref.getTime();\n\n\t\t// increment year by 1\n\t\tvar b = new Date(a);\n\t\tb.setFullYear( ref.getFullYear() + 1 );\n\n\t\t// this is the trickiest since years (periodically) vary in length\n\t\treturn Math.round( (b.getTime() - a) / MILLISECONDS_PER_DAY );\n\t}\n\n\t/**\n\t * Applies the Timespan to the given date.\n\t * \n\t * @private\n\t * @param {Timespan} ts\n\t * @param {Date=} date\n\t * @return {Date}\n\t */\n\tfunction addToDate(ts, date) {\n\t\tdate = (date instanceof Date) || ((date !== null) && isFinite(date)) ? new Date(+date) : new Date();\n\t\tif (!ts) {\n\t\t\treturn date;\n\t\t}\n\n\t\t// if there is a value field, use it directly\n\t\tvar value = +ts.value || 0;\n\t\tif (value) {\n\t\t\tdate.setTime(date.getTime() + value);\n\t\t\treturn date;\n\t\t}\n\n\t\tvalue = +ts.milliseconds || 0;\n\t\tif (value) {\n\t\t\tdate.setMilliseconds(date.getMilliseconds() + value);\n\t\t}\n\n\t\tvalue = +ts.seconds || 0;\n\t\t// if (value) {\n\t\t\tdate.setSeconds(date.getSeconds() + value);\n\t\t// }\n\n\t\tvalue = +ts.minutes || 0;\n\t\tif (value) {\n\t\t\tdate.setMinutes(date.getMinutes() + value);\n\t\t}\n\n\t\tvalue = +ts.hours || 0;\n\t\tif (value) {\n\t\t\tdate.setHours(date.getHours() + value);\n\t\t}\n\n\t\tvalue = +ts.weeks || 0;\n\t\tif (value) {\n\t\t\tvalue *= DAYS_PER_WEEK;\n\t\t}\n\n\t\tvalue += +ts.days || 0;\n\t\tif (value) {\n\t\t\tdate.setDate(date.getDate() + value);\n\t\t}\n\n\t\tvalue = +ts.months || 0;\n\t\tif (value) {\n\t\t\tdate.setMonth(date.getMonth() + value);\n\t\t}\n\n\t\tvalue = +ts.millennia || 0;\n\t\tif (value) {\n\t\t\tvalue *= CENTURIES_PER_MILLENNIUM;\n\t\t}\n\n\t\tvalue += +ts.centuries || 0;\n\t\tif (value) {\n\t\t\tvalue *= DECADES_PER_CENTURY;\n\t\t}\n\n\t\tvalue += +ts.decades || 0;\n\t\tif (value) {\n\t\t\tvalue *= YEARS_PER_DECADE;\n\t\t}\n\n\t\tvalue += +ts.years || 0;\n\t\tif (value) {\n\t\t\tdate.setFullYear(date.getFullYear() + value);\n\t\t}\n\n\t\treturn date;\n\t}\n\n\t/**\n\t * @private\n\t * @const\n\t * @type {number}\n\t */\n\tvar LABEL_MILLISECONDS\t= 0;\n\n\t/**\n\t * @private\n\t * @const\n\t * @type {number}\n\t */\n\tvar LABEL_SECONDS\t\t= 1;\n\n\t/**\n\t * @private\n\t * @const\n\t * @type {number}\n\t */\n\tvar LABEL_MINUTES\t\t= 2;\n\n\t/**\n\t * @private\n\t * @const\n\t * @type {number}\n\t */\n\tvar LABEL_HOURS\t\t\t= 3;\n\n\t/**\n\t * @private\n\t * @const\n\t * @type {number}\n\t */\n\tvar LABEL_DAYS\t\t\t= 4;\n\n\t/**\n\t * @private\n\t * @const\n\t * @type {number}\n\t */\n\tvar LABEL_WEEKS\t\t\t= 5;\n\n\t/**\n\t * @private\n\t * @const\n\t * @type {number}\n\t */\n\tvar LABEL_MONTHS\t\t= 6;\n\n\t/**\n\t * @private\n\t * @const\n\t * @type {number}\n\t */\n\tvar LABEL_YEARS\t\t\t= 7;\n\n\t/**\n\t * @private\n\t * @const\n\t * @type {number}\n\t */\n\tvar LABEL_DECADES\t\t= 8;\n\n\t/**\n\t * @private\n\t * @const\n\t * @type {number}\n\t */\n\tvar LABEL_CENTURIES\t\t= 9;\n\n\t/**\n\t * @private\n\t * @const\n\t * @type {number}\n\t */\n\tvar LABEL_MILLENNIA\t\t= 10;\n\n\t/**\n\t * @private\n\t * @type {Array}\n\t */\n\tvar LABELS_SINGLUAR;\n\n\t/**\n\t * @private\n\t * @type {Array}\n\t */\n\tvar LABELS_PLURAL;\n\n\t/**\n\t * @private\n\t * @type {string}\n\t */\n\tvar LABEL_LAST;\n\n\t/**\n\t * @private\n\t * @type {string}\n\t */\n\tvar LABEL_DELIM;\n\n\t/**\n\t * @private\n\t * @type {string}\n\t */\n\tvar LABEL_NOW;\n\n\t/**\n\t * Formats a number as a string\n\t * \n\t * @private\n\t * @param {number} value\n\t * @return {string}\n\t */\n\tvar formatNumber;\n\n\t/**\n\t * @private\n\t * @param {number} value\n\t * @param {number} unit unit index into label list\n\t * @return {string}\n\t */\n\tfunction plurality(value, unit) {\n\t\treturn formatNumber(value)+((value === 1) ? LABELS_SINGLUAR[unit] : LABELS_PLURAL[unit]);\n\t}\n\n\t/**\n\t * Formats the entries with singular or plural labels\n\t * \n\t * @private\n\t * @param {Timespan} ts\n\t * @return {Array}\n\t */\n\tvar formatList;\n\n\t/**\n\t * Timespan representation of a duration of time\n\t * \n\t * @private\n\t * @this {Timespan}\n\t * @constructor\n\t */\n\tfunction Timespan() {}\n\n\t/**\n\t * Formats the Timespan as a sentence\n\t * \n\t * @param {string=} emptyLabel the string to use when no values returned\n\t * @return {string}\n\t */\n\tTimespan.prototype.toString = function(emptyLabel) {\n\t\tvar label = formatList(this);\n\n\t\tvar count = label.length;\n\t\tif (!count) {\n\t\t\treturn emptyLabel ? ''+emptyLabel : LABEL_NOW;\n\t\t}\n\t\tif (count === 1) {\n\t\t\treturn label[0];\n\t\t}\n\n\t\tvar last = LABEL_LAST+label.pop();\n\t\treturn label.join(LABEL_DELIM)+last;\n\t};\n\n\t/**\n\t * Formats the Timespan as a sentence in HTML\n\t * \n\t * @param {string=} tag HTML tag name to wrap each value\n\t * @param {string=} emptyLabel the string to use when no values returned\n\t * @return {string}\n\t */\n\tTimespan.prototype.toHTML = function(tag, emptyLabel) {\n\t\ttag = tag || 'span';\n\t\tvar label = formatList(this);\n\n\t\tvar count = label.length;\n\t\tif (!count) {\n\t\t\temptyLabel = emptyLabel || LABEL_NOW;\n\t\t\treturn emptyLabel ? '<'+tag+'>'+emptyLabel+'' : emptyLabel;\n\t\t}\n\t\tfor (var i=0; i'+label[i]+'';\n\t\t}\n\t\tif (count === 1) {\n\t\t\treturn label[0];\n\t\t}\n\n\t\tvar last = LABEL_LAST+label.pop();\n\t\treturn label.join(LABEL_DELIM)+last;\n\t};\n\n\t/**\n\t * Applies the Timespan to the given date\n\t * \n\t * @param {Date=} date the date to which the timespan is added.\n\t * @return {Date}\n\t */\n\tTimespan.prototype.addTo = function(date) {\n\t\treturn addToDate(this, date);\n\t};\n\n\t/**\n\t * Formats the entries as English labels\n\t * \n\t * @private\n\t * @param {Timespan} ts\n\t * @return {Array}\n\t */\n\tformatList = function(ts) {\n\t\tvar list = [];\n\n\t\tvar value = ts.millennia;\n\t\tif (value) {\n\t\t\tlist.push(plurality(value, LABEL_MILLENNIA));\n\t\t}\n\n\t\tvalue = ts.centuries;\n\t\tif (value) {\n\t\t\tlist.push(plurality(value, LABEL_CENTURIES));\n\t\t}\n\n\t\tvalue = ts.decades;\n\t\tif (value) {\n\t\t\tlist.push(plurality(value, LABEL_DECADES));\n\t\t}\n\n\t\tvalue = ts.years;\n\t\tif (value) {\n\t\t\tlist.push(plurality(value, LABEL_YEARS));\n\t\t}\n\n\t\tvalue = ts.months;\n\t\tif (value) {\n\t\t\tlist.push(plurality(value, LABEL_MONTHS));\n\t\t}\n\n\t\tvalue = ts.weeks;\n\t\tif (value) {\n\t\t\tlist.push(plurality(value, LABEL_WEEKS));\n\t\t}\n\n\t\tvalue = ts.days;\n\t\tif (value) {\n\t\t\tlist.push(plurality(value, LABEL_DAYS));\n\t\t}\n\n\t\tvalue = ts.hours;\n\t\tif (value) {\n\t\t\tlist.push(plurality(value, LABEL_HOURS));\n\t\t}\n\n\t\tvalue = ts.minutes;\n\t\tif (value) {\n\t\t\tlist.push(plurality(value, LABEL_MINUTES));\n\t\t}\n\n\t\tvalue = ts.seconds;\n\t\t// if (value) {\n\t\t\tlist.push(plurality(value, LABEL_SECONDS));\n\t\t// }\n\n\t\tvalue = ts.milliseconds;\n\t\tif (value) {\n\t\t\tlist.push(plurality(value, LABEL_MILLISECONDS));\n\t\t}\n\n\t\treturn list;\n\t};\n\n\t/**\n\t * Borrow any underflow units, carry any overflow units\n\t * \n\t * @private\n\t * @param {Timespan} ts\n\t * @param {string} toUnit\n\t */\n\tfunction rippleRounded(ts, toUnit) {\n\t\tswitch (toUnit) {\n\t\t\tcase 'seconds':\n\t\t\t\tif (ts.seconds !== SECONDS_PER_MINUTE || isNaN(ts.minutes)) {\n\t\t\t\t\treturn;\n\t\t\t\t}\n\t\t\t\t// ripple seconds up to minutes\n\t\t\t\tts.minutes++;\n\t\t\t\tts.seconds = 0;\n\n\t\t\t\t/* falls through */\n\t\t\tcase 'minutes':\n\t\t\t\tif (ts.minutes !== MINUTES_PER_HOUR || isNaN(ts.hours)) {\n\t\t\t\t\treturn;\n\t\t\t\t}\n\t\t\t\t// ripple minutes up to hours\n\t\t\t\tts.hours++;\n\t\t\t\tts.minutes = 0;\n\n\t\t\t\t/* falls through */\n\t\t\tcase 'hours':\n\t\t\t\tif (ts.hours !== HOURS_PER_DAY || isNaN(ts.days)) {\n\t\t\t\t\treturn;\n\t\t\t\t}\n\t\t\t\t// ripple hours up to days\n\t\t\t\tts.days++;\n\t\t\t\tts.hours = 0;\n\n\t\t\t\t/* falls through */\n\t\t\tcase 'days':\n\t\t\t\tif (ts.days !== DAYS_PER_WEEK || isNaN(ts.weeks)) {\n\t\t\t\t\treturn;\n\t\t\t\t}\n\t\t\t\t// ripple days up to weeks\n\t\t\t\tts.weeks++;\n\t\t\t\tts.days = 0;\n\n\t\t\t\t/* falls through */\n\t\t\tcase 'weeks':\n\t\t\t\tif (ts.weeks !== daysPerMonth(ts.refMonth)/DAYS_PER_WEEK || isNaN(ts.months)) {\n\t\t\t\t\treturn;\n\t\t\t\t}\n\t\t\t\t// ripple weeks up to months\n\t\t\t\tts.months++;\n\t\t\t\tts.weeks = 0;\n\n\t\t\t\t/* falls through */\n\t\t\tcase 'months':\n\t\t\t\tif (ts.months !== MONTHS_PER_YEAR || isNaN(ts.years)) {\n\t\t\t\t\treturn;\n\t\t\t\t}\n\t\t\t\t// ripple months up to years\n\t\t\t\tts.years++;\n\t\t\t\tts.months = 0;\n\n\t\t\t\t/* falls through */\n\t\t\tcase 'years':\n\t\t\t\tif (ts.years !== YEARS_PER_DECADE || isNaN(ts.decades)) {\n\t\t\t\t\treturn;\n\t\t\t\t}\n\t\t\t\t// ripple years up to decades\n\t\t\t\tts.decades++;\n\t\t\t\tts.years = 0;\n\n\t\t\t\t/* falls through */\n\t\t\tcase 'decades':\n\t\t\t\tif (ts.decades !== DECADES_PER_CENTURY || isNaN(ts.centuries)) {\n\t\t\t\t\treturn;\n\t\t\t\t}\n\t\t\t\t// ripple decades up to centuries\n\t\t\t\tts.centuries++;\n\t\t\t\tts.decades = 0;\n\n\t\t\t\t/* falls through */\n\t\t\tcase 'centuries':\n\t\t\t\tif (ts.centuries !== CENTURIES_PER_MILLENNIUM || isNaN(ts.millennia)) {\n\t\t\t\t\treturn;\n\t\t\t\t}\n\t\t\t\t// ripple centuries up to millennia\n\t\t\t\tts.millennia++;\n\t\t\t\tts.centuries = 0;\n\t\t\t\t/* falls through */\n\t\t\t}\n\t}\n\n\t/**\n\t * Ripple up partial units one place\n\t * \n\t * @private\n\t * @param {Timespan} ts timespan\n\t * @param {number} frac accumulated fractional value\n\t * @param {string} fromUnit source unit name\n\t * @param {string} toUnit target unit name\n\t * @param {number} conversion multiplier between units\n\t * @param {number} digits max number of decimal digits to output\n\t * @return {number} new fractional value\n\t */\n\tfunction fraction(ts, frac, fromUnit, toUnit, conversion, digits) {\n\t\tif (ts[fromUnit] >= 0) {\n\t\t\tfrac += ts[fromUnit];\n\t\t\tdelete ts[fromUnit];\n\t\t}\n\n\t\tfrac /= conversion;\n\t\tif (frac + 1 <= 1) {\n\t\t\t// drop if below machine epsilon\n\t\t\treturn 0;\n\t\t}\n\n\t\tif (ts[toUnit] >= 0) {\n\t\t\t// ensure does not have more than specified number of digits\n\t\t\tts[toUnit] = +(ts[toUnit] + frac).toFixed(digits);\n\t\t\trippleRounded(ts, toUnit);\n\t\t\treturn 0;\n\t\t}\n\n\t\treturn frac;\n\t}\n\n\t/**\n\t * Ripple up partial units to next existing\n\t * \n\t * @private\n\t * @param {Timespan} ts\n\t * @param {number} digits max number of decimal digits to output\n\t */\n\tfunction fractional(ts, digits) {\n\t\tvar frac = fraction(ts, 0, 'milliseconds', 'seconds', MILLISECONDS_PER_SECOND, digits);\n\t\tif (!frac) { return; }\n\n\t\tfrac = fraction(ts, frac, 'seconds', 'minutes', SECONDS_PER_MINUTE, digits);\n\t\tif (!frac) { return; }\n\n\t\tfrac = fraction(ts, frac, 'minutes', 'hours', MINUTES_PER_HOUR, digits);\n\t\tif (!frac) { return; }\n\n\t\tfrac = fraction(ts, frac, 'hours', 'days', HOURS_PER_DAY, digits);\n\t\tif (!frac) { return; }\n\n\t\tfrac = fraction(ts, frac, 'days', 'weeks', DAYS_PER_WEEK, digits);\n\t\tif (!frac) { return; }\n\n\t\tfrac = fraction(ts, frac, 'weeks', 'months', daysPerMonth(ts.refMonth)/DAYS_PER_WEEK, digits);\n\t\tif (!frac) { return; }\n\n\t\tfrac = fraction(ts, frac, 'months', 'years', daysPerYear(ts.refMonth)/daysPerMonth(ts.refMonth), digits);\n\t\tif (!frac) { return; }\n\n\t\tfrac = fraction(ts, frac, 'years', 'decades', YEARS_PER_DECADE, digits);\n\t\tif (!frac) { return; }\n\n\t\tfrac = fraction(ts, frac, 'decades', 'centuries', DECADES_PER_CENTURY, digits);\n\t\tif (!frac) { return; }\n\n\t\tfrac = fraction(ts, frac, 'centuries', 'millennia', CENTURIES_PER_MILLENNIUM, digits);\n\n\t\t// should never reach this with remaining fractional value\n\t\tif (frac) { throw new Error('Fractional unit overflow'); }\n\t}\n\n\t/**\n\t * Borrow any underflow units, carry any overflow units\n\t * \n\t * @private\n\t * @param {Timespan} ts\n\t */\n\tfunction ripple(ts) {\n\t\tvar x;\n\n\t\tif (ts.milliseconds < 0) {\n\t\t\t// ripple seconds down to milliseconds\n\t\t\tx = ceil(-ts.milliseconds / MILLISECONDS_PER_SECOND);\n\t\t\tts.seconds -= x;\n\t\t\tts.milliseconds += x * MILLISECONDS_PER_SECOND;\n\n\t\t} else if (ts.milliseconds >= MILLISECONDS_PER_SECOND) {\n\t\t\t// ripple milliseconds up to seconds\n\t\t\tts.seconds += floor(ts.milliseconds / MILLISECONDS_PER_SECOND);\n\t\t\tts.milliseconds %= MILLISECONDS_PER_SECOND;\n\t\t}\n\n\t\tif (ts.seconds < 0) {\n\t\t\t// ripple minutes down to seconds\n\t\t\tx = ceil(-ts.seconds / SECONDS_PER_MINUTE);\n\t\t\tts.minutes -= x;\n\t\t\tts.seconds += x * SECONDS_PER_MINUTE;\n\n\t\t} else if (ts.seconds >= SECONDS_PER_MINUTE) {\n\t\t\t// ripple seconds up to minutes\n\t\t\tts.minutes += floor(ts.seconds / SECONDS_PER_MINUTE);\n\t\t\tts.seconds %= SECONDS_PER_MINUTE;\n\t\t}\n\n\t\tif (ts.minutes < 0) {\n\t\t\t// ripple hours down to minutes\n\t\t\tx = ceil(-ts.minutes / MINUTES_PER_HOUR);\n\t\t\tts.hours -= x;\n\t\t\tts.minutes += x * MINUTES_PER_HOUR;\n\n\t\t} else if (ts.minutes >= MINUTES_PER_HOUR) {\n\t\t\t// ripple minutes up to hours\n\t\t\tts.hours += floor(ts.minutes / MINUTES_PER_HOUR);\n\t\t\tts.minutes %= MINUTES_PER_HOUR;\n\t\t}\n\n\t\tif (ts.hours < 0) {\n\t\t\t// ripple days down to hours\n\t\t\tx = ceil(-ts.hours / HOURS_PER_DAY);\n\t\t\tts.days -= x;\n\t\t\tts.hours += x * HOURS_PER_DAY;\n\n\t\t} else if (ts.hours >= HOURS_PER_DAY) {\n\t\t\t// ripple hours up to days\n\t\t\tts.days += floor(ts.hours / HOURS_PER_DAY);\n\t\t\tts.hours %= HOURS_PER_DAY;\n\t\t}\n\n\t\twhile (ts.days < 0) {\n\t\t\t// NOTE: never actually seen this loop more than once\n\n\t\t\t// ripple months down to days\n\t\t\tts.months--;\n\t\t\tts.days += borrowMonths(ts.refMonth, 1);\n\t\t}\n\n\t\t// weeks is always zero here\n\n\t\tif (ts.days >= DAYS_PER_WEEK) {\n\t\t\t// ripple days up to weeks\n\t\t\tts.weeks += floor(ts.days / DAYS_PER_WEEK);\n\t\t\tts.days %= DAYS_PER_WEEK;\n\t\t}\n\n\t\tif (ts.months < 0) {\n\t\t\t// ripple years down to months\n\t\t\tx = ceil(-ts.months / MONTHS_PER_YEAR);\n\t\t\tts.years -= x;\n\t\t\tts.months += x * MONTHS_PER_YEAR;\n\n\t\t} else if (ts.months >= MONTHS_PER_YEAR) {\n\t\t\t// ripple months up to years\n\t\t\tts.years += floor(ts.months / MONTHS_PER_YEAR);\n\t\t\tts.months %= MONTHS_PER_YEAR;\n\t\t}\n\n\t\t// years is always non-negative here\n\t\t// decades, centuries and millennia are always zero here\n\n\t\tif (ts.years >= YEARS_PER_DECADE) {\n\t\t\t// ripple years up to decades\n\t\t\tts.decades += floor(ts.years / YEARS_PER_DECADE);\n\t\t\tts.years %= YEARS_PER_DECADE;\n\n\t\t\tif (ts.decades >= DECADES_PER_CENTURY) {\n\t\t\t\t// ripple decades up to centuries\n\t\t\t\tts.centuries += floor(ts.decades / DECADES_PER_CENTURY);\n\t\t\t\tts.decades %= DECADES_PER_CENTURY;\n\n\t\t\t\tif (ts.centuries >= CENTURIES_PER_MILLENNIUM) {\n\t\t\t\t\t// ripple centuries up to millennia\n\t\t\t\t\tts.millennia += floor(ts.centuries / CENTURIES_PER_MILLENNIUM);\n\t\t\t\t\tts.centuries %= CENTURIES_PER_MILLENNIUM;\n\t\t\t\t}\n\t\t\t}\n\t\t}\n\t}\n\n\t/**\n\t * Remove any units not requested\n\t * \n\t * @private\n\t * @param {Timespan} ts\n\t * @param {number} units the units to populate\n\t * @param {number} max number of labels to output\n\t * @param {number} digits max number of decimal digits to output\n\t */\n\tfunction pruneUnits(ts, units, max, digits) {\n\t\tvar count = 0;\n\n\t\t// Calc from largest unit to smallest to prevent underflow\n\t\tif (!(units & MILLENNIA) || (count >= max)) {\n\t\t\t// ripple millennia down to centuries\n\t\t\tts.centuries += ts.millennia * CENTURIES_PER_MILLENNIUM;\n\t\t\tdelete ts.millennia;\n\n\t\t} else if (ts.millennia) {\n\t\t\tcount++;\n\t\t}\n\n\t\tif (!(units & CENTURIES) || (count >= max)) {\n\t\t\t// ripple centuries down to decades\n\t\t\tts.decades += ts.centuries * DECADES_PER_CENTURY;\n\t\t\tdelete ts.centuries;\n\n\t\t} else if (ts.centuries) {\n\t\t\tcount++;\n\t\t}\n\n\t\tif (!(units & DECADES) || (count >= max)) {\n\t\t\t// ripple decades down to years\n\t\t\tts.years += ts.decades * YEARS_PER_DECADE;\n\t\t\tdelete ts.decades;\n\n\t\t} else if (ts.decades) {\n\t\t\tcount++;\n\t\t}\n\n\t\tif (!(units & YEARS) || (count >= max)) {\n\t\t\t// ripple years down to months\n\t\t\tts.months += ts.years * MONTHS_PER_YEAR;\n\t\t\tdelete ts.years;\n\n\t\t} else if (ts.years) {\n\t\t\tcount++;\n\t\t}\n\n\t\tif (!(units & MONTHS) || (count >= max)) {\n\t\t\t// ripple months down to days\n\t\t\tif (ts.months) {\n\t\t\t\tts.days += borrowMonths(ts.refMonth, ts.months);\n\t\t\t}\n\t\t\tdelete ts.months;\n\n\t\t\tif (ts.days >= DAYS_PER_WEEK) {\n\t\t\t\t// ripple day overflow back up to weeks\n\t\t\t\tts.weeks += floor(ts.days / DAYS_PER_WEEK);\n\t\t\t\tts.days %= DAYS_PER_WEEK;\n\t\t\t}\n\n\t\t} else if (ts.months) {\n\t\t\tcount++;\n\t\t}\n\n\t\tif (!(units & WEEKS) || (count >= max)) {\n\t\t\t// ripple weeks down to days\n\t\t\tts.days += ts.weeks * DAYS_PER_WEEK;\n\t\t\tdelete ts.weeks;\n\n\t\t} else if (ts.weeks) {\n\t\t\tcount++;\n\t\t}\n\n\t\tif (!(units & DAYS) || (count >= max)) {\n\t\t\t//ripple days down to hours\n\t\t\tts.hours += ts.days * HOURS_PER_DAY;\n\t\t\tdelete ts.days;\n\n\t\t} else if (ts.days) {\n\t\t\tcount++;\n\t\t}\n\n\t\tif (!(units & HOURS) || (count >= max)) {\n\t\t\t// ripple hours down to minutes\n\t\t\tts.minutes += ts.hours * MINUTES_PER_HOUR;\n\t\t\tdelete ts.hours;\n\n\t\t} else if (ts.hours) {\n\t\t\tcount++;\n\t\t}\n\n\t\tif (!(units & MINUTES) || (count >= max)) {\n\t\t\t// ripple minutes down to seconds\n\t\t\tts.seconds += ts.minutes * SECONDS_PER_MINUTE;\n\t\t\tdelete ts.minutes;\n\n\t\t} else if (ts.minutes) {\n\t\t\tcount++;\n\t\t}\n\n\t\tif (!(units & SECONDS) || (count >= max)) {\n\t\t\t// ripple seconds down to milliseconds\n\t\t\tts.milliseconds += ts.seconds * MILLISECONDS_PER_SECOND;\n\t\t\tdelete ts.seconds;\n\n\t\t} else if (ts.seconds) {\n\t\t\tcount++;\n\t\t}\n\n\t\t// nothing to ripple milliseconds down to\n\t\t// so ripple back up to smallest existing unit as a fractional value\n\t\tif (!(units & MILLISECONDS) || (count >= max)) {\n\t\t\tfractional(ts, digits);\n\t\t}\n\t}\n\n\t/**\n\t * Populates the Timespan object\n\t * \n\t * @private\n\t * @param {Timespan} ts\n\t * @param {?Date} start the starting date\n\t * @param {?Date} end the ending date\n\t * @param {number} units the units to populate\n\t * @param {number} max number of labels to output\n\t * @param {number} digits max number of decimal digits to output\n\t */\n\tfunction populate(ts, start, end, units, max, digits) {\n\t\tvar now = new Date();\n\n\t\tts.start = start = start || now;\n\t\tts.end = end = end || now;\n\t\tts.units = units;\n\n\t\tts.value = end.getTime() - start.getTime();\n\t\tif (ts.value < 0) {\n\t\t\t// swap if reversed\n\t\t\tvar tmp = end;\n\t\t\tend = start;\n\t\t\tstart = tmp;\n\t\t}\n\n\t\t// reference month for determining days in month\n\t\tts.refMonth = new Date(start.getFullYear(), start.getMonth(), 15, 12, 0, 0);\n\t\ttry {\n\t\t\t// reset to initial deltas\n\t\t\tts.millennia = 0;\n\t\t\tts.centuries = 0;\n\t\t\tts.decades = 0;\n\t\t\tts.years = end.getFullYear() - start.getFullYear();\n\t\t\tts.months = end.getMonth() - start.getMonth();\n\t\t\tts.weeks = 0;\n\t\t\tts.days = end.getDate() - start.getDate();\n\t\t\tts.hours = end.getHours() - start.getHours();\n\t\t\tts.minutes = end.getMinutes() - start.getMinutes();\n\t\t\tts.seconds = end.getSeconds() - start.getSeconds();\n\t\t\tts.milliseconds = end.getMilliseconds() - start.getMilliseconds();\n\n\t\t\tripple(ts);\n\t\t\tpruneUnits(ts, units, max, digits);\n\n\t\t} finally {\n\t\t\tdelete ts.refMonth;\n\t\t}\n\n\t\treturn ts;\n\t}\n\n\t/**\n\t * Determine an appropriate refresh rate based upon units\n\t * \n\t * @private\n\t * @param {number} units the units to populate\n\t * @return {number} milliseconds to delay\n\t */\n\tfunction getDelay(units) {\n\t\tif (units & MILLISECONDS) {\n\t\t\t// refresh very quickly\n\t\t\treturn MILLISECONDS_PER_SECOND / 30; //30Hz\n\t\t}\n\n\t\tif (units & SECONDS) {\n\t\t\t// refresh every second\n\t\t\treturn MILLISECONDS_PER_SECOND; //1Hz\n\t\t}\n\n\t\tif (units & MINUTES) {\n\t\t\t// refresh every minute\n\t\t\treturn MILLISECONDS_PER_SECOND * SECONDS_PER_MINUTE;\n\t\t}\n\n\t\tif (units & HOURS) {\n\t\t\t// refresh hourly\n\t\t\treturn MILLISECONDS_PER_SECOND * SECONDS_PER_MINUTE * MINUTES_PER_HOUR;\n\t\t}\n\t\t\n\t\tif (units & DAYS) {\n\t\t\t// refresh daily\n\t\t\treturn MILLISECONDS_PER_SECOND * SECONDS_PER_MINUTE * MINUTES_PER_HOUR * HOURS_PER_DAY;\n\t\t}\n\n\t\t// refresh the rest weekly\n\t\treturn MILLISECONDS_PER_SECOND * SECONDS_PER_MINUTE * MINUTES_PER_HOUR * HOURS_PER_DAY * DAYS_PER_WEEK;\n\t}\n\n\t/**\n\t * API entry point\n\t * \n\t * @public\n\t * @param {Date|number|Timespan|null|function(Timespan,number)} start the starting date\n\t * @param {Date|number|Timespan|null|function(Timespan,number)} end the ending date\n\t * @param {number=} units the units to populate\n\t * @param {number=} max number of labels to output\n\t * @param {number=} digits max number of decimal digits to output\n\t * @return {Timespan|number}\n\t */\n\tfunction countdown(start, end, units, max, digits) {\n\t\tvar callback;\n\n\t\t// ensure some units or use defaults\n\t\tunits = +units || DEFAULTS;\n\t\t// max must be positive\n\t\tmax = (max > 0) ? max : NaN;\n\t\t// clamp digits to an integer between [0, 20]\n\t\tdigits = (digits > 0) ? (digits < 20) ? Math.round(digits) : 20 : 0;\n\n\t\t// ensure start date\n\t\tvar startTS = null;\n\t\tif ('function' === typeof start) {\n\t\t\tcallback = start;\n\t\t\tstart = null;\n\n\t\t} else if (!(start instanceof Date)) {\n\t\t\tif ((start !== null) && isFinite(start)) {\n\t\t\t\tstart = new Date(+start);\n\t\t\t} else {\n\t\t\t\tif ('object' === typeof startTS) {\n\t\t\t\t\tstartTS = /** @type{Timespan} */(start);\n\t\t\t\t}\n\t\t\t\tstart = null;\n\t\t\t}\n\t\t}\n\n\t\t// ensure end date\n\t\tvar endTS = null;\n\t\tif ('function' === typeof end) {\n\t\t\tcallback = end;\n\t\t\tend = null;\n\n\t\t} else if (!(end instanceof Date)) {\n\t\t\tif ((end !== null) && isFinite(end)) {\n\t\t\t\tend = new Date(+end);\n\t\t\t} else {\n\t\t\t\tif ('object' === typeof end) {\n\t\t\t\t\tendTS = /** @type{Timespan} */(end);\n\t\t\t\t}\n\t\t\t\tend = null;\n\t\t\t}\n\t\t}\n\n\t\t// must wait to interpret timespans until after resolving dates\n\t\tif (startTS) {\n\t\t\tstart = addToDate(startTS, end);\n\t\t}\n\t\tif (endTS) {\n\t\t\tend = addToDate(endTS, start);\n\t\t}\n\n\t\tif (!start && !end) {\n\t\t\t// used for unit testing\n\t\t\treturn new Timespan();\n\t\t}\n\n\t\tif (!callback) {\n\t\t\treturn populate(new Timespan(), /** @type{Date} */(start), /** @type{Date} */(end), /** @type{number} */(units), /** @type{number} */(max), /** @type{number} */(digits));\n\t\t}\n\n\t\t// base delay off units\n\t\tvar delay = getDelay(units),\n\t\t\ttimerId,\n\t\t\tfn = function() {\n\t\t\t\tcallback(\n\t\t\t\t\tpopulate(new Timespan(), /** @type{Date} */(start), /** @type{Date} */(end), /** @type{number} */(units), /** @type{number} */(max), /** @type{number} */(digits)),\n\t\t\t\t\ttimerId\n\t\t\t\t);\n\t\t\t};\n\n\t\tfn();\n\t\treturn (timerId = setInterval(fn, delay));\n\t}\n\n\t/**\n\t * @public\n\t * @const\n\t * @type {number}\n\t */\n\tcountdown.MILLISECONDS = MILLISECONDS;\n\n\t/**\n\t * @public\n\t * @const\n\t * @type {number}\n\t */\n\tcountdown.SECONDS = SECONDS;\n\n\t/**\n\t * @public\n\t * @const\n\t * @type {number}\n\t */\n\tcountdown.MINUTES = MINUTES;\n\n\t/**\n\t * @public\n\t * @const\n\t * @type {number}\n\t */\n\tcountdown.HOURS = HOURS;\n\n\t/**\n\t * @public\n\t * @const\n\t * @type {number}\n\t */\n\tcountdown.DAYS = DAYS;\n\n\t/**\n\t * @public\n\t * @const\n\t * @type {number}\n\t */\n\tcountdown.WEEKS = WEEKS;\n\n\t/**\n\t * @public\n\t * @const\n\t * @type {number}\n\t */\n\tcountdown.MONTHS = MONTHS;\n\n\t/**\n\t * @public\n\t * @const\n\t * @type {number}\n\t */\n\tcountdown.YEARS = YEARS;\n\n\t/**\n\t * @public\n\t * @const\n\t * @type {number}\n\t */\n\tcountdown.DECADES = DECADES;\n\n\t/**\n\t * @public\n\t * @const\n\t * @type {number}\n\t */\n\tcountdown.CENTURIES = CENTURIES;\n\n\t/**\n\t * @public\n\t * @const\n\t * @type {number}\n\t */\n\tcountdown.MILLENNIA = MILLENNIA;\n\n\t/**\n\t * @public\n\t * @const\n\t * @type {number}\n\t */\n\tcountdown.DEFAULTS = DEFAULTS;\n\n\t/**\n\t * @public\n\t * @const\n\t * @type {number}\n\t */\n\tcountdown.ALL = MILLENNIA|CENTURIES|DECADES|YEARS|MONTHS|WEEKS|DAYS|HOURS|MINUTES|SECONDS|MILLISECONDS;\n\n\t/**\n\t * Override the unit labels\n\t * @public\n\t * @param {string|Array=} singular a pipe ('|') delimited list of singular unit name overrides\n\t * @param {string|Array=} plural a pipe ('|') delimited list of plural unit name overrides\n\t * @param {string=} last a delimiter before the last unit (default: ' and ')\n\t * @param {string=} delim a delimiter to use between all other units (default: ', ')\n\t * @param {string=} empty a label to use when all units are zero (default: '')\n\t * @param {function(number):string=} formatter a function which formats numbers as a string\n\t */\n\tcountdown.setLabels = function(singular, plural, last, delim, empty, formatter) {\n\t\tsingular = singular || [];\n\t\tif (singular.split) {\n\t\t\tsingular = singular.split('|');\n\t\t}\n\t\tplural = plural || [];\n\t\tif (plural.split) {\n\t\t\tplural = plural.split('|');\n\t\t}\n\n\t\tfor (var i=LABEL_MILLISECONDS; i<=LABEL_MILLENNIA; i++) {\n\t\t\t// override any specified units\n\t\t\tLABELS_SINGLUAR[i] = singular[i] || LABELS_SINGLUAR[i];\n\t\t\tLABELS_PLURAL[i] = plural[i] || LABELS_PLURAL[i];\n\t\t}\n\n\t\tLABEL_LAST = ('string' === typeof last) ? last : LABEL_LAST;\n\t\tLABEL_DELIM = ('string' === typeof delim) ? delim : LABEL_DELIM;\n\t\tLABEL_NOW = ('string' === typeof empty) ? empty : LABEL_NOW;\n\t\tformatNumber = ('function' === typeof formatter) ? formatter : formatNumber;\n\t};\n\n\t/**\n\t * Revert to the default unit labels\n\t * @public\n\t */\n\tvar resetLabels = countdown.resetLabels = function() {\n\t\tLABELS_SINGLUAR = ' millisecond| second| minute| hour| day| week| month| year| decade| century| millennium'.split('|');\n\t\tLABELS_PLURAL = ' milliseconds| seconds| minutes| hours| days| weeks| months| years| decades| centuries| millennia'.split('|');\n\t\tLABEL_LAST = ' and ';\n\t\tLABEL_DELIM = ', ';\n\t\tLABEL_NOW = '';\n\n\t\tformatNumber = function(value) { return '' + value + \"\"; };\n\t};\n\n\tresetLabels();\n\n\tif (module && module.exports) {\n\t\tmodule.exports = countdown;\n\n\t} else if (typeof window.define === 'function' && typeof window.define.amd !== 'undefined') {\n\t\twindow.define('countdown', [], function() {\n\t\t\treturn countdown;\n\t\t});\n\t}\n\n\treturn countdown;\n\n})(module);\n"},"repo_name":{"kind":"string","value":"entpy/beauty-and-pics"},"path":{"kind":"string","value":"beauty_and_pics/website/static/website/js/vendor/countdown.js"},"language":{"kind":"string","value":"JavaScript"},"license":{"kind":"string","value":"mit"},"size":{"kind":"number","value":27520,"string":"27,520"}}},{"rowIdx":1714,"cells":{"code":{"kind":"string","value":"---\nlayout: page\ntitle: Avila - Wu Wedding\ndate: 2016-05-24\nauthor: Abigail Heath\ntags: weekly links, java\nstatus: published\nsummary: Vestibulum enim odio, dapibus non turpis.\nbanner: images/banner/people.jpg\nbooking:\n startDate: 05/10/2016\n endDate: 05/13/2016\n ctyhocn: HTSBVHX\n groupCode: AWW\npublished: true\n---\nNam luctus finibus nisi vel accumsan. Nunc luctus diam orci, sed sodales mi luctus quis. Donec eget aliquet augue. Nunc eleifend, nisi id vulputate vehicula, eros dui iaculis velit, ac feugiat lectus diam quis tortor. Nam vitae elementum nisi. Suspendisse sed blandit diam. Cras id sodales magna. Integer quam neque, feugiat in venenatis eget, convallis id velit. Donec posuere lectus tincidunt, malesuada sapien ac, lacinia ante. Pellentesque ex risus, volutpat id augue ac, scelerisque ullamcorper mauris. Nam ac metus mauris. Etiam leo mauris, auctor eget pellentesque eu, aliquam sit amet neque. Quisque eget eleifend dolor. Aenean venenatis odio a est egestas commodo quis quis nulla. Duis luctus velit vitae pulvinar elementum. Curabitur quis tincidunt ex.\n\n* Nam imperdiet purus at ante efficitur, ut elementum lectus facilisis\n* Cras non elit at mauris lacinia eleifend id a orci\n* Nulla pretium odio non varius cursus.\n\nAliquam erat volutpat. Mauris aliquet nisi et metus porta pulvinar. Curabitur ornare eros eu posuere lacinia. Mauris et tortor gravida, ultrices massa ut, auctor ex. Donec non pharetra nisl. Maecenas augue nibh, hendrerit sed lobortis nec, malesuada eu metus. Proin sollicitudin fermentum tortor et tincidunt. Cras quis tristique odio. Aenean molestie iaculis ornare. Quisque ac nunc arcu. Suspendisse quis mollis est. Maecenas feugiat sit amet nulla vitae condimentum.\nVivamus dictum mi sit amet ultrices tristique. Quisque sit amet venenatis est. Donec vulputate malesuada purus sed finibus. Nunc id justo quis odio vulputate pellentesque a nec arcu. Etiam felis eros, placerat eget odio in, lobortis congue massa. Suspendisse elementum fermentum consectetur. Aliquam diam sapien, mattis sit amet volutpat id, gravida ac lorem. Vestibulum dignissim nibh eu porta sagittis. Aliquam facilisis rhoncus egestas. Sed semper vel eros at lobortis. Quisque non mi massa. Vestibulum feugiat diam ex, eu aliquam mi pharetra id. Nam faucibus sollicitudin nibh, et ultricies ligula porttitor ullamcorper.\n"},"repo_name":{"kind":"string","value":"KlishGroup/prose-pogs"},"path":{"kind":"string","value":"pogs/H/HTSBVHX/AWW/index.md"},"language":{"kind":"string","value":"Markdown"},"license":{"kind":"string","value":"mit"},"size":{"kind":"number","value":2344,"string":"2,344"}}},{"rowIdx":1715,"cells":{"code":{"kind":"string","value":"file(REMOVE_RECURSE\n \"CMakeFiles/coverage_polymorphic.dir/polymorphic.cpp.o\"\n \"../../../coverage/coverage_polymorphic.pdb\"\n \"../../../coverage/coverage_polymorphic\"\n)\n\n# Per-language clean rules from dependency scanning.\nforeach(lang CXX)\n include(CMakeFiles/coverage_polymorphic.dir/cmake_clean_${lang}.cmake OPTIONAL)\nendforeach()\n"},"repo_name":{"kind":"string","value":"noahhsmith/starid"},"path":{"kind":"string","value":"libstarid/cereal-1.2.2/unittests/CMakeFiles/coverage_polymorphic.dir/cmake_clean.cmake"},"language":{"kind":"string","value":"CMake"},"license":{"kind":"string","value":"mit"},"size":{"kind":"number","value":337,"string":"337"}}},{"rowIdx":1716,"cells":{"code":{"kind":"string","value":"var formMode=\"detail\";\t\t/*formMode\t页面模式 页面有三种模式 detail add modify*/\nvar panelType=\"form\";\t\t/*panelType\t面板类型 form表单 search 查询 child 从表对象*/\nvar editIndex = undefined;\t/*datagrid 编辑对象的行号*/\nvar dg1EditIndex = undefined;\n\nvar objName=label.objName;\t\t\t/*页面管理对象名称*/\nvar lblDetailStr=label.detailStr;\t/*在不同的语种下应该不同*/\nvar lblAddStr=label.addStr;\t\t/*在不同的语种下应该不同*/\nvar lblEditStr=label.editStr;\t\t/*在不同的语种下应该不同*/\nvar pageName=null;\t\t\t/*根据pageName能够取得按钮定义*/\n\nvar pageHeight=0;\t\t\t/*pageHeight\t页面高度*/\nvar topHeight=366;\t\t\t/*datagrid高度*/\nvar dgHeadHeight=28;\t\t/*datagrid 收缩后高度*/\nvar downHeight=30;\t\t\t/*底部高度*/\nvar paddingHeight=11;\t\t/*页面内补丁高度\tpaddingTop+paddingBottom*/\n\nvar gridToolbar = null;\t\t/*按钮定义 */\nvar dgConf=null;\t\t\t/*dgConf配置信息*/\nvar dg1Conf=null;\n\nfunction initConf(){}\t/*在此初始化本页面的所有配置信息*/\n\nfunction initButton(){\n\tfor(var i=0;i=currRows)\tp--;\n\t\tif(p>=0)\t$('#dg').datagrid('selectRow', p);/*如果已经到末尾则 选中p-1 */\n\n\t}\n\tvar row=$('#dg').datagrid('getSelected');\n\tif(row)\t$.messager.confirm('确认提示', '您确认要删除这条数据吗?', confirmBack);\n\telse\t$.messager.alert('选择提示', '请选择您要删除的数据!',\"info\");\n}\n\nfunction dg_refresh(){/*列表刷新按钮事件*/\n}\n\nfunction dg_search(){/*列表搜索事件 search模式不再禁用其他面板*/\n\tpanelType=\"search\";\n\t$('#tab').tabs(\"select\",1);\n}\nfunction dg_click(index){\n\t/*切换回详细信息模式 首先判断tab的当前选项*/\n\tif(panelType==\"search\"){\n\t\t$('#tab').tabs(\"select\",0);\n\t}\n}\n\nfunction dg_dbl(){/*列表双击事件\t双击进入编辑模式*/\n\tdocument.getElementById(\"btn_edit\").click();/*双击等同于点击编辑按钮*/\n}\nfunction tab_select(title,index){/*选项卡的切换 需要更改按钮的显示*/\n\t$('#down a').css(\"display\",\"none\");\n\n\tif(index==0){/*根据grid的状态来生成按钮 add edit*/\n\t\t$('#btn2_addItem').css(\"display\",\"inline-block\");/*新增行按钮*/\n\t\t$('#btn2_editItem').css(\"display\",\"inline-block\");/*删除行按钮*/\n\t\t$('#btn2_rmItem').css(\"display\",\"inline-block\");/*删除行按钮*/\n\t\t$('#btn2_ok').css(\"display\",\"inline-block\");/*commit按钮*/\n\t}\n\telse if(index==1){/*查询选项卡 切换到查询页签等同于按钮 search被点击*/\n\t\tpanelType=\"search\";\n\t\t$('#btn2_search').css(\"display\",\"inline-block\");/*搜索按钮*/\n\t}\n}\nfunction useDetailMode(row){\n\t//formMode=\"detail\";\n\t\t\n\t//$('#ff2').css(\"display\",\"none\");\n\t//$('#ff1').css(\"display\",\"block\");\n\t\n\t\n\t//if(panelType==\"search\")\t$('#tab').tabs(\"select\",0);\n\t//else \ttab_select();\n}\n\nfunction btn2_addItem(){\n\tif(dg1_endEditing()){/*结束编辑状态成功*/\n\t\tvar p=$('#dg1').datagrid('getRowIndex',$('#dg1').datagrid('getSelected'));\n\t\t/*执行服务器请求,完成服务端数据的删除 然后完成前端的删除*/\n\t\tif (p == undefined){return}\n\t\t$('#dg1').datagrid('unselectAll');\n\t\t\t\t\t\t\n\t\t$('#dg1').datagrid('insertRow',{index:p+1,row:{}})\n\t\t\t\t .datagrid('beginEdit', p+1)\n\t\t\t\t .datagrid('selectRow', p+1);\n\t\n\t\tdg1EditIndex=p+1;\n\t}\n\telse{\n\t\t$('#dg1').datagrid('selectRow', dg1EditIndex);\n\t}\n}\n\nfunction btn2_editItem(){\t\n\tvar index=$('#dg1').datagrid('getRowIndex', $('#dg1').datagrid('getSelected'));\n\tif (dg1EditIndex != index){\n\t\tif (dg1_endEditing()){\n\t\t\t$('#dg1').datagrid('selectRow', index)\n\t\t\t\t\t.datagrid('beginEdit', index);\n\t\t\tdg1EditIndex = index;\n\t\t} else {\n\t\t\t$('#dg1').datagrid('selectRow', dg1EditIndex);\n\t\t}\n\t}\n}\n\nfunction btn2_rmItem(){\t\n\tvar confirmBack=function(r){\n\t\tif(!r)\treturn;\n\t\tvar p=$('#dg1').datagrid('getRowIndex',$('#dg1').datagrid('getSelected'));\n\t\n\t\tif (p == undefined){return}\n\t\t$('#dg1').datagrid('cancelEdit', p)\n\t\t\t\t .datagrid('deleteRow', p);\n\t\n\t\tvar currRows=$('#dg1').datagrid('getRows').length;\n\t\tif(p>=currRows)\tp--;\n\t\tif(p>=0)\t$('#dg1').datagrid('selectRow', p);/*如果已经到末尾则 选中p-1 */\n\t}\n\tvar row=$('#dg1').datagrid('getSelected');\n\tif(row)\t$.messager.confirm('确认提示', '您确认要删除这条数据吗?', confirmBack);\n\telse\t$.messager.alert('选择提示', '请选择您要删除的数据!',\"info\");\n}\n\n\nfunction dg1_endEditing(){\n\tif (dg1EditIndex == undefined){return true}\n\tvar flag=$('#dg1').datagrid('validateRow',dg1EditIndex);\n\tif(flag){/*如果校验通过 允许结束编辑状态*/\n\t\t$('#dg1').datagrid('endEdit', dg1EditIndex);\n\t\tdg1EditIndex = undefined;\n\t\treturn true;\n\t}\n\treturn false;\n\t\n}\n\nfunction dg1_click(index){/*从表单击事件 在编辑模式下打开编辑*/\n\tif (dg1EditIndex != index){\n\t\tdg1_endEditing();\n\t}\n}\nfunction dg1_dbl(index){/*从表双击事件\t双击进入编辑模式*/\n\tdocument.getElementById(\"btn2_editItem\").click();/*双击等同于点击编辑按钮*/\n}\n\nfunction useAddMode(){};\nfunction useEditMode(){};\nfunction form_change(type){}/*type= add|edit*/\nfunction removeValidate(){}/*type= enable|remove*/\nfunction btn2_save(){}\nfunction btn2_update(){}\nfunction btn2_search(){}\nfunction btn2_ok(){}\nfunction lov_init(){}/*绑定值列表*/\n"},"repo_name":{"kind":"string","value":"ldjking/wbscreen"},"path":{"kind":"string","value":"web/wb/2tp/template/js/common/copy/a3.js"},"language":{"kind":"string","value":"JavaScript"},"license":{"kind":"string","value":"mit"},"size":{"kind":"number","value":9914,"string":"9,914"}}},{"rowIdx":1717,"cells":{"code":{"kind":"string","value":"FROM ruby:2.3.3\nRUN apt-get update && apt-get install -y \\\n #Packages\n net-tools \\\n nodejs \n\n#Install phantomjs\nRUN apt-get update \\\n && apt-get install -y --no-install-recommends \\\n ca-certificates \\\n bzip2 \\\n libfontconfig \\\n && apt-get clean \\\n && rm -rf /var/lib/apt/lists/*\n\nRUN apt-get update \\\n && apt-get install -y --no-install-recommends \\\n curl \\\n && mkdir /tmp/phantomjs \\\n && curl -L https://bitbucket.org/ariya/phantomjs/downloads/phantomjs-2.1.1-linux-x86_64.tar.bz2 \\\n | tar -xj --strip-components=1 -C /tmp/phantomjs \\\n && cd /tmp/phantomjs \\\n && mv bin/phantomjs /usr/local/bin \\\n && cd \\\n && apt-get purge --auto-remove -y \\\n curl \\\n && apt-get clean \\\n && rm -rf /tmp/* /var/lib/apt/lists/*\n\n\n\n#Install gems\nRUN mkdir /app\nWORKDIR /app\nCOPY Gemfile* /app/\nRUN bundle install\nRUN apt-get clean\n\n#Upload source\nCOPY . /app\nRUN useradd ruby\nRUN chown -R ruby /app\nUSER ruby\n\n# Database defaults\nENV DATABASE_NAME bookIT\nENV DATABASE_HOST db\nENV DATABASE_USER bookIT\nENV DATABASE_PASSWORD password\nENV DATABASE_ADAPTER mysql2\n\nENV ACCOUNT_ADDRESS https://gamma.chalmers.it\n\n\n#In production, Host is set to naboo.chlamers.it\n# Start server\nENV RAILS_ENV production\nENV RACK_ENV production\nENV SECRET_KEY_BASE secret\nENV PORT 3000\nEXPOSE 3000\n\nRUN rake assets:precompile\n\nCMD [\"sh\", \"start.sh\"]\n"},"repo_name":{"kind":"string","value":"cthit/bookIT"},"path":{"kind":"string","value":"Dockerfile"},"language":{"kind":"string","value":"Dockerfile"},"license":{"kind":"string","value":"mit"},"size":{"kind":"number","value":1372,"string":"1,372"}}},{"rowIdx":1718,"cells":{"code":{"kind":"string","value":"## Capistrano\n\n[![Build\nStatus](https://secure.travis-ci.org/capistrano/capistrano.png)](http://travis-ci.org/capistrano/capistrano)[![Code Climate](https://codeclimate.com/badge.png)](https://codeclimate.com/github/capistrano/capistrano)\n\n\nCapistrano is a utility and framework for executing commands in parallel on\nmultiple remote machines, via SSH. It uses a simple DSL (borrowed in part from\n[Rake](http://rake.rubyforge.org/)) that allows you to define _tasks_, which may\nbe applied to machines in certain roles. It also supports tunneling connections\nvia some gateway machine to allow operations to be performed behind VPN's and\nfirewalls.\n\nCapistrano was originally designed to simplify and automate deployment of web\napplications to distributed environments, and originally came bundled with a set\nof tasks designed for deploying Rails applications.\n\n## Documentation\n\n* [https://github.com/capistrano/capistrano/wiki](https://github.com/capistrano/capistrano/wiki)\n\n## DEPENDENCIES\n\n* [Net::SSH](http://net-ssh.rubyforge.org)\n* [Net::SFTP](http://net-ssh.rubyforge.org)\n* [Net::SCP](http://net-ssh.rubyforge.org)\n* [Net::SSH::Gateway](http://net-ssh.rubyforge.org)\n* [HighLine](http://highline.rubyforge.org)\n* [Ruby](http://www.ruby-lang.org/en/) &#x2265; 1.8.7\n\nIf you want to run the tests, you'll also need to install the dependencies with\nBundler, see the `Gemfile` within .\n\n## ASSUMPTIONS\n\nCapistrano is \"opinionated software\", which means it has very firm ideas about\nhow things ought to be done, and tries to force those ideas on you. Some of the\nassumptions behind these opinions are:\n\n* You are using SSH to access the remote servers.\n* You either have the same password to all target machines, or you have public\n keys in place to allow passwordless access to them.\n\nDo not expect these assumptions to change.\n\n## USAGE\n\nIn general, you'll use Capistrano as follows:\n\n* Create a recipe file (\"capfile\" or \"Capfile\").\n* Use the `cap` script to execute your recipe.\n\nUse the `cap` script as follows:\n\n cap sometask\n\nBy default, the script will look for a file called one of `capfile` or\n`Capfile`. The `sometask` text indicates which task to execute. You can do\n\"cap -h\" to see all the available options and \"cap -T\" to see all the available\ntasks.\n\n## CONTRIBUTING:\n\n* Fork Capistrano\n* Create a topic branch - `git checkout -b my_branch`\n* Rebase your branch so that all your changes are reflected in one\n commit\n* Push to your branch - `git push origin my_branch`\n* Create a Pull Request from your branch, include as much documentation\n as you can in the commit message/pull request, following these\n[guidelines on writing a good commit message](http://tbaggery.com/2008/04/19/a-note-about-git-commit-messages.html)\n* That's it!\n\n\n## LICENSE:\n\nPermission is hereby granted, free of charge, to any person obtaining\na copy of this software and associated documentation files (the\n'Software'), to deal in the Software without restriction, including\nwithout limitation the rights to use, copy, modify, merge, publish,\ndistribute, sublicense, and/or sell copies of the Software, and to\npermit persons to whom the Software is furnished to do so, subject to\nthe following conditions:\n\nThe above copyright notice and this permission notice shall be\nincluded in all copies or substantial portions of the Software.\n\nTHE SOFTWARE IS PROVIDED 'AS IS', WITHOUT WARRANTY OF ANY KIND,\nEXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF\nMERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.\nIN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY\nCLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,\nTORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE\nSOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.\n"},"repo_name":{"kind":"string","value":"piousbox/microsites2-cities"},"path":{"kind":"string","value":"vendor/ruby/1.9.1/gems/capistrano-2.15.4/README.md"},"language":{"kind":"string","value":"Markdown"},"license":{"kind":"string","value":"mit"},"size":{"kind":"number","value":3795,"string":"3,795"}}},{"rowIdx":1719,"cells":{"code":{"kind":"string","value":"/*\n * Jermit\n *\n * The MIT License (MIT)\n *\n * Copyright (C) 2018 Kevin Lamonte\n *\n * Permission is hereby granted, free of charge, to any person obtaining a\n * copy of this software and associated documentation files (the \"Software\"),\n * to deal in the Software without restriction, including without limitation\n * the rights to use, copy, modify, merge, publish, distribute, sublicense,\n * and/or sell copies of the Software, and to permit persons to whom the\n * Software is furnished to do so, subject to the following conditions:\n *\n * The above copyright notice and this permission notice shall be included in\n * all copies or substantial portions of the Software.\n *\n * THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR\n * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,\n * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL\n * THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER\n * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING\n * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER\n * DEALINGS IN THE SOFTWARE.\n *\n * @author Kevin Lamonte [kevin.lamonte@gmail.com]\n * @version 1\n */\npackage jermit.protocol.zmodem;\n\n/**\n * ZEofHeader represents the end of a file.\n */\nclass ZEofHeader extends Header {\n\n // ------------------------------------------------------------------------\n // Constructors -----------------------------------------------------------\n // ------------------------------------------------------------------------\n\n /**\n * Public constructor.\n */\n public ZEofHeader() {\n this(0);\n }\n\n /**\n * Public constructor.\n *\n * @param data the data field for this header\n */\n public ZEofHeader(final int data) {\n super(Type.ZEOF, (byte) 0x0B, \"ZEOF\", data);\n }\n\n // ------------------------------------------------------------------------\n // Header -----------------------------------------------------------------\n // ------------------------------------------------------------------------\n\n // ------------------------------------------------------------------------\n // ZEofHeader -------------------------------------------------------------\n // ------------------------------------------------------------------------\n\n /**\n * Get the file size value.\n *\n * @return the value\n */\n public int getFileSize() {\n return data;\n }\n\n}\n"},"repo_name":{"kind":"string","value":"klamonte/jermit"},"path":{"kind":"string","value":"src/jermit/protocol/zmodem/ZEofHeader.java"},"language":{"kind":"string","value":"Java"},"license":{"kind":"string","value":"mit"},"size":{"kind":"number","value":2504,"string":"2,504"}}},{"rowIdx":1720,"cells":{"code":{"kind":"string","value":"# fullstack-course4-submissions"},"repo_name":{"kind":"string","value":"aaronblair/fullstack-course4-submissions"},"path":{"kind":"string","value":"README.md"},"language":{"kind":"string","value":"Markdown"},"license":{"kind":"string","value":"mit"},"size":{"kind":"number","value":31,"string":"31"}}},{"rowIdx":1721,"cells":{"code":{"kind":"string","value":"import java.sql.Connection;\nimport java.sql.DriverManager;\nimport java.sql.SQLException;\n\npublic class FormLoader {\n\t\n\tpublic static String connectionString = \"jdbc:hsqldb:file:db-data/teamsandplayers\";\n\tstatic Connection con;\n\n\tpublic static void main(String[] args) throws Exception {\n\t\ttry {\n\t\t\tClass.forName(\"org.hsqldb.jdbc.JDBCDriver\");\n\t\t} catch (ClassNotFoundException e) {\n\t\t\tthrow e;\n\t\t}\n\t\t\n\t\t\n\t\tMainTeamForm form = new MainTeamForm();\n\t\tform.setVisible(true);\n\t\t\n\t\ttry {\n\t\t\t// will create DB if does not exist\n\t\t\t// \"SA\" is default user with hypersql\n\t\t\tcon = DriverManager.getConnection(connectionString, \"SA\", \"\");\n\t\t\t\n\t\t\t\n\t\t\t\n\t\t\t\n\t \n\t\t} catch (SQLException e) {\n\t\t\tthrow e;\n\t\t} finally {\n\t\t\tcon.close();\n\t\t\tSystem.out.println(\"Program complete\");\n\t\t}\n\t\t\n\t\t\n\t\t\n\t}\n\n\n}\n"},"repo_name":{"kind":"string","value":"a-r-d/java-1-class-demos"},"path":{"kind":"string","value":"jframe-actionlistener-access-db-cxn/homework-start/Week13Assignment10/src/FormLoader.java"},"language":{"kind":"string","value":"Java"},"license":{"kind":"string","value":"mit"},"size":{"kind":"number","value":788,"string":"788"}}},{"rowIdx":1722,"cells":{"code":{"kind":"string","value":"store($this->getKey(), array('foo' => 'bar'), $this->getExpires());\n $this->assertTrue($success);\n $this->assertEquals($this->getExpires(), $client->getExpires($this->getKey()));\n $this->assertEquals($this->getExpires(), $client->getTtl($this->getKey())); // default should be the same as expires\n\n // store with ttl\n $success = $client->store($this->getKey(), array('foo' => 'bar'), $this->getExpires(), $this->getTtl());\n $this->assertTrue($success);\n $this->assertLessThanOrEqual($this->getExpires(), $client->getExpires($this->getKey()));\n $this->assertLessThanOrEqual($this->getTtl(), $client->getTtl($this->getKey()));\n }\n\n /** @dataProvider provideClients */\n public function testExists(Memento\\Client $client)\n {\n $client->store($this->getKey(), true);\n\n $exists = $client->exists($this->getKey());\n $this->assertTrue($exists);\n }\n\n /** @dataProvider provideClients */\n public function testRetrieve(Memento\\Client $client)\n {\n $client->store($this->getKey(), array('foo' => 'bar'));\n\n $data = $client->retrieve($this->getKey());\n $this->assertEquals($data, array('foo' => 'bar'));\n }\n\n /** @dataProvider provideClients */\n public function testInvalidRetrieve(Memento\\Client $client)\n {\n $data = $client->retrieve(new Memento\\Key(md5(time() . rand(0, 1000))));\n $this->assertEquals($data, null);\n }\n\n /** @dataProvider provideClients */\n public function testInvalidate(Memento\\Client $client)\n {\n $client->store($this->getKey(), true);\n $invalid = $client->invalidate($this->getKey());\n $this->assertTrue($invalid);\n $exists = $client->exists($this->getKey());\n $this->assertFalse($exists);\n }\n\n /** @dataProvider provideClients */\n public function testTerminate(Memento\\Client $client)\n {\n $client->store($this->getKey(), true);\n\n $terminated = $client->terminate($this->getKey());\n $this->assertTrue($terminated);\n $exists = $client->exists($this->getKey());\n $this->assertFalse($exists);\n }\n\n /** @dataProvider provideClients */\n public function testExpires(Memento\\Client $client)\n {\n $client->store($this->getKey(), array('foo' => 'bar'), 1, $ttl = 5);\n sleep(3);\n $exists = $client->exists($this->getKey());\n $this->assertFalse($exists);\n\n // check if cache exists but include expired caches\n $exists = $client->exists($this->getKey(), true);\n $this->assertTrue($exists);\n\n $client->store($this->getKey(), array('foo' => 'bar'), $this->getExpires(), $this->getTtl());\n $this->assertTrue($client->exists($this->getKey()));\n $client->expire($this->getKey());\n sleep(1);\n $this->assertFalse($client->exists($this->getKey()));\n\n // check if cache exists but include expired caches\n $exists = $client->exists($this->getKey(), true);\n $this->assertTrue($exists);\n }\n}\n"},"repo_name":{"kind":"string","value":"garyr/memento"},"path":{"kind":"string","value":"test/Memento/Test/SingleTest.php"},"language":{"kind":"string","value":"PHP"},"license":{"kind":"string","value":"mit"},"size":{"kind":"number","value":3252,"string":"3,252"}}},{"rowIdx":1723,"cells":{"code":{"kind":"string","value":"/*\n Copyright (c) 2015 Shaps Mohsenin. All rights reserved.\n \n Redistribution and use in source and binary forms, with or without\n modification, are permitted provided that the following conditions are met:\n \n 1. Redistributions of source code must retain the above copyright notice, this\n list of conditions and the following disclaimer.\n \n 2. Redistributions in binary form must reproduce the above copyright notice,\n this list of conditions and the following disclaimer in the documentation\n and/or other materials provided with the distribution.\n \n THIS SOFTWARE IS PROVIDED BY Shaps Mohsenin `AS IS' AND ANY EXPRESS OR\n IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF\n MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO\n EVENT SHALL Shaps Mohsenin OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT,\n INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,\n BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,\n DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF\n LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE\n OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF\n ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.\n */\n\n@import UIKit;\n#import \"SPXDataView.h\"\n\n\n/**\n * Provides collectionView specific definitions of a dataView\n */\n@interface UITableView (SPXDataViewAdditions) \n\n\n/**\n * Gets/sets the block to execute when the collectionView requests a cell\n */\n@property (nonatomic, copy) UITableViewCell *(^viewForItemAtIndexPathBlock)(UITableView *tableView, id object, NSIndexPath *indexPath);\n\n\n/**\n * Gets/sets the block to execute when the collectionView requests the cell to be configured\n */\n@property (nonatomic, copy) void (^configureViewForItemAtIndexPathBlock)(UITableView *tableView, UITableViewCell *cell, id object, NSIndexPath *indexPath);\n\n\n/**\n * Gets/sets the block to execute when the collectionView requests a section header\n */\n@property (nonatomic, copy) NSString *(^titleForHeaderInSectionBlock)(UITableView *tableView, NSUInteger section);\n\n\n/**\n * Gets/sets the block to execute when the collectionView requests a section footer\n */\n@property (nonatomic, copy) NSString *(^titleForFooterInSectionBlock)(UITableView *tableView, NSUInteger section);\n\n\n/**\n * Gets/sets the block to execute when the collectionView requests whether or not a cell can be moved\n */\n@property (nonatomic, copy) BOOL (^canMoveItemAtIndexPathBlock)(UITableView *tableView, UITableViewCell *cell, id object, NSIndexPath *indexPath);\n\n\n/**\n * Gets/sets the block to execute when the collectionView requests whether or not a cell can be edited\n */\n@property (nonatomic, copy) BOOL (^canEditItemAtIndexPathBlock)(UITableView *tableView, UITableViewCell *cell, id object, NSIndexPath *indexPath);\n\n\n/**\n * Gets/sets the block to execute when the collectionView commits an editing action for a cell\n */\n@property (nonatomic, copy) void (^commitEditingStyleForItemAtIndexPathBlock)(UITableView *tableView, UITableViewCell *cell, id object, NSIndexPath *indexPath);\n\n\n/**\n * Gets/sets the block to execute when the collectionView moves a cell\n */\n@property (nonatomic, copy) void (^moveItemAtSourceIndexPathToDestinationIndexPathBlock)(UITableView *tableView, NSIndexPath *sourceIndexPath, NSIndexPath *destinationIndexPath);\n\n\n@end\n\n\n"},"repo_name":{"kind":"string","value":"shaps80/SPXCore"},"path":{"kind":"string","value":"Example/Pods/SPXDataSources/Pod/Classes/DataViews/UITableView+SPXDataViewAdditions.h"},"language":{"kind":"string","value":"C"},"license":{"kind":"string","value":"mit"},"size":{"kind":"number","value":3424,"string":"3,424"}}},{"rowIdx":1724,"cells":{"code":{"kind":"string","value":"//\n// Generated by class-dump 3.5 (64 bit).\n//\n// class-dump is Copyright (C) 1997-1998, 2000-2001, 2004-2013 by Steve Nygard.\n//\n\n#import \"CDStructures.h\"\n\n@interface _IDEKitPrivateClassForFindingBundle : NSObject\n{\n}\n\n@end\n\n"},"repo_name":{"kind":"string","value":"kolinkrewinkel/Multiplex"},"path":{"kind":"string","value":"Multiplex/IDEHeaders/IDEHeaders/IDEKit/_IDEKitPrivateClassForFindingBundle.h"},"language":{"kind":"string","value":"C"},"license":{"kind":"string","value":"mit"},"size":{"kind":"number","value":234,"string":"234"}}},{"rowIdx":1725,"cells":{"code":{"kind":"string","value":"\n\nC - Debugging with GDB\n\n\n\n\n\n\n\n\n\n\n\n\n
\n

\n\nNext:&nbsp;D,\nUp:&nbsp;Supported Languages\n


\n
\n\n

15.4.1 C and C++

\n\n

\nSince C and C++ are so closely related, many features of gdb apply\nto both languages. Whenever this is the case, we discuss those languages\ntogether.\n\n

The C++ debugging facilities are jointly implemented by the C++\ncompiler and gdb. Therefore, to debug your C++ code\neffectively, you must compile your C++ programs with a supported\nC++ compiler, such as gnu g++, or the HP ANSI C++\ncompiler (aCC).\n\n

\n\n \n\n"},"repo_name":{"kind":"string","value":"ChangsoonKim/STM32F7DiscTutor"},"path":{"kind":"string","value":"toolchain/osx/gcc-arm-none-eabi-6-2017-q1-update/share/doc/gcc-arm-none-eabi/html/gdb/C.html"},"language":{"kind":"string","value":"HTML"},"license":{"kind":"string","value":"mit"},"size":{"kind":"number","value":4070,"string":"4,070"}}},{"rowIdx":1726,"cells":{"code":{"kind":"string","value":"Answer these questions in your reflection:\n\nWhat git concepts were you struggling with prior to the GPS session?\n - Prior to the GPS session I was having trouble navigating between branches. I also was completely confused on remote and fetch. I thought that you could just use the command git pull which would fetch/merge in one.\n\nWhat concepts were clarified during the GPS?\n - Using git checkout moves between branches.\n\nWhat questions did you ask your pair and the guide?\n - I asked them questions on what was troubling me and that cleared things up. I am still a little fuzzy on fetch / remote but I know that will come with more practice. Git pull is also a compact way to fetch and merge in one.\n\nWhat still confuses you about git?\n - When using the remote I am still not completely sure on what it does. I will need to do more research and practice while I work on the HTML this week.\n\nHow was your first experience of pairing in a GPS?\n - My first experience was great! I really enjoyed working with my partner and the guide had some great pointers. Once again my feelings toward DBC are getting better and better as the days go on. I am having a great time learning things that interest me."},"repo_name":{"kind":"string","value":"mikelondon/phase-0-gps-1"},"path":{"kind":"string","value":"london-reflection.md"},"language":{"kind":"string","value":"Markdown"},"license":{"kind":"string","value":"mit"},"size":{"kind":"number","value":1202,"string":"1,202"}}},{"rowIdx":1727,"cells":{"code":{"kind":"string","value":"#!/usr/bin/env python3\n\"\"\"\n Categorize and analyze user sessions.\n Read in ecfs_obfuscated_filtered.gz file, output some fancy results. \n\"\"\"\n\nfrom collections import defaultdict\nfrom collections import Counter\nimport sys\nimport time\nimport os\nimport resource\nimport json\nimport fnmatch\nfrom pipes import Pipes\nimport operator\n\nfrom operation import Operation\n\nKB = 1024\nMB = KB * 1024\nGB = MB * 1024\nTB = GB * 1024\nPB = TB * 1024\n\nMONITOR_LINES = 100000\n\n\nclass UserSession():\n def __init__(self, user_id):\n self.user_id = user_id\n self.from_ts = 0\n self.till_ts = 0\n self.get_requests = 0\n self.reget_requests = 0\n self.put_requests = 0\n self.get_bytes = 0\n self.put_bytes = 0\n self.rename_requests = 0\n self.del_requests = 0\n self.get_dirs = 0\n self.put_dirs = 0\n self.put_files_per_dir = 0.0\n self.get_files_per_dir = 0.0\n self.window_seconds = 0\n\n self.file_cnt_gets = Counter()\n self.file_cnt_puts = Counter()\n self.dir_cnt_gets = Counter()\n self.dir_cnt_puts = Counter()\n\n self.num_ops = 0\n self.last_ts = 0\n\n def add_op(self, op):\n self.num_ops += 1\n\n if op.ts < self.last_ts:\n raise Exception(\"Timestamp too old\")\n else:\n self.last_ts = op.ts\n\n if op.optype == 'g':\n self.get_requests += 1\n self.get_bytes += op.size\n self.file_cnt_gets[op.obj_id] += 1\n self.dir_cnt_gets[op.parent_dir_id] += 1\n elif op.optype == 'p':\n self.put_requests += 1\n self.put_bytes += op.size\n self.file_cnt_puts[op.obj_id] += 1\n self.dir_cnt_puts[op.parent_dir_id] += 1\n elif op.optype == 'd':\n self.del_requests += 1\n elif op.optype == 'r':\n self.rename_requests += 1\n\n #update last time stamp in the session\n self.till_ts = op.ts + op.execution_time\n\n def finish(self):\n self.get_dirs = len(self.dir_cnt_gets)\n if self.get_dirs > 0:\n self.get_files_per_dir = float(self.get_requests) / self.get_dirs\n\n self.put_dirs = len(self.dir_cnt_puts)\n if self.put_dirs > 0:\n self.put_files_per_dir = float(self.put_requests) / self.put_dirs\n\n \"\"\"\n set reget_counter\n :param counter: contains [ 1, 1, 5] counts of objects. value > 1 is a re-retrieval.\n :return:\n \"\"\"\n for c in self.file_cnt_gets.values():\n if c > 1:\n self.reget_requests += (c - 1)\n\n # self.announce()\n\n return \";\".join([str(x) for x in [\n self.user_id,\n self.from_ts,\n self.till_ts,\n self.till_ts - self.from_ts,\n self.get_requests,\n self.reget_requests,\n self.put_requests,\n self.get_bytes,\n self.put_bytes,\n self.rename_requests,\n self.del_requests,\n self.get_dirs,\n self.put_dirs,\n self.put_files_per_dir,\n self.get_files_per_dir,\n self.window_seconds\n ]]\n )\n\n\n def announce(self):\n print(\"closed session. gets: %r, regets: %r, puts: %r, dels: %r, renames: %r get_dirs: %r, put_dirs: %r, get_bytes: %r put_bytes: %r window_seconds: %d\" % \\\n (self.get_requests, self.reget_requests, self.put_requests, self.del_requests, self.rename_requests, self.get_dirs, self.put_dirs, self.get_bytes, self.put_bytes, self.window_seconds))\n\n\ndef find_clusters(atimes):\n foo = Counter()\n bar = dict()\n for i in xrange(120, 3660, 10):\n clusters = get_clusters(atimes, i)\n cs = len(clusters)\n foo[cs] += 1\n\n # note first occurance of this cluster size.\n if cs not in bar:\n bar[cs] = i\n # print(len(atimes), i, cs)\n\n return bar[foo.most_common()[0][0]]\n\ndef get_clusters(data, maxgap):\n '''Arrange data into groups where successive elements\n differ by no more than *maxgap*\n\n >>> cluster([1, 6, 9, 100, 102, 105, 109, 134, 139], maxgap=10)\n [[1, 6, 9], [100, 102, 105, 109], [134, 139]]\n\n >>> cluster([1, 6, 9, 99, 100, 102, 105, 134, 139, 141], maxgap=10)\n [[1, 6, 9], [99, 100, 102, 105], [134, 139, 141]]\n '''\n data.sort()\n groups = [[data[0]]]\n for x in data[1:]:\n if abs(x - groups[-1][-1]) <= maxgap:\n groups[-1].append(x)\n else:\n groups.append([x])\n return groups\n\n\ndef analyze_user_session(user_session_file, out_pipeline, target_file_name):\n with open(user_session_file, 'r') as sf:\n ops = list()\n atimes = list()\n\n for line in sf:\n op = Operation()\n op.init(line.strip())\n ops.append(op)\n atimes.append(op.ts)\n\n ops.sort(key=operator.attrgetter('ts'))\n atimes.sort()\n window_seconds = find_clusters(atimes)\n\n session_counter = 1\n\n uf = os.path.basename(user_session_file)\n user_id = uf[:uf.find(\".user_session.csv\")]\n\n session = UserSession(user_id)\n session.window_seconds = window_seconds\n\n for op in ops:\n if session.from_ts == 0:\n session.from_ts = op.ts\n session.till_ts = op.ts + op.execution_time\n\n if (session.till_ts + window_seconds) < op.ts:\n # this session is over, so archive it.\n out_pipeline.write_to(target_file_name, session.finish())\n del session\n session = UserSession(user_id)\n session.window_seconds = window_seconds\n session_counter += 1\n \n session.add_op(op)\n\n if session.num_ops > 0:\n out_pipeline.write_to(target_file_name, session.finish())\n\n print(\"sessions: %d with window_seconds: %d\" %(session_counter, window_seconds))\n\n\nif __name__ == \"__main__\":\n source_dir = os.path.abspath(sys.argv[1])\n\n\n result = os.path.abspath(sys.argv[2])\n results_dir = os.path.dirname(result)\n target_file_name = os.path.basename(result)\n\n users_session_files = [os.path.join(dirpath, f)\n for dirpath, dirnames, files in os.walk(source_dir)\n for f in fnmatch.filter(files, '*.user_session.csv')]\n\n #remove the old log file, as outpipe is append only.\n if os.path.exists(os.path.join(results_dir, target_file_name)):\n os.remove(os.path.join(results_dir, target_file_name))\n\n out_pipe = Pipes(results_dir)\n \n csv_header = \";\".join([\"user_id\",\n \"from_ts\",\n \"till_ts\",\n \"session_lifetime\",\n \"get_requests\",\n \"reget_requests\",\n \"put_requests\",\n \"get_bytes\",\n \"put_bytes\",\n \"rename_requests\",\n \"del_requests\",\n \"get_dirs\",\n \"put_dirs\",\n \"put_files_per_dir\",\n \"get_files_per_dir\",\n \"window_seconds\"\n ])\n\n out_pipe.write_to(target_file_name, csv_header)\n\n cnt = 0\n for sf in users_session_files:\n cnt += 1\n print (\"working on %d/%d\" % (cnt, len(users_session_files)))\n analyze_user_session(sf, out_pipe, target_file_name)\n\n # if cnt >=20:\n # break\n\n out_pipe.close()\n\n print(\"wrote results to %s: \" % (os.path.join(results_dir, target_file_name)))\n \n "},"repo_name":{"kind":"string","value":"zdvresearch/fast15-paper-extras"},"path":{"kind":"string","value":"ecfs_user_sessions/src/analyze_user_sessions.py"},"language":{"kind":"string","value":"Python"},"license":{"kind":"string","value":"mit"},"size":{"kind":"number","value":7526,"string":"7,526"}}},{"rowIdx":1728,"cells":{"code":{"kind":"string","value":"package esl\n\nimport (\n\t\"io\"\n\t\"errors\"\n\t\"unicode/utf8\"\n)\n\n// Buffer ...\ntype buffer []byte\n// MemoryReader ...\ntype memReader [ ]byte\n// MemoryWriter ...\ntype memWriter [ ]byte\n// ErrBufferSize indicates that memory cannot be allocated to store data in a buffer.\nvar ErrBufferSize = errors.New(`could not allocate memory`)\n\n\nfunc newBuffer( size int ) *buffer {\n\tbuf := make([ ]byte, 0, size )\n\treturn (*buffer)(&buf)\n}\n\n\nfunc ( buf *buffer ) reader( ) *memReader {\n\tn := len( *buf )\n\trbuf := ( *buf )[:n:n]\n\treturn ( *memReader )( &rbuf )\n}\n\n\nfunc ( buf *buffer ) writer( ) *memWriter {\n\treturn ( *memWriter )( buf )\n}\n\n\nfunc ( buf *buffer ) grow( n int ) error {\n\tif ( len( *buf )+ n ) > cap( *buf ) {\n\t\t// Not enough space to store [:+(n)]byte(s)\n\t\tmbuf, err := makebuf( cap( *buf )+ n )\n\t\t\n\t\tif ( err != nil ) {\n\t\t\treturn ( err )\n\t\t}\n\t\t\n\t\tcopy( mbuf, *buf )\n\t\t*( buf ) = mbuf\n\t}\n\treturn nil\n}\n\n\n// allocates a byte slice of size.\n// If the allocation fails, returns error\n// indicating that memory cannot be allocated to store data in a buffer.\nfunc makebuf( size int ) ( buf [ ]byte, memerr error ) {\n\tdefer func( ) {\n\t\t// If the make fails, give a known error.\n\t\tif ( recover( ) != nil ) {\n\t\t\t( memerr ) = ErrBufferSize\n\t\t}\n\t}( )\n\treturn make( [ ]byte, 0, size ), nil\n}\n\n\nfunc ( buf *memReader ) Read( b [ ]byte ) ( n int, err error ) {\n\tif len( *buf ) == 0 {\n\t\treturn ( 0 ), io.EOF\n\t}\n\tn, *buf = copy( b, *buf ), ( *buf )[ n: ]\n\treturn // n, nil\n}\n\n\nfunc ( buf *memReader ) ReadByte( ) ( c byte, err error ) {\n\tif len(*buf) == 0 {\n\t\treturn ( 0 ), io.EOF\n\t}\n\tc, *buf = (*buf)[0], (*buf)[1:]\n\treturn // c, nil\n}\n\n\nfunc ( buf *memReader ) ReadRune( ) ( r rune, size int, err error ) {\n\tif len(*buf) == 0 {\n\t\treturn 0, 0, io.EOF\n\t}\n\tr, size = utf8.DecodeRune(*buf)\n\t*buf = (*buf)[size:]\n\treturn // r, size, nil\n}\n\n\nfunc ( buf *memReader ) WriteTo( w io.Writer ) ( n int64, err error ) {\n\tfor len( *buf ) > 0 {\n\t\trw, err := w.Write( *buf )\n\t\tif ( rw > 0 ) {\n\t\t\tn, *buf = n + int64( rw ), (*buf)[rw:]\n\t\t}\n\t\tif ( err != nil ) {\n\t\t\treturn n, err\n\t\t}\n\t}\n\treturn ( 0 ), io.EOF\n}\n\n\n\n\n\nfunc ( buf *memWriter ) Write( b []byte ) ( n int, err error ) {\n\t*buf = append( *buf, b...)\n\treturn len( b ), nil\n}\n\n\nfunc ( buf *memWriter ) WriteByte( c byte ) error {\n\t*buf = append( *buf, c )\n\treturn ( nil )\n}\n\n\nfunc ( buf *memWriter ) WriteRune( r rune ) error {\n\t\n\tif ( r < utf8.RuneSelf ) {\n\t\treturn buf.WriteByte( byte( r ))\n\t}\n\n\tb := *buf\n\tn := len( b )\n\tif ( n + utf8.UTFMax ) > cap( b ) {\n\t\tb = make( []byte, ( n + utf8.UTFMax ))\n\t\tcopy( b, *buf )\n\t}\n\tw := utf8.EncodeRune( b[ n:( n + utf8.UTFMax )], r )\n\t*buf = b[ :( n + w )]\n\treturn nil\n}\n\n\nfunc ( buf *memWriter ) WriteString( s string ) ( n int, err error ) {\n\t*buf = append( *buf, s...)\n\treturn len( s ), nil\n}\n\n\n// func (buf *memWriter) ReadFrom(r io.Reader) (n int64, err error) {\n// \t// NOTE: indefinite allocation! Try to use io.WriterTo interface!\n// }"},"repo_name":{"kind":"string","value":"navrotskyj/acr"},"path":{"kind":"string","value":"src/pkg/esl/io.go"},"language":{"kind":"string","value":"GO"},"license":{"kind":"string","value":"mit"},"size":{"kind":"number","value":2905,"string":"2,905"}}},{"rowIdx":1729,"cells":{"code":{"kind":"string","value":"#ifdef __OBJC__\n#import \n#else\n#ifndef FOUNDATION_EXPORT\n#if defined(__cplusplus)\n#define FOUNDATION_EXPORT extern \"C\"\n#else\n#define FOUNDATION_EXPORT extern\n#endif\n#endif\n#endif\n\n\nFOUNDATION_EXPORT double Pods_WZYUnlimitedScrollViewDemoVersionNumber;\nFOUNDATION_EXPORT const unsigned char Pods_WZYUnlimitedScrollViewDemoVersionString[];\n\n"},"repo_name":{"kind":"string","value":"CoderZYWang/WZYUnlimitedScrollView"},"path":{"kind":"string","value":"WZYUnlimitedScrollViewDemo/Pods/Target Support Files/Pods-WZYUnlimitedScrollViewDemo/Pods-WZYUnlimitedScrollViewDemo-umbrella.h"},"language":{"kind":"string","value":"C"},"license":{"kind":"string","value":"mit"},"size":{"kind":"number","value":354,"string":"354"}}},{"rowIdx":1730,"cells":{"code":{"kind":"string","value":"package com.zimbra.cs.versioncheck;\n\nimport java.io.IOException;\nimport java.util.Iterator;\nimport java.util.List;\nimport java.util.Date;\n\nimport org.apache.commons.cli.CommandLine;\nimport org.apache.commons.cli.Options;\nimport org.apache.commons.cli.ParseException;\nimport com.zimbra.common.util.ZimbraLog;\nimport com.zimbra.common.account.Key;\nimport com.zimbra.common.account.Key.ServerBy;\nimport com.zimbra.common.service.ServiceException;\nimport com.zimbra.common.soap.AdminConstants;\nimport com.zimbra.common.soap.SoapFaultException;\nimport com.zimbra.common.soap.SoapTransport;\nimport com.zimbra.common.util.CliUtil;\nimport com.zimbra.cs.account.Config;\nimport com.zimbra.cs.account.Provisioning;\nimport com.zimbra.cs.account.Server;\nimport com.zimbra.cs.client.LmcSession;\nimport com.zimbra.cs.client.soap.LmcSoapClientException;\nimport com.zimbra.cs.client.soap.LmcVersionCheckRequest;\nimport com.zimbra.cs.client.soap.LmcVersionCheckResponse;\nimport com.zimbra.cs.util.BuildInfo;\nimport com.zimbra.cs.util.SoapCLI;\nimport com.zimbra.common.util.DateUtil;\n/**\n * @author Greg Solovyev\n */\npublic class VersionCheckUtil extends SoapCLI {\n private static final String OPT_CHECK_VERSION = \"c\";\n private static final String OPT_MANUAL_CHECK_VERSION = \"m\";\n private static final String SHOW_LAST_STATUS = \"r\";\n \n protected VersionCheckUtil() throws ServiceException {\n super();\n }\n \n public static void main(String[] args) {\n CliUtil.toolSetup();\n SoapTransport.setDefaultUserAgent(\"zmcheckversion\", BuildInfo.VERSION);\n VersionCheckUtil util = null;\n try {\n util = new VersionCheckUtil();\n } catch (ServiceException e) {\n System.err.println(e.getMessage());\n System.exit(1);\n }\n\n try {\n util.setupCommandLineOptions();\n CommandLine cl = null;\n try {\n cl = util.getCommandLine(args);\n } catch (ParseException e) {\n System.out.println(e.getMessage());\n util.usage();\n System.exit(1);\n }\n \n if (cl == null) {\n System.exit(1);\n }\n \n if (cl.hasOption(OPT_CHECK_VERSION)) {\n \t//check schedule\n \t\tProvisioning prov = Provisioning.getInstance();\n \t\tConfig config;\n \t\tconfig = prov.getConfig();\n \tString updaterServerId = config.getAttr(Provisioning.A_zimbraVersionCheckServer);\n \t\n if (updaterServerId != null) {\n Server server = prov.get(Key.ServerBy.id, updaterServerId);\n if (server != null) {\n \tServer localServer = prov.getLocalServer();\n \tif (localServer!=null) { \n \t\tif(!localServer.getId().equalsIgnoreCase(server.getId())) {\n \t\t\tSystem.out.println(\"Wrong server\");\n \t\t\tSystem.exit(0);\n \t\t}\n \t}\n }\n } \t\t\n \t\tString versionInterval = config.getAttr(Provisioning.A_zimbraVersionCheckInterval);\n \t\tif(versionInterval == null || versionInterval.length()==0 || versionInterval.equalsIgnoreCase(\"0\")) {\n \t\t\tSystem.out.println(\"Automatic updates are disabled\");\n \t\t\tSystem.exit(0);\n \t\t} else {\n \t\t\tlong checkInterval = DateUtil.getTimeIntervalSecs(versionInterval,0);\n \t\t\tString lastAttempt = config.getAttr(Provisioning.A_zimbraVersionCheckLastAttempt);\n \t\t\tif(lastAttempt != null) {\n \t\t\t\tDate lastChecked = DateUtil.parseGeneralizedTime(config.getAttr(Provisioning.A_zimbraVersionCheckLastAttempt));\n \t\t\t\tDate now = new Date();\n \t\t\t\tif\t(now.getTime()/1000- lastChecked.getTime()/1000 >= checkInterval) {\n \t\t\t\t\tutil.doVersionCheck();\n \t\t\t\t} else {\n \t\t\t\t\tSystem.out.println(\"Too early\");\n \t\t\t\t\tSystem.exit(0);\n \t\t\t\t}\n \t\t\t} else {\n \t\t\t\tutil.doVersionCheck();\n \t\t\t}\n \t\t}\n } else if (cl.hasOption(OPT_MANUAL_CHECK_VERSION)) {\n util.doVersionCheck();\n } else if (cl.hasOption(SHOW_LAST_STATUS)) {\n util.doResult();\n System.exit(0);\n } else {\n util.usage();\n System.exit(1);\n }\n } catch (Exception e) {\n System.err.println(e.getMessage());\n ZimbraLog.extensions.error(\"Error in versioncheck util\", e);\n util.usage(null);\n System.exit(1);\n }\n }\n \n private void doVersionCheck() throws SoapFaultException, IOException, ServiceException, LmcSoapClientException {\n LmcSession session = auth();\n LmcVersionCheckRequest req = new LmcVersionCheckRequest();\n req.setAction(AdminConstants.VERSION_CHECK_CHECK);\n req.setSession(session);\n req.invoke(getServerUrl());\n }\n \n private void doResult() throws SoapFaultException, IOException, ServiceException, LmcSoapClientException {\n \ttry {\n\t \tLmcSession session = auth();\n\t LmcVersionCheckRequest req = new LmcVersionCheckRequest();\n\t req.setAction(AdminConstants.VERSION_CHECK_STATUS);\n\t req.setSession(session);\n\t LmcVersionCheckResponse res = (LmcVersionCheckResponse) req.invoke(getServerUrl());\n\t \tList updates = res.getUpdates();\n\t \t\n\t \tfor(Iterator iter = updates.iterator();iter.hasNext();){\n\t \t\tVersionUpdate update = iter.next();\n\t \t\tString critical;\n\t \t\tif(update.isCritical()) {\n\t \t\t\tcritical = \"critical\";\n\t \t\t} else { \n\t \t\t\tcritical = \"not critical\";\n\t \t\t}\n\t \t\tSystem.out.println(\n\t \t\t\t\tString.format(\"Found a %s update. Update is %s . Update version: %s. For more info visit: %s\", \n\t \t\t\t\t\t\tupdate.getType(),critical,update.getVersion(),update.getUpdateURL())\n\t \t\t\t); \t\t\n\t \t} \n \t} catch (SoapFaultException soape) {\n \t\tSystem.out.println(\"Cought SoapFaultException\");\n \t\tSystem.out.println(soape.getStackTrace().toString());\n \t\tthrow (soape);\n \t} catch (LmcSoapClientException lmce) {\n \t\tSystem.out.println(\"Cought LmcSoapClientException\");\n \t\tSystem.out.println(lmce.getStackTrace().toString());\n \t\tthrow (lmce);\n \t} catch (ServiceException se) {\n \t\tSystem.out.println(\"Cought ServiceException\");\n \t\tSystem.out.println(se.getStackTrace().toString());\n \t\tthrow (se);\n \t} catch (IOException ioe) {\n \t\tSystem.out.println(\"Cought IOException\");\n \t\tSystem.out.println(ioe.getStackTrace().toString());\n \t\tthrow (ioe);\n \t}\n }\n \n protected void setupCommandLineOptions() {\n // super.setupCommandLineOptions();\n Options options = getOptions();\n Options hiddenOptions = getHiddenOptions();\n hiddenOptions.addOption(OPT_CHECK_VERSION, \"autocheck\", false, \"Initiate version check request (exits if zimbraVersionCheckInterval==0)\"); \n options.addOption(SHOW_LAST_STATUS, \"result\", false, \"Show results of last version check.\");\n options.addOption(OPT_MANUAL_CHECK_VERSION, \"manual\", false, \"Initiate version check request.\");\n }\n \n protected String getCommandUsage() {\n return \"zmcheckversion \";\n }\n\n}\n"},"repo_name":{"kind":"string","value":"nico01f/z-pec"},"path":{"kind":"string","value":"ZimbraAdminVersionCheck/src/java/com/zimbra/cs/versioncheck/VersionCheckUtil.java"},"language":{"kind":"string","value":"Java"},"license":{"kind":"string","value":"mit"},"size":{"kind":"number","value":7334,"string":"7,334"}}},{"rowIdx":1731,"cells":{"code":{"kind":"string","value":"using System;\nusing System.Collections.Generic;\nusing System.Linq;\nusing System.Text;\nusing System.Threading.Tasks;\n\nnamespace EmployeeFinder.Models\n{\n public enum Position\n {\n Bartender,\n Waiter,\n Bellboy,\n Receptionist,\n Manager,\n Housekeeper,\n Chef,\n Manintanace\n\n }\n}\n"},"repo_name":{"kind":"string","value":"GeorgiNik/EmployeeFinder"},"path":{"kind":"string","value":"EmployeeFinder.Models/Position.cs"},"language":{"kind":"string","value":"C#"},"license":{"kind":"string","value":"mit"},"size":{"kind":"number","value":340,"string":"340"}}},{"rowIdx":1732,"cells":{"code":{"kind":"string","value":"using System;\nusing System.Collections.Generic;\nusing System.Linq;\nusing System.Text;\nusing System.Threading.Tasks;\n\nnamespace Domain\n{\n public class Meeting\n {\n public int ConsultantId { get; set; }\n public Consultant Consultant { get; set; }\n public int UserId { get; set; }\n public User User { get; set; }\n public DateTime BeginTime { get; set; }\n public DateTime EndTime { get; set; }\n\n public override string ToString()\n {\n return $\"{BeginTime} -> {EndTime}\";\n }\n }\n}\n"},"repo_name":{"kind":"string","value":"rohansen/Code-Examples"},"path":{"kind":"string","value":"Database/TransactionScopeWithGUI/Domain/Meeting.cs"},"language":{"kind":"string","value":"C#"},"license":{"kind":"string","value":"mit"},"size":{"kind":"number","value":559,"string":"559"}}},{"rowIdx":1733,"cells":{"code":{"kind":"string","value":"module PiwikAnalytics\n module Helpers\n def piwik_tracking_tag\n config = PiwikAnalytics.configuration\n return if config.disabled?\n\n if config.use_async?\n file = \"piwik_analytics/piwik_tracking_tag_async\"\n else\n file = \"piwik_analytics/piwik_tracking_tag\"\n end\n render({\n :file => file,\n :locals => {:url => config.url, :id_site => config.id_site}\n })\n end\n end\nend\n"},"repo_name":{"kind":"string","value":"piwik/piwik-ruby-tracking"},"path":{"kind":"string","value":"lib/piwik_analytics/helpers.rb"},"language":{"kind":"string","value":"Ruby"},"license":{"kind":"string","value":"mit"},"size":{"kind":"number","value":435,"string":"435"}}},{"rowIdx":1734,"cells":{"code":{"kind":"string","value":"#!/usr/bin/env python\n# -*- coding: utf-8 -*-\n\n\"\"\"\n@author Stephan Reith\n@date \t31.08.2016\n\nThis is a simple example to demonstrate how the ROS Spinnaker Interface can be used.\n\nYou will also need a ROS Listener and a ROS Talker to send and receive data.\nMake sure they communicate over the same ROS topics and std_msgs.Int64 ROS Messages used in here.\n\"\"\"\n\nimport spynnaker.pyNN as pynn\n\nfrom ros_spinnaker_interface import ROS_Spinnaker_Interface\n# import transfer_functions as tf\nfrom ros_spinnaker_interface import SpikeSourcePoisson\nfrom ros_spinnaker_interface import SpikeSinkSmoothing\n\n\nts = 0.1\nn_neurons = 1\nsimulation_time = 10000 # ms\n\n\npynn.setup(timestep=ts, min_delay=ts, max_delay=2.0*ts)\n\n\npop = pynn.Population(size=n_neurons, cellclass=pynn.IF_curr_exp, cellparams={}, label='pop')\n\n\n# The ROS_Spinnaker_Interface just needs to be initialised. The following parameters are possible:\nros_interface = ROS_Spinnaker_Interface(\n n_neurons_source=n_neurons, # number of neurons of the injector population\n Spike_Source_Class=SpikeSourcePoisson, # the transfer function ROS Input -> Spikes you want to use.\n Spike_Sink_Class=SpikeSinkSmoothing, # the transfer function Spikes -> ROS Output you want to use.\n # You can choose from the transfer_functions module\n # or write one yourself.\n output_population=pop, # the pynn population you wish to receive the\n # live spikes from.\n ros_topic_send='to_spinnaker', # the ROS topic used for the incoming ROS values.\n ros_topic_recv='from_spinnaker', # the ROS topic used for the outgoing ROS values.\n clk_rate=1000, # mainloop clock (update) rate in Hz.\n ros_output_rate=10) # number of ROS messages send out per second.\n\n# Build your network, run the simulation and optionally record the spikes and voltages.\npynn.Projection(ros_interface, pop, pynn.OneToOneConnector(weights=5, delays=1))\n\n\npop.record()\npop.record_v()\n\npynn.run(simulation_time)\n\nspikes = pop.getSpikes()\n\npynn.end()\n\n# Plot\nimport pylab\n\nspike_times = [spike[1] for spike in spikes]\nspike_ids = [spike[0] for spike in spikes]\n\npylab.plot(spike_times, spike_ids, \".\")\npylab.xlabel('Time (ms)')\npylab.ylabel('Neuron ID')\npylab.title('Spike Plot')\npylab.xlim(xmin=0)\npylab.show()\n"},"repo_name":{"kind":"string","value":"reiths/ros_spinnaker_interface"},"path":{"kind":"string","value":"examples/example_ros_spinnaker_interface.py"},"language":{"kind":"string","value":"Python"},"license":{"kind":"string","value":"mit"},"size":{"kind":"number","value":2533,"string":"2,533"}}},{"rowIdx":1735,"cells":{"code":{"kind":"string","value":"\n\n \n \n \n area-method: Not compatible 👼\n \n \n \n \n \n \n \n \n \n \n
\n \n
\n
\n
\n « Up\n

\n area-method\n \n 8.5.0\n Not compatible 👼\n \n

\n

📅 (2022-02-04 18:52:19 UTC)

\n

Context

\n
# Packages matching: installed\n# Name              # Installed # Synopsis\nbase-bigarray       base\nbase-threads        base\nbase-unix           base\nconf-findutils      1           Virtual package relying on findutils\nconf-gmp            4           Virtual package relying on a GMP lib system installation\ncoq                 8.13.1      Formal proof management system\nnum                 1.4         The legacy Num library for arbitrary-precision integer and rational arithmetic\nocaml               4.07.1      The OCaml compiler (virtual package)\nocaml-base-compiler 4.07.1      Official release 4.07.1\nocaml-config        1           OCaml Switch Configuration\nocamlfind           1.9.3       A library manager for OCaml\nzarith              1.12        Implements arithmetic and logical operations over arbitrary-precision integers\n# opam file:\nopam-version: &quot;2.0&quot;\nmaintainer: &quot;matej.kosik@inria.fr&quot;\nhomepage: &quot;https://github.com/coq-contribs/area-method&quot;\nlicense: &quot;Proprietary&quot;\nbuild: [make &quot;-j%{jobs}%&quot;]\ninstall: [make &quot;install&quot;]\nremove: [&quot;rm&quot; &quot;-R&quot; &quot;%{lib}%/coq/user-contrib/AreaMethod&quot;]\ndepends: [\n  &quot;ocaml&quot;\n  &quot;coq&quot; {&gt;= &quot;8.5&quot; &amp; &lt; &quot;8.6~&quot;}\n]\ntags: [ &quot;keyword:geometry&quot; &quot;keyword:chou gao zhang area method&quot; &quot;keyword:decision procedure&quot; &quot;category:Mathematics/Geometry/AutomatedDeduction&quot; &quot;date:2004-2010&quot; ]\nauthors: [ &quot;Julien Narboux &lt;&gt;&quot; ]\nbug-reports: &quot;https://github.com/coq-contribs/area-method/issues&quot;\ndev-repo: &quot;git+https://github.com/coq-contribs/area-method.git&quot;\nsynopsis: &quot;The Chou, Gao and Zhang area method&quot;\ndescription: &quot;&quot;&quot;\nThis contribution is the implementation of the Chou, Gao and Zhang&#39;s area method decision procedure for euclidean plane geometry.\nThis development contains a partial formalization of the book &quot;Machine Proofs in Geometry, Automated Production of Readable Proofs for Geometry Theorems&quot; by Chou, Gao and Zhang.\nThe examples shown automatically (there are more than 100 examples) includes the Ceva, Desargues, Menelaus, Pascal, Centroïd, Pappus, Gauss line, Euler line, Napoleon theorems.\nChangelog\n2.1 : remove some not needed assumptions in some elimination lemmas (2010)\n2.0 : extension implementation to Euclidean geometry (2009-2010)\n1.0 : first implementation for affine geometry (2004)&quot;&quot;&quot;\nflags: light-uninstall\nurl {\n  src: &quot;https://github.com/coq-contribs/area-method/archive/v8.5.0.tar.gz&quot;\n  checksum: &quot;md5=ba9772aa2056aa4bc9ccc051a9a76a7f&quot;\n}\n
\n

Lint

\n
\n
Command
\n
true
\n
Return code
\n
0
\n
\n

Dry install 🏜️

\n

Dry install with the current Coq version:

\n
\n
Command
\n
opam install -y --show-action coq-area-method.8.5.0 coq.8.13.1
\n
Return code
\n
5120
\n
Output
\n
[NOTE] Package coq is already installed (current version is 8.13.1).\nThe following dependencies couldn&#39;t be met:\n  - coq-area-method -&gt; coq &lt; 8.6~ -&gt; ocaml &lt; 4.06.0\n      base of this switch (use `--unlock-base&#39; to force)\nNo solution found, exiting\n
\n
\n

Dry install without Coq/switch base, to test if the problem was incompatibility with the current Coq/OCaml version:

\n
\n
Command
\n
opam remove -y coq; opam install -y --show-action --unlock-base coq-area-method.8.5.0
\n
Return code
\n
0
\n
\n

Install dependencies

\n
\n
Command
\n
true
\n
Return code
\n
0
\n
Duration
\n
0 s
\n
\n

Install 🚀

\n
\n
Command
\n
true
\n
Return code
\n
0
\n
Duration
\n
0 s
\n
\n

Installation size

\n

No files were installed.

\n

Uninstall 🧹

\n
\n
Command
\n
true
\n
Return code
\n
0
\n
Missing removes
\n
\n none\n
\n
Wrong removes
\n
\n none\n
\n
\n
\n
\n
\n
\n
\n

\n Sources are on GitHub © Guillaume Claret 🐣\n

\n
\n
\n \n \n \n\n"},"repo_name":{"kind":"string","value":"coq-bench/coq-bench.github.io"},"path":{"kind":"string","value":"clean/Linux-x86_64-4.07.1-2.0.6/released/8.13.1/area-method/8.5.0.html"},"language":{"kind":"string","value":"HTML"},"license":{"kind":"string","value":"mit"},"size":{"kind":"number","value":7612,"string":"7,612"}}},{"rowIdx":1736,"cells":{"code":{"kind":"string","value":"\n\n \n \n \n ieee754: Not compatible\n \n \n \n \n \n \n \n \n \n \n
\n
\n \n
\n
\n
\n
\n « Up\n

\n ieee754\n \n 8.7.0\n Not compatible\n \n

\n

(2020-08-24 17:47:06 UTC)

\n

Context

\n
# Packages matching: installed\n# Name              # Installed # Synopsis\nbase-bigarray       base\nbase-num            base        Num library distributed with the OCaml compiler\nbase-threads        base\nbase-unix           base\ncamlp5              7.12        Preprocessor-pretty-printer of OCaml\nconf-findutils      1           Virtual package relying on findutils\nconf-m4             1           Virtual package relying on m4\ncoq                 8.9.1       Formal proof management system\nnum                 0           The Num library for arbitrary-precision integer and rational arithmetic\nocaml               4.05.0      The OCaml compiler (virtual package)\nocaml-base-compiler 4.05.0      Official 4.05.0 release\nocaml-config        1           OCaml Switch Configuration\nocamlfind           1.8.1       A library manager for OCaml\n# opam file:\nopam-version: &quot;2.0&quot;\nmaintainer: &quot;Hugo.Herbelin@inria.fr&quot;\nhomepage: &quot;https://github.com/coq-contribs/ieee754&quot;\nlicense: &quot;LGPL 2.1&quot;\nbuild: [make &quot;-j%{jobs}%&quot;]\ninstall: [make &quot;install&quot;]\nremove: [&quot;rm&quot; &quot;-R&quot; &quot;%{lib}%/coq/user-contrib/IEEE754&quot;]\ndepends: [\n  &quot;ocaml&quot;\n  &quot;coq&quot; {&gt;= &quot;8.7&quot; &amp; &lt; &quot;8.8~&quot;}\n]\ntags: [ &quot;keyword: floating-point arithmetic&quot; &quot;keyword: floats&quot; &quot;keyword: IEEE&quot; &quot;category: Computer Science/Data Types and Data Structures&quot; &quot;category: Computer Science/Semantics and Compilation/Semantics&quot; &quot;date: 1997&quot; ]\nauthors: [ &quot;Patrick Loiseleur&quot; ]\nbug-reports: &quot;https://github.com/coq-contribs/ieee754/issues&quot;\ndev-repo: &quot;git+https://github.com/coq-contribs/ieee754.git&quot;\nsynopsis: &quot;A formalisation of the IEEE754 norm on floating-point arithmetic&quot;\ndescription: &quot;&quot;&quot;\nThis library contains a non-verified implementation of\nbinary floating-point addition and multiplication operators inspired\nby the IEEE-754 standard. It is today outdated.\nSee the attached 1997 report rapport-stage-dea.ps.gz for a discussion\n(in French) of this work.\nFor the state of the art at the time of updating this notice, see\ne.g. &quot;Flocq: A Unified Library for Proving Floating-point Algorithms\nin Coq&quot; by S. Boldo and G. Melquiond, 2011.&quot;&quot;&quot;\nflags: light-uninstall\nurl {\n  src: &quot;https://github.com/coq-contribs/ieee754/archive/v8.7.0.tar.gz&quot;\n  checksum: &quot;md5=c79fabb9831e0231bc5ce75f3be6aad7&quot;\n}\n
\n

Lint

\n
\n
Command
\n
true
\n
Return code
\n
0
\n
\n

Dry install

\n

Dry install with the current Coq version:

\n
\n
Command
\n
opam install -y --show-action coq-ieee754.8.7.0 coq.8.9.1
\n
Return code
\n
5120
\n
Output
\n
[NOTE] Package coq is already installed (current version is 8.9.1).\nThe following dependencies couldn&#39;t be met:\n  - coq-ieee754 -&gt; coq &lt; 8.8~ -&gt; ocaml &lt; 4.03.0\n      base of this switch (use `--unlock-base&#39; to force)\nYour request can&#39;t be satisfied:\n  - No available version of coq satisfies the constraints\nNo solution found, exiting\n
\n
\n

Dry install without Coq/switch base, to test if the problem was incompatibility with the current Coq/OCaml version:

\n
\n
Command
\n
opam remove -y coq; opam install -y --show-action --unlock-base coq-ieee754.8.7.0
\n
Return code
\n
0
\n
\n

Install dependencies

\n
\n
Command
\n
true
\n
Return code
\n
0
\n
Duration
\n
0 s
\n
\n

Install

\n
\n
Command
\n
true
\n
Return code
\n
0
\n
Duration
\n
0 s
\n
\n

Installation size

\n

No files were installed.

\n

Uninstall

\n
\n
Command
\n
true
\n
Return code
\n
0
\n
Missing removes
\n
\n none\n
\n
Wrong removes
\n
\n none\n
\n
\n
\n
\n
\n
\n
\n

\n Sources are on GitHub. © Guillaume Claret.\n

\n
\n
\n \n \n \n\n"},"repo_name":{"kind":"string","value":"coq-bench/coq-bench.github.io"},"path":{"kind":"string","value":"clean/Linux-x86_64-4.05.0-2.0.6/released/8.9.1/ieee754/8.7.0.html"},"language":{"kind":"string","value":"HTML"},"license":{"kind":"string","value":"mit"},"size":{"kind":"number","value":7485,"string":"7,485"}}},{"rowIdx":1737,"cells":{"code":{"kind":"string","value":"/**\n * @license\n * Copyright Google Inc. All Rights Reserved.\n *\n * Use of this source code is governed by an MIT-style license that can be\n * found in the LICENSE file at https://angular.io/license\n */\n\nimport {CompileDirectiveMetadata, CompileStylesheetMetadata, CompileTemplateMetadata, templateSourceUrl} from './compile_metadata';\nimport {CompilerConfig, preserveWhitespacesDefault} from './config';\nimport {ViewEncapsulation} from './core';\nimport * as html from './ml_parser/ast';\nimport {HtmlParser} from './ml_parser/html_parser';\nimport {InterpolationConfig} from './ml_parser/interpolation_config';\nimport {ParseTreeResult as HtmlParseTreeResult} from './ml_parser/parser';\nimport {ResourceLoader} from './resource_loader';\nimport {extractStyleUrls, isStyleUrlResolvable} from './style_url_resolver';\nimport {PreparsedElementType, preparseElement} from './template_parser/template_preparser';\nimport {UrlResolver} from './url_resolver';\nimport {isDefined, stringify, SyncAsync, syntaxError} from './util';\n\nexport interface PrenormalizedTemplateMetadata {\n ngModuleType: any;\n componentType: any;\n moduleUrl: string;\n template: string|null;\n templateUrl: string|null;\n styles: string[];\n styleUrls: string[];\n interpolation: [string, string]|null;\n encapsulation: ViewEncapsulation|null;\n animations: any[];\n preserveWhitespaces: boolean|null;\n}\n\nexport class DirectiveNormalizer {\n private _resourceLoaderCache = new Map>();\n\n constructor(\n private _resourceLoader: ResourceLoader, private _urlResolver: UrlResolver,\n private _htmlParser: HtmlParser, private _config: CompilerConfig) {}\n\n clearCache(): void {\n this._resourceLoaderCache.clear();\n }\n\n clearCacheFor(normalizedDirective: CompileDirectiveMetadata): void {\n if (!normalizedDirective.isComponent) {\n return;\n }\n const template = normalizedDirective.template !;\n this._resourceLoaderCache.delete(template.templateUrl!);\n template.externalStylesheets.forEach((stylesheet) => {\n this._resourceLoaderCache.delete(stylesheet.moduleUrl!);\n });\n }\n\n private _fetch(url: string): SyncAsync {\n let result = this._resourceLoaderCache.get(url);\n if (!result) {\n result = this._resourceLoader.get(url);\n this._resourceLoaderCache.set(url, result);\n }\n return result;\n }\n\n normalizeTemplate(prenormData: PrenormalizedTemplateMetadata):\n SyncAsync {\n if (isDefined(prenormData.template)) {\n if (isDefined(prenormData.templateUrl)) {\n throw syntaxError(`'${\n stringify(prenormData\n .componentType)}' component cannot define both template and templateUrl`);\n }\n if (typeof prenormData.template !== 'string') {\n throw syntaxError(`The template specified for component ${\n stringify(prenormData.componentType)} is not a string`);\n }\n } else if (isDefined(prenormData.templateUrl)) {\n if (typeof prenormData.templateUrl !== 'string') {\n throw syntaxError(`The templateUrl specified for component ${\n stringify(prenormData.componentType)} is not a string`);\n }\n } else {\n throw syntaxError(\n `No template specified for component ${stringify(prenormData.componentType)}`);\n }\n\n if (isDefined(prenormData.preserveWhitespaces) &&\n typeof prenormData.preserveWhitespaces !== 'boolean') {\n throw syntaxError(`The preserveWhitespaces option for component ${\n stringify(prenormData.componentType)} must be a boolean`);\n }\n\n return SyncAsync.then(\n this._preParseTemplate(prenormData),\n (preparsedTemplate) => this._normalizeTemplateMetadata(prenormData, preparsedTemplate));\n }\n\n private _preParseTemplate(prenomData: PrenormalizedTemplateMetadata):\n SyncAsync {\n let template: SyncAsync;\n let templateUrl: string;\n if (prenomData.template != null) {\n template = prenomData.template;\n templateUrl = prenomData.moduleUrl;\n } else {\n templateUrl = this._urlResolver.resolve(prenomData.moduleUrl, prenomData.templateUrl!);\n template = this._fetch(templateUrl);\n }\n return SyncAsync.then(\n template, (template) => this._preparseLoadedTemplate(prenomData, template, templateUrl));\n }\n\n private _preparseLoadedTemplate(\n prenormData: PrenormalizedTemplateMetadata, template: string,\n templateAbsUrl: string): PreparsedTemplate {\n const isInline = !!prenormData.template;\n const interpolationConfig = InterpolationConfig.fromArray(prenormData.interpolation!);\n const templateUrl = templateSourceUrl(\n {reference: prenormData.ngModuleType}, {type: {reference: prenormData.componentType}},\n {isInline, templateUrl: templateAbsUrl});\n const rootNodesAndErrors = this._htmlParser.parse(\n template, templateUrl, {tokenizeExpansionForms: true, interpolationConfig});\n if (rootNodesAndErrors.errors.length > 0) {\n const errorString = rootNodesAndErrors.errors.join('\\n');\n throw syntaxError(`Template parse errors:\\n${errorString}`);\n }\n\n const templateMetadataStyles = this._normalizeStylesheet(new CompileStylesheetMetadata(\n {styles: prenormData.styles, moduleUrl: prenormData.moduleUrl}));\n\n const visitor = new TemplatePreparseVisitor();\n html.visitAll(visitor, rootNodesAndErrors.rootNodes);\n const templateStyles = this._normalizeStylesheet(new CompileStylesheetMetadata(\n {styles: visitor.styles, styleUrls: visitor.styleUrls, moduleUrl: templateAbsUrl}));\n\n const styles = templateMetadataStyles.styles.concat(templateStyles.styles);\n\n const inlineStyleUrls = templateMetadataStyles.styleUrls.concat(templateStyles.styleUrls);\n const styleUrls = this\n ._normalizeStylesheet(new CompileStylesheetMetadata(\n {styleUrls: prenormData.styleUrls, moduleUrl: prenormData.moduleUrl}))\n .styleUrls;\n return {\n template,\n templateUrl: templateAbsUrl,\n isInline,\n htmlAst: rootNodesAndErrors,\n styles,\n inlineStyleUrls,\n styleUrls,\n ngContentSelectors: visitor.ngContentSelectors,\n };\n }\n\n private _normalizeTemplateMetadata(\n prenormData: PrenormalizedTemplateMetadata,\n preparsedTemplate: PreparsedTemplate): SyncAsync {\n return SyncAsync.then(\n this._loadMissingExternalStylesheets(\n preparsedTemplate.styleUrls.concat(preparsedTemplate.inlineStyleUrls)),\n (externalStylesheets) => this._normalizeLoadedTemplateMetadata(\n prenormData, preparsedTemplate, externalStylesheets));\n }\n\n private _normalizeLoadedTemplateMetadata(\n prenormData: PrenormalizedTemplateMetadata, preparsedTemplate: PreparsedTemplate,\n stylesheets: Map): CompileTemplateMetadata {\n // Algorithm:\n // - produce exactly 1 entry per original styleUrl in\n // CompileTemplateMetadata.externalStylesheets with all styles inlined\n // - inline all styles that are referenced by the template into CompileTemplateMetadata.styles.\n // Reason: be able to determine how many stylesheets there are even without loading\n // the template nor the stylesheets, so we can create a stub for TypeScript always synchronously\n // (as resource loading may be async)\n\n const styles = [...preparsedTemplate.styles];\n this._inlineStyles(preparsedTemplate.inlineStyleUrls, stylesheets, styles);\n const styleUrls = preparsedTemplate.styleUrls;\n\n const externalStylesheets = styleUrls.map(styleUrl => {\n const stylesheet = stylesheets.get(styleUrl)!;\n const styles = [...stylesheet.styles];\n this._inlineStyles(stylesheet.styleUrls, stylesheets, styles);\n return new CompileStylesheetMetadata({moduleUrl: styleUrl, styles: styles});\n });\n\n let encapsulation = prenormData.encapsulation;\n if (encapsulation == null) {\n encapsulation = this._config.defaultEncapsulation;\n }\n if (encapsulation === ViewEncapsulation.Emulated && styles.length === 0 &&\n styleUrls.length === 0) {\n encapsulation = ViewEncapsulation.None;\n }\n return new CompileTemplateMetadata({\n encapsulation,\n template: preparsedTemplate.template,\n templateUrl: preparsedTemplate.templateUrl,\n htmlAst: preparsedTemplate.htmlAst,\n styles,\n styleUrls,\n ngContentSelectors: preparsedTemplate.ngContentSelectors,\n animations: prenormData.animations,\n interpolation: prenormData.interpolation,\n isInline: preparsedTemplate.isInline,\n externalStylesheets,\n preserveWhitespaces: preserveWhitespacesDefault(\n prenormData.preserveWhitespaces, this._config.preserveWhitespaces),\n });\n }\n\n private _inlineStyles(\n styleUrls: string[], stylesheets: Map,\n targetStyles: string[]) {\n styleUrls.forEach(styleUrl => {\n const stylesheet = stylesheets.get(styleUrl)!;\n stylesheet.styles.forEach(style => targetStyles.push(style));\n this._inlineStyles(stylesheet.styleUrls, stylesheets, targetStyles);\n });\n }\n\n private _loadMissingExternalStylesheets(\n styleUrls: string[],\n loadedStylesheets:\n Map = new Map()):\n SyncAsync> {\n return SyncAsync.then(\n SyncAsync.all(styleUrls.filter((styleUrl) => !loadedStylesheets.has(styleUrl))\n .map(\n styleUrl => SyncAsync.then(\n this._fetch(styleUrl),\n (loadedStyle) => {\n const stylesheet =\n this._normalizeStylesheet(new CompileStylesheetMetadata(\n {styles: [loadedStyle], moduleUrl: styleUrl}));\n loadedStylesheets.set(styleUrl, stylesheet);\n return this._loadMissingExternalStylesheets(\n stylesheet.styleUrls, loadedStylesheets);\n }))),\n (_) => loadedStylesheets);\n }\n\n private _normalizeStylesheet(stylesheet: CompileStylesheetMetadata): CompileStylesheetMetadata {\n const moduleUrl = stylesheet.moduleUrl!;\n const allStyleUrls = stylesheet.styleUrls.filter(isStyleUrlResolvable)\n .map(url => this._urlResolver.resolve(moduleUrl, url));\n\n const allStyles = stylesheet.styles.map(style => {\n const styleWithImports = extractStyleUrls(this._urlResolver, moduleUrl, style);\n allStyleUrls.push(...styleWithImports.styleUrls);\n return styleWithImports.style;\n });\n\n return new CompileStylesheetMetadata(\n {styles: allStyles, styleUrls: allStyleUrls, moduleUrl: moduleUrl});\n }\n}\n\ninterface PreparsedTemplate {\n template: string;\n templateUrl: string;\n isInline: boolean;\n htmlAst: HtmlParseTreeResult;\n styles: string[];\n inlineStyleUrls: string[];\n styleUrls: string[];\n ngContentSelectors: string[];\n}\n\nclass TemplatePreparseVisitor implements html.Visitor {\n ngContentSelectors: string[] = [];\n styles: string[] = [];\n styleUrls: string[] = [];\n ngNonBindableStackCount: number = 0;\n\n visitElement(ast: html.Element, context: any): any {\n const preparsedElement = preparseElement(ast);\n switch (preparsedElement.type) {\n case PreparsedElementType.NG_CONTENT:\n if (this.ngNonBindableStackCount === 0) {\n this.ngContentSelectors.push(preparsedElement.selectAttr);\n }\n break;\n case PreparsedElementType.STYLE:\n let textContent = '';\n ast.children.forEach(child => {\n if (child instanceof html.Text) {\n textContent += child.value;\n }\n });\n this.styles.push(textContent);\n break;\n case PreparsedElementType.STYLESHEET:\n this.styleUrls.push(preparsedElement.hrefAttr);\n break;\n default:\n break;\n }\n if (preparsedElement.nonBindable) {\n this.ngNonBindableStackCount++;\n }\n html.visitAll(this, ast.children);\n if (preparsedElement.nonBindable) {\n this.ngNonBindableStackCount--;\n }\n return null;\n }\n\n visitExpansion(ast: html.Expansion, context: any): any {\n html.visitAll(this, ast.cases);\n }\n\n visitExpansionCase(ast: html.ExpansionCase, context: any): any {\n html.visitAll(this, ast.expression);\n }\n\n visitComment(ast: html.Comment, context: any): any {\n return null;\n }\n visitAttribute(ast: html.Attribute, context: any): any {\n return null;\n }\n visitText(ast: html.Text, context: any): any {\n return null;\n }\n}\n"},"repo_name":{"kind":"string","value":"matsko/angular"},"path":{"kind":"string","value":"packages/compiler/src/directive_normalizer.ts"},"language":{"kind":"string","value":"TypeScript"},"license":{"kind":"string","value":"mit"},"size":{"kind":"number","value":12863,"string":"12,863"}}},{"rowIdx":1738,"cells":{"code":{"kind":"string","value":"parent = false;\n\n $this->blocks = array(\n );\n }\n\n protected function doDisplay(array $context, array $blocks = array())\n {\n // line 1\n $this->env->loadTemplate(\"TwigBundle:Exception:error.xml.twig\")->display(array_merge($context, array(\"exception\" => (isset($context[\"exception\"]) ? $context[\"exception\"] : $this->getContext($context, \"exception\")))));\n }\n\n public function getTemplateName()\n {\n return \"TwigBundle:Exception:error.atom.twig\";\n }\n\n public function isTraitable()\n {\n return false;\n }\n\n public function getDebugInfo()\n {\n return array ( 19 => 1, 79 => 21, 72 => 13, 69 => 12, 47 => 18, 40 => 11, 37 => 10, 22 => 1, 246 => 32, 157 => 56, 145 => 46, 139 => 45, 131 => 42, 123 => 41, 120 => 40, 115 => 39, 111 => 38, 108 => 37, 101 => 33, 98 => 32, 96 => 31, 83 => 25, 74 => 14, 66 => 11, 55 => 16, 52 => 21, 50 => 14, 43 => 9, 41 => 8, 35 => 9, 32 => 4, 29 => 6, 209 => 82, 203 => 78, 199 => 76, 193 => 73, 189 => 71, 187 => 70, 182 => 68, 176 => 64, 173 => 63, 168 => 62, 164 => 58, 162 => 57, 154 => 54, 149 => 51, 147 => 50, 144 => 49, 141 => 48, 133 => 42, 130 => 41, 125 => 38, 122 => 37, 116 => 36, 112 => 35, 109 => 34, 106 => 36, 103 => 32, 99 => 30, 95 => 28, 92 => 29, 86 => 24, 82 => 22, 80 => 24, 73 => 19, 64 => 19, 60 => 6, 57 => 12, 54 => 22, 51 => 10, 48 => 9, 45 => 17, 42 => 16, 39 => 6, 36 => 5, 33 => 4, 30 => 3,);\n }\n}\n"},"repo_name":{"kind":"string","value":"Mchichou/UEOptionnelles"},"path":{"kind":"string","value":"app/cache/dev/twig/40/53/49459f7f2e8922747537b1c12aa2323bb61b0265aaf549db7e51eafd66f4.php"},"language":{"kind":"string","value":"PHP"},"license":{"kind":"string","value":"mit"},"size":{"kind":"number","value":1789,"string":"1,789"}}},{"rowIdx":1739,"cells":{"code":{"kind":"string","value":"\n\n\n\n\n\nPage has moved\n\n\n

Click here...

\n\n\n\n\n\n"},"repo_name":{"kind":"string","value":"hoangphuc1494/sourd_codeigniter"},"path":{"kind":"string","value":"assest/thevillage/index5c36.html"},"language":{"kind":"string","value":"HTML"},"license":{"kind":"string","value":"mit"},"size":{"kind":"number","value":758,"string":"758"}}},{"rowIdx":1740,"cells":{"code":{"kind":"string","value":"/**\n * HTTP.test\n */\n\n\"use strict\";\n\n\n/* Node modules */\n\n\n/* Third-party modules */\nvar steeplejack = require(\"steeplejack\");\n\n\n/* Files */\n\n\ndescribe(\"HTTPError test\", function () {\n\n var HTTPError;\n beforeEach(function () {\n\n injector(function (_HTTPError_) {\n HTTPError = _HTTPError_;\n });\n\n });\n\n describe(\"Instantation tests\", function () {\n\n it(\"should extend the steeplejack Fatal exception\", function () {\n\n var obj = new HTTPError(\"text\");\n\n expect(obj).to.be.instanceof(HTTPError)\n .to.be.instanceof(steeplejack.Exceptions.Fatal);\n\n expect(obj.type).to.be.equal(\"HTTPError\");\n expect(obj.message).to.be.equal(\"text\");\n expect(obj.httpCode).to.be.equal(500);\n expect(obj.getHttpCode()).to.be.equal(500);\n\n });\n\n it(\"should set the HTTP code in the first input\", function () {\n\n var obj = new HTTPError(401);\n\n expect(obj.httpCode).to.be.equal(401);\n expect(obj.getHttpCode()).to.be.equal(401);\n\n });\n\n });\n\n});\n"},"repo_name":{"kind":"string","value":"riggerthegeek/steeplejack-errors"},"path":{"kind":"string","value":"test/unit/errors/HTTP.test.js"},"language":{"kind":"string","value":"JavaScript"},"license":{"kind":"string","value":"mit"},"size":{"kind":"number","value":1103,"string":"1,103"}}},{"rowIdx":1741,"cells":{"code":{"kind":"string","value":"require File.join(File.dirname(__FILE__), './scribd-carrierwave/version')\nrequire File.join(File.dirname(__FILE__), './scribd-carrierwave/config')\nrequire 'carrierwave'\nrequire 'rscribd'\nrequire 'configatron'\n\nmodule ScribdCarrierWave\n class << self\n def included(base)\n base.extend ClassMethods\n end\n\n def upload uploader\n\n file_path = full_path(uploader)\n args = { file: file_path, access: ( uploader.class.public? ? 'public' : 'private' )}\n\n type = File.extname(file_path)\n if type\n type = type.gsub(/^\\./, '').gsub(/\\?.*$/, '')\n args.merge!(type: type) if type != ''\n end\n\n scribd_user.upload(args)\n end\n\n def destroy uploader\n document = scribd_user.find_document(uploader.ipaper_id) rescue nil\n document.destroy if !document.nil?\n end\n\n def load_ipaper_document(id)\n scribd_user.find_document(id) rescue nil\n end\n\n def full_path uploader\n if uploader.url =~ /^http(s?):\\/\\//\n uploader.url\n else\n uploader.root + uploader.url\n end\n end\n\n module ClassMethods\n\n def public?\n @public\n end\n\n def has_ipaper(public = false)\n include InstanceMethods\n after :store, :upload_to_scribd\n before :remove, :delete_from_scribd\n\n @public = !!public\n end\n end\n\n module InstanceMethods\n def self.included(base)\n base.extend ClassMethods\n end\n\n def upload_to_scribd files\n res = ScribdCarrierWave::upload(self)\n set_params res\n end\n\n def delete_from_scribd\n ScribdCarrierWave::destroy(self)\n end\n\n def display_ipaper(options = {})\n id = options.delete(:id)\n <<-END\n \n
#{options.delete(:alt)}
\n \n END\n end\n\n def fullscreen_url\n \"http://www.scribd.com/fullscreen/#{ipaper_id}?access_key=#{ipaper_access_key}\"\n end\n\n def ipaper_id\n self.model.send(\"#{self.mounted_as.to_s}_ipaper_id\")\n end\n\n def ipaper_access_key\n self.model.send(\"#{self.mounted_as.to_s}_ipaper_access_key\")\n end\n\n # Responds the Scribd::Document associated with this model, or nil if it does not exist.\n def ipaper_document\n @document ||= ScribdCarrierWave::load_ipaper_document(ipaper_id)\n end\n\n\n private\n\n def set_params res\n self.model.update_attributes({\"#{self.mounted_as}_ipaper_id\" => res.doc_id,\n \"#{self.mounted_as}_ipaper_access_key\" => res.access_key})\n end\n end\n\n private\n\n def scribd_user\n Scribd::API.instance.key = ScribdCarrierWave.config.key\n Scribd::API.instance.secret = ScribdCarrierWave.config.secret\n @scribd_user = Scribd::User.login(ScribdCarrierWave.config.username, ScribdCarrierWave.config.password)\n end\n end\nend\n\nCarrierWave::Uploader::Base.send(:include, ScribdCarrierWave) if Object.const_defined?(\"CarrierWave\")\n"},"repo_name":{"kind":"string","value":"milkfarm/scribd-carrierwave"},"path":{"kind":"string","value":"lib/scribd-carrierwave.rb"},"language":{"kind":"string","value":"Ruby"},"license":{"kind":"string","value":"mit"},"size":{"kind":"number","value":3507,"string":"3,507"}}},{"rowIdx":1742,"cells":{"code":{"kind":"string","value":"var gulp = require('gulp');\nvar babel = require('gulp-babel');\nvar concat = require('gulp-concat');\nvar merge = require('merge-stream');\nvar stylus = require('gulp-stylus');\nvar rename = require(\"gulp-rename\");\nvar uglify = require(\"gulp-uglify\");\nvar cssmin = require(\"gulp-cssmin\");\nvar ngAnnotate = require('gulp-ng-annotate');\nvar nib = require(\"nib\");\nvar watch = require('gulp-watch');\n\nfunction compileJs(devOnly) {\n\tvar othersUmd = gulp.src(['src/**/*.js', '!src/main.js'])\n\t\t.pipe(babel({\n\t\t\tmodules: 'umdStrict',\n\t\t\tmoduleRoot: 'angular-chatbar',\n\t\t\tmoduleIds: true\n\t\t})),\n\t\tmainUmd = gulp.src('src/main.js')\n\t\t.pipe(babel({\n\t\t\tmodules: 'umdStrict',\n\t\t\tmoduleIds: true,\n\t\t\tmoduleId: 'angular-chatbar'\n\t\t})),\n\t\tstream = merge(othersUmd, mainUmd)\n\t\t.pipe(concat('angular-chatbar.umd.js'))\n\t\t.pipe(gulp.dest('dist'))\n\t;\n\n\tif (!devOnly) {\n\t\tstream = stream\n\t\t.pipe(ngAnnotate())\n\t\t.pipe(uglify())\n\t\t.pipe(rename('angular-chatbar.umd.min.js'))\n\t\t.pipe(gulp.dest('dist'));\n\t}\n\n\treturn stream;\n}\n\nfunction compileCss(name, devOnly) {\n\tvar stream = gulp.src('styles/' + name + '.styl')\n\t\t.pipe(stylus({use: nib()}))\n\t\t.pipe(rename('angular-' + name + '.css'))\n\t\t.pipe(gulp.dest('dist'))\n\t;\n\n\tif (!devOnly) {\n\t\tstream = stream.pipe(cssmin())\n\t\t.pipe(rename('angular-' + name + '.min.css'))\n\t\t.pipe(gulp.dest('dist'));\n\t}\n\n\treturn stream;\n}\n\nfunction compileAllCss(devOnly) {\n\tvar streams = [];\n\n\t['chatbar', 'chatbar.default-theme', 'chatbar.default-animations'].forEach(function (name) {\n\t\tstreams.push(compileCss(name, devOnly));\n\t});\n\n\treturn merge.apply(null, streams);\n}\n\ngulp.task('default', function() {\n\treturn merge.apply(compileJs(), compileAllCss());\n});\n\ngulp.task('_watch', function() {\n\twatch('styles/**/*.styl', function () {\n\t\tcompileAllCss(true);\n\t});\n\twatch('src/**/*.js', function () {\n\t\tcompileJs(true);\n\t});\n});\n\ngulp.task('watch', ['default', '_watch']);\n"},"repo_name":{"kind":"string","value":"jlowcs/angular-chatbar"},"path":{"kind":"string","value":"gulpfile.js"},"language":{"kind":"string","value":"JavaScript"},"license":{"kind":"string","value":"mit"},"size":{"kind":"number","value":1878,"string":"1,878"}}},{"rowIdx":1743,"cells":{"code":{"kind":"string","value":"# -*- coding: utf-8 -*-\n\n\"\"\" Resource Import Tools\n\n @copyright: 2011-12 (c) Sahana Software Foundation\n @license: MIT\n\n Permission is hereby granted, free of charge, to any person\n obtaining a copy of this software and associated documentation\n files (the \"Software\"), to deal in the Software without\n restriction, including without limitation the rights to use,\n copy, modify, merge, publish, distribute, sublicense, and/or sell\n copies of the Software, and to permit persons to whom the\n Software is furnished to do so, subject to the following\n conditions:\n\n The above copyright notice and this permission notice shall be\n included in all copies or substantial portions of the Software.\n\n THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND,\n EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES\n OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND\n NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT\n HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,\n WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING\n FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR\n OTHER DEALINGS IN THE SOFTWARE.\n\"\"\"\n\n# @todo: remove all interactive error reporting out of the _private methods, and raise exceptions instead.\n__all__ = [\"S3Importer\", \"S3ImportJob\", \"S3ImportItem\"]\n\nimport os\nimport sys\nimport cPickle\nimport tempfile\nfrom datetime import datetime\nfrom copy import deepcopy\ntry:\n from cStringIO import StringIO # Faster, where available\nexcept:\n from StringIO import StringIO\n\ntry:\n from lxml import etree\nexcept ImportError:\n print >> sys.stderr, \"ERROR: lxml module needed for XML handling\"\n raise\n\ntry:\n import json # try stdlib (Python 2.6)\nexcept ImportError:\n try:\n import simplejson as json # try external module\n except:\n import gluon.contrib.simplejson as json # fallback to pure-Python module\n\nfrom gluon import *\nfrom gluon.serializers import json as jsons\nfrom gluon.storage import Storage, Messages\nfrom gluon.tools import callback\n\nfrom s3utils import SQLTABLES3\nfrom s3crud import S3CRUD\nfrom s3xml import S3XML\nfrom s3utils import s3_mark_required, s3_has_foreign_key, s3_get_foreign_key\n\nDEBUG = False\nif DEBUG:\n print >> sys.stderr, \"S3IMPORTER: DEBUG MODE\"\n def _debug(m):\n print >> sys.stderr, m\nelse:\n _debug = lambda m: None\n\n# =============================================================================\nclass S3Importer(S3CRUD):\n \"\"\"\n Transformable formats (XML, JSON, CSV) import handler\n \"\"\"\n\n UPLOAD_TABLE_NAME = \"s3_import_upload\"\n\n # -------------------------------------------------------------------------\n def apply_method(self, r, **attr):\n \"\"\"\n Apply CRUD methods\n\n @param r: the S3Request\n @param attr: dictionary of parameters for the method handler\n\n @returns: output object to send to the view\n\n Known means of communicating with this module:\n\n It expects a URL of the form: /prefix/name/import\n\n It will interpret the http requests as follows:\n\n GET will trigger the upload\n POST will trigger either commits or display the import details\n DELETE will trigger deletes\n\n It will accept one of the following control vars:\n item: to specify a single item in the import job\n job: to specify a job\n It should not receive both so job takes precedent over item\n\n For CSV imports, the calling controller can add extra fields\n to the upload form to add columns to each row in the CSV. To add\n the extra fields, pass a named parameter \"csv_extra_fields\" to the\n s3_rest_controller call (or the S3Request call, respectively):\n\n s3_rest_controller(module, resourcename,\n csv_extra_fields=[\n dict(label=\"ColumnLabelInTheCSV\",\n field=field_instance)\n ])\n\n The Field instance \"field\" will be added to the upload form, and\n the user input will be added to each row of the CSV under the\n label as specified. If the \"field\" validator has options, the\n input value will be translated into the option representation,\n otherwise the value will be used as-is.\n\n Note that the \"label\" in the dict is the column label in the CSV,\n whereas the field label for the form is to be set in the Field\n instance passed as \"field\".\n\n You can add any arbitrary number of csv_extra_fields to the list.\n\n Additionally, you may want to allow the user to choose whether\n the import shall first remove all existing data in the target\n table. To do so, pass a label for the \"replace_option\" to the\n request:\n\n s3_rest_controller(module, resourcename,\n replace_option=T(\"Remove existing data before import\"))\n\n This will add the respective checkbox to the upload form.\n\n You may also want to provide a link to download a CSV template from\n the upload form. To do that, add the resource name to the request\n attributes:\n\n s3_rest_controller(module, resourcename,\n csv_template=\"\")\n\n This will provide a link to:\n - static/formats/s3csv//.csv\n at the top of the upload form.\n\n \"\"\"\n\n _debug(\"S3Importer.apply_method(%s)\" % r)\n\n # Messages\n T = current.T\n messages = self.messages = Messages(T)\n messages.download_template = \"Download Template\"\n messages.invalid_file_format = \"Invalid File Format\"\n messages.unsupported_file_type = \"Unsupported file type of %s\"\n messages.stylesheet_not_found = \"No Stylesheet %s could be found to manage the import file.\"\n messages.no_file = \"No file submitted\"\n messages.file_open_error = \"Unable to open the file %s\"\n messages.file_not_found = \"The file to upload is missing\"\n messages.no_records_to_import = \"No records to import\"\n messages.no_job_to_delete = \"No job to delete, maybe it has already been deleted.\"\n messages.title_job_read = \"Details of the selected import job\"\n messages.title_job_list = \"List of import items\"\n messages.file_uploaded = \"Import file uploaded\"\n messages.upload_submit_btn = \"Upload Data File\"\n messages.open_btn = \"Open\"\n messages.view_btn = \"View\"\n messages.delete_btn = \"Delete\"\n messages.item_show_details = \"Display Details\"\n messages.job_total_records = \"Total records in the Import Job\"\n messages.job_records_selected = \"Records selected\"\n messages.job_deleted = \"Import job deleted\"\n messages.job_completed = \"Job run on %s. With result of (%s)\"\n messages.import_file = \"Import File\"\n messages.import_file_comment = \"Upload a file formatted according to the Template.\"\n messages.user_name = \"User Name\"\n messages.commit_total_records_imported = \"%s records imported\"\n messages.commit_total_records_ignored = \"%s records ignored\"\n messages.commit_total_errors = \"%s records in error\"\n\n try:\n self.uploadTitle = current.response.s3.crud_strings[self.tablename].title_upload\n except:\n self.uploadTitle = T(\"Upload a %s import file\" % r.function)\n\n # @todo: correct to switch this off for the whole session?\n current.session.s3.ocr_enabled = False\n\n # Reset all errors/warnings\n self.error = None\n self.warning = None\n\n # CSV upload configuration\n if \"csv_stylesheet\" in attr:\n self.csv_stylesheet = attr[\"csv_stylesheet\"]\n else:\n self.csv_stylesheet = None\n self.csv_extra_fields = None\n self.csv_extra_data = None\n\n # Environment\n self.controller = r.controller\n self.function = r.function\n\n # Target table for the data import\n self.controller_resource = self.resource\n self.controller_table = self.table\n self.controller_tablename = self.tablename\n\n # Table for uploads\n self.__define_table()\n self.upload_resource = None\n self.item_resource = None\n\n # XSLT Path\n self.xslt_path = os.path.join(r.folder, r.XSLT_PATH)\n self.xslt_extension = r.XSLT_EXTENSION\n\n # Check authorization\n authorised = self.permit(\"create\", self.upload_tablename) and \\\n self.permit(\"create\", self.controller_tablename)\n if not authorised:\n if r.method is not None:\n r.unauthorised()\n else:\n return dict(form=None)\n\n # @todo: clean this up\n source = None\n transform = None\n upload_id = None\n items = None\n # @todo get the data from either get_vars or post_vars appropriately\n # for post -> commit_items would need to add the uploadID\n if \"transform\" in r.get_vars:\n transform = r.get_vars[\"transform\"]\n if \"filename\" in r.get_vars:\n source = r.get_vars[\"filename\"]\n if \"job\" in r.post_vars:\n upload_id = r.post_vars[\"job\"]\n elif \"job\" in r.get_vars:\n upload_id = r.get_vars[\"job\"]\n items = self._process_item_list(upload_id, r.vars)\n if \"delete\" in r.get_vars:\n r.http = \"DELETE\"\n\n # If we have an upload ID, then get upload and import job\n self.upload_id = upload_id\n query = (self.upload_table.id == upload_id)\n self.upload_job = current.db(query).select(limitby=(0, 1)).first()\n if self.upload_job:\n self.job_id = self.upload_job.job_id\n else:\n self.job_id = None\n\n # Now branch off to the appropriate controller function\n if r.http == \"GET\":\n if source != None:\n self.commit(source, transform)\n output = self.upload(r, **attr)\n if upload_id != None:\n output = self.display_job(upload_id)\n else:\n output = self.upload(r, **attr)\n elif r.http == \"POST\":\n if items != None:\n output = self.commit_items(upload_id, items)\n else:\n output = self.generate_job(r, **attr)\n elif r.http == \"DELETE\":\n if upload_id != None:\n output = self.delete_job(upload_id)\n else:\n r.error(405, current.manager.ERROR.BAD_METHOD)\n\n return output\n\n # -------------------------------------------------------------------------\n def upload(self, r, **attr):\n \"\"\"\n This will display the upload form\n It will ask for a file to be uploaded or for a job to be selected.\n\n If a file is uploaded then it will guess at the file type and\n ask for the transform file to be used. The transform files will\n be in a dataTable with the module specific files shown first and\n after those all other known transform files. Once the transform\n file is selected the import process can be started which will\n generate an importJob, and a \"POST\" method will occur\n\n If a job is selected it will have two actions, open and delete.\n Open will mean that a \"GET\" method will occur, with the job details\n passed in.\n Whilst the delete action will trigger a \"DELETE\" method.\n \"\"\"\n\n _debug(\"S3Importer.upload()\")\n\n request = self.request\n\n form = self._upload_form(r, **attr)\n output = self._create_upload_dataTable()\n if request.representation == \"aadata\":\n return output\n\n output.update(form=form, title=self.uploadTitle)\n return output\n\n # -------------------------------------------------------------------------\n def generate_job(self, r, **attr):\n \"\"\"\n Generate an ImportJob from the submitted upload form\n \"\"\"\n\n _debug(\"S3Importer.display()\")\n\n response = current.response\n s3 = response.s3\n\n db = current.db\n table = self.upload_table\n\n title=self.uploadTitle\n form = self._upload_form(r, **attr)\n\n r = self.request\n r.read_body()\n sfilename = form.vars.file\n try:\n ofilename = r.post_vars[\"file\"].filename\n except:\n form.errors.file = self.messages.no_file\n\n if form.errors:\n response.flash = \"\"\n output = self._create_upload_dataTable()\n output.update(form=form, title=title)\n\n elif not sfilename or \\\n ofilename not in r.files or r.files[ofilename] is None:\n response.flash = \"\"\n response.error = self.messages.file_not_found\n output = self._create_upload_dataTable()\n output.update(form=form, title=title)\n\n else:\n output = dict()\n query = (table.file == sfilename)\n db(query).update(controller=self.controller,\n function=self.function,\n filename=ofilename,\n user_id=current.session.auth.user.id)\n # must commit here to separate this transaction from\n # the trial import phase which will be rolled back.\n db.commit()\n\n extension = ofilename.rsplit(\".\", 1).pop()\n if extension not in (\"csv\", \"xls\"):\n response.flash = None\n response.error = self.messages.invalid_file_format\n return self.upload(r, **attr)\n\n upload_file = r.files[ofilename]\n if extension == \"xls\":\n if \"xls_parser\" in s3:\n upload_file.seek(0)\n upload_file = s3.xls_parser(upload_file.read())\n extension = \"csv\"\n\n if upload_file is None:\n response.flash = None\n response.error = self.messages.file_not_found\n return self.upload(r, **attr)\n else:\n upload_file.seek(0)\n\n row = db(query).select(table.id, limitby=(0, 1)).first()\n upload_id = row.id\n if \"single_pass\" in r.vars:\n single_pass = r.vars[\"single_pass\"]\n else:\n single_pass = None\n self._generate_import_job(upload_id,\n upload_file,\n extension,\n commit_job = single_pass)\n if upload_id is None:\n row = db(query).update(status = 2) # in error\n if self.error != None:\n response.error = self.error\n if self.warning != None:\n response.warning = self.warning\n response.flash = \"\"\n return self.upload(r, **attr)\n else:\n if single_pass:\n current.session.flash = self.messages.file_uploaded\n # For a single pass retain the vars from the original URL\n next_URL = URL(r=self.request,\n f=self.function,\n args=[\"import\"],\n vars=current.request.get_vars\n )\n redirect(next_URL)\n s3.dataTable_vars = {\"job\" : upload_id}\n return self.display_job(upload_id)\n return output\n\n # -------------------------------------------------------------------------\n def display_job(self, upload_id):\n \"\"\"\n @todo: docstring?\n \"\"\"\n\n _debug(\"S3Importer.display_job()\")\n\n request = self.request\n response = current.response\n\n db = current.db\n table = self.upload_table\n job_id = self.job_id\n output = dict()\n if job_id == None:\n # redirect to the start page (removes all vars)\n query = (table.id == upload_id)\n row = db(query).update(status = 2) # in error\n current.session.warning = self.messages.no_records_to_import\n redirect(URL(r=request, f=self.function, args=[\"import\"]))\n\n # Get the status of the upload job\n query = (table.id == upload_id)\n row = db(query).select(table.status,\n table.modified_on,\n table.summary_added,\n table.summary_error,\n table.summary_ignored,\n limitby=(0, 1)).first()\n status = row.status\n # completed display details\n if status == 3: # Completed\n # @todo currently this is an unnecessary server call,\n # change for completed records to be a display details\n # and thus avoid the round trip.\n # but keep this code to protect against hand-crafted URLs\n # (and the 'go back' syndrome on the browser)\n result = (row.summary_added,\n row.summary_error,\n row.summary_ignored,\n )\n self._display_completed_job(result, row.modified_on)\n redirect(URL(r=request, f=self.function, args=[\"import\"]))\n # otherwise display import items\n response.view = self._view(request, \"list.html\")\n\n output = self._create_import_item_dataTable(upload_id, job_id)\n if request.representation == \"aadata\":\n return output\n\n if response.s3.error_report:\n error_report = \"Errors|\" + \"|\".join(response.s3.error_report)\n error_tip = A(\"All Errors\",\n _class=\"errortip\",\n _title=error_report)\n else:\n # @todo: restore the error tree from all items?\n error_tip = \"\"\n\n rowcount = len(self._get_all_items(upload_id))\n rheader = DIV(TABLE(\n TR(\n TH(\"%s: \" % self.messages.job_total_records),\n TD(rowcount, _id=\"totalAvaliable\"),\n TH(\"%s: \" % self.messages.job_records_selected),\n TD(0, _id=\"totalSelected\"),\n TH(error_tip)\n ),\n ))\n\n output[\"title\"] = self.messages.title_job_read\n output[\"rheader\"] = rheader\n output[\"subtitle\"] = self.messages.title_job_list\n\n return output\n\n # -------------------------------------------------------------------------\n def commit(self, source, transform):\n \"\"\"\n @todo: docstring?\n \"\"\"\n\n _debug(\"S3Importer.commit(%s, %s)\" % (source, transform))\n\n db = current.db\n session = current.session\n request = self.request\n\n try:\n openFile = open(source, \"r\")\n except:\n session.error = self.messages.file_open_error % source\n redirect(URL(r=request, f=self.function))\n\n # @todo: manage different file formats\n # @todo: find file format from request.extension\n fileFormat = \"csv\"\n\n # insert data in the table and get the ID\n try:\n user = session.auth.user.id\n except:\n user = None\n\n upload_id = self.upload_table.insert(controller=self.controller,\n function=self.function,\n filename = source,\n user_id = user,\n status = 1)\n db.commit()\n\n # create the import job\n result = self._generate_import_job(upload_id,\n openFile,\n fileFormat,\n stylesheet=transform\n )\n if result == None:\n if self.error != None:\n if session.error == None:\n session.error = self.error\n else:\n session.error += self.error\n if self.warning != None:\n if session.warning == None:\n session.warning = self.warning\n else:\n session.warning += self.warning\n else:\n items = self._get_all_items(upload_id, True)\n # commit the import job\n self._commit_import_job(upload_id, items)\n result = self._update_upload_job(upload_id)\n\n # get the results and display\n msg = \"%s : %s %s %s\" % (source,\n self.messages.commit_total_records_imported,\n self.messages.commit_total_errors,\n self.messages.commit_total_records_ignored)\n msg = msg % result\n\n if session.flash == None:\n session.flash = msg\n else:\n session.flash += msg\n\n # @todo: return the upload_id?\n\n # -------------------------------------------------------------------------\n def commit_items(self, upload_id, items):\n \"\"\"\n @todo: docstring?\n \"\"\"\n\n _debug(\"S3Importer.commit_items(%s, %s)\" % (upload_id, items))\n # Save the import items\n self._commit_import_job(upload_id, items)\n # Update the upload table\n # change the status to completed\n # record the summary details\n # delete the upload file\n result = self._update_upload_job(upload_id)\n # redirect to the start page (removes all vars)\n self._display_completed_job(result)\n redirect(URL(r=self.request, f=self.function, args=[\"import\"]))\n\n # -------------------------------------------------------------------------\n def delete_job(self, upload_id):\n \"\"\"\n Delete an uploaded file and the corresponding import job\n\n @param upload_id: the upload ID\n \"\"\"\n\n _debug(\"S3Importer.delete_job(%s)\" % (upload_id))\n\n db = current.db\n\n request = self.request\n resource = request.resource # use self.resource?\n response = current.response\n\n # Get the import job ID\n job_id = self.job_id\n\n # Delete the import job (if any)\n if job_id:\n result = resource.import_xml(None,\n id = None,\n tree = None,\n job_id = job_id,\n delete_job = True)\n # @todo: check result\n\n # now delete the upload entry\n query = (self.upload_table.id == upload_id)\n count = db(query).delete()\n # @todo: check that the record has been deleted\n\n # Now commit the changes\n db.commit()\n\n result = count\n\n # return to the main import screen\n # @todo: check result properly\n if result == False:\n response.warning = self.messages.no_job_to_delete\n else:\n response.flash = self.messages.job_deleted\n\n # redirect to the start page (remove all vars)\n self.next = self.request.url(vars=dict())\n return\n\n # ========================================================================\n # Utility methods\n # ========================================================================\n def _upload_form(self, r, **attr):\n \"\"\"\n Create and process the upload form, including csv_extra_fields\n \"\"\"\n\n EXTRA_FIELDS = \"csv_extra_fields\"\n TEMPLATE = \"csv_template\"\n REPLACE_OPTION = \"replace_option\"\n\n session = current.session\n response = current.response\n s3 = response.s3\n request = self.request\n table = self.upload_table\n\n formstyle = s3.crud.formstyle\n response.view = self._view(request, \"list_create.html\")\n\n if REPLACE_OPTION in attr:\n replace_option = attr[REPLACE_OPTION]\n if replace_option is not None:\n table.replace_option.readable = True\n table.replace_option.writable = True\n table.replace_option.label = replace_option\n\n fields = [f for f in table if f.readable or f.writable and not f.compute]\n if EXTRA_FIELDS in attr:\n extra_fields = attr[EXTRA_FIELDS]\n if extra_fields is not None:\n fields.extend([f[\"field\"] for f in extra_fields if \"field\" in f])\n self.csv_extra_fields = extra_fields\n labels, required = s3_mark_required(fields)\n if required:\n s3.has_required = True\n\n form = SQLFORM.factory(table_name=self.UPLOAD_TABLE_NAME,\n labels=labels,\n formstyle=formstyle,\n upload = os.path.join(request.folder, \"uploads\", \"imports\"),\n separator = \"\",\n message=self.messages.file_uploaded,\n *fields)\n\n args = [\"s3csv\"]\n template = attr.get(TEMPLATE, True)\n if template is True:\n args.extend([self.controller, \"%s.csv\" % self.function])\n elif isinstance(template, basestring):\n args.extend([self.controller, \"%s.csv\" % template])\n elif isinstance(template, (tuple, list)):\n args.extend(template[:-1])\n args.append(\"%s.csv\" % template[-1])\n else:\n template = None\n if template is not None:\n url = URL(r=request, c=\"static\", f=\"formats\", args=args)\n try:\n # only add the download link if the template can be opened\n open(\"%s/../%s\" % (r.folder, url))\n form[0][0].insert(0, TR(TD(A(self.messages.download_template,\n _href=url)),\n _id=\"template__row\"))\n except:\n pass\n\n if form.accepts(r.post_vars, session,\n formname=\"upload_form\"):\n upload_id = table.insert(**table._filter_fields(form.vars))\n if self.csv_extra_fields:\n self.csv_extra_data = Storage()\n for f in self.csv_extra_fields:\n label = f.get(\"label\", None)\n if not label:\n continue\n field = f.get(\"field\", None)\n value = f.get(\"value\", None)\n if field:\n if field.name in form.vars:\n data = form.vars[field.name]\n else:\n data = field.default\n value = data\n requires = field.requires\n if not isinstance(requires, (list, tuple)):\n requires = [requires]\n if requires:\n requires = requires[0]\n if isinstance(requires, IS_EMPTY_OR):\n requires = requires.other\n try:\n options = requires.options()\n except:\n pass\n else:\n for k, v in options:\n if k == str(data):\n value = v\n elif value is None:\n continue\n self.csv_extra_data[label] = value\n s3.no_formats = True\n return form\n\n # -------------------------------------------------------------------------\n def _create_upload_dataTable(self):\n \"\"\"\n List of previous Import jobs\n \"\"\"\n\n db = current.db\n request = self.request\n controller = self.controller\n function = self.function\n s3 = current.response.s3\n\n table = self.upload_table\n s3.filter = (table.controller == controller) & \\\n (table.function == function)\n fields = [\"id\",\n \"filename\",\n \"created_on\",\n \"user_id\",\n \"replace_option\",\n \"status\"]\n\n self._use_upload_table()\n\n # Hide the list of prior uploads for now\n #output = self._dataTable(fields, sort_by = [[2,\"desc\"]])\n output = dict()\n\n self._use_controller_table()\n\n if request.representation == \"aadata\":\n return output\n\n query = (table.status != 3) # Status of Pending or in-Error\n rows = db(query).select(table.id)\n restrictOpen = [str(row.id) for row in rows]\n query = (table.status == 3) # Status of Completed\n rows = db(query).select(table.id)\n restrictView = [str(row.id) for row in rows]\n\n s3.actions = [\n dict(label=str(self.messages.open_btn),\n _class=\"action-btn\",\n url=URL(r=request,\n c=controller,\n f=function,\n args=[\"import\"],\n vars={\"job\":\"[id]\"}),\n restrict = restrictOpen\n\n ),\n dict(label=str(self.messages.view_btn),\n _class=\"action-btn\",\n url=URL(r=request,\n c=controller,\n f=function,\n args=[\"import\"],\n vars={\"job\":\"[id]\"}),\n restrict = restrictView\n ),\n dict(label=str(self.messages.delete_btn),\n _class=\"delete-btn\",\n url=URL(r=request,\n c=controller,\n f=function,\n args=[\"import\"],\n vars={\"job\":\"[id]\",\n \"delete\":\"True\"\n }\n )\n ),\n ]\n # Display an Error if no job is attached with this record\n query = (table.status == 1) # Pending\n rows = db(query).select(table.id)\n s3.dataTableStyleAlert = [str(row.id) for row in rows]\n query = (table.status == 2) # in error\n rows = db(query).select(table.id)\n s3.dataTableStyleWarning = [str(row.id) for row in rows]\n\n return output\n\n # -------------------------------------------------------------------------\n def _create_import_item_dataTable(self, upload_id, job_id):\n \"\"\"\n @todo: docstring?\n \"\"\"\n\n s3 = current.response.s3\n\n represent = {\"element\" : self._item_element_represent}\n self._use_import_item_table(job_id)\n\n # Add a filter to the dataTable query\n s3.filter = (self.table.job_id == job_id) & \\\n (self.table.tablename == self.controller_tablename)\n\n # Get a list of the records that have an error of None\n query = (self.table.job_id == job_id) & \\\n (self.table.tablename == self.controller_tablename)\n rows = current.db(query).select(self.table.id, self.table.error)\n select_list = []\n error_list = []\n for row in rows:\n if row.error:\n error_list.append(str(row.id))\n else:\n select_list.append(\"%s\" % row.id)\n select_id = \",\".join(select_list)\n\n output = self._dataTable([\"id\", \"element\", \"error\"],\n sort_by = [[1, \"asc\"]],\n represent=represent)\n\n self._use_controller_table()\n\n if self.request.representation == \"aadata\":\n return output\n\n # Highlight rows in error in red\n s3.dataTableStyleWarning = error_list\n\n s3.dataTableSelectable = True\n s3.dataTablePostMethod = True\n table = output[\"items\"]\n job = INPUT(_type=\"hidden\", _id=\"importUploadID\", _name=\"job\",\n _value=\"%s\" % upload_id)\n mode = INPUT(_type=\"hidden\", _id=\"importMode\", _name=\"mode\",\n _value=\"Inclusive\")\n # only select the rows with no errors\n selected = INPUT(_type=\"hidden\", _id=\"importSelected\",\n _name=\"selected\", _value=\"[%s]\" % select_id)\n form = FORM(table, job, mode, selected)\n output[\"items\"] = form\n s3.dataTableSelectSubmitURL = \"import?job=%s&\" % upload_id\n s3.actions = [\n dict(label= str(self.messages.item_show_details),\n _class=\"action-btn\",\n _jqclick=\"$('.importItem.'+id).toggle();\",\n ),\n ]\n return output\n\n # -------------------------------------------------------------------------\n def _generate_import_job(self,\n upload_id,\n openFile,\n fileFormat,\n stylesheet=None,\n commit_job=False):\n \"\"\"\n This will take a s3_import_upload record and\n generate the importJob\n\n @param uploadFilename: The name of the uploaded file\n\n @todo: complete parameter descriptions\n \"\"\"\n\n _debug(\"S3Importer._generate_import_job(%s, %s, %s, %s)\" % (upload_id,\n openFile,\n fileFormat,\n stylesheet\n )\n )\n\n db = current.db\n request = self.request\n resource = request.resource\n\n # ---------------------------------------------------------------------\n # CSV\n if fileFormat == \"csv\" or fileFormat == \"comma-separated-values\":\n\n fmt = \"csv\"\n src = openFile\n\n # ---------------------------------------------------------------------\n # XML\n # @todo: implement\n #elif fileFormat == \"xml\":\n\n # ---------------------------------------------------------------------\n # S3JSON\n # @todo: implement\n #elif fileFormat == \"s3json\":\n\n # ---------------------------------------------------------------------\n # PDF\n # @todo: implement\n #elif fileFormat == \"pdf\":\n\n # ---------------------------------------------------------------------\n # Unsupported Format\n else:\n msg = self.messages.unsupported_file_type % fileFormat\n self.error = msg\n _debug(msg)\n return None\n\n # Get the stylesheet\n if stylesheet == None:\n stylesheet = self._get_stylesheet()\n if stylesheet == None:\n return None\n\n # before calling import tree ensure the db.table is the controller_table\n self.table = self.controller_table\n self.tablename = self.controller_tablename\n\n # Pass stylesheet arguments\n args = Storage()\n mode = request.get_vars.get(\"xsltmode\", None)\n if mode is not None:\n args.update(mode=mode)\n\n # Generate the import job\n resource.import_xml(src,\n format=fmt,\n extra_data=self.csv_extra_data,\n stylesheet=stylesheet,\n ignore_errors = True,\n commit_job = commit_job,\n **args)\n\n job = resource.job\n if job is None:\n if resource.error:\n # Error\n self.error = resource.error\n return None\n else:\n # Nothing to import\n self.warning = self.messages.no_records_to_import\n return None\n else:\n # Job created\n job_id = job.job_id\n errors = current.xml.collect_errors(job)\n if errors:\n current.response.s3.error_report = errors\n query = (self.upload_table.id == upload_id)\n result = db(query).update(job_id=job_id)\n # @todo: add check that result == 1, if not we are in error\n # Now commit the changes\n db.commit()\n\n self.job_id = job_id\n return True\n\n # -------------------------------------------------------------------------\n def _get_stylesheet(self, file_format=\"csv\"):\n \"\"\"\n Get the stylesheet for transformation of the import\n\n @param file_format: the import source file format\n \"\"\"\n\n if file_format == \"csv\":\n xslt_path = os.path.join(self.xslt_path, \"s3csv\")\n else:\n xslt_path = os.path.join(self.xslt_path, file_format, \"import.xsl\")\n return xslt_path\n\n # Use the \"csv_stylesheet\" parameter to override the CSV stylesheet subpath\n # and filename, e.g.\n # s3_rest_controller(module, resourcename,\n # csv_stylesheet=(\"inv\", \"inv_item.xsl\"))\n if self.csv_stylesheet:\n if isinstance(self.csv_stylesheet, (tuple, list)):\n stylesheet = os.path.join(xslt_path,\n *self.csv_stylesheet)\n else:\n stylesheet = os.path.join(xslt_path,\n self.controller,\n self.csv_stylesheet)\n else:\n xslt_filename = \"%s.%s\" % (self.function, self.xslt_extension)\n stylesheet = os.path.join(xslt_path,\n self.controller,\n xslt_filename)\n\n if os.path.exists(stylesheet) is False:\n msg = self.messages.stylesheet_not_found % stylesheet\n self.error = msg\n _debug(msg)\n return None\n\n return stylesheet\n\n # -------------------------------------------------------------------------\n def _commit_import_job(self, upload_id, items):\n \"\"\"\n This will save all of the selected import items\n\n @todo: parameter descriptions?\n \"\"\"\n\n _debug(\"S3Importer._commit_import_job(%s, %s)\" % (upload_id, items))\n\n db = current.db\n resource = self.request.resource\n\n # Load the items from the s3_import_item table\n self.importDetails = dict()\n\n table = self.upload_table\n query = (table.id == upload_id)\n row = db(query).select(table.job_id,\n table.replace_option,\n limitby=(0, 1)).first()\n if row is None:\n return False\n else:\n job_id = row.job_id\n current.response.s3.import_replace = row.replace_option\n\n itemTable = S3ImportJob.define_item_table()\n\n if itemTable != None:\n #****************************************************************\n # EXPERIMENTAL\n # This doesn't delete related items\n # but import_tree will tidy it up later\n #****************************************************************\n # get all the items selected for import\n rows = self._get_all_items(upload_id, as_string=True)\n\n # loop through each row and delete the items not required\n self._store_import_details(job_id, \"preDelete\")\n for id in rows:\n if str(id) not in items:\n # @todo: replace with a helper method from the API\n _debug(\"Deleting item.id = %s\" % id)\n query = (itemTable.id == id)\n db(query).delete()\n\n #****************************************************************\n # EXPERIMENTAL\n #****************************************************************\n\n # set up the table we will import data into\n self.table = self.controller_table\n self.tablename = self.controller_tablename\n\n self._store_import_details(job_id, \"preImportTree\")\n\n # Now commit the remaining items\n msg = resource.import_xml(None,\n job_id = job_id,\n ignore_errors = True)\n return resource.error is None\n\n # -------------------------------------------------------------------------\n def _store_import_details(self, job_id, key):\n \"\"\"\n This will store the details from an importJob\n\n @todo: parameter descriptions?\n \"\"\"\n\n _debug(\"S3Importer._store_import_details(%s, %s)\" % (job_id, key))\n\n itemTable = S3ImportJob.define_item_table()\n\n query = (itemTable.job_id == job_id) & \\\n (itemTable.tablename == self.controller_tablename)\n rows = current.db(query).select(itemTable.data, itemTable.error)\n items = [dict(data=row.data, error=row.error) for row in rows]\n\n self.importDetails[key] = items\n\n # -------------------------------------------------------------------------\n def _update_upload_job(self, upload_id):\n \"\"\"\n This will record the results from the import, and change the\n status of the upload job\n\n @todo: parameter descriptions?\n @todo: report errors in referenced records, too\n \"\"\"\n\n _debug(\"S3Importer._update_upload_job(%s)\" % (upload_id))\n\n request = self.request\n resource = request.resource\n db = current.db\n\n totalPreDelete = len(self.importDetails[\"preDelete\"])\n totalPreImport = len(self.importDetails[\"preImportTree\"])\n totalIgnored = totalPreDelete - totalPreImport\n\n if resource.error_tree is None:\n totalErrors = 0\n else:\n totalErrors = len(resource.error_tree.findall(\n \"resource[@name='%s']\" % resource.tablename))\n\n totalRecords = totalPreImport - totalErrors\n if totalRecords < 0:\n totalRecords = 0\n\n query = (self.upload_table.id == upload_id)\n result = db(query).update(summary_added=totalRecords,\n summary_error=totalErrors,\n summary_ignored = totalIgnored,\n status = 3)\n\n # Now commit the changes\n db.commit()\n return (totalRecords, totalErrors, totalIgnored)\n\n # -------------------------------------------------------------------------\n def _display_completed_job(self, totals, timestmp=None):\n \"\"\"\n Generate a summary flash message for a completed import job\n\n @param totals: the job totals as tuple\n (total imported, total errors, total ignored)\n @param timestmp: the timestamp of the completion\n \"\"\"\n\n session = current.session\n\n msg = \"%s - %s - %s\" % \\\n (self.messages.commit_total_records_imported,\n self.messages.commit_total_errors,\n self.messages.commit_total_records_ignored)\n msg = msg % totals\n\n if timestmp != None:\n session.flash = self.messages.job_completed % \\\n (self.date_represent(timestmp), msg)\n elif totals[1] is not 0:\n session.error = msg\n elif totals[2] is not 0:\n session.warning = msg\n else:\n session.flash = msg\n\n # -------------------------------------------------------------------------\n def _dataTable(self,\n list_fields = [],\n sort_by = [[1, \"asc\"]],\n represent={},\n ):\n \"\"\"\n Method to get the data for the dataTable\n This can be either a raw html representation or\n and ajax call update\n Additional data will be cached to limit calls back to the server\n\n @param list_fields: list of field names\n @param sort_by: list of sort by columns\n @param represent: a dict of field callback functions used\n to change how the data will be displayed\n\n @return: a dict()\n In html representations this will be a table of the data\n plus the sortby instructions\n In ajax this will be a json response\n\n In addition the following values will be made available:\n totalRecords Number of records in the filtered data set\n totalDisplayRecords Number of records to display\n start Start point in the ordered data set\n limit Number of records in the ordered set\n NOTE: limit - totalDisplayRecords = total cached\n \"\"\"\n\n # ********************************************************************\n # Common tasks\n # ********************************************************************\n db = current.db\n session = current.session\n request = self.request\n response = current.response\n resource = self.resource\n s3 = response.s3\n representation = request.representation\n table = self.table\n tablename = self.tablename\n vars = request.get_vars\n output = dict()\n\n # Check permission to read this table\n authorised = self.permit(\"read\", tablename)\n if not authorised:\n request.unauthorised()\n\n # List of fields to select from\n # fields is a list of Field objects\n # list_field is a string list of field names\n if list_fields == []:\n fields = resource.readable_fields()\n else:\n fields = [table[f] for f in list_fields if f in table.fields]\n if not fields:\n fields = []\n\n # attach any represent callbacks\n for f in fields:\n if f.name in represent:\n f.represent = represent[f.name]\n\n # Make sure that we have the table id as the first column\n if fields[0].name != table.fields[0]:\n fields.insert(0, table[table.fields[0]])\n\n list_fields = [f.name for f in fields]\n\n # Filter\n if s3.filter is not None:\n self.resource.add_filter(s3.filter)\n\n # ********************************************************************\n # ajax call\n # ********************************************************************\n if representation == \"aadata\":\n start = vars.get(\"iDisplayStart\", None)\n limit = vars.get(\"iDisplayLength\", None)\n if limit is not None:\n try:\n start = int(start)\n limit = int(limit)\n except ValueError:\n start = None\n limit = None # use default\n else:\n start = None # use default\n # Using the sort variables sent from dataTables\n if vars.iSortingCols:\n orderby = self.ssp_orderby(resource, list_fields)\n\n # Echo\n sEcho = int(vars.sEcho or 0)\n\n # Get the list\n items = resource.sqltable(fields=list_fields,\n start=start,\n limit=limit,\n orderby=orderby,\n download_url=self.download_url,\n as_page=True) or []\n # Ugly hack to change any occurrence of [id] with the true id\n # Needed because the represent doesn't know the id\n for i in range(len(items)):\n id = items[i][0]\n for j in range(len(items[i])):\n new = items[i][j].replace(\"[id]\",id)\n items[i][j] = new\n totalrows = self.resource.count()\n result = dict(sEcho = sEcho,\n iTotalRecords = totalrows,\n iTotalDisplayRecords = totalrows,\n aaData = items)\n\n output = jsons(result)\n\n # ********************************************************************\n # html 'initial' call\n # ********************************************************************\n else: # catch all\n start = 0\n limit = 1\n # Sort by\n vars[\"iSortingCols\"] = len(sort_by)\n\n # generate the dataTables.js variables for sorting\n index = 0\n for col in sort_by:\n colName = \"iSortCol_%s\" % str(index)\n colValue = col[0]\n dirnName = \"sSortDir_%s\" % str(index)\n if len(col) > 1:\n dirnValue = col[1]\n else:\n dirnValue = \"asc\"\n vars[colName] = colValue\n vars[dirnName] = dirnValue\n # Now using these sort variables generate the order by statement\n orderby = self.ssp_orderby(resource, list_fields)\n\n del vars[\"iSortingCols\"]\n for col in sort_by:\n del vars[\"iSortCol_%s\" % str(index)]\n del vars[\"sSortDir_%s\" % str(index)]\n\n # Get the first row for a quick up load\n items = resource.sqltable(fields=list_fields,\n start=start,\n limit=1,\n orderby=orderby,\n download_url=self.download_url)\n totalrows = resource.count()\n if items:\n if totalrows:\n if s3.dataTable_iDisplayLength:\n limit = 2 * s3.dataTable_iDisplayLength\n else:\n limit = 50\n # Add a test on the first call here:\n # Now get the limit rows for ajax style update of table\n sqltable = resource.sqltable(fields=list_fields,\n start=start,\n limit=limit,\n orderby=orderby,\n download_url=self.download_url,\n as_page=True)\n aadata = dict(aaData = sqltable or [])\n # Ugly hack to change any occurrence of [id] with the true id\n # Needed because the represent doesn't know the id\n for i in range(len(aadata[\"aaData\"])):\n id = aadata[\"aaData\"][i][0]\n for j in range(len(aadata[\"aaData\"][i])):\n new = aadata[\"aaData\"][i][j].replace(\"[id]\",id)\n aadata[\"aaData\"][i][j] = new\n\n aadata.update(iTotalRecords=totalrows,\n iTotalDisplayRecords=totalrows)\n response.aadata = jsons(aadata)\n s3.start = 0\n s3.limit = limit\n else: # No items in database\n # s3import tables don't have a delete field but kept for the record\n if \"deleted\" in table:\n available_records = db(table.deleted == False)\n else:\n available_records = db(table.id > 0)\n # check for any records on an unfiltered table\n if available_records.select(table.id,\n limitby=(0, 1)).first():\n items = self.crud_string(tablename, \"msg_no_match\")\n else:\n items = self.crud_string(tablename, \"msg_list_empty\")\n\n output.update(items=items, sortby=sort_by)\n # Value to be added to the dataTable ajax call\n s3.dataTable_Method = \"import\"\n\n return output\n\n # -------------------------------------------------------------------------\n def _item_element_represent(self, value):\n \"\"\"\n Represent the element in an import item for dataTable display\n\n @param value: the string containing the element\n \"\"\"\n\n T = current.T\n db = current.db\n\n value = S3XML.xml_decode(value)\n try:\n element = etree.fromstring(value)\n except:\n # XMLSyntaxError: return the element as-is\n return DIV(value)\n\n tablename = element.get(\"name\")\n table = current.db[tablename]\n\n output = DIV()\n details = TABLE(_class=\"importItem [id]\")\n header, rows = self._add_item_details(element.findall(\"data\"), table)\n if header is not None:\n output.append(header)\n # Add components, if present\n components = element.findall(\"resource\")\n for component in components:\n ctablename = component.get(\"name\")\n ctable = db[ctablename]\n self._add_item_details(component.findall(\"data\"), ctable,\n details=rows, prefix=True)\n if rows:\n details.append(TBODY(rows))\n # Add error messages, if present\n errors = current.xml.collect_errors(element)\n if errors:\n details.append(TFOOT(TR(TH(\"%s:\" % T(\"Errors\")),\n TD(UL([LI(e) for e in errors])))))\n if rows == [] and components == []:\n # At this stage we don't have anything to display to see if we can\n # find something to show. This could be the case when a table being\n # imported is a resolver for a many to many relationship\n refdetail = TABLE(_class=\"importItem [id]\")\n references = element.findall(\"reference\")\n for reference in references:\n tuid = reference.get(\"tuid\")\n resource = reference.get(\"resource\")\n refdetail.append(TR(TD(resource), TD(tuid)))\n output.append(refdetail)\n else:\n output.append(details)\n return str(output)\n\n # -------------------------------------------------------------------------\n @staticmethod\n def _add_item_details(data, table, details=None, prefix=False):\n \"\"\"\n Add details of the item element\n\n @param data: the list of data elements in the item element\n @param table: the table for the data\n @param details: the existing details rows list (to append to)\n \"\"\"\n\n tablename = table._tablename\n if details is None:\n details = []\n first = None\n firstString = None\n header = None\n for child in data:\n f = child.get(\"field\", None)\n if f not in table.fields:\n continue\n elif f == \"wkt\":\n # Skip bulky WKT fields\n continue\n field = table[f]\n ftype = str(field.type)\n value = child.get(\"value\", None)\n if not value:\n value = child.text\n try:\n value = S3Importer._decode_data(field, value)\n except:\n pass\n if value:\n value = S3XML.xml_encode(unicode(value))\n else:\n value = \"\"\n if f != None and value != None:\n headerText = P(B(\"%s: \" % f), value)\n if not first:\n first = headerText\n if ftype == \"string\" and not firstString:\n firstString = headerText\n if f == \"name\":\n header = headerText\n if prefix:\n details.append(TR(TH(\"%s.%s:\" % (tablename, f)), TD(value)))\n else:\n details.append(TR(TH(\"%s:\" % f), TD(value)))\n if not header:\n if firstString:\n header = firstString\n else:\n header = first\n return (header, details)\n\n # -------------------------------------------------------------------------\n @staticmethod\n def _decode_data(field, value):\n \"\"\"\n Try to decode string data into their original type\n\n @param field: the Field instance\n @param value: the stringified value\n\n @todo: replace this by ordinary decoder\n \"\"\"\n\n if field.type == \"string\" or \\\n field.type == \"string\" or \\\n field.type == \"password\" or \\\n field.type == \"upload\" or \\\n field.type == \"text\":\n return value\n elif field.type == \"integer\" or field.type == \"id\":\n return int(value)\n elif field.type == \"double\" or field.type == \"decimal\":\n return double(value)\n elif field.type == 'boolean':\n if value and not str(value)[:1].upper() in [\"F\", \"0\"]:\n return \"T\"\n else:\n return \"F\"\n elif field.type == \"date\":\n return value # @todo fix this to get a date\n elif field.type == \"time\":\n return value # @todo fix this to get a time\n elif field.type == \"datetime\":\n return value # @todo fix this to get a datetime\n else:\n return value\n\n # -------------------------------------------------------------------------\n @staticmethod\n def date_represent(date_obj):\n \"\"\"\n Represent a datetime object as string\n\n @param date_obj: the datetime object\n\n @todo: replace by S3DateTime method?\n \"\"\"\n return date_obj.strftime(\"%d %B %Y, %I:%M%p\")\n\n # -------------------------------------------------------------------------\n def _process_item_list(self, upload_id, vars):\n \"\"\"\n Get the list of IDs for the selected items from the \"mode\"\n and \"selected\" request variables\n\n @param upload_id: the upload_id\n @param vars: the request variables\n \"\"\"\n\n items = None\n if \"mode\" in vars:\n mode = vars[\"mode\"]\n if \"selected\" in vars:\n selected = vars[\"selected\"].split(\",\")\n else:\n selected = []\n if mode == \"Inclusive\":\n items = selected\n elif mode == \"Exclusive\":\n all_items = self._get_all_items(upload_id, as_string=True)\n items = [i for i in all_items if i not in selected]\n return items\n\n # -------------------------------------------------------------------------\n def _get_all_items(self, upload_id, as_string=False):\n \"\"\" Get a list of the record IDs of all import items for\n the the given upload ID\n\n @param upload_id: the upload ID\n @param as_string: represent each ID as string\n \"\"\"\n\n item_table = S3ImportJob.define_item_table()\n upload_table = self.upload_table\n\n query = (upload_table.id == upload_id) & \\\n (item_table.job_id == upload_table.job_id) & \\\n (item_table.tablename == self.controller_tablename)\n\n rows = current.db(query).select(item_table.id)\n if as_string:\n items = [str(row.id) for row in rows]\n else:\n items = [row.id for row in rows]\n\n return items\n\n # -------------------------------------------------------------------------\n def _use_upload_table(self):\n \"\"\"\n Set the resource and the table to being s3_import_upload\n \"\"\"\n\n if self.upload_resource == None:\n from s3resource import S3Resource\n (prefix, name) = self.UPLOAD_TABLE_NAME.split(\"_\",1)\n self.upload_resource = S3Resource(prefix, name)\n self.resource = self.upload_resource\n self.table = self.upload_table\n self.tablename = self.upload_tablename\n\n # -------------------------------------------------------------------------\n def _use_controller_table(self):\n \"\"\"\n Set the resource and the table to be the imported resource\n \"\"\"\n\n self.resource = self.controller_resource\n self.table = self.controller_table\n self.tablename = self.controller_tablename\n\n # -------------------------------------------------------------------------\n def _use_import_item_table(self, job_id):\n \"\"\"\n Set the resource and the table to being s3_import_item \n \"\"\"\n\n if self.item_resource == None:\n from s3resource import S3Resource\n (prefix, name) = S3ImportJob.ITEM_TABLE_NAME.split(\"_\",1)\n self.item_resource = S3Resource(prefix, name)\n self.resource = self.item_resource\n self.tablename = S3ImportJob.ITEM_TABLE_NAME\n self.table = S3ImportJob.define_item_table()\n\n # -------------------------------------------------------------------------\n def __define_table(self):\n \"\"\" Configures the upload table \"\"\"\n\n _debug(\"S3Importer.__define_table()\")\n\n T = current.T\n db = current.db\n request = current.request\n\n self.upload_tablename = self.UPLOAD_TABLE_NAME\n\n import_upload_status = {\n 1: T(\"Pending\"),\n 2: T(\"In error\"),\n 3: T(\"Completed\"),\n }\n\n def user_name_represent(id):\n # @todo: use s3_present_user?\n\n rep_str = \"-\"\n table = db.auth_user\n query = (table.id == id)\n row = db(query).select(table.first_name,\n table.last_name,\n limitby=(0, 1)).first()\n if row:\n rep_str = \"%s %s\" % (row.first_name, row.last_name)\n return rep_str\n\n def status_represent(index):\n if index == None:\n return \"Unknown\" # @todo: use messages (internationalize)\n else:\n return import_upload_status[index]\n\n now = request.utcnow\n table = self.define_upload_table()\n table.file.upload_folder = os.path.join(request.folder,\n \"uploads\",\n #\"imports\"\n )\n table.file.comment = DIV(_class=\"tooltip\",\n _title=\"%s|%s\" %\n (self.messages.import_file,\n self.messages.import_file_comment))\n table.file.label = self.messages.import_file\n table.status.requires = IS_IN_SET(import_upload_status, zero=None)\n table.status.represent = status_represent\n table.user_id.label = self.messages.user_name\n table.user_id.represent = user_name_represent\n table.created_on.default = now\n table.created_on.represent = self.date_represent\n table.modified_on.default = now\n table.modified_on.update = now\n table.modified_on.represent = self.date_represent\n\n table.replace_option.label = T(\"Replace\")\n\n self.upload_table = db[self.UPLOAD_TABLE_NAME]\n\n # -------------------------------------------------------------------------\n @classmethod\n def define_upload_table(cls):\n \"\"\" Defines the upload table \"\"\"\n\n db = current.db\n uploadfolder = os.path.join(current.request.folder,\n \"uploads\",\n )\n if cls.UPLOAD_TABLE_NAME not in db:\n upload_table = db.define_table(cls.UPLOAD_TABLE_NAME,\n Field(\"controller\",\n readable=False,\n writable=False),\n Field(\"function\",\n readable=False,\n writable=False),\n Field(\"file\", \"upload\",\n uploadfolder=os.path.join(current.request.folder, \"uploads\", \"imports\"),\n autodelete=True),\n Field(\"filename\",\n readable=False,\n writable=False),\n Field(\"status\", \"integer\",\n default=1,\n readable=False,\n writable=False),\n Field(\"extra_data\",\n readable=False,\n writable=False),\n Field(\"replace_option\", \"boolean\",\n default=False,\n readable=False,\n writable=False),\n Field(\"job_id\",\n length=128,\n readable=False,\n writable=False),\n Field(\"user_id\", \"integer\",\n readable=False,\n writable=False),\n Field(\"created_on\", \"datetime\",\n readable=False,\n writable=False),\n Field(\"modified_on\", \"datetime\",\n readable=False,\n writable=False),\n Field(\"summary_added\", \"integer\",\n readable=False,\n writable=False),\n Field(\"summary_error\", \"integer\",\n readable=False,\n writable=False),\n Field(\"summary_ignored\", \"integer\",\n readable=False,\n writable=False),\n Field(\"completed_details\", \"text\",\n readable=False,\n writable=False))\n else:\n upload_table = db[cls.UPLOAD_TABLE_NAME]\n\n return upload_table\n\n# =============================================================================\nclass S3ImportItem(object):\n \"\"\" Class representing an import item (=a single record) \"\"\"\n\n METHOD = Storage(\n CREATE=\"create\",\n UPDATE=\"update\",\n DELETE=\"delete\"\n )\n\n POLICY = Storage(\n THIS=\"THIS\", # keep local instance\n OTHER=\"OTHER\", # update unconditionally\n NEWER=\"NEWER\", # update if import is newer\n MASTER=\"MASTER\" # update if import is master\n )\n\n # -------------------------------------------------------------------------\n def __init__(self, job):\n \"\"\"\n Constructor\n\n @param job: the import job this item belongs to\n \"\"\"\n\n self.job = job\n self.ERROR = current.manager.ERROR\n\n # Locking and error handling\n self.lock = False\n self.error = None\n\n # Identification\n import uuid\n self.item_id = uuid.uuid4() # unique ID for this item\n self.id = None\n self.uid = None\n\n # Data elements\n self.table = None\n self.tablename = None\n self.element = None\n self.data = None\n self.original = None\n self.components = []\n self.references = []\n self.load_components = []\n self.load_references = []\n self.parent = None\n self.skip = False\n\n # Conflict handling\n self.mci = 2\n self.mtime = datetime.utcnow()\n self.modified = True\n self.conflict = False\n\n # Allowed import methods\n self.strategy = job.strategy\n # Update and conflict resolution policies\n self.update_policy = job.update_policy\n self.conflict_policy = job.conflict_policy\n\n # Actual import method\n self.method = None\n\n self.onvalidation = None\n self.onaccept = None\n\n # Item import status flags\n self.accepted = None\n self.permitted = False\n self.committed = False\n\n # Writeback hook for circular references:\n # Items which need a second write to update references\n self.update = []\n\n # -------------------------------------------------------------------------\n def __repr__(self):\n \"\"\" Helper method for debugging \"\"\"\n\n _str = \"\" % \\\n (self.table, self.item_id, self.uid, self.id, self.error, self.data)\n return _str\n\n # -------------------------------------------------------------------------\n def parse(self,\n element,\n original=None,\n table=None,\n tree=None,\n files=None):\n \"\"\"\n Read data from a element\n\n @param element: the element\n @param table: the DB table\n @param tree: the import tree\n @param files: uploaded files\n\n @returns: True if successful, False if not (sets self.error)\n \"\"\"\n\n db = current.db\n xml = current.xml\n manager = current.manager\n validate = manager.validate\n s3db = current.s3db\n\n self.element = element\n if table is None:\n tablename = element.get(xml.ATTRIBUTE.name, None)\n try:\n table = s3db[tablename]\n except:\n self.error = self.ERROR.BAD_RESOURCE\n element.set(xml.ATTRIBUTE.error, self.error)\n return False\n\n self.table = table\n self.tablename = table._tablename\n\n if original is None:\n original = manager.original(table, element)\n data = xml.record(table, element,\n files=files,\n original=original,\n validate=validate)\n\n if data is None:\n self.error = self.ERROR.VALIDATION_ERROR\n self.accepted = False\n if not element.get(xml.ATTRIBUTE.error, False):\n element.set(xml.ATTRIBUTE.error, str(self.error))\n return False\n\n self.data = data\n\n if original is not None:\n self.original = original\n self.id = original[table._id.name]\n if xml.UID in original:\n self.uid = original[xml.UID]\n self.data.update({xml.UID:self.uid})\n elif xml.UID in data:\n self.uid = data[xml.UID]\n if xml.MTIME in data:\n self.mtime = data[xml.MTIME]\n if xml.MCI in data:\n self.mci = data[xml.MCI]\n\n _debug(\"New item: %s\" % self)\n return True\n\n # -------------------------------------------------------------------------\n def deduplicate(self):\n\n RESOLVER = \"deduplicate\"\n\n if self.id:\n return\n\n table = self.table\n\n if table is None:\n return\n if self.original is not None:\n original = self.original\n else:\n original = current.manager.original(table, self.data)\n\n if original is not None:\n self.original = original\n self.id = original[table._id.name]\n UID = current.xml.UID\n if UID in original:\n self.uid = original[UID]\n self.data.update({UID:self.uid})\n self.method = self.METHOD.UPDATE\n else:\n resolve = current.s3db.get_config(self.tablename, RESOLVER)\n if self.data and resolve:\n resolve(self)\n\n return\n\n # -------------------------------------------------------------------------\n def authorize(self):\n \"\"\"\n Authorize the import of this item, sets self.permitted\n \"\"\"\n\n db = current.db\n manager = current.manager\n authorize = manager.permit\n\n self.permitted = False\n\n if not self.table:\n return False\n\n prefix = self.tablename.split(\"_\", 1)[0]\n if prefix in manager.PROTECTED:\n return False\n\n if not authorize:\n self.permitted = True\n\n self.method = self.METHOD.CREATE\n if self.id:\n\n if self.data.deleted is True:\n self.method = self.METHOD.DELETE\n self.accepted = True\n\n else:\n if not self.original:\n query = (self.table.id == self.id)\n self.original = db(query).select(limitby=(0, 1)).first()\n if self.original:\n self.method = self.METHOD.UPDATE\n\n if self.method == self.METHOD.CREATE:\n self.id = 0\n\n if authorize:\n self.permitted = authorize(self.method,\n self.tablename,\n record_id=self.id)\n\n return self.permitted\n\n # -------------------------------------------------------------------------\n def validate(self):\n \"\"\"\n Validate this item (=record onvalidation), sets self.accepted\n \"\"\"\n\n if self.accepted is not None:\n return self.accepted\n if self.data is None or not self.table:\n self.accepted = False\n return False\n\n form = Storage()\n form.method = self.method\n form.vars = self.data\n if self.id:\n form.vars.id = self.id\n form.errors = Storage()\n tablename = self.tablename\n key = \"%s_onvalidation\" % self.method\n s3db = current.s3db\n onvalidation = s3db.get_config(tablename, key,\n s3db.get_config(tablename, \"onvalidation\"))\n if onvalidation:\n try:\n callback(onvalidation, form, tablename=tablename)\n except:\n pass # @todo need a better handler here.\n self.accepted = True\n if form.errors:\n error = current.xml.ATTRIBUTE.error\n for k in form.errors:\n e = self.element.findall(\"data[@field='%s']\" % k)\n if not e:\n e = self.element.findall(\"reference[@field='%s']\" % k)\n if not e:\n e = self.element\n form.errors[k] = \"[%s] %s\" % (k, form.errors[k])\n else:\n e = e[0]\n e.set(error,\n str(form.errors[k]).decode(\"utf-8\"))\n self.error = self.ERROR.VALIDATION_ERROR\n self.accepted = False\n return self.accepted\n\n # -------------------------------------------------------------------------\n def commit(self, ignore_errors=False):\n \"\"\"\n Commit this item to the database\n\n @param ignore_errors: skip invalid components\n (still reports errors)\n \"\"\"\n\n db = current.db\n s3db = current.s3db\n xml = current.xml\n manager = current.manager\n table = self.table\n\n # Check if already committed\n if self.committed:\n # already committed\n return True\n\n # If the parent item gets skipped, then skip this item as well\n if self.parent is not None and self.parent.skip:\n return True\n\n _debug(\"Committing item %s\" % self)\n\n # Resolve references\n self._resolve_references()\n\n # Validate\n if not self.validate():\n _debug(\"Validation error: %s (%s)\" % (self.error, xml.tostring(self.element, pretty_print=True)))\n self.skip = True\n return ignore_errors\n\n elif self.components:\n for component in self.components:\n if not component.validate():\n if hasattr(component, \"tablename\"):\n tn = component.tablename\n else:\n tn = None\n _debug(\"Validation error, component=%s\" % tn)\n component.skip = True\n # Skip this item on any component validation errors\n # unless ignore_errors is True\n if ignore_errors:\n continue\n else:\n self.skip = True\n return False\n\n # De-duplicate\n self.deduplicate()\n\n # Log this item\n if manager.log is not None:\n manager.log(self)\n\n # Authorize item\n if not self.authorize():\n _debug(\"Not authorized - skip\")\n self.error = manager.ERROR.NOT_PERMITTED\n self.skip = True\n return ignore_errors\n\n _debug(\"Method: %s\" % self.method)\n\n # Check if import method is allowed in strategy\n if not isinstance(self.strategy, (list, tuple)):\n self.strategy = [self.strategy]\n if self.method not in self.strategy:\n _debug(\"Method not in strategy - skip\")\n self.error = manager.ERROR.NOT_PERMITTED\n self.skip = True\n return True\n\n this = self.original\n if not this and self.id and \\\n self.method in (self.METHOD.UPDATE, self.METHOD.DELETE):\n query = (table.id == self.id)\n this = db(query).select(limitby=(0, 1)).first()\n this_mtime = None\n this_mci = 0\n if this:\n if xml.MTIME in table.fields:\n this_mtime = xml.as_utc(this[xml.MTIME])\n if xml.MCI in table.fields:\n this_mci = this[xml.MCI]\n self.mtime = xml.as_utc(self.mtime)\n\n # Conflict detection\n this_modified = True\n self.modified = True\n self.conflict = False\n last_sync = xml.as_utc(self.job.last_sync)\n if last_sync:\n if this_mtime and this_mtime < last_sync:\n this_modified = False\n if self.mtime and self.mtime < last_sync:\n self.modified = False\n if self.modified and this_modified:\n self.conflict = True\n\n if self.conflict and \\\n self.method in (self.METHOD.UPDATE, self.METHOD.DELETE):\n _debug(\"Conflict: %s\" % self)\n if self.job.onconflict:\n self.job.onconflict(self)\n\n if self.data is not None:\n data = Storage(self.data)\n else:\n data = Storage()\n\n # Update existing record\n if self.method == self.METHOD.UPDATE:\n\n if this:\n if \"deleted\" in this and this.deleted:\n policy = self._get_update_policy(None)\n if policy == self.POLICY.NEWER and \\\n this_mtime and this_mtime > self.mtime or \\\n policy == self.POLICY.MASTER and \\\n (this_mci == 0 or self.mci != 1):\n self.skip = True\n return True\n fields = data.keys()\n for f in fields:\n if f not in this:\n continue\n if isinstance(this[f], datetime):\n if xml.as_utc(data[f]) == xml.as_utc(this[f]):\n del data[f]\n continue\n else:\n if data[f] == this[f]:\n del data[f]\n continue\n remove = False\n policy = self._get_update_policy(f)\n if policy == self.POLICY.THIS:\n remove = True\n elif policy == self.POLICY.NEWER:\n if this_mtime and this_mtime > self.mtime:\n remove = True\n elif policy == self.POLICY.MASTER:\n if this_mci == 0 or self.mci != 1:\n remove = True\n if remove:\n del data[f]\n self.data.update({f:this[f]})\n if \"deleted\" in this and this.deleted:\n # Undelete re-imported records:\n data.update(deleted=False)\n if \"deleted_fk\" in table:\n data.update(deleted_fk=\"\")\n if \"created_by\" in table:\n data.update(created_by=table.created_by.default)\n if \"modified_by\" in table:\n data.update(modified_by=table.modified_by.default)\n\n if not self.skip and not self.conflict and \\\n (len(data) or self.components or self.references):\n if self.uid and xml.UID in table:\n data.update({xml.UID:self.uid})\n if xml.MTIME in table:\n data.update({xml.MTIME: self.mtime})\n if xml.MCI in data:\n # retain local MCI on updates\n del data[xml.MCI]\n query = (table._id == self.id)\n try:\n success = db(query).update(**dict(data))\n except:\n self.error = sys.exc_info()[1]\n self.skip = True\n return False\n if success:\n self.committed = True\n else:\n # Nothing to update\n self.committed = True\n\n # Create new record\n elif self.method == self.METHOD.CREATE:\n\n # Do not apply field policy to UID and MCI\n UID = xml.UID\n if UID in data:\n del data[UID]\n MCI = xml.MCI\n if MCI in data:\n del data[MCI]\n\n for f in data:\n policy = self._get_update_policy(f)\n if policy == self.POLICY.MASTER and self.mci != 1:\n del data[f]\n\n if len(data) or self.components or self.references:\n\n # Restore UID and MCI\n if self.uid and UID in table.fields:\n data.update({UID:self.uid})\n if MCI in table.fields:\n data.update({MCI:self.mci})\n\n # Insert the new record\n try:\n success = table.insert(**dict(data))\n except:\n self.error = sys.exc_info()[1]\n self.skip = True\n return False\n if success:\n self.id = success\n self.committed = True\n\n else:\n # Nothing to create\n self.skip = True\n return True\n\n # Delete local record\n elif self.method == self.METHOD.DELETE:\n\n if this:\n if this.deleted:\n self.skip = True\n policy = self._get_update_policy(None)\n if policy == self.POLICY.THIS:\n self.skip = True\n elif policy == self.POLICY.NEWER and \\\n (this_mtime and this_mtime > self.mtime):\n self.skip = True\n elif policy == self.POLICY.MASTER and \\\n (this_mci == 0 or self.mci != 1):\n self.skip = True\n else:\n self.skip = True\n\n if not self.skip and not self.conflict:\n\n prefix, name = self.tablename.split(\"_\", 1)\n resource = manager.define_resource(prefix, name, id=self.id)\n\n ondelete = s3db.get_config(self.tablename, \"ondelete\")\n success = resource.delete(ondelete=ondelete,\n cascade=True)\n if resource.error:\n self.error = resource.error\n self.skip = True\n return ignore_errors\n\n _debug(\"Success: %s, id=%s %sd\" % (self.tablename, self.id,\n self.skip and \"skippe\" or \\\n self.method))\n return True\n\n # Audit + onaccept on successful commits\n if self.committed:\n form = Storage()\n form.method = self.method\n form.vars = self.data\n tablename = self.tablename\n prefix, name = tablename.split(\"_\", 1)\n if self.id:\n form.vars.id = self.id\n if manager.audit is not None:\n manager.audit(self.method, prefix, name,\n form=form,\n record=self.id,\n representation=\"xml\")\n s3db.update_super(table, form.vars)\n if self.method == self.METHOD.CREATE:\n current.auth.s3_set_record_owner(table, self.id)\n key = \"%s_onaccept\" % self.method\n onaccept = s3db.get_config(tablename, key,\n s3db.get_config(tablename, \"onaccept\"))\n if onaccept:\n callback(onaccept, form, tablename=self.tablename)\n\n # Update referencing items\n if self.update and self.id:\n for u in self.update:\n item = u.get(\"item\", None)\n if not item:\n continue\n field = u.get(\"field\", None)\n if isinstance(field, (list, tuple)):\n pkey, fkey = field\n query = table.id == self.id\n row = db(query).select(table[pkey],\n limitby=(0, 1)).first()\n if row:\n item._update_reference(fkey, row[pkey])\n else:\n item._update_reference(field, self.id)\n\n _debug(\"Success: %s, id=%s %sd\" % (self.tablename, self.id,\n self.skip and \"skippe\" or \\\n self.method))\n return True\n\n # -------------------------------------------------------------------------\n def _get_update_policy(self, field):\n \"\"\"\n Get the update policy for a field (if the item will\n update an existing record)\n\n @param field: the name of the field\n \"\"\"\n\n if isinstance(self.update_policy, dict):\n r = self.update_policy.get(field,\n self.update_policy.get(\"__default__\", self.POLICY.THIS))\n else:\n r = self.update_policy\n if not r in self.POLICY.values():\n r = self.POLICY.THIS\n return r\n\n # -------------------------------------------------------------------------\n def _resolve_references(self):\n \"\"\"\n Resolve the references of this item (=look up all foreign\n keys from other items of the same job). If a foreign key\n is not yet available, it will be scheduled for later update.\n \"\"\"\n\n if not self.table:\n return\n\n items = self.job.items\n for reference in self.references:\n\n item = None\n field = reference.field\n entry = reference.entry\n if not entry:\n continue\n\n # Resolve key tuples\n if isinstance(field, (list,tuple)):\n pkey, fkey = field\n else:\n pkey, fkey = (\"id\", field)\n\n # Resolve the key table name\n ktablename, key, multiple = s3_get_foreign_key(self.table[fkey])\n if not ktablename:\n if self.tablename == \"auth_user\" and \\\n fkey == \"organisation_id\":\n ktablename = \"org_organisation\"\n else:\n continue\n if entry.tablename:\n ktablename = entry.tablename\n try:\n ktable = current.s3db[ktablename]\n except:\n continue\n\n # Resolve the foreign key (value)\n fk = entry.id\n if entry.item_id:\n item = items[entry.item_id]\n if item:\n fk = item.id\n if fk and pkey != \"id\":\n row = current.db(ktable._id == fk).select(ktable[pkey],\n limitby=(0, 1)).first()\n if not row:\n fk = None\n continue\n else:\n fk = row[pkey]\n\n # Update record data\n if fk:\n if multiple:\n val = self.data.get(fkey, [])\n if fk not in val:\n val.append(fk)\n self.data[fkey] = val\n else:\n self.data[fkey] = fk\n else:\n if fkey in self.data and not multiple:\n del self.data[fkey]\n if item:\n item.update.append(dict(item=self, field=fkey))\n\n # -------------------------------------------------------------------------\n def _update_reference(self, field, value):\n \"\"\"\n Helper method to update a foreign key in an already written\n record. Will be called by the referenced item after (and only\n if) it has been committed. This is only needed if the reference\n could not be resolved before commit due to circular references.\n\n @param field: the field name of the foreign key\n @param value: the value of the foreign key\n \"\"\"\n\n if not value or not self.table:\n return\n db = current.db\n if self.id and self.permitted:\n fieldtype = str(self.table[field].type)\n if fieldtype.startswith(\"list:reference\"):\n query = (self.table.id == self.id)\n record = db(query).select(self.table[field],\n limitby=(0,1)).first()\n if record:\n values = record[field]\n if value not in values:\n values.append(value)\n db(self.table.id == self.id).update(**{field:values})\n else:\n db(self.table.id == self.id).update(**{field:value})\n\n # -------------------------------------------------------------------------\n def store(self, item_table=None):\n \"\"\"\n Store this item in the DB\n \"\"\"\n\n _debug(\"Storing item %s\" % self)\n if item_table is None:\n return None\n db = current.db\n query = item_table.item_id == self.item_id\n row = db(query).select(item_table.id, limitby=(0, 1)).first()\n if row:\n record_id = row.id\n else:\n record_id = None\n record = Storage(job_id = self.job.job_id,\n item_id = self.item_id,\n tablename = self.tablename,\n record_uid = self.uid,\n error = self.error)\n if self.element is not None:\n element_str = current.xml.tostring(self.element,\n xml_declaration=False)\n record.update(element=element_str)\n if self.data is not None:\n data = Storage()\n for f in self.data.keys():\n table = self.table\n if f not in table.fields:\n continue\n fieldtype = str(self.table[f].type)\n if fieldtype == \"id\" or s3_has_foreign_key(self.table[f]):\n continue\n data.update({f:self.data[f]})\n data_str = cPickle.dumps(data)\n record.update(data=data_str)\n ritems = []\n for reference in self.references:\n field = reference.field\n entry = reference.entry\n store_entry = None\n if entry:\n if entry.item_id is not None:\n store_entry = dict(field=field,\n item_id=str(entry.item_id))\n elif entry.uid is not None:\n store_entry = dict(field=field,\n tablename=entry.tablename,\n uid=str(entry.uid))\n if store_entry is not None:\n ritems.append(json.dumps(store_entry))\n if ritems:\n record.update(ritems=ritems)\n citems = [c.item_id for c in self.components]\n if citems:\n record.update(citems=citems)\n if self.parent:\n record.update(parent=self.parent.item_id)\n if record_id:\n db(item_table.id == record_id).update(**record)\n else:\n record_id = item_table.insert(**record)\n _debug(\"Record ID=%s\" % record_id)\n return record_id\n\n # -------------------------------------------------------------------------\n def restore(self, row):\n \"\"\"\n Restore an item from a item table row. This does not restore\n the references (since this can not be done before all items\n are restored), must call job.restore_references() to do that\n\n @param row: the item table row\n \"\"\"\n\n xml = current.xml\n\n self.item_id = row.item_id\n self.accepted = None\n self.permitted = False\n self.committed = False\n tablename = row.tablename\n self.id = None\n self.uid = row.record_uid\n if row.data is not None:\n self.data = cPickle.loads(row.data)\n else:\n self.data = Storage()\n data = self.data\n if xml.MTIME in data:\n self.mtime = data[xml.MTIME]\n if xml.MCI in data:\n self.mci = data[xml.MCI]\n UID = xml.UID\n if UID in data:\n self.uid = data[UID]\n self.element = etree.fromstring(row.element)\n if row.citems:\n self.load_components = row.citems\n if row.ritems:\n self.load_references = [json.loads(ritem) for ritem in row.ritems]\n self.load_parent = row.parent\n try:\n table = current.s3db[tablename]\n except:\n self.error = self.ERROR.BAD_RESOURCE\n return False\n else:\n self.table = table\n self.tablename = tablename\n original = current.manager.original(table, self.data)\n if original is not None:\n self.original = original\n self.id = original[table._id.name]\n if UID in original:\n self.uid = original[UID]\n self.data.update({UID:self.uid})\n self.error = row.error\n if self.error and not self.data:\n # Validation error\n return False\n return True\n\n# =============================================================================\nclass S3ImportJob():\n \"\"\"\n Class to import an element tree into the database\n \"\"\"\n\n JOB_TABLE_NAME = \"s3_import_job\"\n ITEM_TABLE_NAME = \"s3_import_item\"\n\n # -------------------------------------------------------------------------\n def __init__(self, manager, table,\n tree=None,\n files=None,\n job_id=None,\n strategy=None,\n update_policy=None,\n conflict_policy=None,\n last_sync=None,\n onconflict=None):\n \"\"\"\n Constructor\n\n @param manager: the S3RequestManager instance performing this job\n @param tree: the element tree to import\n @param files: files attached to the import (for upload fields)\n @param job_id: restore job from database (record ID or job_id)\n @param strategy: the import strategy\n @param update_policy: the update policy\n @param conflict_policy: the conflict resolution policy\n @param last_sync: the last synchronization time stamp (datetime)\n @param onconflict: custom conflict resolver function\n \"\"\"\n\n self.error = None # the last error\n self.error_tree = etree.Element(current.xml.TAG.root)\n\n self.table = table\n self.tree = tree\n self.files = files\n self.directory = Storage()\n\n self.elements = Storage()\n self.items = Storage()\n self.references = []\n\n self.job_table = None\n self.item_table = None\n\n self.count = 0 # total number of records imported\n self.created = [] # IDs of created records\n self.updated = [] # IDs of updated records\n self.deleted = [] # IDs of deleted records\n\n # Import strategy\n self.strategy = strategy\n if self.strategy is None:\n self.strategy = [S3ImportItem.METHOD.CREATE,\n S3ImportItem.METHOD.UPDATE,\n S3ImportItem.METHOD.DELETE]\n if not isinstance(self.strategy, (tuple, list)):\n self.strategy = [self.strategy]\n\n # Update policy (default=always update)\n self.update_policy = update_policy\n if not self.update_policy:\n self.update_policy = S3ImportItem.POLICY.OTHER\n # Conflict resolution policy (default=always update)\n self.conflict_policy = conflict_policy\n if not self.conflict_policy:\n self.conflict_policy = S3ImportItem.POLICY.OTHER\n\n # Synchronization settings\n self.mtime = None\n self.last_sync = last_sync\n self.onconflict = onconflict\n\n if job_id:\n self.__define_tables()\n jobtable = self.job_table\n if str(job_id).isdigit():\n query = jobtable.id == job_id\n else:\n query = jobtable.job_id == job_id\n row = current.db(query).select(limitby=(0, 1)).first()\n if not row:\n raise SyntaxError(\"Job record not found\")\n self.job_id = row.job_id\n if not self.table:\n tablename = row.tablename\n try:\n table = current.s3db[tablename]\n except:\n pass\n else:\n import uuid\n self.job_id = uuid.uuid4() # unique ID for this job\n\n # -------------------------------------------------------------------------\n def add_item(self,\n element=None,\n original=None,\n components=None,\n parent=None,\n joinby=None):\n \"\"\"\n Parse and validate an XML element and add it as new item\n to the job.\n\n @param element: the element\n @param original: the original DB record (if already available,\n will otherwise be looked-up by this function)\n @param components: a dictionary of components (as in S3Resource)\n to include in the job (defaults to all\n defined components)\n @param parent: the parent item (if this is a component)\n @param joinby: the component join key(s) (if this is a component)\n\n @returns: a unique identifier for the new item, or None if there\n was an error. self.error contains the last error, and\n self.error_tree an element tree with all failing elements\n including error attributes.\n \"\"\"\n\n if element in self.elements:\n # element has already been added to this job\n return self.elements[element]\n\n # Parse the main element\n item = S3ImportItem(self)\n\n # Update lookup lists\n item_id = item.item_id\n self.items[item_id] = item\n if element is not None:\n self.elements[element] = item_id\n\n if not item.parse(element,\n original=original,\n files=self.files):\n self.error = item.error\n item.accepted = False\n if parent is None:\n self.error_tree.append(deepcopy(item.element))\n\n else:\n # Now parse the components\n table = item.table\n components = current.s3db.get_components(table, names=components)\n\n cnames = Storage()\n cinfos = Storage()\n for alias in components:\n component = components[alias]\n pkey = component.pkey\n if component.linktable:\n ctable = component.linktable\n fkey = component.lkey\n else:\n ctable = component.table\n fkey = component.fkey\n ctablename = ctable._tablename\n if ctablename in cnames:\n cnames[ctablename].append(alias)\n else:\n cnames[ctablename] = [alias]\n cinfos[(ctablename, alias)] = Storage(component = component,\n ctable = ctable,\n pkey = pkey,\n fkey = fkey,\n original = None,\n uid = None)\n add_item = self.add_item\n xml = current.xml\n for celement in xml.components(element, names=cnames.keys()):\n\n # Get the component tablename\n ctablename = celement.get(xml.ATTRIBUTE.name, None)\n if not ctablename:\n continue\n\n # Get the component alias (for disambiguation)\n calias = celement.get(xml.ATTRIBUTE.alias, None)\n if calias is None:\n if ctablename not in cnames:\n continue\n aliases = cnames[ctablename]\n if len(aliases) == 1:\n calias = aliases[0]\n else:\n # ambiguous components *must* use alias\n continue\n if (ctablename, calias) not in cinfos:\n continue\n else:\n cinfo = cinfos[(ctablename, calias)]\n\n component = cinfo.component\n original = cinfo.original\n ctable = cinfo.ctable\n pkey = cinfo.pkey\n fkey = cinfo.fkey\n if not component.multiple:\n if cinfo.uid is not None:\n continue\n if original is None and item.id:\n query = (table.id == item.id) & \\\n (table[pkey] == ctable[fkey])\n original = current.db(query).select(ctable.ALL,\n limitby=(0, 1)).first()\n if original:\n cinfo.uid = uid = original.get(xml.UID, None)\n celement.set(xml.UID, uid)\n cinfo.original = original\n\n item_id = add_item(element=celement,\n original=original,\n parent=item,\n joinby=(pkey, fkey))\n if item_id is None:\n item.error = self.error\n self.error_tree.append(deepcopy(item.element))\n else:\n citem = self.items[item_id]\n citem.parent = item\n item.components.append(citem)\n\n # Handle references\n table = item.table\n tree = self.tree\n if tree is not None:\n fields = [table[f] for f in table.fields]\n rfields = filter(s3_has_foreign_key, fields)\n item.references = self.lookahead(element,\n table=table,\n fields=rfields,\n tree=tree,\n directory=self.directory)\n for reference in item.references:\n entry = reference.entry\n if entry and entry.element is not None:\n item_id = add_item(element=entry.element)\n if item_id:\n entry.update(item_id=item_id)\n\n # Parent reference\n if parent is not None:\n entry = Storage(item_id=parent.item_id,\n element=parent.element,\n tablename=parent.tablename)\n item.references.append(Storage(field=joinby,\n entry=entry))\n\n return item.item_id\n\n # -------------------------------------------------------------------------\n def lookahead(self,\n element,\n table=None,\n fields=None,\n tree=None,\n directory=None):\n \"\"\"\n Find referenced elements in the tree\n\n @param element: the element\n @param table: the DB table\n @param fields: the FK fields in the table\n @param tree: the import tree\n @param directory: a dictionary to lookup elements in the tree\n (will be filled in by this function)\n \"\"\"\n\n db = current.db\n s3db = current.s3db\n xml = current.xml\n import_uid = xml.import_uid\n ATTRIBUTE = xml.ATTRIBUTE\n TAG = xml.TAG\n UID = xml.UID\n reference_list = []\n\n root = None\n if tree is not None:\n if isinstance(tree, etree._Element):\n root = tree\n else:\n root = tree.getroot()\n references = element.findall(\"reference\")\n for reference in references:\n field = reference.get(ATTRIBUTE.field, None)\n # Ignore references without valid field-attribute\n if not field or field not in fields:\n continue\n # Find the key table\n multiple = False\n fieldtype = str(table[field].type)\n if fieldtype.startswith(\"reference\"):\n ktablename = fieldtype[10:]\n elif fieldtype.startswith(\"list:reference\"):\n ktablename = fieldtype[15:]\n multiple = True\n else:\n # ignore if the field is not a reference type\n continue\n try:\n ktable = s3db[ktablename]\n except:\n # Invalid tablename - skip\n continue\n tablename = reference.get(ATTRIBUTE.resource, None)\n # Ignore references to tables without UID field:\n if UID not in ktable.fields:\n continue\n # Fall back to key table name if tablename is not specified:\n if not tablename:\n tablename = ktablename\n # Super-entity references must use the super-key:\n if tablename != ktablename:\n field = (ktable._id.name, field)\n # Ignore direct references to super-entities:\n if tablename == ktablename and ktable._id.name != \"id\":\n continue\n # Get the foreign key\n uids = reference.get(UID, None)\n attr = UID\n if not uids:\n uids = reference.get(ATTRIBUTE.tuid, None)\n attr = ATTRIBUTE.tuid\n if uids and multiple:\n uids = json.loads(uids)\n elif uids:\n uids = [uids]\n\n # Find the elements and map to DB records\n relements = []\n\n # Create a UID<->ID map\n id_map = Storage()\n if attr == UID and uids:\n _uids = map(import_uid, uids)\n query = ktable[UID].belongs(_uids)\n records = db(query).select(ktable.id,\n ktable[UID])\n id_map = dict([(r[UID], r.id) for r in records])\n\n if not uids:\n # Anonymous reference: inside the element\n expr = './/%s[@%s=\"%s\"]' % (TAG.resource,\n ATTRIBUTE.name,\n tablename)\n relements = reference.xpath(expr)\n if relements and not multiple:\n relements = [relements[0]]\n\n elif root is not None:\n\n for uid in uids:\n\n entry = None\n # Entry already in directory?\n if directory is not None:\n entry = directory.get((tablename, attr, uid), None)\n if not entry:\n expr = \".//%s[@%s='%s' and @%s='%s']\" % (\n TAG.resource,\n ATTRIBUTE.name,\n tablename,\n attr,\n uid)\n e = root.xpath(expr)\n if e:\n # Element in the source => append to relements\n relements.append(e[0])\n else:\n # No element found, see if original record exists\n _uid = import_uid(uid)\n if _uid and _uid in id_map:\n _id = id_map[_uid]\n entry = Storage(tablename=tablename,\n element=None,\n uid=uid,\n id=_id,\n item_id=None)\n reference_list.append(Storage(field=field,\n entry=entry))\n else:\n continue\n else:\n reference_list.append(Storage(field=field,\n entry=entry))\n\n # Create entries for all newly found elements\n for relement in relements:\n uid = relement.get(attr, None)\n if attr == UID:\n _uid = import_uid(uid)\n id = _uid and id_map and id_map.get(_uid, None) or None\n else:\n _uid = None\n id = None\n entry = Storage(tablename=tablename,\n element=relement,\n uid=uid,\n id=id,\n item_id=None)\n # Add entry to directory\n if uid and directory is not None:\n directory[(tablename, attr, uid)] = entry\n # Append the entry to the reference list\n reference_list.append(Storage(field=field, entry=entry))\n\n return reference_list\n\n # -------------------------------------------------------------------------\n def load_item(self, row):\n \"\"\"\n Load an item from the item table (counterpart to add_item\n when restoring a job from the database)\n \"\"\"\n\n item = S3ImportItem(self)\n if not item.restore(row):\n self.error = item.error\n if item.load_parent is None:\n self.error_tree.append(deepcopy(item.element))\n # Update lookup lists\n item_id = item.item_id\n self.items[item_id] = item\n return item_id\n\n # -------------------------------------------------------------------------\n def resolve(self, item_id, import_list):\n \"\"\"\n Resolve the reference list of an item\n\n @param item_id: the import item UID\n @param import_list: the ordered list of items (UIDs) to import\n \"\"\"\n\n item = self.items[item_id]\n if item.lock or item.accepted is False:\n return False\n references = []\n for reference in item.references:\n ritem_id = reference.entry.item_id\n if ritem_id and ritem_id not in import_list:\n references.append(ritem_id)\n for ritem_id in references:\n item.lock = True\n if self.resolve(ritem_id, import_list):\n import_list.append(ritem_id)\n item.lock = False\n return True\n\n # -------------------------------------------------------------------------\n def commit(self, ignore_errors=False):\n \"\"\"\n Commit the import job to the DB\n\n @param ignore_errors: skip any items with errors\n (does still report the errors)\n \"\"\"\n\n ATTRIBUTE = current.xml.ATTRIBUTE\n\n # Resolve references\n import_list = []\n for item_id in self.items:\n self.resolve(item_id, import_list)\n if item_id not in import_list:\n import_list.append(item_id)\n # Commit the items\n items = self.items\n count = 0\n mtime = None\n created = []\n cappend = created.append\n updated = []\n deleted = []\n tablename = self.table._tablename\n for item_id in import_list:\n item = items[item_id]\n error = None\n success = item.commit(ignore_errors=ignore_errors)\n error = item.error\n if error:\n self.error = error\n element = item.element\n if element is not None:\n if not element.get(ATTRIBUTE.error, False):\n element.set(ATTRIBUTE.error, str(self.error))\n self.error_tree.append(deepcopy(element))\n if not ignore_errors:\n return False\n elif item.tablename == tablename:\n count += 1\n if mtime is None or item.mtime > mtime:\n mtime = item.mtime\n if item.id:\n if item.method == item.METHOD.CREATE:\n cappend(item.id)\n elif item.method == item.METHOD.UPDATE:\n updated.append(item.id)\n elif item.method == item.METHOD.DELETE:\n deleted.append(item.id)\n self.count = count\n self.mtime = mtime\n self.created = created\n self.updated = updated\n self.deleted = deleted\n return True\n\n # -------------------------------------------------------------------------\n def __define_tables(self):\n \"\"\"\n Define the database tables for jobs and items\n \"\"\"\n\n self.job_table = self.define_job_table()\n self.item_table = self.define_item_table()\n\n # -------------------------------------------------------------------------\n @classmethod\n def define_job_table(cls):\n\n db = current.db\n if cls.JOB_TABLE_NAME not in db:\n job_table = db.define_table(cls.JOB_TABLE_NAME,\n Field(\"job_id\", length=128,\n unique=True,\n notnull=True),\n Field(\"tablename\"),\n Field(\"timestmp\", \"datetime\",\n default=datetime.utcnow()))\n else:\n job_table = db[cls.JOB_TABLE_NAME]\n return job_table\n\n # -------------------------------------------------------------------------\n @classmethod\n def define_item_table(cls):\n\n db = current.db\n if cls.ITEM_TABLE_NAME not in db:\n item_table = db.define_table(cls.ITEM_TABLE_NAME,\n Field(\"item_id\", length=128,\n unique=True,\n notnull=True),\n Field(\"job_id\", length=128),\n Field(\"tablename\", length=128),\n #Field(\"record_id\", \"integer\"),\n Field(\"record_uid\"),\n Field(\"error\", \"text\"),\n Field(\"data\", \"text\"),\n Field(\"element\", \"text\"),\n Field(\"ritems\", \"list:string\"),\n Field(\"citems\", \"list:string\"),\n Field(\"parent\", length=128))\n else:\n item_table = db[cls.ITEM_TABLE_NAME]\n return item_table\n\n # -------------------------------------------------------------------------\n def store(self):\n \"\"\"\n Store this job and all its items in the job table\n \"\"\"\n\n db = current.db\n\n _debug(\"Storing Job ID=%s\" % self.job_id)\n self.__define_tables()\n jobtable = self.job_table\n query = jobtable.job_id == self.job_id\n row = db(query).select(jobtable.id, limitby=(0, 1)).first()\n if row:\n record_id = row.id\n else:\n record_id = None\n record = Storage(job_id=self.job_id)\n try:\n tablename = self.table._tablename\n except:\n pass\n else:\n record.update(tablename=tablename)\n for item in self.items.values():\n item.store(item_table=self.item_table)\n if record_id:\n db(jobtable.id == record_id).update(**record)\n else:\n record_id = jobtable.insert(**record)\n _debug(\"Job record ID=%s\" % record_id)\n return record_id\n\n # -------------------------------------------------------------------------\n def get_tree(self):\n \"\"\"\n Reconstruct the element tree of this job\n \"\"\"\n\n if self.tree is not None:\n return tree\n else:\n xml = current.xml\n root = etree.Element(xml.TAG.root)\n for item in self.items.values():\n if item.element is not None and not item.parent:\n if item.tablename == self.table._tablename or \\\n item.element.get(xml.UID, None) or \\\n item.element.get(xml.ATTRIBUTE.tuid, None):\n root.append(deepcopy(item.element))\n return etree.ElementTree(root)\n\n # -------------------------------------------------------------------------\n def delete(self):\n \"\"\"\n Delete this job and all its items from the job table\n \"\"\"\n\n db = current.db\n\n _debug(\"Deleting job ID=%s\" % self.job_id)\n self.__define_tables()\n item_table = self.item_table\n query = item_table.job_id == self.job_id\n db(query).delete()\n job_table = self.job_table\n query = job_table.job_id == self.job_id\n db(query).delete()\n\n # -------------------------------------------------------------------------\n def restore_references(self):\n \"\"\"\n Restore the job's reference structure after loading items\n from the item table\n \"\"\"\n\n db = current.db\n UID = current.xml.UID\n\n for item in self.items.values():\n for citem_id in item.load_components:\n if citem_id in self.items:\n item.components.append(self.items[citem_id])\n item.load_components = []\n for ritem in item.load_references:\n field = ritem[\"field\"]\n if \"item_id\" in ritem:\n item_id = ritem[\"item_id\"]\n if item_id in self.items:\n _item = self.items[item_id]\n entry = Storage(tablename=_item.tablename,\n element=_item.element,\n uid=_item.uid,\n id=_item.id,\n item_id=item_id)\n item.references.append(Storage(field=field,\n entry=entry))\n else:\n _id = None\n uid = ritem.get(\"uid\", None)\n tablename = ritem.get(\"tablename\", None)\n if tablename and uid:\n try:\n table = current.s3db[tablename]\n except:\n continue\n if UID not in table.fields:\n continue\n query = table[UID] == uid\n row = db(query).select(table._id,\n limitby=(0, 1)).first()\n if row:\n _id = row[table._id.name]\n else:\n continue\n entry = Storage(tablename = ritem[\"tablename\"],\n element=None,\n uid = ritem[\"uid\"],\n id = _id,\n item_id = None)\n item.references.append(Storage(field=field,\n entry=entry))\n item.load_references = []\n if item.load_parent is not None:\n item.parent = self.items[item.load_parent]\n item.load_parent = None\n\n# END =========================================================================\n"},"repo_name":{"kind":"string","value":"ashwyn/eden-message_parser"},"path":{"kind":"string","value":"modules/s3/s3import.py"},"language":{"kind":"string","value":"Python"},"license":{"kind":"string","value":"mit"},"size":{"kind":"number","value":123322,"string":"123,322"}}},{"rowIdx":1744,"cells":{"code":{"kind":"string","value":"package br.ufrj.g2matricula.domain;\n\nimport org.springframework.data.elasticsearch.annotations.Document;\n\nimport javax.persistence.*;\nimport javax.validation.constraints.*;\nimport java.io.Serializable;\nimport java.util.Objects;\n\nimport br.ufrj.g2matricula.domain.enumeration.MatriculaStatus;\n\n/**\n * A Matricula.\n */\n@Entity\n@Table(name = \"matricula\")\n@Document(indexName = \"matricula\")\npublic class Matricula implements Serializable {\n\n private static final long serialVersionUID = 1L;\n\n @Id\n @GeneratedValue(strategy = GenerationType.IDENTITY)\n private Long id;\n\n @NotNull\n @Enumerated(EnumType.STRING)\n @Column(name = \"status\", nullable = false)\n private MatriculaStatus status;\n\n @ManyToOne\n private Aluno dreAluno;\n\n @ManyToOne\n private Curso curso;\n\n // jhipster-needle-entity-add-field - JHipster will add fields here, do not remove\n public Long getId() {\n return id;\n }\n\n public void setId(Long id) {\n this.id = id;\n }\n\n public MatriculaStatus getStatus() {\n return status;\n }\n\n public Matricula status(MatriculaStatus status) {\n this.status = status;\n return this;\n }\n\n public void setStatus(MatriculaStatus status) {\n this.status = status;\n }\n\n public Aluno getDreAluno() {\n return dreAluno;\n }\n\n public Matricula dreAluno(Aluno aluno) {\n this.dreAluno = aluno;\n return this;\n }\n\n public void setDreAluno(Aluno aluno) {\n this.dreAluno = aluno;\n }\n\n public Curso getCurso() {\n return curso;\n }\n\n public Matricula curso(Curso curso) {\n this.curso = curso;\n return this;\n }\n\n public void setCurso(Curso curso) {\n this.curso = curso;\n }\n // jhipster-needle-entity-add-getters-setters - JHipster will add getters and setters here, do not remove\n\n @Override\n public boolean equals(Object o) {\n if (this == o) {\n return true;\n }\n if (o == null || getClass() != o.getClass()) {\n return false;\n }\n Matricula matricula = (Matricula) o;\n if (matricula.getId() == null || getId() == null) {\n return false;\n }\n return Objects.equals(getId(), matricula.getId());\n }\n\n @Override\n public int hashCode() {\n return Objects.hashCode(getId());\n }\n\n @Override\n public String toString() {\n return \"Matricula{\" +\n \"id=\" + getId() +\n \", status='\" + getStatus() + \"'\" +\n \"}\";\n }\n}\n"},"repo_name":{"kind":"string","value":"DamascenoRafael/cos482-qualidade-de-software"},"path":{"kind":"string","value":"www/src/main/java/br/ufrj/g2matricula/domain/Matricula.java"},"language":{"kind":"string","value":"Java"},"license":{"kind":"string","value":"mit"},"size":{"kind":"number","value":2535,"string":"2,535"}}},{"rowIdx":1745,"cells":{"code":{"kind":"string","value":"# rhodecode-ce-dockerized\nDocker container for RhodeCode Community Edition repository management platform\n\n# WIP\n"},"repo_name":{"kind":"string","value":"darneta/rhodecode-ce-dockerized"},"path":{"kind":"string","value":"README.md"},"language":{"kind":"string","value":"Markdown"},"license":{"kind":"string","value":"mit"},"size":{"kind":"number","value":113,"string":"113"}}},{"rowIdx":1746,"cells":{"code":{"kind":"string","value":"db->select('CID, CategoryName');\n\t\t$this->db->from('admin_module_category');\n\t\t$this->db->order_by('Ordering');\n\t\t$query = $this->db->get();\n\t\tif($query->num_rows())\n\t\t\treturn $query;\n\t\telse\n\t\t\treturn FALSE;\n\t}\n\t\n\tpublic function GetAdminModuleList()\n\t{\n\t\t$this->db->select('MID, CID, ModuleName, DisplayName');\n\t\t$this->db->from('admin_module');\n\t\t$this->db->order_by('Ordering');\n\t\t$query = $this->db->get();\n\t\tif($query->num_rows())\n\t\t\treturn $query;\n\t\telse\n\t\t\treturn FALSE;\n\t}\n\t\n\tpublic function GetAdminModuleActions($MID = NULL)\n\t{\n\t\t$this->db->select('AID, MID, Action');\n\t\t$this->db->from('admin_module_action');\n\t\tif($MID != NULL)\n\t\t\t$this->db->where('MID', $MID);\n\t\t$query = $this->db->get();\n\t\tif($query->num_rows())\n\t\t\treturn $query->result();\n\t\telse\n\t\t\treturn FALSE;\n\t}\n}\n?>"},"repo_name":{"kind":"string","value":"dernst91/deCMS"},"path":{"kind":"string","value":"application/models/Admin_GeneralModel.php"},"language":{"kind":"string","value":"PHP"},"license":{"kind":"string","value":"mit"},"size":{"kind":"number","value":895,"string":"895"}}},{"rowIdx":1747,"cells":{"code":{"kind":"string","value":"#ifndef SYMTAB_H\n#define SYMTAB_H\n\n#include \"symbol.h\"\n\nvoid symtab_init();\n\nvoid push_scope();\nvoid pop_scope();\n\nsymbol *bind_symbol(char *name);\nsymbol *lookup_symbol(char *name);\n\nvoid print_symtab();\n\n#endif\n"},"repo_name":{"kind":"string","value":"rayiner/ccpoc"},"path":{"kind":"string","value":"symtab.h"},"language":{"kind":"string","value":"C"},"license":{"kind":"string","value":"mit"},"size":{"kind":"number","value":213,"string":"213"}}},{"rowIdx":1748,"cells":{"code":{"kind":"string","value":"RSpec.describe(\"executables\", skip_db_cleaner: true) do\n include SharedSpecSetup\n before do\n #migrations don't work if we are still connected to the db\n ActiveRecord::Base.remove_connection\n end\n\n\n it \"extracts the schema\" do\n output = `bin/extract #{config_filename} production #{schema_filename} 2>&1`\n\n expect(output).to match(/extracted to/)\n expect(output).to match(/#{schema_filename}/)\n end\n\n it \"transfers the schema\" do\n output = `bin/transfer-schema #{config_filename} production test config/include_tables.txt 2>&1`\n\n expect(output).to match(/transferred schema from production to test/)\n end\nend\n"},"repo_name":{"kind":"string","value":"ifad/sybase-schema-extractor"},"path":{"kind":"string","value":"spec/bin_spec.rb"},"language":{"kind":"string","value":"Ruby"},"license":{"kind":"string","value":"mit"},"size":{"kind":"number","value":636,"string":"636"}}},{"rowIdx":1749,"cells":{"code":{"kind":"string","value":"## S3proxy - serve S3 files simply\n\nS3proxy is a simple flask-based REST web application which can expose files (keys) stored in the AWS Simple Storage Service (S3) via a simple REST api. \n\n### What does this do?\nS3proxy takes a set of AWS credentials and an S3 bucket name and provides GET and HEAD endpoints on the files within the bucket. It uses the [boto][boto] library for internal access to S3. For example, if your bucket has the following file:\n\n s3://mybucket/examples/path/to/myfile.txt\n\nthen running S3proxy on a localhost server (port 5000) would enable you read (GET) this file at:\n\n\thttp://localhost:5000/files/examples/path/to/myfile.txt\n\nSupport exists in S3proxy for the `byte-range` header in a GET request. This means that the API can provide arbitrary parts of S3 files if requested/supported by the application making the GET request.\n\n### Why do this?\nS3proxy simplifies access to private S3 objects. While S3 already provides [a complete REST API][s3_api], this API requires signed authentication headers or parameters that are not always obtainable within existing applications (see below), or overly complex for simple development/debugging tasks.\n\nIn fact, however, S3proxy was specifically designed to provide a compatability layer for viewing DNA sequencing data in(`.bam` files) using [IGV][igv]. While IGV already includes an interface for reading bam files from an HTTP endpoint, it does not support creating signed requests as required by the AWS S3 API (IGV does support HTTP Basic Authentication, a feature that I would like to include in S3proxy in the near future). Though it is in principal possible to provide a signed AWS-compatible URL to IGV, IGV will still not be able to create its own signed URLs necessary for accessing `.bai` index files, usually located in the same directory as the `.bam` file. Using S3proxy you can expose the S3 objects via a simplified HTTP API which IGV can understand and access directly.\n\nThis project is in many ways similar to [S3Auth][s3auth], a hosted service which provides a much more complete API to a private S3 bucket. I wrote S3proxy as a faster, simpler solution-- and because S3Auth requires a domain name and access to the `CNAME` record in order to function. If you want a more complete API (read: more than just GET/HEAD at the moment) should check them out!\n\n### Features\n - Serves S3 file objects via standard GET request, optionally providing only a part of a file using the `byte-range` header. \n - Easy to configure via a the `config.yaml` file-- S3 keys and bucket name is all you need!\n - Limited support for simple url-rewriting where necessary.\n - Uses the werkzeug [`SimpleCache` module][simplecache] to cache S3 object identifiers (but not data) in order to reduce latency and lookup times.\n\n### Usage\n\n#### Requirements\nTo run S3proxy, you will need:\n - [Flask][flask]\n - [boto][boto]\n - [PyYAML][pyyaml]\n - An Amazon AWS account and keys with appropriate S3 access\n\n#### Installation/Configuration\nAt the moment, there is no installation. Simply put your AWS keys and bucket name into the config.yaml file:\n\n```yaml\nAWS_ACCESS_KEY_ID: ''\nAWS_SECRET_ACCESS_KEY: ''\nbucket_name: ''\n```\n\nYou may also optionally specify a number of \"rewrite\" rules. These are simple pairs of a regular expression and a replacement string which can be used to internally redirect (Note, the API does not actually currently send a REST 3XX redirect header) file paths. The example in the config.yaml file reads:\n\n```yaml\nrewrite_rules:\n bai_rule:\n from: \".bam.bai$\"\n to: \".bai\"\n```\n\n... which will match all url/filenames ending with \".bam.bai\" and rewrite this to \".bai\". \n\nIf you do not wish to use any rewrite_rules, simply leave this commented out.\n\n#### Running S3cmd:\nOnce you have filled out the config.yaml file, you can test out S3proxy simply by running on the command line:\n\n python app.py\n\n*Note*: Running using the built-in flask server is not recommended for anything other than debugging. Refer to [these deployment options][wsgi_server] for instructions on how to set up a flask applicaiton in a WSGI framework.\n\n#### Options\nIf you wish to see more debug-level output (headers, etc.), use the `--debug` option. You may also specify a yaml configuration file to load using the `--config` parameter.\n\n### Important considerations and caveats\nS3proxy should not be used in production-level or open/exposed servers! There is currently no security provided by S3proxy (though I may add basic HTTP authentication later). Once given the AWS credentials, S3proxy will serve any path available to it. And, although I restrict requests to GET and HEAD only, I cannot currently guarantee that a determined person would not be able to execute a PUT/UPDATE/DELETE request using this service. Finally, I highly recommend you create a separate [IAM role][iam_roles] in AWS with limited access and permisisons to S3 only for use with S3proxy. \n\n### Future development\n - Implement HTTP Basic Authentication to provide some level of security.\n - Implement other error codes and basic REST responses. \n - Add ability to log to a file and specify a `--log-level` (use the Python logging module)\n\n\n[boto]: http://boto.readthedocs.org/\n[flask]: http://flask.pocoo.org/\n[pyyaml]: http://pyyaml.org/wiki/PyYAML\n[s3_api]: http://docs.aws.amazon.com/AmazonS3/latest/API/APIRest.html\n[igv]: http://www.broadinstitute.org/igv/home\n[wsgi_server]: http://flask.pocoo.org/docs/deploying/\n[iam_roles]: http://aws.amazon.com/iam/\n[simplecache]: http://flask.pocoo.org/docs/patterns/caching/\n[s3auth]: http://www.s3auth.com/\n"},"repo_name":{"kind":"string","value":"nkrumm/s3proxy"},"path":{"kind":"string","value":"README.md"},"language":{"kind":"string","value":"Markdown"},"license":{"kind":"string","value":"mit"},"size":{"kind":"number","value":5632,"string":"5,632"}}},{"rowIdx":1750,"cells":{"code":{"kind":"string","value":"Title: Survey data\nTemplate: survey\nSlug: survey/data\nGithub: True\n\n\nThe code to clean and process the survey data is available in the [GitHub repository](https://github.com/andrewheiss/From-the-Trenches-Anti-TIP-NGOs-and-US) for Andrew Heiss and Judith G. Kelley. 2016. \"From the Trenches: A Global Survey of Anti-TIP NGOs and their Views of US Efforts.\" *Journal of Human Trafficking*. [doi:10.1080/23322705.2016.1199241](https://dx.doi.org/10.1080/23322705.2016.1199241)\n\n
\n
\n
\n
\n
\n\nThe free response answers for respondents who requested anonymity have been\nredacted. \n\n- CSV file: [`responses_full_anonymized.csv`](/files/data/responses_full_anonymized.csv)\n- R file: [`responses_full_anonymized.rds`](/files/data/responses_full_anonymized.rds)\n"},"repo_name":{"kind":"string","value":"andrewheiss/scorecarddiplomacy-org"},"path":{"kind":"string","value":"content/pages/survey-stuff/data.md"},"language":{"kind":"string","value":"Markdown"},"license":{"kind":"string","value":"mit"},"size":{"kind":"number","value":952,"string":"952"}}},{"rowIdx":1751,"cells":{"code":{"kind":"string","value":"// @flow\nimport { StyleSheet } from 'react-native';\nimport { colors } from '../../themes';\n\nconst styles = StyleSheet.create({\n divider: {\n height: 1,\n marginHorizontal: 0,\n backgroundColor: colors.darkDivider,\n },\n});\n\nexport default styles;\n"},"repo_name":{"kind":"string","value":"Dennitz/Timetable"},"path":{"kind":"string","value":"src/components/styles/HorizontalDividerList.styles.js"},"language":{"kind":"string","value":"JavaScript"},"license":{"kind":"string","value":"mit"},"size":{"kind":"number","value":254,"string":"254"}}},{"rowIdx":1752,"cells":{"code":{"kind":"string","value":"## Testing testing, 1, 2, 3\n\nLet's see how *[this](https://github.com/imathis/jekyll-markdown-block)* does.\n\n puts 'awesome' unless not_awesome?\n\n- One item\n- Two item\n- Three Item\n- Four!\n\nAnd… scene!\n\n"},"repo_name":{"kind":"string","value":"imathis/jekyll-markdown-block"},"path":{"kind":"string","value":"test/source/_includes/test.md"},"language":{"kind":"string","value":"Markdown"},"license":{"kind":"string","value":"mit"},"size":{"kind":"number","value":208,"string":"208"}}},{"rowIdx":1753,"cells":{"code":{"kind":"string","value":"\n\n\n\n\n\nResourceXmlPropertyEmitterInterface\n\n\n\n\n\n\n\n\n\n\n\n\n
\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n
\n\n\n \n \n \n \n \n \n \n \n \n \n
Overview&nbsp; Package&nbsp; &nbsp;Class&nbsp; Use&nbsp; Tree&nbsp; Deprecated&nbsp; Index&nbsp; Help&nbsp;
\n
\n\n
\n&nbsp;PREV CLASS&nbsp;\n&nbsp;NEXT CLASS\n FRAMES &nbsp;\n&nbsp;NO FRAMES &nbsp;\n&nbsp;\n\n\n\n
\n SUMMARY:&nbsp;NESTED&nbsp;|&nbsp;FIELD&nbsp;|&nbsp;CONSTR&nbsp;|&nbsp;METHOD\nDETAIL:&nbsp;FIELD&nbsp;|&nbsp;CONSTR&nbsp;|&nbsp;METHOD
\n\n\n\n
\n\n

\n\norg.pentaho.di.resource\n
\nInterface ResourceXmlPropertyEmitterInterface

\n
\n
\n
public interface ResourceXmlPropertyEmitterInterface
\n\n\n

\n


\n\n

\n\n\n\n\n

\n\n\n\n\n\n\n\n
\nMethod Summary
\n&nbsp;StringgetExtraResourceProperties(ResourceHolderInterface&nbsp;ref,\n int&nbsp;indention)\n\n
\n&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;Allows injection of additional relevant properties in the\n to-xml of the Resource Reference.
\n&nbsp;\n

\n\n\n\n\n

\n\n\n\n
\nMethod Detail
\n\n

\ngetExtraResourceProperties

\n
\nString getExtraResourceProperties(ResourceHolderInterface&nbsp;ref,\n                                  int&nbsp;indention)
\n
\n
Allows injection of additional relevant properties in the\n to-xml of the Resource Reference.\n

\n

\n
Parameters:
ref - The Resource Reference Holder (a step, or a job entry)
indention - If -1, then no indenting, otherwise, it's the indent level to indent the XML strings\n
Returns:
String of injected XML
\n
\n
\n\n
\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n
\n\n\n \n \n \n \n \n \n \n \n \n \n
Overview&nbsp; Package&nbsp; &nbsp;Class&nbsp; Use&nbsp; Tree&nbsp; Deprecated&nbsp; Index&nbsp; Help&nbsp;
\n
\n\n
\n&nbsp;PREV CLASS&nbsp;\n&nbsp;NEXT CLASS\n FRAMES &nbsp;\n&nbsp;NO FRAMES &nbsp;\n&nbsp;\n\n\n\n
\n SUMMARY:&nbsp;NESTED&nbsp;|&nbsp;FIELD&nbsp;|&nbsp;CONSTR&nbsp;|&nbsp;METHOD\nDETAIL:&nbsp;FIELD&nbsp;|&nbsp;CONSTR&nbsp;|&nbsp;METHOD
\n\n\n\n
\n\n\n\n"},"repo_name":{"kind":"string","value":"ColFusion/PentahoKettle"},"path":{"kind":"string","value":"kettle-data-integration/docs/api/org/pentaho/di/resource/ResourceXmlPropertyEmitterInterface.html"},"language":{"kind":"string","value":"HTML"},"license":{"kind":"string","value":"mit"},"size":{"kind":"number","value":9567,"string":"9,567"}}},{"rowIdx":1754,"cells":{"code":{"kind":"string","value":"require 'ffi'\n\nmodule ProcessShared\n module Posix\n module Errno\n extend FFI::Library\n\n ffi_lib FFI::Library::LIBC\n\n attach_variable :errno, :int\n\n # Replace methods in +syms+ with error checking wrappers that\n # invoke the original method and raise a {SystemCallError} with\n # the current errno if the return value is an error.\n #\n # Errors are detected if the block returns true when called with\n # the original method's return value.\n def error_check(*syms, &is_err)\n unless block_given?\n is_err = lambda { |v| (v == -1) }\n end\n\n syms.each do |sym|\n method = self.method(sym)\n new_method_body = proc do |*args|\n ret = method.call(*args)\n if is_err.call(ret)\n raise SystemCallError.new(\"error in #{sym}\", Errno.errno)\n else\n ret\n end\n end\n\n define_singleton_method(sym, &new_method_body)\n define_method(sym, &new_method_body)\n end\n end\n end\n end\nend\n"},"repo_name":{"kind":"string","value":"pmahoney/process_shared"},"path":{"kind":"string","value":"lib/process_shared/posix/errno.rb"},"language":{"kind":"string","value":"Ruby"},"license":{"kind":"string","value":"mit"},"size":{"kind":"number","value":1066,"string":"1,066"}}},{"rowIdx":1755,"cells":{"code":{"kind":"string","value":"\r\n\r\n\r\n\r\n \r\n\r\n\r\n\r\n\r\n

Manual

\r\n \r\n
\r\n \r\n
\r\n [&minus;]&nbsp;&nbsp;Scripts\r\n \r\n\r\n \r\n \r\n
\r\n
\r\n\r\n\r\n"},"repo_name":{"kind":"string","value":"matthias-wolff/C-VAU"},"path":{"kind":"string","value":"manual/toc.html"},"language":{"kind":"string","value":"HTML"},"license":{"kind":"string","value":"mit"},"size":{"kind":"number","value":2328,"string":"2,328"}}},{"rowIdx":1756,"cells":{"code":{"kind":"string","value":"'use strict';\n\nconst _ = require('lodash');\nconst co = require('co');\nconst Promise = require('bluebird');\nconst AWS = require('aws-sdk');\nAWS.config.region = 'us-east-1';\nconst cloudwatch = Promise.promisifyAll(new AWS.CloudWatch());\nconst Lambda = new AWS.Lambda();\n\nconst START_TIME = new Date('2017-06-07T01:00:00.000Z');\nconst DAYS = 2;\nconst ONE_DAY = 24 * 60 * 60 * 1000;\n\nlet addDays = (startDt, n) => new Date(startDt.getTime() + ONE_DAY * n);\n\nlet getFuncStats = co.wrap(function* (funcName) {\n let getStats = co.wrap(function* (startTime, endTime) {\n let req = {\n MetricName: 'Duration',\n Namespace: 'AWS/Lambda',\n Period: 60,\n Dimensions: [ { Name: 'FunctionName', Value: funcName } ],\n Statistics: [ 'Maximum' ],\n Unit: 'Milliseconds',\n StartTime: startTime,\n EndTime: endTime\n };\n let resp = yield cloudwatch.getMetricStatisticsAsync(req);\n\n return resp.Datapoints.map(dp => { \n return {\n timestamp: dp.Timestamp,\n value: dp.Maximum\n };\n });\n });\n\n let stats = [];\n for (let i = 0; i < DAYS; i++) {\n // CloudWatch only allows us to query 1440 data points per request, which \n // at 1 min period is 24 hours\n let startTime = addDays(START_TIME, i);\n let endTime = addDays(startTime, 1);\n let oneDayStats = yield getStats(startTime, endTime);\n\n stats = stats.concat(oneDayStats);\n }\n\n return _.sortBy(stats, s => s.timestamp);\n});\n\nlet listFunctions = co.wrap(function* (marker, acc) {\n acc = acc || [];\n\n let resp = yield Lambda.listFunctions({ Marker: marker, MaxItems: 100 }).promise();\n\n let functions = resp.Functions\n .map(f => f.FunctionName)\n .filter(fn => fn.includes(\"aws-coldstart\") && !fn.endsWith(\"run\"));\n\n acc = acc.concat(functions);\n\n if (resp.NextMarker) {\n return yield listFunctions(resp.NextMarker, acc);\n } else {\n return acc;\n }\n});\n\nlistFunctions()\n .then(co.wrap(function* (funcs) {\n for (let func of funcs) {\n let stats = yield getFuncStats(func);\n stats.forEach(stat => console.log(`${func},${stat.timestamp},${stat.value}`));\n }\n }));"},"repo_name":{"kind":"string","value":"theburningmonk/lambda-coldstart-comparison"},"path":{"kind":"string","value":"download-stats.js"},"language":{"kind":"string","value":"JavaScript"},"license":{"kind":"string","value":"mit"},"size":{"kind":"number","value":2153,"string":"2,153"}}},{"rowIdx":1757,"cells":{"code":{"kind":"string","value":"from __future__ import absolute_import, division, print_function, unicode_literals\nimport string\nimport urllib\ntry:\n from urllib.parse import urlparse, urlencode, urljoin, parse_qsl, urlunparse\n from urllib.request import urlopen, Request\n from urllib.error import HTTPError\nexcept ImportError:\n from urlparse import urlparse, urljoin, urlunparse, parse_qsl\n from urllib import urlencode\n from urllib2 import urlopen, Request, HTTPError\n\nfrom random import SystemRandom\n\ntry:\n UNICODE_ASCII_CHARACTERS = (string.ascii_letters +\n string.digits)\nexcept AttributeError:\n UNICODE_ASCII_CHARACTERS = (string.ascii_letters.decode('ascii') +\n string.digits.decode('ascii'))\n\n\ndef random_ascii_string(length):\n random = SystemRandom()\n return ''.join([random.choice(UNICODE_ASCII_CHARACTERS) for x in range(length)])\n\n\ndef url_query_params(url):\n \"\"\"Return query parameters as a dict from the specified URL.\n\n :param url: URL.\n :type url: str\n :rtype: dict\n \"\"\"\n return dict(parse_qsl(urlparse(url).query, True))\n\n\ndef url_dequery(url):\n \"\"\"Return a URL with the query component removed.\n\n :param url: URL to dequery.\n :type url: str\n :rtype: str\n \"\"\"\n url = urlparse(url)\n return urlunparse((url.scheme,\n url.netloc,\n url.path,\n url.params,\n '',\n url.fragment))\n\n\ndef build_url(base, additional_params=None):\n \"\"\"Construct a URL based off of base containing all parameters in\n the query portion of base plus any additional parameters.\n\n :param base: Base URL\n :type base: str\n ::param additional_params: Additional query parameters to include.\n :type additional_params: dict\n :rtype: str\n \"\"\"\n url = urlparse(base)\n query_params = {}\n query_params.update(parse_qsl(url.query, True))\n if additional_params is not None:\n query_params.update(additional_params)\n for k, v in additional_params.items():\n if v is None:\n query_params.pop(k)\n\n return urlunparse((url.scheme,\n url.netloc,\n url.path,\n url.params,\n urlencode(query_params),\n url.fragment))\n"},"repo_name":{"kind":"string","value":"VulcanTechnologies/oauth2lib"},"path":{"kind":"string","value":"oauth2lib/utils.py"},"language":{"kind":"string","value":"Python"},"license":{"kind":"string","value":"mit"},"size":{"kind":"number","value":2411,"string":"2,411"}}},{"rowIdx":1758,"cells":{"code":{"kind":"string","value":"#!/bin/bash\n\n# data in Empar_paper/data/simul_balanc4GenNonhSSM\n#length1000_b100.tar length1000_b150.tar length1000_b200.tar\n#length1000_b100_num98.fa\n\nMOD=ssm\n\nITER=2 # number of data sets\nbl=100\n\n#prep output files\nOUT_lik='likel_balanced4_gennonh_'$bl'_'$MOD'_E.txt'\nOUT_iter='iter_balanced4_gennonh_'$bl'_'$MOD'_E.txt'\nOUT_time='time_balanced4_gennonh_'$bl'_'$MOD'_E.txt'\nOUT_nc='neg_cases_balanced4_gennonh_'$bl'_'$MOD'_E.txt'\n\n[[ -f $OUT_lik ]] && rm -f $OUT_lik\n[[ -f $OUT_iter ]] && rm -f $OUT_iter\n[[ -f $OUT_time ]] && rm -f $OUT_time\n[[ -f $OUT_nc ]] && rm -f $OUT_nc\n\ntouch $OUT_lik\ntouch $OUT_iter\ntouch $OUT_time\ntouch $OUT_nc\n\n# run from within the scripts folder\n\nfor i in $(seq 0 1 $ITER)\ndo\n #extract a single file from tar\n tar -xvf ../data/simul_balanc4GenNonhSSM/length1000_b$bl.tar length1000_b$bl\\_num$i.fa\n ./main ../data/trees/treeE.tree length1000_b$bl\\_num$i.fa $MOD > out.txt\n cat out.txt | grep Likelihood | cut -d':' -f2 | xargs >> $OUT_lik\n cat out.txt | grep Iter | cut -d':' -f2 | xargs >> $OUT_iter\n cat out.txt | grep Time | cut -d':' -f2 | xargs >> $OUT_time\n cat out.txt | grep \"negative branches\" | cut -d':' -f2 | xargs >> $OUT_nc\n\n rm out.txt\n # not poluting the folder with single files\n rm length1000_b$bl\\_num$i.fa\n\ndone\nmv $OUT_time ../results/ssm/gennonh_data/balanc4GenNonh/.\nmv $OUT_lik ../results/ssm/gennonh_data/balanc4GenNonh/.\nmv $OUT_iter ../results/ssm/gennonh_data/balanc4GenNonh/.\nmv $OUT_nc ../results/ssm/gennonh_data/balanc4GenNonh/.\n"},"repo_name":{"kind":"string","value":"Algebraicphylogenetics/Empar_paper"},"path":{"kind":"string","value":"scripts/process_balanced4_gennonh_ssm.sh"},"language":{"kind":"string","value":"Shell"},"license":{"kind":"string","value":"mit"},"size":{"kind":"number","value":1509,"string":"1,509"}}},{"rowIdx":1759,"cells":{"code":{"kind":"string","value":"/*\r\nThe MIT License (MIT)\r\n\r\nCopyright (c) 2014 Banbury & Play-Em\r\n\r\nPermission is hereby granted, free of charge, to any person obtaining a copy\r\nof this software and associated documentation files (the \"Software\"), to deal\r\nin the Software without restriction, including without limitation the rights\r\nto use, copy, modify, merge, publish, distribute, sublicense, and/or sell\r\ncopies of the Software, and to permit persons to whom the Software is\r\nfurnished to do so, subject to the following conditions:\r\n\r\nThe above copyright notice and this permission notice shall be included in\r\nall copies or substantial portions of the Software.\r\n\r\nTHE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR\r\nIMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,\r\nFITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE\r\nAUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER\r\nLIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,\r\nOUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN\r\nTHE SOFTWARE.\r\n*/\r\n\r\nusing UnityEngine;\r\n\r\n#if UNITY_EDITOR\r\n\r\nusing UnityEditor;\r\nusing System.IO;\r\n\r\n#endif\r\n\r\nnamespace SpritesAndBones.Editor\r\n{\r\n [CustomEditor(typeof(Skin2D))]\r\n public class Skin2DEditor : UnityEditor.Editor\r\n {\r\n private Skin2D skin;\r\n\r\n private float baseSelectDistance = 0.1f;\r\n private float changedBaseSelectDistance = 0.1f;\r\n private int selectedIndex = -1;\r\n private Color handleColor = Color.green;\r\n\r\n private void OnEnable()\r\n {\r\n skin = (Skin2D)target;\r\n }\r\n\r\n public override void OnInspectorGUI()\r\n {\r\n DrawDefaultInspector();\r\n\r\n EditorGUILayout.Separator();\r\n\r\n if (GUILayout.Button(\"Toggle Mesh Outline\"))\r\n {\r\n Skin2D.showMeshOutline = !Skin2D.showMeshOutline;\r\n }\r\n\r\n EditorGUILayout.Separator();\r\n\r\n if (skin.GetComponent().sharedMesh != null && GUILayout.Button(\"Save as Prefab\"))\r\n {\r\n skin.SaveAsPrefab();\r\n }\r\n\r\n EditorGUILayout.Separator();\r\n\r\n if (skin.GetComponent().sharedMesh != null && GUILayout.Button(\"Recalculate Bone Weights\"))\r\n {\r\n skin.RecalculateBoneWeights();\r\n }\r\n\r\n EditorGUILayout.Separator();\r\n handleColor = EditorGUILayout.ColorField(\"Handle Color\", handleColor);\r\n changedBaseSelectDistance = EditorGUILayout.Slider(\"Handle Size\", baseSelectDistance, 0, 1);\r\n if (baseSelectDistance != changedBaseSelectDistance)\r\n {\r\n baseSelectDistance = changedBaseSelectDistance;\r\n EditorUtility.SetDirty(this);\r\n SceneView.RepaintAll();\r\n }\r\n\r\n if (skin.GetComponent().sharedMesh != null && GUILayout.Button(\"Create Control Points\"))\r\n {\r\n skin.CreateControlPoints(skin.GetComponent());\r\n }\r\n\r\n if (skin.GetComponent().sharedMesh != null && GUILayout.Button(\"Reset Control Points\"))\r\n {\r\n skin.ResetControlPointPositions();\r\n }\r\n\r\n if (skin.points != null && skin.controlPoints != null && skin.controlPoints.Length > 0\r\n && selectedIndex != -1 && GUILayout.Button(\"Reset Selected Control Point\"))\r\n {\r\n if (skin.controlPoints[selectedIndex].originalPosition != skin.GetComponent().sharedMesh.vertices[selectedIndex])\r\n {\r\n skin.controlPoints[selectedIndex].originalPosition = skin.GetComponent().sharedMesh.vertices[selectedIndex];\r\n }\r\n skin.controlPoints[selectedIndex].ResetPosition();\r\n skin.points.SetPoint(skin.controlPoints[selectedIndex]);\r\n }\r\n\r\n if (GUILayout.Button(\"Remove Control Points\"))\r\n {\r\n skin.RemoveControlPoints();\r\n }\r\n\r\n EditorGUILayout.Separator();\r\n\r\n if (skin.GetComponent().sharedMesh != null && GUILayout.Button(\"Generate Mesh Asset\"))\r\n {\r\n#if UNITY_EDITOR\r\n // Check if the Meshes directory exists, if not, create it.\r\n if (!Directory.Exists(\"Assets/Meshes\"))\r\n {\r\n AssetDatabase.CreateFolder(\"Assets\", \"Meshes\");\r\n AssetDatabase.Refresh();\r\n }\r\n Mesh mesh = new Mesh();\r\n mesh.name = skin.GetComponent().sharedMesh.name.Replace(\".SkinnedMesh\", \".Mesh\"); ;\r\n mesh.vertices = skin.GetComponent().sharedMesh.vertices;\r\n mesh.triangles = skin.GetComponent().sharedMesh.triangles;\r\n mesh.normals = skin.GetComponent().sharedMesh.normals;\r\n mesh.uv = skin.GetComponent().sharedMesh.uv;\r\n mesh.uv2 = skin.GetComponent().sharedMesh.uv2;\r\n mesh.bounds = skin.GetComponent().sharedMesh.bounds;\r\n ScriptableObjectUtility.CreateAsset(mesh, \"Meshes/\" + skin.gameObject.name + \".Mesh\");\r\n#endif\r\n }\r\n\r\n if (skin.GetComponent().sharedMaterial != null && GUILayout.Button(\"Generate Material Asset\"))\r\n {\r\n#if UNITY_EDITOR\r\n Material material = new Material(skin.GetComponent().sharedMaterial);\r\n material.CopyPropertiesFromMaterial(skin.GetComponent().sharedMaterial);\r\n skin.GetComponent().sharedMaterial = material;\r\n if (!Directory.Exists(\"Assets/Materials\"))\r\n {\r\n AssetDatabase.CreateFolder(\"Assets\", \"Materials\");\r\n AssetDatabase.Refresh();\r\n }\r\n AssetDatabase.CreateAsset(material, \"Assets/Materials/\" + material.mainTexture.name + \".mat\");\r\n Debug.Log(\"Created material \" + material.mainTexture.name + \" for \" + skin.gameObject.name);\r\n#endif\r\n }\r\n }\r\n\r\n private void OnSceneGUI()\r\n {\r\n if (skin != null && skin.GetComponent().sharedMesh != null\r\n && skin.controlPoints != null && skin.controlPoints.Length > 0 && skin.points != null)\r\n {\r\n Event e = Event.current;\r\n\r\n Handles.matrix = skin.transform.localToWorldMatrix;\r\n EditorGUI.BeginChangeCheck();\r\n Ray r = HandleUtility.GUIPointToWorldRay(e.mousePosition);\r\n Vector2 mousePos = r.origin;\r\n float selectDistance = HandleUtility.GetHandleSize(mousePos) * baseSelectDistance;\r\n\r\n #region Draw vertex handles\r\n\r\n Handles.color = handleColor;\r\n\r\n for (int i = 0; i < skin.controlPoints.Length; i++)\r\n {\r\n if (Handles.Button(skin.points.GetPoint(skin.controlPoints[i]), Quaternion.identity, selectDistance, selectDistance, Handles.CircleCap))\r\n {\r\n selectedIndex = i;\r\n }\r\n if (selectedIndex == i)\r\n {\r\n EditorGUI.BeginChangeCheck();\r\n skin.controlPoints[i].position = Handles.DoPositionHandle(skin.points.GetPoint(skin.controlPoints[i]), Quaternion.identity);\r\n if (EditorGUI.EndChangeCheck())\r\n {\r\n skin.points.SetPoint(skin.controlPoints[i]);\r\n Undo.RecordObject(skin, \"Changed Control Point\");\r\n Undo.RecordObject(skin.points, \"Changed Control Point\");\r\n EditorUtility.SetDirty(this);\r\n }\r\n }\r\n }\r\n\r\n #endregion Draw vertex handles\r\n }\r\n }\r\n }\r\n}"},"repo_name":{"kind":"string","value":"Apelsin/UnitySpritesAndBones"},"path":{"kind":"string","value":"Assets/SpritesAndBones/Scripts/Editor/Skin2DEditor.cs"},"language":{"kind":"string","value":"C#"},"license":{"kind":"string","value":"mit"},"size":{"kind":"number","value":8335,"string":"8,335"}}},{"rowIdx":1760,"cells":{"code":{"kind":"string","value":"package com.thilko.springdoc;\n\n@SuppressWarnings(\"all\")\npublic class CredentialsCode {\n Integer age;\n double anotherValue;\n\n public Integer getAge() {\n return age;\n }\n\n public void setAge(Integer age) {\n this.age = age;\n }\n\n public double getAnotherValue() {\n return anotherValue;\n }\n\n public void setAnotherValue(double anotherValue) {\n this.anotherValue = anotherValue;\n }\n}\n"},"repo_name":{"kind":"string","value":"thilko/gradle-springdoc-plugin"},"path":{"kind":"string","value":"src/test/java/com/thilko/springdoc/CredentialsCode.java"},"language":{"kind":"string","value":"Java"},"license":{"kind":"string","value":"mit"},"size":{"kind":"number","value":435,"string":"435"}}},{"rowIdx":1761,"cells":{"code":{"kind":"string","value":"title = Yii::t('app', 'Datasources');\n$this->params['breadcrumbs'][] = $this->title;\n?>\n
\n\n

title) ?>

\n render('/_menus') ?> \n render('_search', ['model' => $searchModel]); ?>\n\n

\n '.Yii::t('app', 'Create Datasource'), ['create'], ['class' => 'btn btn-success']) ?>\n

\n $dataProvider,\n 'filterModel' => $searchModel,\n 'columns' => [\n ['class' => 'yii\\grid\\SerialColumn'],\n 'name',\n // 'created_at',\n // 'updated_at',\n // 'created_by',\n 'updated_by:dateTime',\n\n [\n 'class' => 'yii\\grid\\ActionColumn',\n 'options'=>['style'=>'width:150px;'],\n 'buttonOptions'=>['class'=>'btn btn-default'],\n 'template'=>'
{view} {update} {delete}
',\n ]\n ],\n ]); ?>\n
\n"},"repo_name":{"kind":"string","value":"Yii2Learning/yii2-chart-builder"},"path":{"kind":"string","value":"views/datasource/index.php"},"language":{"kind":"string","value":"PHP"},"license":{"kind":"string","value":"mit"},"size":{"kind":"number","value":1401,"string":"1,401"}}},{"rowIdx":1762,"cells":{"code":{"kind":"string","value":"module Web::Controllers::Books\n class Create\n include Web::Action\n\n expose :book\n\n params do\n param :book do\n param :title, presence: true\n param :author, presence: true\n end\n end\n\n def call(params)\n if params.valid?\n @book = BookRepository.create(Book.new(params[:book]))\n\n redirect_to routes.books_path\n end\n end\n end\nend\n"},"repo_name":{"kind":"string","value":"matiasleidemer/lotus-bookshelf"},"path":{"kind":"string","value":"apps/web/controllers/books/create.rb"},"language":{"kind":"string","value":"Ruby"},"license":{"kind":"string","value":"mit"},"size":{"kind":"number","value":393,"string":"393"}}},{"rowIdx":1763,"cells":{"code":{"kind":"string","value":";idta.asm sets up all the intterupt entry points\nextern default_handler\nextern idt_ftoi\n\n;error interrupt entry point, we need to only push the error code details to stack\n%macro error_interrupt 1\nglobal interrupt_handler_%1\ninterrupt_handler_%1:\n\tpush \tdword %1\n\tjmp \tcommon_handler\n%endmacro\n\n;regular interrupt entry point, need to push interrupt number and other data\n%macro regular_interrupt 1\nglobal interrupt_handler_%1\ninterrupt_handler_%1:\n\tpush \tdword 0\n\tpush \tdword %1\n\tjmp \tcommon_handler\n%endmacro\n\n;common handler for all interrupts, saves all necessary stack data and calls our c intterupt handler\ncommon_handler:\n push dword ds\n push dword es\n push dword fs\n push dword gs\n\tpusha\n\tcall default_handler\n\tpopa\n pop dword gs\n pop dword fs\n pop dword es\n pop dword ds\n \n\tadd\tesp, 8\n\tiret\n\nregular_interrupt 0\nregular_interrupt 1\t\nregular_interrupt 2\nregular_interrupt 3\nregular_interrupt 4\nregular_interrupt 5\nregular_interrupt 6\nregular_interrupt 7\nerror_interrupt 8\nregular_interrupt 9\nerror_interrupt 10\nerror_interrupt 11\nerror_interrupt 12\nerror_interrupt 13\nerror_interrupt 14\nregular_interrupt 15\nregular_interrupt 16\nerror_interrupt 17\n%assign i 18\n%rep 12\n\tregular_interrupt i\n%assign i i+1\n%endrep\nerror_interrupt 30\n%assign i 31\n%rep 225\n\tregular_interrupt i\n%assign i i+1\n%endrep\n\n;interrupt setup, adds all of out interrupt handlers to the idt\n\tglobal idtsetup\nidtsetup:\n\t%assign i 0\n\t%rep 256\n\t\tpush interrupt_handler_%[i]\n\t\tpush i\n\t\tcall idt_ftoi\n\t\tadd esp, 8\n\t%assign i i+1\n\t%endrep\n\tret\n"},"repo_name":{"kind":"string","value":"MalcolmLorber/kernel"},"path":{"kind":"string","value":"src/idta.asm"},"language":{"kind":"string","value":"Assembly"},"license":{"kind":"string","value":"mit"},"size":{"kind":"number","value":1579,"string":"1,579"}}},{"rowIdx":1764,"cells":{"code":{"kind":"string","value":"// @flow\n\n(require('../../lib/git'): any).rebaseRepoMaster = jest.fn();\n\nimport {\n _clearCustomCacheDir as clearCustomCacheDir,\n _setCustomCacheDir as setCustomCacheDir,\n} from '../../lib/cacheRepoUtils';\n\nimport {copyDir, mkdirp} from '../../lib/fileUtils';\n\nimport {parseDirString as parseFlowDirString} from '../../lib/flowVersion';\n\nimport {\n add as gitAdd,\n commit as gitCommit,\n init as gitInit,\n setLocalConfig as gitConfig,\n} from '../../lib/git';\n\nimport {fs, path, child_process} from '../../lib/node';\n\nimport {getNpmLibDefs} from '../../lib/npm/npmLibDefs';\n\nimport {testProject} from '../../lib/TEST_UTILS';\n\nimport {\n _determineFlowVersion as determineFlowVersion,\n _installNpmLibDefs as installNpmLibDefs,\n _installNpmLibDef as installNpmLibDef,\n run,\n} from '../install';\n\nconst BASE_FIXTURE_ROOT = path.join(__dirname, '__install-fixtures__');\n\nfunction _mock(mockFn) {\n return ((mockFn: any): JestMockFn<*, *>);\n}\n\nasync function touchFile(filePath) {\n await fs.close(await fs.open(filePath, 'w'));\n}\n\nasync function writePkgJson(filePath, pkgJson) {\n await fs.writeJson(filePath, pkgJson);\n}\n\ndescribe('install (command)', () => {\n describe('determineFlowVersion', () => {\n it('infers version from path if arg not passed', () => {\n return testProject(async ROOT_DIR => {\n const ARBITRARY_PATH = path.join(ROOT_DIR, 'some', 'arbitrary', 'path');\n await Promise.all([\n mkdirp(ARBITRARY_PATH),\n touchFile(path.join(ROOT_DIR, '.flowconfig')),\n writePkgJson(path.join(ROOT_DIR, 'package.json'), {\n name: 'test',\n devDependencies: {\n 'flow-bin': '^0.40.0',\n },\n }),\n ]);\n\n const flowVer = await determineFlowVersion(ARBITRARY_PATH);\n expect(flowVer).toEqual({\n kind: 'specific',\n ver: {\n major: 0,\n minor: 40,\n patch: 0,\n prerel: null,\n },\n });\n });\n });\n\n it('uses explicitly specified version', async () => {\n const explicitVer = await determineFlowVersion('/', '0.7.0');\n expect(explicitVer).toEqual({\n kind: 'specific',\n ver: {\n major: 0,\n minor: 7,\n patch: 0,\n prerel: null,\n },\n });\n });\n\n it(\"uses 'v'-prefixed explicitly specified version\", async () => {\n const explicitVer = await determineFlowVersion('/', 'v0.7.0');\n expect(explicitVer).toEqual({\n kind: 'specific',\n ver: {\n major: 0,\n minor: 7,\n patch: 0,\n prerel: null,\n },\n });\n });\n });\n\n describe('installNpmLibDefs', () => {\n const origConsoleError = console.error;\n\n beforeEach(() => {\n (console: any).error = jest.fn();\n });\n\n afterEach(() => {\n (console: any).error = origConsoleError;\n });\n\n it('errors if unable to find a project root (.flowconfig)', () => {\n return testProject(async ROOT_DIR => {\n const result = await installNpmLibDefs({\n cwd: ROOT_DIR,\n flowVersion: parseFlowDirString('flow_v0.40.0'),\n explicitLibDefs: [],\n libdefDir: 'flow-typed',\n verbose: false,\n overwrite: false,\n skip: false,\n ignoreDeps: [],\n useCacheUntil: 1000 * 60,\n });\n expect(result).toBe(1);\n expect(_mock(console.error).mock.calls).toEqual([\n [\n 'Error: Unable to find a flow project in the current dir or any of ' +\n \"it's parent dirs!\\n\" +\n 'Please run this command from within a Flow project.',\n ],\n ]);\n });\n });\n\n it(\n \"errors if an explicitly specified libdef arg doesn't match npm \" +\n 'pkgver format',\n () => {\n return testProject(async ROOT_DIR => {\n await touchFile(path.join(ROOT_DIR, '.flowconfig'));\n await writePkgJson(path.join(ROOT_DIR, 'package.json'), {\n name: 'test',\n devDependencies: {\n 'flow-bin': '^0.40.0',\n },\n });\n const result = await installNpmLibDefs({\n cwd: ROOT_DIR,\n flowVersion: parseFlowDirString('flow_v0.40.0'),\n explicitLibDefs: ['INVALID'],\n libdefDir: 'flow-typed',\n verbose: false,\n overwrite: false,\n skip: false,\n ignoreDeps: [],\n useCacheUntil: 1000 * 60,\n });\n expect(result).toBe(1);\n expect(_mock(console.error).mock.calls).toEqual([\n [\n 'ERROR: Package not found from package.json.\\n' +\n 'Please specify version for the package in the format of `foo@1.2.3`',\n ],\n ]);\n });\n },\n );\n\n it('warns if 0 dependencies are found in package.json', () => {\n return testProject(async ROOT_DIR => {\n await Promise.all([\n touchFile(path.join(ROOT_DIR, '.flowconfig')),\n writePkgJson(path.join(ROOT_DIR, 'package.json'), {\n name: 'test',\n }),\n ]);\n const result = await installNpmLibDefs({\n cwd: ROOT_DIR,\n flowVersion: parseFlowDirString('flow_v0.40.0'),\n explicitLibDefs: [],\n libdefDir: 'flow-typed',\n verbose: false,\n overwrite: false,\n skip: false,\n ignoreDeps: [],\n useCacheUntil: 1000 * 60,\n });\n expect(result).toBe(0);\n expect(_mock(console.error).mock.calls).toEqual([\n [\"No dependencies were found in this project's package.json!\"],\n ]);\n });\n });\n });\n\n describe('installNpmLibDef', () => {\n const FIXTURE_ROOT = path.join(BASE_FIXTURE_ROOT, 'installNpmLibDef');\n\n const FIXTURE_FAKE_CACHE_REPO_DIR = path.join(\n FIXTURE_ROOT,\n 'fakeCacheRepo',\n );\n\n const origConsoleLog = console.log;\n beforeEach(() => {\n (console: any).log = jest.fn();\n });\n\n afterEach(() => {\n (console: any).log = origConsoleLog;\n });\n\n it('installs scoped libdefs within a scoped directory', () => {\n return testProject(async ROOT_DIR => {\n const FAKE_CACHE_DIR = path.join(ROOT_DIR, 'fakeCache');\n const FAKE_CACHE_REPO_DIR = path.join(FAKE_CACHE_DIR, 'repo');\n const FLOWPROJ_DIR = path.join(ROOT_DIR, 'flowProj');\n const FLOWTYPED_DIR = path.join(FLOWPROJ_DIR, 'flow-typed', 'npm');\n\n await Promise.all([mkdirp(FAKE_CACHE_REPO_DIR), mkdirp(FLOWTYPED_DIR)]);\n\n await Promise.all([\n copyDir(FIXTURE_FAKE_CACHE_REPO_DIR, FAKE_CACHE_REPO_DIR),\n touchFile(path.join(FLOWPROJ_DIR, '.flowconfig')),\n writePkgJson(path.join(FLOWPROJ_DIR, 'package.json'), {\n name: 'test',\n devDependencies: {\n 'flow-bin': '^0.40.0',\n },\n }),\n ]);\n await gitInit(FAKE_CACHE_REPO_DIR),\n await gitAdd(FAKE_CACHE_REPO_DIR, 'definitions');\n await gitCommit(FAKE_CACHE_REPO_DIR, 'FIRST');\n\n setCustomCacheDir(FAKE_CACHE_DIR);\n\n const availableLibDefs = await getNpmLibDefs(\n path.join(FAKE_CACHE_REPO_DIR, 'definitions'),\n );\n\n await installNpmLibDef(availableLibDefs[0], FLOWTYPED_DIR, false);\n });\n });\n });\n\n describe('end-to-end tests', () => {\n const FIXTURE_ROOT = path.join(BASE_FIXTURE_ROOT, 'end-to-end');\n\n const FIXTURE_FAKE_CACHE_REPO_DIR = path.join(\n FIXTURE_ROOT,\n 'fakeCacheRepo',\n );\n\n const origConsoleLog = console.log;\n const origConsoleError = console.error;\n beforeEach(() => {\n (console: any).log = jest.fn();\n (console: any).error = jest.fn();\n });\n afterEach(() => {\n (console: any).log = origConsoleLog;\n (console: any).error = origConsoleError;\n });\n\n async function fakeProjectEnv(runTest) {\n return await testProject(async ROOT_DIR => {\n const FAKE_CACHE_DIR = path.join(ROOT_DIR, 'fakeCache');\n const FAKE_CACHE_REPO_DIR = path.join(FAKE_CACHE_DIR, 'repo');\n const FLOWPROJ_DIR = path.join(ROOT_DIR, 'flowProj');\n const FLOWTYPED_DIR = path.join(FLOWPROJ_DIR, 'flow-typed', 'npm');\n\n await Promise.all([mkdirp(FAKE_CACHE_REPO_DIR), mkdirp(FLOWTYPED_DIR)]);\n\n await copyDir(FIXTURE_FAKE_CACHE_REPO_DIR, FAKE_CACHE_REPO_DIR);\n\n await gitInit(FAKE_CACHE_REPO_DIR),\n await Promise.all([\n gitConfig(FAKE_CACHE_REPO_DIR, 'user.name', 'Test Author'),\n gitConfig(FAKE_CACHE_REPO_DIR, 'user.email', 'test@flow-typed.org'),\n ]);\n await gitAdd(FAKE_CACHE_REPO_DIR, 'definitions');\n await gitCommit(FAKE_CACHE_REPO_DIR, 'FIRST');\n\n setCustomCacheDir(FAKE_CACHE_DIR);\n\n const origCWD = process.cwd;\n (process: any).cwd = () => FLOWPROJ_DIR;\n try {\n await runTest(FLOWPROJ_DIR);\n } finally {\n (process: any).cwd = origCWD;\n clearCustomCacheDir();\n }\n });\n }\n\n it('installs available libdefs', () => {\n return fakeProjectEnv(async FLOWPROJ_DIR => {\n // Create some dependencies\n await Promise.all([\n touchFile(path.join(FLOWPROJ_DIR, '.flowconfig')),\n writePkgJson(path.join(FLOWPROJ_DIR, 'package.json'), {\n name: 'test',\n devDependencies: {\n 'flow-bin': '^0.43.0',\n },\n dependencies: {\n foo: '1.2.3',\n },\n }),\n mkdirp(path.join(FLOWPROJ_DIR, 'node_modules', 'foo')),\n mkdirp(path.join(FLOWPROJ_DIR, 'node_modules', 'flow-bin')),\n ]);\n\n // Run the install command\n await run({\n overwrite: false,\n verbose: false,\n skip: false,\n ignoreDeps: [],\n explicitLibDefs: [],\n });\n\n // Installs libdefs\n expect(\n await Promise.all([\n fs.exists(\n path.join(\n FLOWPROJ_DIR,\n 'flow-typed',\n 'npm',\n 'flow-bin_v0.x.x.js',\n ),\n ),\n fs.exists(\n path.join(FLOWPROJ_DIR, 'flow-typed', 'npm', 'foo_v1.x.x.js'),\n ),\n ]),\n ).toEqual([true, true]);\n\n // Signs installed libdefs\n const fooLibDefContents = await fs.readFile(\n path.join(FLOWPROJ_DIR, 'flow-typed', 'npm', 'foo_v1.x.x.js'),\n 'utf8',\n );\n expect(fooLibDefContents).toContain('// flow-typed signature: ');\n expect(fooLibDefContents).toContain('// flow-typed version: ');\n });\n });\n\n it('installs available libdefs using PnP', () => {\n return fakeProjectEnv(async FLOWPROJ_DIR => {\n // Create some dependencies\n await Promise.all([\n touchFile(path.join(FLOWPROJ_DIR, '.flowconfig')),\n writePkgJson(path.join(FLOWPROJ_DIR, 'package.json'), {\n name: 'test',\n installConfig: {\n pnp: true,\n },\n devDependencies: {\n 'flow-bin': '^0.43.0',\n },\n dependencies: {\n // Use local foo for initial install\n foo: 'file:./foo',\n },\n }),\n mkdirp(path.join(FLOWPROJ_DIR, 'foo')),\n ]);\n\n await writePkgJson(path.join(FLOWPROJ_DIR, 'foo/package.json'), {\n name: 'foo',\n version: '1.2.3',\n });\n\n // Yarn install so PnP file resolves to local foo\n await child_process.execP('yarn install', {cwd: FLOWPROJ_DIR});\n\n // Overwrite foo dep so it's like we installed from registry instead\n writePkgJson(path.join(FLOWPROJ_DIR, 'package.json'), {\n name: 'test',\n installConfig: {\n pnp: true,\n },\n devDependencies: {\n 'flow-bin': '^0.43.0',\n },\n dependencies: {\n foo: '1.2.3',\n },\n });\n\n // Run the install command\n await run({\n overwrite: false,\n verbose: false,\n skip: false,\n ignoreDeps: [],\n explicitLibDefs: [],\n });\n\n // Installs libdefs\n expect(\n await Promise.all([\n fs.exists(\n path.join(\n FLOWPROJ_DIR,\n 'flow-typed',\n 'npm',\n 'flow-bin_v0.x.x.js',\n ),\n ),\n fs.exists(\n path.join(FLOWPROJ_DIR, 'flow-typed', 'npm', 'foo_v1.x.x.js'),\n ),\n ]),\n ).toEqual([true, true]);\n\n // Signs installed libdefs\n const fooLibDefRawContents = await fs.readFile(\n path.join(FLOWPROJ_DIR, 'flow-typed', 'npm', 'foo_v1.x.x.js'),\n );\n const fooLibDefContents = fooLibDefRawContents.toString();\n expect(fooLibDefContents).toContain('// flow-typed signature: ');\n expect(fooLibDefContents).toContain('// flow-typed version: ');\n });\n });\n\n it('ignores libdefs in dev, bundled, optional or peer dependencies when flagged', () => {\n return fakeProjectEnv(async FLOWPROJ_DIR => {\n // Create some dependencies\n await Promise.all([\n touchFile(path.join(FLOWPROJ_DIR, '.flowconfig')),\n writePkgJson(path.join(FLOWPROJ_DIR, 'package.json'), {\n name: 'test',\n devDependencies: {\n foo: '1.2.3',\n },\n peerDependencies: {\n 'flow-bin': '^0.43.0',\n },\n optionalDependencies: {\n foo: '2.0.0',\n },\n bundledDependencies: {\n bar: '^1.6.9',\n },\n dependencies: {\n foo: '1.2.3',\n },\n }),\n mkdirp(path.join(FLOWPROJ_DIR, 'node_modules', 'foo')),\n mkdirp(path.join(FLOWPROJ_DIR, 'node_modules', 'flow-bin')),\n mkdirp(path.join(FLOWPROJ_DIR, 'node_modules', 'bar')),\n ]);\n\n // Run the install command\n await run({\n overwrite: false,\n verbose: false,\n skip: false,\n ignoreDeps: ['dev', 'optional', 'bundled'],\n explicitLibDefs: [],\n });\n\n // Installs libdefs\n expect(\n await Promise.all([\n fs.exists(\n path.join(\n FLOWPROJ_DIR,\n 'flow-typed',\n 'npm',\n 'flow-bin_v0.x.x.js',\n ),\n ),\n fs.exists(\n path.join(FLOWPROJ_DIR, 'flow-typed', 'npm', 'foo_v1.x.x.js'),\n ),\n fs.exists(\n path.join(FLOWPROJ_DIR, 'flow-typed', 'npm', 'bar_v1.x.x.js'),\n ),\n ]),\n ).toEqual([true, true, false]);\n });\n });\n\n it('stubs unavailable libdefs', () => {\n return fakeProjectEnv(async FLOWPROJ_DIR => {\n // Create some dependencies\n await Promise.all([\n touchFile(path.join(FLOWPROJ_DIR, '.flowconfig')),\n writePkgJson(path.join(FLOWPROJ_DIR, 'package.json'), {\n name: 'test',\n devDependencies: {\n 'flow-bin': '^0.43.0',\n },\n dependencies: {\n someUntypedDep: '1.2.3',\n },\n }),\n mkdirp(path.join(FLOWPROJ_DIR, 'node_modules', 'someUntypedDep')),\n mkdirp(path.join(FLOWPROJ_DIR, 'node_modules', 'flow-bin')),\n ]);\n\n // Run the install command\n await run({\n overwrite: false,\n verbose: false,\n skip: false,\n explicitLibDefs: [],\n });\n\n // Installs a stub for someUntypedDep\n expect(\n await fs.exists(\n path.join(\n FLOWPROJ_DIR,\n 'flow-typed',\n 'npm',\n 'someUntypedDep_vx.x.x.js',\n ),\n ),\n ).toBe(true);\n });\n });\n\n it(\"doesn't stub unavailable libdefs when --skip is passed\", () => {\n return fakeProjectEnv(async FLOWPROJ_DIR => {\n // Create some dependencies\n await Promise.all([\n touchFile(path.join(FLOWPROJ_DIR, '.flowconfig')),\n writePkgJson(path.join(FLOWPROJ_DIR, 'package.json'), {\n name: 'test',\n devDependencies: {\n 'flow-bin': '^0.43.0',\n },\n dependencies: {\n someUntypedDep: '1.2.3',\n },\n }),\n mkdirp(path.join(FLOWPROJ_DIR, 'node_modules', 'someUntypedDep')),\n mkdirp(path.join(FLOWPROJ_DIR, 'node_modules', 'flow-bin')),\n ]);\n\n // Run the install command\n await run({\n overwrite: false,\n verbose: false,\n skip: true,\n explicitLibDefs: [],\n });\n\n // Installs a stub for someUntypedDep\n expect(\n await fs.exists(path.join(FLOWPROJ_DIR, 'flow-typed', 'npm')),\n ).toBe(true);\n });\n });\n\n it('overwrites stubs when libdef becomes available (with --overwrite)', () => {\n return fakeProjectEnv(async FLOWPROJ_DIR => {\n // Create some dependencies\n await Promise.all([\n touchFile(path.join(FLOWPROJ_DIR, '.flowconfig')),\n writePkgJson(path.join(FLOWPROJ_DIR, 'package.json'), {\n name: 'test',\n devDependencies: {\n 'flow-bin': '^0.43.0',\n },\n dependencies: {\n foo: '1.2.3',\n },\n }),\n mkdirp(path.join(FLOWPROJ_DIR, 'node_modules', 'foo')),\n mkdirp(path.join(FLOWPROJ_DIR, 'node_modules', 'flow-bin')),\n ]);\n\n await fs.writeFile(\n path.join(FLOWPROJ_DIR, 'flow-typed', 'npm', 'foo_vx.x.x.js'),\n '',\n );\n\n // Run the install command\n await run({\n overwrite: true,\n verbose: false,\n skip: false,\n explicitLibDefs: [],\n });\n\n // Replaces the stub with the real typedef\n expect(\n await Promise.all([\n fs.exists(\n path.join(FLOWPROJ_DIR, 'flow-typed', 'npm', 'foo_vx.x.x.js'),\n ),\n fs.exists(\n path.join(FLOWPROJ_DIR, 'flow-typed', 'npm', 'foo_v1.x.x.js'),\n ),\n ]),\n ).toEqual([false, true]);\n });\n });\n\n it(\"doesn't overwrite tweaked libdefs (without --overwrite)\", () => {\n return fakeProjectEnv(async FLOWPROJ_DIR => {\n // Create some dependencies\n await Promise.all([\n touchFile(path.join(FLOWPROJ_DIR, '.flowconfig')),\n writePkgJson(path.join(FLOWPROJ_DIR, 'package.json'), {\n name: 'test',\n devDependencies: {\n 'flow-bin': '^0.43.0',\n },\n dependencies: {\n foo: '1.2.3',\n },\n }),\n mkdirp(path.join(FLOWPROJ_DIR, 'node_modules', 'foo')),\n mkdirp(path.join(FLOWPROJ_DIR, 'node_modules', 'flow-bin')),\n ]);\n\n // Run the install command\n await run({\n overwrite: false,\n verbose: false,\n skip: false,\n explicitLibDefs: [],\n });\n\n const libdefFilePath = path.join(\n FLOWPROJ_DIR,\n 'flow-typed',\n 'npm',\n 'foo_v1.x.x.js',\n );\n\n // Tweak the libdef for foo\n const libdefFileContent =\n (await fs.readFile(libdefFilePath, 'utf8')) + '\\n// TWEAKED!';\n await fs.writeFile(libdefFilePath, libdefFileContent);\n\n // Run install command again\n await run({\n overwrite: false,\n verbose: false,\n skip: false,\n explicitLibDefs: [],\n });\n\n // Verify that the tweaked libdef file wasn't overwritten\n expect(await fs.readFile(libdefFilePath, 'utf8')).toBe(\n libdefFileContent,\n );\n });\n });\n\n it('overwrites tweaked libdefs when --overwrite is passed', () => {\n return fakeProjectEnv(async FLOWPROJ_DIR => {\n // Create some dependencies\n await Promise.all([\n touchFile(path.join(FLOWPROJ_DIR, '.flowconfig')),\n writePkgJson(path.join(FLOWPROJ_DIR, 'package.json'), {\n name: 'test',\n devDependencies: {\n 'flow-bin': '^0.43.0',\n },\n dependencies: {\n foo: '1.2.3',\n },\n }),\n mkdirp(path.join(FLOWPROJ_DIR, 'node_modules', 'foo')),\n mkdirp(path.join(FLOWPROJ_DIR, 'node_modules', 'flow-bin')),\n ]);\n\n // Run the install command\n await run({\n overwrite: false,\n verbose: false,\n skip: false,\n explicitLibDefs: [],\n });\n\n const libdefFilePath = path.join(\n FLOWPROJ_DIR,\n 'flow-typed',\n 'npm',\n 'foo_v1.x.x.js',\n );\n\n // Tweak the libdef for foo\n const libdefFileContent = await fs.readFile(libdefFilePath, 'utf8');\n await fs.writeFile(libdefFilePath, libdefFileContent + '\\n// TWEAKED!');\n\n // Run install command again\n await run({\n overwrite: true,\n skip: false,\n verbose: false,\n explicitLibDefs: [],\n });\n\n // Verify that the tweaked libdef file wasn't overwritten\n expect(await fs.readFile(libdefFilePath, 'utf8')).toBe(\n libdefFileContent,\n );\n });\n });\n\n it('uses flow-bin defined in another package.json', () => {\n return fakeProjectEnv(async FLOWPROJ_DIR => {\n // Create some dependencies\n await Promise.all([\n touchFile(path.join(FLOWPROJ_DIR, '.flowconfig')),\n writePkgJson(path.join(FLOWPROJ_DIR, 'package.json'), {\n name: 'test',\n dependencies: {\n foo: '1.2.3',\n },\n }),\n mkdirp(path.join(FLOWPROJ_DIR, 'node_modules', 'foo')),\n writePkgJson(path.join(FLOWPROJ_DIR, '..', 'package.json'), {\n name: 'parent',\n devDependencies: {\n 'flow-bin': '^0.45.0',\n },\n }),\n mkdirp(path.join(FLOWPROJ_DIR, '..', 'node_modules', 'flow-bin')),\n ]);\n\n // Run the install command\n await run({\n overwrite: false,\n verbose: false,\n skip: false,\n packageDir: path.join(FLOWPROJ_DIR, '..'),\n explicitLibDefs: [],\n });\n\n // Installs libdef\n expect(\n await fs.exists(\n path.join(FLOWPROJ_DIR, 'flow-typed', 'npm', 'foo_v1.x.x.js'),\n ),\n ).toEqual(true);\n });\n });\n\n it('uses .flowconfig from specified root directory', () => {\n return fakeProjectEnv(async FLOWPROJ_DIR => {\n // Create some dependencies\n await Promise.all([\n mkdirp(path.join(FLOWPROJ_DIR, 'src')),\n writePkgJson(path.join(FLOWPROJ_DIR, 'package.json'), {\n name: 'test',\n devDependencies: {\n 'flow-bin': '^0.43.0',\n },\n dependencies: {\n foo: '1.2.3',\n },\n }),\n mkdirp(path.join(FLOWPROJ_DIR, 'node_modules', 'foo')),\n mkdirp(path.join(FLOWPROJ_DIR, 'node_modules', 'flow-bin')),\n ]);\n\n await touchFile(path.join(FLOWPROJ_DIR, 'src', '.flowconfig'));\n\n // Run the install command\n await run({\n overwrite: false,\n verbose: false,\n skip: false,\n rootDir: path.join(FLOWPROJ_DIR, 'src'),\n explicitLibDefs: [],\n });\n\n // Installs libdef\n expect(\n await fs.exists(\n path.join(\n FLOWPROJ_DIR,\n 'src',\n 'flow-typed',\n 'npm',\n 'foo_v1.x.x.js',\n ),\n ),\n ).toEqual(true);\n });\n });\n });\n});\n"},"repo_name":{"kind":"string","value":"splodingsocks/FlowTyped"},"path":{"kind":"string","value":"cli/src/commands/__tests__/install-test.js"},"language":{"kind":"string","value":"JavaScript"},"license":{"kind":"string","value":"mit"},"size":{"kind":"number","value":23904,"string":"23,904"}}},{"rowIdx":1765,"cells":{"code":{"kind":"string","value":"\n\n\n \n\n\n

Redirecting to ../../openssl_sys/fn.BN_exp.html...

\n \n\n"},"repo_name":{"kind":"string","value":"malept/guardhaus"},"path":{"kind":"string","value":"main/openssl_sys/bn/fn.BN_exp.html"},"language":{"kind":"string","value":"HTML"},"license":{"kind":"string","value":"mit"},"size":{"kind":"number","value":369,"string":"369"}}},{"rowIdx":1766,"cells":{"code":{"kind":"string","value":"import React, { Component } from 'react'\nimport PropTypes from 'prop-types'\nimport { assign } from 'lodash'\nimport autoBind from '../utils/autoBind'\n\nconst styles = {\n 'ClosedPanelWrapper': {\n height: '40px'\n },\n\n 'PanelWrapper': {\n position: 'relative'\n },\n\n 'Over': {\n border: '1px dashed white',\n overflowY: 'hidden'\n },\n\n 'PanelTitle': {\n width: '100%',\n height: '40px',\n lineHeight: '40px',\n backgroundColor: '#000',\n color: '#fff',\n paddingLeft: '10px',\n position: 'relative',\n whiteSpace: 'nowrap',\n overflowX: 'hidden',\n textOverflow: 'ellipsis',\n paddingRight: '8px',\n cursor: 'pointer',\n WebkitUserSelect: 'none',\n userSelect: 'none'\n },\n\n 'Handle': {\n cursor: '-webkit-grab',\n position: 'absolute',\n zIndex: '2',\n color: 'white',\n right: '10px',\n fontSize: '16px',\n top: '12px'\n },\n\n 'OpenPanel': {\n position: 'relative',\n zIndex: '2',\n top: '0',\n left: '0',\n padding: '7px',\n paddingTop: '5px',\n maxHeight: '30%',\n display: 'block'\n },\n\n 'ClosedPanel': {\n height: '0',\n position: 'relative',\n zIndex: '2',\n top: '-1000px',\n left: '0',\n overflow: 'hidden',\n maxHeight: '0',\n display: 'none'\n }\n}\n\nclass Panel extends Component {\n\n constructor() {\n super()\n\n this.state = {\n dragIndex: null,\n overIndex: null,\n isOver: false\n }\n\n autoBind(this, [\n 'handleTitleClick', 'handleDragStart', 'handleDragOver', 'handleDragEnter',\n 'handleDragLeave', 'handleDrop', 'handleDragEnd'\n ])\n }\n\n handleTitleClick() {\n const { index, isOpen, openPanel } = this.props\n openPanel(isOpen ? -1 : index)\n }\n\n handleDragStart(e) {\n // e.target.style.opacity = '0.4'; // this / e.target is the source node.\n e.dataTransfer.setData('index', e.target.dataset.index)\n }\n\n handleDragOver(e) {\n if (e.preventDefault) {\n e.preventDefault() // Necessary. Allows us to drop.\n }\n return false\n }\n\n handleDragEnter(e) {\n const overIndex = e.target.dataset.index\n if (e.dataTransfer.getData('index') !== overIndex) {\n // e.target.classList.add('Over') // e.target is the current hover target.\n this.setState({ isOver: true })\n }\n }\n\n handleDragLeave() {\n this.setState({ isOver: false })\n // e.target.classList.remove('Over') // e.target is previous target element.\n }\n\n handleDrop(e) {\n if (e.stopPropagation) {\n e.stopPropagation() // stops the browser from redirecting.\n }\n\n const dragIndex = e.dataTransfer.getData('index')\n const dropIndex = this.props.index.toString()\n if (dragIndex !== dropIndex) {\n this.props.reorder(dragIndex, dropIndex)\n }\n\n return false\n }\n\n handleDragEnd() {\n this.setState({ isOver: false, dragIndex: null, overIndex: null })\n }\n\n render() {\n const { isOpen, orderable } = this.props\n const { isOver } = this.state\n return (\n \n \n {this.props.header}\n {orderable && ()}\n
\n {\n isOpen &&\n (\n
\n {this.props.children}\n
\n )\n }\n
\n )\n }\n}\n\nPanel.propTypes = {\n children: PropTypes.any,\n index: PropTypes.any,\n openPanel: PropTypes.func,\n isOpen: PropTypes.any,\n header: PropTypes.any,\n orderable: PropTypes.any,\n reorder: PropTypes.func\n}\n\nPanel.defaultProps = {\n isOpen: false,\n header: '',\n orderable: false\n}\n\nexport default Panel\n"},"repo_name":{"kind":"string","value":"jcgertig/react-struct-editor"},"path":{"kind":"string","value":"src/components/Panel.js"},"language":{"kind":"string","value":"JavaScript"},"license":{"kind":"string","value":"mit"},"size":{"kind":"number","value":4120,"string":"4,120"}}},{"rowIdx":1767,"cells":{"code":{"kind":"string","value":"'use strict';\n\n// src\\services\\message\\hooks\\timestamp.js\n//\n// Use this hook to manipulate incoming or outgoing data.\n// For more information on hooks see: http://docs.feathersjs.com/hooks/readme.html\n\nconst defaults = {};\n\nmodule.exports = function(options) {\n\toptions = Object.assign({}, defaults, options);\n\n\treturn function(hook) {\n\t\tconst usr = hook.params.user;\n\t\tconst txt = hook.data.text;\n\t\thook.data = {\n\t\t\ttext: txt,\n\t\t\tcreatedBy: usr._id,\n\t\t\tcreatedAt: Date.now()\n\t\t}\n\t};\n};\n"},"repo_name":{"kind":"string","value":"zorqie/bfests"},"path":{"kind":"string","value":"src/services/message/hooks/timestamp.js"},"language":{"kind":"string","value":"JavaScript"},"license":{"kind":"string","value":"mit"},"size":{"kind":"number","value":488,"string":"488"}}},{"rowIdx":1768,"cells":{"code":{"kind":"string","value":"/*\n---------------------------------------------------------------------------\nOpen Asset Import Library (assimp)\n---------------------------------------------------------------------------\n\nCopyright (c) 2006-2021, assimp team\n\n\n\nAll rights reserved.\n\nRedistribution and use of this software in source and binary forms,\nwith or without modification, are permitted provided that the following\nconditions are met:\n\n* Redistributions of source code must retain the above\n copyright notice, this list of conditions and the\n following disclaimer.\n\n* Redistributions in binary form must reproduce the above\n copyright notice, this list of conditions and the\n following disclaimer in the documentation and/or other\n materials provided with the distribution.\n\n* Neither the name of the assimp team, nor the names of its\n contributors may be used to endorse or promote products\n derived from this software without specific prior\n written permission of the assimp team.\n\nTHIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS\n\"AS IS\" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT\nLIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR\nA PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT\nOWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,\nSPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT\nLIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,\nDATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY\nTHEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT\n(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE\nOF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.\n---------------------------------------------------------------------------\n*/\n\n/** @file Bitmap.h\n * @brief Defines bitmap format helper for textures\n *\n * Used for file formats which embed their textures into the model file.\n */\n#pragma once\n#ifndef AI_BITMAP_H_INC\n#define AI_BITMAP_H_INC\n\n#ifdef __GNUC__\n# pragma GCC system_header\n#endif\n\n#include \"defs.h\"\n#include \n#include \n\nstruct aiTexture;\n\nnamespace Assimp {\n\nclass IOStream;\n\nclass ASSIMP_API Bitmap {\nprotected:\n\n struct Header {\n uint16_t type;\n uint32_t size;\n uint16_t reserved1;\n uint16_t reserved2;\n uint32_t offset;\n\n // We define the struct size because sizeof(Header) might return a wrong result because of structure padding.\n // Moreover, we must use this ugly and error prone syntax because Visual Studio neither support constexpr or sizeof(name_of_field).\n static const std::size_t header_size =\n sizeof(uint16_t) + // type\n sizeof(uint32_t) + // size\n sizeof(uint16_t) + // reserved1\n sizeof(uint16_t) + // reserved2\n sizeof(uint32_t); // offset\n };\n\n struct DIB {\n uint32_t size;\n int32_t width;\n int32_t height;\n uint16_t planes;\n uint16_t bits_per_pixel;\n uint32_t compression;\n uint32_t image_size;\n int32_t x_resolution;\n int32_t y_resolution;\n uint32_t nb_colors;\n uint32_t nb_important_colors;\n\n // We define the struct size because sizeof(DIB) might return a wrong result because of structure padding.\n // Moreover, we must use this ugly and error prone syntax because Visual Studio neither support constexpr or sizeof(name_of_field).\n static const std::size_t dib_size =\n sizeof(uint32_t) + // size\n sizeof(int32_t) + // width\n sizeof(int32_t) + // height\n sizeof(uint16_t) + // planes\n sizeof(uint16_t) + // bits_per_pixel\n sizeof(uint32_t) + // compression\n sizeof(uint32_t) + // image_size\n sizeof(int32_t) + // x_resolution\n sizeof(int32_t) + // y_resolution\n sizeof(uint32_t) + // nb_colors\n sizeof(uint32_t); // nb_important_colors\n };\n\n static const std::size_t mBytesPerPixel = 4;\n\npublic:\n static void Save(aiTexture* texture, IOStream* file);\n\nprotected:\n static void WriteHeader(Header& header, IOStream* file);\n static void WriteDIB(DIB& dib, IOStream* file);\n static void WriteData(aiTexture* texture, IOStream* file);\n};\n\n}\n\n#endif // AI_BITMAP_H_INC\n"},"repo_name":{"kind":"string","value":"andrerogers/Enjin"},"path":{"kind":"string","value":"src/includes/assimp/Bitmap.h"},"language":{"kind":"string","value":"C"},"license":{"kind":"string","value":"mit"},"size":{"kind":"number","value":4360,"string":"4,360"}}},{"rowIdx":1769,"cells":{"code":{"kind":"string","value":"# LeadifyTest"},"repo_name":{"kind":"string","value":"JomoLumina/LeadifyTest"},"path":{"kind":"string","value":"README.md"},"language":{"kind":"string","value":"Markdown"},"license":{"kind":"string","value":"mit"},"size":{"kind":"number","value":13,"string":"13"}}},{"rowIdx":1770,"cells":{"code":{"kind":"string","value":"package fr.lteconsulting.pomexplorer.commands;\n\nimport fr.lteconsulting.pomexplorer.AppFactory;\nimport fr.lteconsulting.pomexplorer.Client;\nimport fr.lteconsulting.pomexplorer.Log;\n\npublic class HelpCommand\n{\n\t@Help( \"gives this message\" )\n\tpublic void main( Client client, Log log )\n\t{\n\t\tlog.html( AppFactory.get().commands().help() );\n\t}\n}\n"},"repo_name":{"kind":"string","value":"ltearno/pom-explorer"},"path":{"kind":"string","value":"pom-explorer/src/main/java/fr/lteconsulting/pomexplorer/commands/HelpCommand.java"},"language":{"kind":"string","value":"Java"},"license":{"kind":"string","value":"mit"},"size":{"kind":"number","value":342,"string":"342"}}},{"rowIdx":1771,"cells":{"code":{"kind":"string","value":"//\n// DORDoneHUD.h\n// DORDoneHUD\n//\n// Created by Pawel Bednorz on 23/09/15.\n// Copyright © 2015 Droids on Roids. All rights reserved.\n//\n\n#import \n\n@interface DORDoneHUD : NSObject\n+ (void)show:(UIView *)view message:(NSString *)messageText completion:(void (^)(void))completionBlock;\n+ (void)show:(UIView *)view message:(NSString *)messageText;\n+ (void)show:(UIView *)view;\n@end\n"},"repo_name":{"kind":"string","value":"DroidsOnRoids/DORDoneHUD"},"path":{"kind":"string","value":"Source/DORDoneHUD.h"},"language":{"kind":"string","value":"C"},"license":{"kind":"string","value":"mit"},"size":{"kind":"number","value":401,"string":"401"}}},{"rowIdx":1772,"cells":{"code":{"kind":"string","value":"namespace CAAssistant.Models\n{\n public class ClientFileViewModel\n {\n\n public ClientFileViewModel()\n {\n }\n\n public ClientFileViewModel(ClientFile clientFile)\n {\n Id = clientFile.Id;\n FileNumber = clientFile.FileNumber;\n ClientName = clientFile.ClientName;\n ClientContactPerson = clientFile.ClientContactPerson;\n AssociateReponsible = clientFile.AssociateReponsible;\n CaSign = clientFile.CaSign;\n DscExpiryDate = clientFile.DscExpiryDate;\n FileStatus = clientFile.FileStatus;\n }\n\n public string Id { get; set; }\n\n public int FileNumber { get; set; }\n\n public string ClientName { get; set; }\n\n public string ClientContactPerson { get; set; }\n\n public string AssociateReponsible { get; set; }\n\n public string CaSign { get; set; }\n\n public string DscExpiryDate { get; set; }\n\n public string FileStatus { get; set; }\n\n public string UserName { get; set; }\n\n public FileStatusModification InitialFileStatus { get; set; }\n }\n\n}"},"repo_name":{"kind":"string","value":"vishipayyallore/CAAssitant"},"path":{"kind":"string","value":"CAAssistant/Models/ClientFileViewModel.cs"},"language":{"kind":"string","value":"C#"},"license":{"kind":"string","value":"mit"},"size":{"kind":"number","value":1131,"string":"1,131"}}},{"rowIdx":1773,"cells":{"code":{"kind":"string","value":"getParentID();\n\t}\n\n\n\n\t/**\n\t * Get task object\n\t *\n\t * @return\tTask\n\t */\n\tpublic function getTask() {\n\t\treturn TodoyuProjectTaskManager::getTask($this->getTaskID());\n\t}\n\n}\n\n?>"},"repo_name":{"kind":"string","value":"JoAutomation/todo-for-you"},"path":{"kind":"string","value":"ext/assets/model/TodoyuAssetsTaskAsset.class.php"},"language":{"kind":"string","value":"PHP"},"license":{"kind":"string","value":"mit"},"size":{"kind":"number","value":1210,"string":"1,210"}}},{"rowIdx":1774,"cells":{"code":{"kind":"string","value":"//------------------------------------------------------------------------------\n// \n// This code was generated by a tool.\n//\n// Changes to this file may cause incorrect behavior and will be lost if\n// the code is regenerated.\n// \n//------------------------------------------------------------------------------\n\nnamespace NewsSystem.Web.Admin\n{\n\n\n public partial class Edit\n {\n }\n}\n"},"repo_name":{"kind":"string","value":"MystFan/TelerikAcademy"},"path":{"kind":"string","value":"ASP.NET WebForms/NewsSystem/NewsSystem.Web/Admin/Edit.aspx.designer.cs"},"language":{"kind":"string","value":"C#"},"license":{"kind":"string","value":"mit"},"size":{"kind":"number","value":440,"string":"440"}}},{"rowIdx":1775,"cells":{"code":{"kind":"string","value":"---\ntitle: Stylesheets and JavaScript - Fabricator\nlayout: 2-column\nsection: Documentation\n---\n\n{{#markdown}}\n\n# Stylesheets and JavaScript\n\n> How to work with CSS and JS within Fabricator\n\nFabricator comes with little opinion about how you should architect your Stylesheets and JavaScript. Each use case is different, so it's up to you to define what works best.\n\nOut of the box, you'll find a single `.scss` and `.js` file. These are the entry points for Sass compilation and Webpack respectively. It is recommended that you leverage the module importing features of each preprocessor to compile your toolkit down to a single `.css` and `.js` file. Practically speaking, you should be able to drop these two files into any application and have full access to your entire toolkit.\n\n{{/markdown}}\n"},"repo_name":{"kind":"string","value":"fbrctr/fbrctr.github.io"},"path":{"kind":"string","value":"src/views/docs/building-a-toolkit/assets.html"},"language":{"kind":"string","value":"HTML"},"license":{"kind":"string","value":"mit"},"size":{"kind":"number","value":797,"string":"797"}}},{"rowIdx":1776,"cells":{"code":{"kind":"string","value":"assertArrayHasKey('IdeHelper', $result);\n\t\t$this->assertArrayHasKey('Awesome', $result);\n\t\t$this->assertArrayHasKey('MyNamespace/MyPlugin', $result);\n\t\t$this->assertArrayNotHasKey('FooBar', $result);\n\n\t\tConfigure::write('IdeHelper.plugins', ['FooBar', '-MyNamespace/MyPlugin']);\n\n\t\t$result = Plugin::all();\n\t\t$this->assertArrayHasKey('FooBar', $result);\n\t\t$this->assertArrayNotHasKey('MyNamespace/MyPlugin', $result);\n\t}\n\n}\n"},"repo_name":{"kind":"string","value":"dereuromark/cakephp-ide-helper"},"path":{"kind":"string","value":"tests/TestCase/Utility/PluginTest.php"},"language":{"kind":"string","value":"PHP"},"license":{"kind":"string","value":"mit"},"size":{"kind":"number","value":953,"string":"953"}}},{"rowIdx":1777,"cells":{"code":{"kind":"string","value":"# Hubot: hubot-loggly-slack\n\nA hubot script to post alerts from Loggly into a Slack room as an attachment.\nAn attachment has additional formatting options.\n\nSee [`src/loggly-slack.coffee`](src/loggly-slack.coffee) for documentation.\n\n# Installation\n\n npm install hubot-loggly-slack\n\n # Add \"hubot-loggly-slack\" to external-scripts.json\n\n# Other hubot slack modules\n\nhttps://github.com/spanishdict/hubot-awssns-slack\n\nhttps://github.com/spanishdict/hubot-loggly-slack\n\nhttps://github.com/spanishdict/hubot-scoutapp-slack\n"},"repo_name":{"kind":"string","value":"spanishdict/hubot-loggly-slack"},"path":{"kind":"string","value":"README.md"},"language":{"kind":"string","value":"Markdown"},"license":{"kind":"string","value":"mit"},"size":{"kind":"number","value":526,"string":"526"}}},{"rowIdx":1778,"cells":{"code":{"kind":"string","value":".\n *\n * Portions created by the Initial Developer are\n * Copyright (C) 2014 Tarmo Alexander Sundström \n *\n * All Rights Reserved.\n *\n * Contributor(s):\n *\n * Permission is hereby granted, free of charge, to any person obtaining a\n * copy of this software and associated documentation files (the \"Software\"),\n * to deal in the Software without restriction, including without limitation\n * the rights to use, copy, modify, merge, publish, distribute, sublicense,\n * and/or sell copies of the Software, and to permit persons to whom the\n * Software is furnished to do so, subject to the following conditions:\n *\n * The above copyright notice and this permission notice shall be included\n * in all copies or substantial portions of the Software.\n *\n * THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR\n * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,\n * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL\n * THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER\n * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,\n * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS\n * IN THE SOFTWARE.\n */\nnamespace Webvaloa;\n\nuse Libvaloa\\Db;\nuse RuntimeException;\n\n/**\n * Manage and run plugins.\n */\nclass Plugin\n{\n private $db;\n private $plugins;\n private $runnablePlugins;\n private $plugin;\n\n // Objects that plugins can access\n public $_properties;\n public $ui;\n public $controller;\n public $request;\n public $view;\n public $xhtml;\n\n public static $properties = array(\n // Vendor tag\n 'vendor' => 'ValoaApplication',\n\n // Events\n 'events' => array(\n 'onAfterFrontControllerInit',\n 'onBeforeController',\n 'onAfterController',\n 'onBeforeRender',\n 'onAfterRender',\n ),\n\n // Skip plugins in these controllers\n 'skipControllers' => array(\n 'Setup',\n ),\n );\n\n public function __construct($plugin = false)\n {\n $this->plugin = $plugin;\n\n $this->event = false;\n $this->plugins = false;\n $this->runnablePlugins = false;\n\n // Plugins can access and modify these\n $this->_properties = false;\n $this->ui = false;\n $this->controller = false;\n $this->request = false;\n $this->view = false;\n $this->xhtml = false;\n\n try {\n $this->db = \\Webvaloa\\Webvaloa::DBConnection();\n } catch (Exception $e) {\n }\n }\n\n public function setEvent($e)\n {\n if (in_array($e, self::$properties['events'])) {\n $this->event = $e;\n }\n }\n\n public function plugins()\n {\n if (!method_exists($this->db, 'prepare')) {\n // Just bail out\n return false;\n }\n\n if (method_exists($this->request, 'getMainController') && (in_array($this->request->getMainController(), self::$properties['skipControllers']))) {\n return false;\n }\n\n $query = '\n SELECT id, plugin, system_plugin\n FROM plugin\n WHERE blocked = 0\n ORDER BY ordering ASC';\n\n try {\n $stmt = $this->db->prepare($query);\n $stmt->execute();\n $this->plugins = $stmt->fetchAll();\n\n return $this->plugins;\n } catch (PDOException $e) {\n }\n }\n\n public function pluginExists($name)\n {\n $name = trim($name);\n\n foreach ($this->plugins as $k => $plugin) {\n if ($plugin->plugin == $name) {\n return true;\n }\n }\n\n return false;\n }\n\n public function hasRunnablePlugins()\n {\n // Return runnable plugins if we already gathered them\n if ($this->runnablePlugins) {\n return $this->runnablePlugins;\n }\n\n if (!$this->request) {\n throw new RuntimeException('Instance of request is required');\n }\n\n if (in_array($this->request->getMainController(), self::$properties['skipControllers'])) {\n return false;\n }\n\n // Load plugins\n if (!$this->plugins) {\n $this->plugins();\n }\n\n if (!is_array($this->plugins)) {\n return false;\n }\n\n $controller = $this->request->getMainController();\n\n // Look for executable plugins\n foreach ($this->plugins as $k => $plugin) {\n if ($controller && strpos($plugin->plugin, $controller) === false\n && strpos($plugin->plugin, 'Plugin') === false) {\n continue;\n }\n\n $this->runnablePlugins[] = $plugin;\n }\n\n return (bool) ($this->runnablePlugins && !empty($this->runnablePlugins)) ? $this->runnablePlugins : false;\n }\n\n public function runPlugins()\n {\n if (!$this->runnablePlugins || empty($this->runnablePlugins)) {\n return false;\n }\n\n $e = $this->event;\n\n foreach ($this->runnablePlugins as $k => $v) {\n $p = '\\\\'.self::$properties['vendor'].'\\Plugins\\\\'.$v->plugin.'Plugin';\n $plugin = new $p();\n\n $plugin->view = &$this->view;\n $plugin->ui = &$this->ui;\n $plugin->request = &$this->request;\n $plugin->controller = &$this->controller;\n $plugin->xhtml = &$this->xhtml;\n $plugin->_properties = &$this->_properties;\n\n if (method_exists($plugin, $e)) {\n $plugin->{$e}();\n }\n }\n }\n\n public static function getPluginStatus($pluginID)\n {\n $query = '\n SELECT blocked\n FROM plugin\n WHERE system_plugin = 0\n AND id = ?';\n\n try {\n $db = \\Webvaloa\\Webvaloa::DBConnection();\n\n $stmt = $db->prepare($query);\n $stmt->set((int) $pluginID);\n $stmt->execute();\n\n $row = $stmt->fetch();\n\n if (isset($row->blocked)) {\n return $row->blocked;\n }\n\n return false;\n } catch (PDOException $e) {\n }\n }\n\n public static function setPluginStatus($pluginID, $status = 0)\n {\n $query = '\n UPDATE plugin\n SET blocked = ?\n WHERE id = ?';\n\n try {\n $db = \\Webvaloa\\Webvaloa::DBConnection();\n\n $stmt = $db->prepare($query);\n $stmt->set((int) $status);\n $stmt->set((int) $pluginID);\n $stmt->execute();\n } catch (PDOException $e) {\n }\n }\n\n public static function setPluginOrder($pluginID, $ordering = 0)\n {\n $query = '\n UPDATE plugin\n SET ordering = ?\n WHERE id = ?';\n\n try {\n $db = \\Webvaloa\\Webvaloa::DBConnection();\n\n $stmt = $db->prepare($query);\n $stmt->set((int) $ordering);\n $stmt->set((int) $pluginID);\n $stmt->execute();\n } catch (PDOException $e) {\n }\n }\n\n public function install()\n {\n if (!$this->plugin) {\n return false;\n }\n\n $installable = $this->discover();\n\n if (!in_array($this->plugin, $installable)) {\n return false;\n }\n\n $db = \\Webvaloa\\Webvaloa::DBConnection();\n\n // Install plugin\n $object = new Db\\Object('plugin', $db);\n $object->plugin = $this->plugin;\n $object->system_plugin = 0;\n $object->blocked = 0;\n $object->ordering = 1;\n\n $id = $object->save();\n\n return $id;\n }\n\n public function uninstall()\n {\n if (!$this->plugin) {\n return false;\n }\n\n $db = \\Webvaloa\\Webvaloa::DBConnection();\n\n $query = '\n DELETE FROM plugin\n WHERE system_plugin = 0\n AND plugin = ?';\n\n $stmt = $db->prepare($query);\n\n try {\n $stmt->set($this->plugin);\n $stmt->execute();\n\n return true;\n } catch (Exception $e) {\n }\n\n return false;\n }\n\n public function discover()\n {\n // Installed plugins\n $tmp = $this->plugins();\n\n foreach ($tmp as $v => $plugin) {\n $plugins[] = $plugin->plugin;\n }\n\n // Discovery paths\n $paths[] = LIBVALOA_INSTALLPATH.DIRECTORY_SEPARATOR.self::$properties['vendor'].DIRECTORY_SEPARATOR.'Plugins';\n $paths[] = LIBVALOA_EXTENSIONSPATH.DIRECTORY_SEPARATOR.self::$properties['vendor'].DIRECTORY_SEPARATOR.'Plugins';\n\n $skip = array(\n '.',\n '..',\n );\n\n $plugins = array_merge($plugins, $skip);\n\n // Look for new plugins\n foreach ($paths as $path) {\n if ($handle = opendir($path)) {\n while (false !== ($entry = readdir($handle))) {\n if ($entry == '.' || $entry == '..') {\n continue;\n }\n\n if (substr($entry, -3) != 'php') {\n continue;\n }\n\n $pluginName = str_replace('Plugin.php', '', $entry);\n\n if (!isset($installablePlugins)) {\n $installablePlugins = array();\n }\n\n if (!in_array($pluginName, $plugins) && !in_array($pluginName, $installablePlugins)) {\n $installablePlugins[] = $pluginName;\n }\n }\n\n closedir($handle);\n }\n }\n\n if (isset($installablePlugins)) {\n return $installablePlugins;\n }\n\n return array();\n }\n}\n"},"repo_name":{"kind":"string","value":"lahdekorpi/webvaloa"},"path":{"kind":"string","value":"vendor/Webvaloa/Plugin.php"},"language":{"kind":"string","value":"PHP"},"license":{"kind":"string","value":"mit"},"size":{"kind":"number","value":9863,"string":"9,863"}}},{"rowIdx":1779,"cells":{"code":{"kind":"string","value":"const electron = window.require('electron');\nconst events = window.require('events');\n\nconst {\n ipcRenderer\n} = electron;\n\nconst {\n EventEmitter\n} = events;\n\nclass Emitter extends EventEmitter {}\nwindow.Events = new Emitter();\n\nmodule.exports = () => {\n let settings = window.localStorage.getItem('settings');\n if (settings === null) {\n const defaultSettings = {\n general: {\n launch: true,\n clipboard: true\n },\n images: {\n copy: false,\n delete: true\n },\n notifications: {\n enabled: true\n }\n };\n window.localStorage.setItem('settings', JSON.stringify(defaultSettings));\n settings = defaultSettings;\n }\n ipcRenderer.send('settings', JSON.parse(settings));\n};\n"},"repo_name":{"kind":"string","value":"vevix/focus"},"path":{"kind":"string","value":"app/js/init.js"},"language":{"kind":"string","value":"JavaScript"},"license":{"kind":"string","value":"mit"},"size":{"kind":"number","value":740,"string":"740"}}},{"rowIdx":1780,"cells":{"code":{"kind":"string","value":"---\ntitle: 'Production applications updated 9.1.2018 10:05 - 10:54'\nlang: en\nref: 2018-01-09-release\nimage: \npublished: true\ncategories: en News\ntraffictypes:\n - Road\ntags:\n - APIs\n - Admin\n---\n\nDigitraffic production applications have been updated.\n\nChangelog:\n\nTIE\n- DPO-336 - LAM binääritietovirta jakautuu kahdeksi LOTJU 2.5 versiossa\n - Ei vaikuta datan formaattiin. Reaaliaika-asemien tiedot ovat nyt tuoreempia.\n- DPO-399 - CameraStationsStatusMetadataUpdateJob ei käsittele obsolete tietoa oikein\n\nWe apologize for any inconvenience.\n"},"repo_name":{"kind":"string","value":"lapintom/digitraffic"},"path":{"kind":"string","value":"_posts/2018-01-09-release-en.md"},"language":{"kind":"string","value":"Markdown"},"license":{"kind":"string","value":"mit"},"size":{"kind":"number","value":549,"string":"549"}}},{"rowIdx":1781,"cells":{"code":{"kind":"string","value":"using System;\nusing System.Collections.Generic;\nusing System.Linq;\nusing BohFoundation.ApplicantsRepository.Repositories.Implementations;\nusing BohFoundation.AzureStorage.TableStorage.Implementations.Essay.Entities;\nusing BohFoundation.AzureStorage.TableStorage.Interfaces.Essay;\nusing BohFoundation.AzureStorage.TableStorage.Interfaces.Essay.Helpers;\nusing BohFoundation.Domain.Dtos.Applicant.Essay;\nusing BohFoundation.Domain.Dtos.Applicant.Notifications;\nusing BohFoundation.Domain.Dtos.Common.AzureQueuryObjects;\nusing BohFoundation.Domain.EntityFrameworkModels.Applicants;\nusing BohFoundation.Domain.EntityFrameworkModels.Common;\nusing BohFoundation.Domain.EntityFrameworkModels.Persons;\nusing BohFoundation.EntityFrameworkBaseClass;\nusing BohFoundation.TestHelpers;\nusing EntityFramework.Extensions;\nusing FakeItEasy;\nusing Microsoft.VisualStudio.TestTools.UnitTesting;\n\nnamespace BohFoundation.ApplicantsRepository.Tests.IntegrationTests\n{\n [TestClass]\n public class ApplicantsEssayRepositoryIntegrationTests\n {\n private static IEssayRowKeyGenerator _rowKeyGenerator;\n private static IAzureEssayRepository _azureAzureEssayRepository;\n private static ApplicantsEssayRepository _applicantsEssayRepository;\n private static ApplicantsesNotificationRepository _applicantsesNotification;\n\n [ClassInitialize]\n public static void InitializeClass(TestContext ctx)\n {\n Setup();\n FirstTestOfNotifications();\n FirstUpsert();\n SecondUpsert();\n SecondTestOfNotifications();\n }\n\n #region SettingUp\n\n private static void Setup()\n {\n TestHelpersCommonFields.InitializeFields();\n TestHelpersCommonFakes.InitializeFakes();\n\n ApplicantsGuid = Guid.NewGuid();\n Prompt = \"prompt\" + ApplicantsGuid;\n TitleOfEssay = \"title\" + ApplicantsGuid;\n\n _azureAzureEssayRepository = A.Fake();\n _rowKeyGenerator = A.Fake();\n\n CreateEssayTopicAndApplicant();\n\n SetupFakes();\n\n _applicantsesNotification = new ApplicantsesNotificationRepository(TestHelpersCommonFields.DatabaseName,\n TestHelpersCommonFakes.ClaimsInformationGetters, TestHelpersCommonFakes.DeadlineUtilities);\n _applicantsEssayRepository = new ApplicantsEssayRepository(TestHelpersCommonFields.DatabaseName,\n TestHelpersCommonFakes.ClaimsInformationGetters, _azureAzureEssayRepository, _rowKeyGenerator);\n\n }\n\n private static void CreateEssayTopicAndApplicant()\n {\n var random = new Random();\n GraduatingYear = random.Next();\n\n var subject = new EssayTopic\n {\n EssayPrompt = Prompt,\n TitleOfEssay = TitleOfEssay,\n RevisionDateTime = DateTime.UtcNow\n };\n\n var subject2 = new EssayTopic\n {\n EssayPrompt = Prompt + 2,\n TitleOfEssay = TitleOfEssay + 2,\n RevisionDateTime = DateTime.UtcNow\n };\n\n var subject3 = new EssayTopic\n {\n EssayPrompt = \"SHOULD NOT SHOW UP IN LIST\",\n TitleOfEssay = \"REALLY SHOULDN't SHOW up\",\n RevisionDateTime = DateTime.UtcNow,\n };\n\n var graduatingYear = new GraduatingClass\n {\n GraduatingYear = GraduatingYear,\n EssayTopics = new List { subject, subject2 }\n };\n\n var applicant = new Applicant\n {\n Person = new Person { Guid = ApplicantsGuid, DateCreated = DateTime.UtcNow },\n ApplicantPersonalInformation =\n new ApplicantPersonalInformation\n {\n GraduatingClass = graduatingYear,\n Birthdate = DateTime.UtcNow,\n LastUpdated = DateTime.UtcNow\n }\n };\n\n using (var context = GetRootContext())\n {\n context.EssayTopics.Add(subject3);\n context.GraduatingClasses.Add(graduatingYear);\n context.Applicants.Add(applicant);\n context.EssayTopics.Add(subject);\n context.SaveChanges();\n\n EssayTopicId = context.EssayTopics.First(topic => topic.EssayPrompt == Prompt).Id;\n EssayTopicId2 = context.EssayTopics.First(topic => topic.EssayPrompt == Prompt + 2).Id;\n }\n }\n\n private static int EssayTopicId2 { get; set; }\n\n private static void SetupFakes()\n {\n RowKey = \"THISISTHEROWKEYFORTHEAPPLICANT\";\n A.CallTo(() => TestHelpersCommonFakes.ClaimsInformationGetters.GetApplicantsGraduatingYear())\n .Returns(GraduatingYear);\n A.CallTo(() => TestHelpersCommonFakes.ClaimsInformationGetters.GetUsersGuid()).Returns(ApplicantsGuid);\n A.CallTo(() => _rowKeyGenerator.CreateRowKeyForEssay(ApplicantsGuid, EssayTopicId)).Returns(RowKey);\n }\n\n private static string RowKey { get; set; }\n private static int GraduatingYear { get; set; }\n private static string TitleOfEssay { get; set; }\n private static string Prompt { get; set; }\n\n private static Guid ApplicantsGuid { get; set; }\n\n #endregion\n\n #region FirstNotifications\n\n private static void FirstTestOfNotifications()\n {\n FirstNotificationResult = _applicantsesNotification.GetApplicantNotifications();\n }\n\n private static ApplicantNotificationsDto FirstNotificationResult { get; set; }\n\n [TestMethod, TestCategory(\"Integration\")]\n public void ApplicantsNotificationRepository_FirstGetNotifications_Should_Have_Two_EssayTopics()\n {\n Assert.AreEqual(2, FirstNotificationResult.EssayNotifications.Count);\n }\n\n [TestMethod, TestCategory(\"Integration\")]\n public void ApplicantsNotificationRepository_FirstGetNotifications_EssayTopics_Should_Have_No_LastUpdated()\n {\n foreach (var essayTopic in FirstNotificationResult.EssayNotifications)\n {\n Assert.IsNull(essayTopic.RevisionDateTime);\n }\n }\n\n [TestMethod, TestCategory(\"Integration\")]\n public void ApplicantsNotificationRepository_FirstGetNotifications_EssayTopics_Should_Have_Right_EssayTopic()\n {\n foreach (var essayTopic in FirstNotificationResult.EssayNotifications)\n {\n if (essayTopic.EssayPrompt == Prompt)\n {\n Assert.AreEqual(TitleOfEssay, essayTopic.TitleOfEssay);\n }\n else\n {\n Assert.AreEqual(TitleOfEssay + 2, essayTopic.TitleOfEssay);\n }\n }\n }\n\n [TestMethod, TestCategory(\"Integration\")]\n public void ApplicantsNotificationRepository_FirstGetNotifications_EssayTopics_Should_Have_Right_Ids()\n {\n foreach (var essayTopic in FirstNotificationResult.EssayNotifications)\n {\n Assert.AreEqual(essayTopic.EssayPrompt == Prompt ? EssayTopicId : EssayTopicId2, essayTopic.EssayTopicId);\n }\n }\n\n #endregion\n\n #region FirstUpsert\n\n private static void FirstUpsert()\n {\n Essay = \"Essay\";\n var dto = new EssayDto {Essay = Essay + 1, EssayPrompt = Prompt, EssayTopicId = EssayTopicId};\n _applicantsEssayRepository.UpsertEssay(dto);\n\n using (var context = GetRootContext())\n {\n EssayUpsertResult1 =\n context.Essays.First(\n essay => essay.EssayTopic.Id == EssayTopicId && essay.Applicant.Person.Guid == ApplicantsGuid);\n }\n }\n\n private static Essay EssayUpsertResult1 { get; set; }\n private static string Essay { get; set; }\n\n [TestMethod, TestCategory(\"Integration\")]\n public void ApplicantsEssayRepository_FirstUpsert_Should_Have_6_Characters()\n {\n Assert.AreEqual(6, EssayUpsertResult1.CharacterLength);\n }\n\n [TestMethod, TestCategory(\"Integration\")]\n public void ApplicantsEssayRepository_FirstUpsert_Should_Have_Have_RecentUpdated()\n {\n TestHelpersTimeAsserts.RecentTime(EssayUpsertResult1.RevisionDateTime);\n }\n\n [TestMethod, TestCategory(\"Integration\")]\n public void ApplicantsEssayRepository_FirstUpsert_Should_Have_Have_Correct_RowKey()\n {\n Assert.AreEqual(RowKey, EssayUpsertResult1.RowKey);\n }\n\n [TestMethod, TestCategory(\"Integration\")]\n public void ApplicantsEssayRepository_FirstUpsert_Should_Have_Have_Correct_PartitionKey()\n {\n Assert.AreEqual(GraduatingYear.ToString(), EssayUpsertResult1.PartitionKey);\n }\n\n [TestMethod, TestCategory(\"Integration\")]\n public void ApplicantsEssayRepository_FirstUpsert_Should_Have_Positive_Id()\n {\n TestHelpersCommonAsserts.IsGreaterThanZero(EssayUpsertResult1.Id);\n }\n\n [TestMethod, TestCategory(\"Integration\")]\n public void ApplicantsEssayRepository_FirstUpsert_Should_Call_CreateRowKey()\n {\n A.CallTo(() => _rowKeyGenerator.CreateRowKeyForEssay(ApplicantsGuid, EssayTopicId)).MustHaveHappened();\n }\n\n [TestMethod, TestCategory(\"Integration\")]\n public void ApplicantsEssayRepository_FirstUpsert_Should_Call_UpsertEssay()\n {\n //Not checking time. It just isn't coming up. I did an in class check to see if it worked. It did. \n A.CallTo(() => _azureAzureEssayRepository.UpsertEssay(A\n .That.Matches(x =>\n x.Essay == Essay + 1 &&\n x.EssayPrompt == Prompt &&\n x.EssayTopicId == EssayTopicId &&\n x.PartitionKey == GraduatingYear.ToString() &&\n x.RowKey == RowKey\n ))).MustHaveHappened();\n }\n\n #endregion\n\n #region SecondUpsert\n\n private static void SecondUpsert()\n {\n var dto = new EssayDto {Essay = Essay + Essay + Essay, EssayPrompt = Prompt, EssayTopicId = EssayTopicId};\n _applicantsEssayRepository.UpsertEssay(dto);\n\n using (var context = GetRootContext())\n {\n EssayUpsertResult2 =\n context.Essays.First(\n essay => essay.EssayTopic.Id == EssayTopicId && essay.Applicant.Person.Guid == ApplicantsGuid);\n }\n }\n\n private static Essay EssayUpsertResult2 { get; set; }\n private static int EssayTopicId { get; set; }\n\n [TestMethod, TestCategory(\"Integration\")]\n public void ApplicantsEssayRepository_SecondUpsert_Should_Have_15_Characters()\n {\n Assert.AreEqual(15, EssayUpsertResult2.CharacterLength);\n }\n\n [TestMethod, TestCategory(\"Integration\")]\n public void ApplicantsEssayRepository_SecondUpsert_Should_Have_Have_RecentUpdated_More_Recent_Than_First()\n {\n TestHelpersTimeAsserts.IsGreaterThanOrEqual(EssayUpsertResult2.RevisionDateTime,\n EssayUpsertResult1.RevisionDateTime);\n }\n\n [TestMethod, TestCategory(\"Integration\")]\n public void ApplicantsEssayRepository_SecondUpsert_Should_Have_Have_Correct_RowKey()\n {\n Assert.AreEqual(RowKey, EssayUpsertResult2.RowKey);\n }\n\n [TestMethod, TestCategory(\"Integration\")]\n public void ApplicantsEssayRepository_SecondUpsert_Should_Have_Have_Correct_PartitionKey()\n {\n Assert.AreEqual(GraduatingYear.ToString(), EssayUpsertResult2.PartitionKey);\n }\n\n [TestMethod, TestCategory(\"Integration\")]\n public void ApplicantsEssayRepository_SecondUpsert_Should_Have_Equal_Id_To_First()\n {\n Assert.AreEqual(EssayUpsertResult1.Id, EssayUpsertResult2.Id);\n }\n\n [TestMethod, TestCategory(\"Integration\")]\n public void ApplicantsEssayRepository_SecondUpsert_Should_Call_CreateRowKey()\n {\n A.CallTo(() => _rowKeyGenerator.CreateRowKeyForEssay(ApplicantsGuid, EssayTopicId))\n .MustHaveHappened(Repeated.AtLeast.Times(3));\n }\n\n [TestMethod, TestCategory(\"Integration\")]\n public void ApplicantsEssayRepository_SecondUpsert_Should_Call_UpsertEssay()\n {\n //Not checking time. It just isn't coming up. I did an in class check to see if it worked. It did. \n A.CallTo(() => _azureAzureEssayRepository.UpsertEssay(A\n .That.Matches(x =>\n x.Essay == Essay + Essay + Essay &&\n x.EssayPrompt == Prompt &&\n x.EssayTopicId == EssayTopicId &&\n x.PartitionKey == GraduatingYear.ToString() &&\n x.RowKey == RowKey\n ))).MustHaveHappened();\n }\n\n #endregion\n\n #region SecondNotifications\n\n private static void SecondTestOfNotifications()\n {\n SecondNotificationResult = _applicantsesNotification.GetApplicantNotifications();\n }\n\n private static ApplicantNotificationsDto SecondNotificationResult { get; set; }\n\n [TestMethod, TestCategory(\"Integration\")]\n public void ApplicantsNotificationRepository_SecondGetNotifications_Should_Have_Two_EssayTopics()\n {\n Assert.AreEqual(2, SecondNotificationResult.EssayNotifications.Count);\n }\n\n [TestMethod, TestCategory(\"Integration\")]\n public void ApplicantsNotificationRepository_SecondGetNotifications_EssayTopics_Should_Have_No_LastUpdated()\n {\n foreach (var essayTopic in SecondNotificationResult.EssayNotifications)\n {\n if (essayTopic.EssayPrompt == Prompt)\n {\n Assert.AreEqual(EssayUpsertResult2.RevisionDateTime, essayTopic.RevisionDateTime);\n }\n else\n {\n Assert.IsNull(essayTopic.RevisionDateTime);\n }\n }\n }\n\n [TestMethod, TestCategory(\"Integration\")]\n public void ApplicantsNotificationRepository_SecondGetNotifications_EssayTopics_Should_Have_Right_EssayTopic()\n {\n foreach (var essayTopic in SecondNotificationResult.EssayNotifications)\n {\n if (essayTopic.EssayPrompt == Prompt)\n {\n Assert.AreEqual(TitleOfEssay, essayTopic.TitleOfEssay);\n }\n else\n {\n Assert.AreEqual(TitleOfEssay + 2, essayTopic.TitleOfEssay);\n }\n }\n }\n\n [TestMethod, TestCategory(\"Integration\")]\n public void ApplicantsNotificationRepository_SecondGetNotifications_EssayTopics_Should_Have_Right_Ids()\n {\n foreach (var essayTopic in SecondNotificationResult.EssayNotifications)\n {\n Assert.AreEqual(essayTopic.EssayPrompt == Prompt ? EssayTopicId : EssayTopicId2, essayTopic.EssayTopicId);\n }\n }\n\n #endregion\n \n #region Utilities\n\n private static DatabaseRootContext GetRootContext()\n {\n return new DatabaseRootContext(TestHelpersCommonFields.DatabaseName);\n }\n\n [ClassCleanup]\n public static void CleanDb()\n {\n using (var context = new DatabaseRootContext(TestHelpersCommonFields.DatabaseName))\n {\n context.Essays.Where(essay => essay.Id > 0).Delete();\n context.EssayTopics.Where(essayTopic => essayTopic.Id > 0).Delete();\n context.ApplicantPersonalInformations.Where(info => info.Id > 0).Delete();\n context.GraduatingClasses.Where(gradClass => gradClass.Id > 0).Delete();\n }\n }\n\n #endregion\n\n #region GetEssay\n\n [TestMethod, TestCategory(\"Integration\")]\n public void ApplicantsEssayRepository_GetEssay_Should_Call_CreateRowKeyForEssay()\n {\n GetEssay();\n A.CallTo(() => _rowKeyGenerator.CreateRowKeyForEssay(ApplicantsGuid, EssayTopicId)).MustHaveHappened();\n }\n\n [TestMethod, TestCategory(\"Integration\")]\n public void ApplicantsEssayRepository_GetEssay_Should_Call_AzureEssayRepository()\n {\n GetEssay();\n A.CallTo(\n () =>\n _azureAzureEssayRepository.GetEssay(\n A.That.Matches(\n x => x.PartitionKey == GraduatingYear.ToString() && x.RowKey == RowKey))).MustHaveHappened();\n }\n\n [TestMethod, TestCategory(\"Integration\")]\n public void ApplicantsEssayRepository_GetEssay_Should_Return_Whatever_TheAzureRepoReturns()\n {\n var essayDto = new EssayDto();\n A.CallTo(() => _azureAzureEssayRepository.GetEssay(A.Ignored))\n .Returns(essayDto);\n\n Assert.AreSame(essayDto, GetEssay());\n }\n\n private EssayDto GetEssay()\n {\n return _applicantsEssayRepository.GetEssay(EssayTopicId);\n }\n\n #endregion\n\n }\n}\n"},"repo_name":{"kind":"string","value":"Sobieck00/BOH-Bulldog-Scholarship-Application-Management"},"path":{"kind":"string","value":"BohFoundation.ApplicantsRepository.Tests/IntegrationTests/ApplicantsEssayRepositoryIntegrationTests.cs"},"language":{"kind":"string","value":"C#"},"license":{"kind":"string","value":"mit"},"size":{"kind":"number","value":17599,"string":"17,599"}}},{"rowIdx":1782,"cells":{"code":{"kind":"string","value":"class AddAuthorAndSubjectToClaimStateTransitions < ActiveRecord::Migration[4.2]\n def change\n add_column :claim_state_transitions, :author_id, :integer\n add_column :claim_state_transitions, :subject_id, :integer\n end\nend\n"},"repo_name":{"kind":"string","value":"ministryofjustice/advocate-defence-payments"},"path":{"kind":"string","value":"db/migrate/20160909150238_add_author_and_subject_to_claim_state_transitions.rb"},"language":{"kind":"string","value":"Ruby"},"license":{"kind":"string","value":"mit"},"size":{"kind":"number","value":228,"string":"228"}}},{"rowIdx":1783,"cells":{"code":{"kind":"string","value":"# Using a compact OS\nFROM registry.dataos.io/library/nginx\n\nMAINTAINER Golfen Guo \n\n# Install Nginx\n\n# Add 2048 stuff into Nginx server\nCOPY . /usr/share/nginx/html\n\nEXPOSE 80\n"},"repo_name":{"kind":"string","value":"yepengxj/dao-2048"},"path":{"kind":"string","value":"Dockerfile"},"language":{"kind":"string","value":"Dockerfile"},"license":{"kind":"string","value":"mit"},"size":{"kind":"number","value":201,"string":"201"}}},{"rowIdx":1784,"cells":{"code":{"kind":"string","value":"require 'test_helper'\n\nrequire 'cache_value/util'\n\n\nclass UtilTest < Test::Unit::TestCase\n include CacheValue::Util\n \n context 'hex_digest' do\n should 'return the same digest for identical hashes' do \n hex_digest({ :ha => 'ha'}).should == hex_digest({ :ha => 'ha'})\n end\n end\n \nend\n"},"repo_name":{"kind":"string","value":"tobias/cache_value"},"path":{"kind":"string","value":"test/util_test.rb"},"language":{"kind":"string","value":"Ruby"},"license":{"kind":"string","value":"mit"},"size":{"kind":"number","value":298,"string":"298"}}},{"rowIdx":1785,"cells":{"code":{"kind":"string","value":"# This migration comes from thinkspace_resource (originally 20150502000000)\nclass AddFingerprintToFile < ActiveRecord::Migration\n def change\n add_column :thinkspace_resource_files, :file_fingerprint, :string\n end\nend\n"},"repo_name":{"kind":"string","value":"sixthedge/cellar"},"path":{"kind":"string","value":"packages/opentbl/api/db/migrate/20170511210074_add_fingerprint_to_file.thinkspace_resource.rb"},"language":{"kind":"string","value":"Ruby"},"license":{"kind":"string","value":"mit"},"size":{"kind":"number","value":222,"string":"222"}}},{"rowIdx":1786,"cells":{"code":{"kind":"string","value":"/**\n * Copyright (c) Microsoft Corporation. All rights reserved.\n * Licensed under the MIT License. See License.txt in the project root for\n * license information.\n *\n * Code generated by Microsoft (R) AutoRest Code Generator.\n */\n\npackage com.microsoft.azure.management.network.v2020_04_01;\n\nimport java.util.Map;\nimport com.fasterxml.jackson.annotation.JsonProperty;\n\n/**\n * Tags object for patch operations.\n */\npublic class TagsObject {\n /**\n * Resource tags.\n */\n @JsonProperty(value = \"tags\")\n private Map tags;\n\n /**\n * Get resource tags.\n *\n * @return the tags value\n */\n public Map tags() {\n return this.tags;\n }\n\n /**\n * Set resource tags.\n *\n * @param tags the tags value to set\n * @return the TagsObject object itself.\n */\n public TagsObject withTags(Map tags) {\n this.tags = tags;\n return this;\n }\n\n}\n"},"repo_name":{"kind":"string","value":"selvasingh/azure-sdk-for-java"},"path":{"kind":"string","value":"sdk/network/mgmt-v2020_04_01/src/main/java/com/microsoft/azure/management/network/v2020_04_01/TagsObject.java"},"language":{"kind":"string","value":"Java"},"license":{"kind":"string","value":"mit"},"size":{"kind":"number","value":954,"string":"954"}}},{"rowIdx":1787,"cells":{"code":{"kind":"string","value":"/*\n * This file is part of Sponge, licensed under the MIT License (MIT).\n *\n * Copyright (c) SpongePowered \n * Copyright (c) contributors\n *\n * Permission is hereby granted, free of charge, to any person obtaining a copy\n * of this software and associated documentation files (the \"Software\"), to deal\n * in the Software without restriction, including without limitation the rights\n * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell\n * copies of the Software, and to permit persons to whom the Software is\n * furnished to do so, subject to the following conditions:\n *\n * The above copyright notice and this permission notice shall be included in\n * all copies or substantial portions of the Software.\n *\n * THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR\n * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,\n * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE\n * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER\n * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,\n * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN\n * THE SOFTWARE.\n */\npackage org.spongepowered.common.mixin.core.server.network;\n\nimport net.minecraft.network.NetworkManager;\nimport net.minecraft.network.login.server.S00PacketDisconnect;\nimport net.minecraft.server.MinecraftServer;\nimport net.minecraft.server.management.ServerConfigurationManager;\nimport net.minecraft.server.network.NetHandlerLoginServer;\nimport net.minecraft.util.ChatComponentTranslation;\nimport net.minecraft.util.IChatComponent;\nimport org.apache.logging.log4j.Logger;\nimport org.spongepowered.api.event.cause.NamedCause;\nimport org.spongepowered.api.profile.GameProfile;\nimport org.spongepowered.api.event.SpongeEventFactory;\nimport org.spongepowered.api.event.cause.Cause;\nimport org.spongepowered.api.event.network.ClientConnectionEvent;\nimport org.spongepowered.api.network.RemoteConnection;\nimport org.spongepowered.api.text.Text;\nimport org.spongepowered.asm.lib.Opcodes;\nimport org.spongepowered.asm.mixin.Mixin;\nimport org.spongepowered.asm.mixin.Shadow;\nimport org.spongepowered.asm.mixin.injection.At;\nimport org.spongepowered.asm.mixin.injection.Inject;\nimport org.spongepowered.asm.mixin.injection.Redirect;\nimport org.spongepowered.asm.mixin.injection.callback.CallbackInfo;\nimport org.spongepowered.common.SpongeImpl;\nimport org.spongepowered.common.interfaces.IMixinNetHandlerLoginServer;\nimport org.spongepowered.common.text.SpongeTexts;\n\nimport java.net.SocketAddress;\nimport java.util.Optional;\n\n@Mixin(NetHandlerLoginServer.class)\npublic abstract class MixinNetHandlerLoginServer implements IMixinNetHandlerLoginServer {\n\n @Shadow private static Logger logger;\n @Shadow public NetworkManager networkManager;\n @Shadow private MinecraftServer server;\n @Shadow private com.mojang.authlib.GameProfile loginGameProfile;\n\n @Shadow public abstract String getConnectionInfo();\n @Shadow public abstract com.mojang.authlib.GameProfile getOfflineProfile(com.mojang.authlib.GameProfile profile);\n\n @Redirect(method = \"tryAcceptPlayer\", at = @At(value = \"INVOKE\", target = \"Lnet/minecraft/server/management/ServerConfigurationManager;\"\n + \"allowUserToConnect(Ljava/net/SocketAddress;Lcom/mojang/authlib/GameProfile;)Ljava/lang/String;\"))\n public String onAllowUserToConnect(ServerConfigurationManager confMgr, SocketAddress address, com.mojang.authlib.GameProfile profile) {\n return null; // We handle disconnecting\n }\n\n private void closeConnection(IChatComponent reason) {\n try {\n logger.info(\"Disconnecting \" + this.getConnectionInfo() + \": \" + reason.getUnformattedText());\n this.networkManager.sendPacket(new S00PacketDisconnect(reason));\n this.networkManager.closeChannel(reason);\n } catch (Exception exception) {\n logger.error(\"Error whilst disconnecting player\", exception);\n }\n }\n\n private void disconnectClient(Optional disconnectMessage) {\n IChatComponent reason = null;\n if (disconnectMessage.isPresent()) {\n reason = SpongeTexts.toComponent(disconnectMessage.get());\n } else {\n reason = new ChatComponentTranslation(\"disconnect.disconnected\");\n }\n this.closeConnection(reason);\n }\n\n @Override\n public boolean fireAuthEvent() {\n Optional disconnectMessage = Optional.of(Text.of(\"You are not allowed to log in to this server.\"));\n ClientConnectionEvent.Auth event = SpongeEventFactory.createClientConnectionEventAuth(Cause.of(NamedCause.source(this.loginGameProfile)),\n disconnectMessage, disconnectMessage, (RemoteConnection) this.networkManager, (GameProfile) this.loginGameProfile);\n SpongeImpl.postEvent(event);\n if (event.isCancelled()) {\n this.disconnectClient(event.getMessage());\n }\n return event.isCancelled();\n }\n\n @Inject(method = \"processLoginStart\", at = @At(value = \"FIELD\", target = \"Lnet/minecraft/server/network/NetHandlerLoginServer;\"\n + \"currentLoginState:Lnet/minecraft/server/network/NetHandlerLoginServer$LoginState;\",\n opcode = Opcodes.PUTFIELD, ordinal = 1), cancellable = true)\n public void fireAuthEventOffline(CallbackInfo ci) {\n // Move this check up here, so that the UUID isn't null when we fire the event\n if (!this.loginGameProfile.isComplete()) {\n this.loginGameProfile = this.getOfflineProfile(this.loginGameProfile);\n }\n\n if (this.fireAuthEvent()) {\n ci.cancel();\n }\n }\n}\n"},"repo_name":{"kind":"string","value":"kashike/SpongeCommon"},"path":{"kind":"string","value":"src/main/java/org/spongepowered/common/mixin/core/server/network/MixinNetHandlerLoginServer.java"},"language":{"kind":"string","value":"Java"},"license":{"kind":"string","value":"mit"},"size":{"kind":"number","value":5729,"string":"5,729"}}},{"rowIdx":1788,"cells":{"code":{"kind":"string","value":"from otp.ai.AIBaseGlobal import *\nimport DistributedCCharBaseAI\nfrom direct.directnotify import DirectNotifyGlobal\nfrom direct.fsm import ClassicFSM, State\nfrom direct.fsm import State\nfrom direct.task import Task\nimport random\nfrom toontown.toonbase import ToontownGlobals\nfrom toontown.toonbase import TTLocalizer\nimport CharStateDatasAI\n\nclass DistributedGoofySpeedwayAI(DistributedCCharBaseAI.DistributedCCharBaseAI):\n notify = DirectNotifyGlobal.directNotify.newCategory('DistributedGoofySpeedwayAI')\n\n def __init__(self, air):\n DistributedCCharBaseAI.DistributedCCharBaseAI.__init__(self, air, TTLocalizer.Goofy)\n self.fsm = ClassicFSM.ClassicFSM('DistributedGoofySpeedwayAI', [State.State('Off', self.enterOff, self.exitOff, ['Lonely', 'TransitionToCostume', 'Walk']),\n State.State('Lonely', self.enterLonely, self.exitLonely, ['Chatty', 'Walk', 'TransitionToCostume']),\n State.State('Chatty', self.enterChatty, self.exitChatty, ['Lonely', 'Walk', 'TransitionToCostume']),\n State.State('Walk', self.enterWalk, self.exitWalk, ['Lonely', 'Chatty', 'TransitionToCostume']),\n State.State('TransitionToCostume', self.enterTransitionToCostume, self.exitTransitionToCostume, ['Off'])], 'Off', 'Off')\n self.fsm.enterInitialState()\n self.handleHolidays()\n\n def delete(self):\n self.fsm.requestFinalState()\n DistributedCCharBaseAI.DistributedCCharBaseAI.delete(self)\n self.lonelyDoneEvent = None\n self.lonely = None\n self.chattyDoneEvent = None\n self.chatty = None\n self.walkDoneEvent = None\n self.walk = None\n return\n\n def generate(self):\n DistributedCCharBaseAI.DistributedCCharBaseAI.generate(self)\n name = self.getName()\n self.lonelyDoneEvent = self.taskName(name + '-lonely-done')\n self.lonely = CharStateDatasAI.CharLonelyStateAI(self.lonelyDoneEvent, self)\n self.chattyDoneEvent = self.taskName(name + '-chatty-done')\n self.chatty = CharStateDatasAI.CharChattyStateAI(self.chattyDoneEvent, self)\n self.walkDoneEvent = self.taskName(name + '-walk-done')\n if self.diffPath == None:\n self.walk = CharStateDatasAI.CharWalkStateAI(self.walkDoneEvent, self)\n else:\n self.walk = CharStateDatasAI.CharWalkStateAI(self.walkDoneEvent, self, self.diffPath)\n return\n\n def walkSpeed(self):\n return ToontownGlobals.GoofySpeed\n\n def start(self):\n self.fsm.request('Lonely')\n\n def __decideNextState(self, doneStatus):\n if self.transitionToCostume == 1:\n curWalkNode = self.walk.getDestNode()\n if simbase.air.holidayManager:\n if ToontownGlobals.HALLOWEEN_COSTUMES in simbase.air.holidayManager.currentHolidays and simbase.air.holidayManager.currentHolidays[ToontownGlobals.HALLOWEEN_COSTUMES]:\n simbase.air.holidayManager.currentHolidays[ToontownGlobals.HALLOWEEN_COSTUMES].triggerSwitch(curWalkNode, self)\n self.fsm.request('TransitionToCostume')\n elif ToontownGlobals.APRIL_FOOLS_COSTUMES in simbase.air.holidayManager.currentHolidays and simbase.air.holidayManager.currentHolidays[ToontownGlobals.APRIL_FOOLS_COSTUMES]:\n simbase.air.holidayManager.currentHolidays[ToontownGlobals.APRIL_FOOLS_COSTUMES].triggerSwitch(curWalkNode, self)\n self.fsm.request('TransitionToCostume')\n else:\n self.notify.warning('transitionToCostume == 1 but no costume holiday')\n else:\n self.notify.warning('transitionToCostume == 1 but no holiday Manager')\n if doneStatus['state'] == 'lonely' and doneStatus['status'] == 'done':\n self.fsm.request('Walk')\n elif doneStatus['state'] == 'chatty' and doneStatus['status'] == 'done':\n self.fsm.request('Walk')\n elif doneStatus['state'] == 'walk' and doneStatus['status'] == 'done':\n if len(self.nearbyAvatars) > 0:\n self.fsm.request('Chatty')\n else:\n self.fsm.request('Lonely')\n\n def enterOff(self):\n pass\n\n def exitOff(self):\n DistributedCCharBaseAI.DistributedCCharBaseAI.exitOff(self)\n\n def enterLonely(self):\n self.lonely.enter()\n self.acceptOnce(self.lonelyDoneEvent, self.__decideNextState)\n\n def exitLonely(self):\n self.ignore(self.lonelyDoneEvent)\n self.lonely.exit()\n\n def __goForAWalk(self, task):\n self.notify.debug('going for a walk')\n self.fsm.request('Walk')\n return Task.done\n\n def enterChatty(self):\n self.chatty.enter()\n self.acceptOnce(self.chattyDoneEvent, self.__decideNextState)\n\n def exitChatty(self):\n self.ignore(self.chattyDoneEvent)\n self.chatty.exit()\n\n def enterWalk(self):\n self.notify.debug('going for a walk')\n self.walk.enter()\n self.acceptOnce(self.walkDoneEvent, self.__decideNextState)\n\n def exitWalk(self):\n self.ignore(self.walkDoneEvent)\n self.walk.exit()\n\n def avatarEnterNextState(self):\n if len(self.nearbyAvatars) == 1:\n if self.fsm.getCurrentState().getName() != 'Walk':\n self.fsm.request('Chatty')\n else:\n self.notify.debug('avatarEnterNextState: in walk state')\n else:\n self.notify.debug('avatarEnterNextState: num avatars: ' + str(len(self.nearbyAvatars)))\n\n def avatarExitNextState(self):\n if len(self.nearbyAvatars) == 0:\n if self.fsm.getCurrentState().getName() != 'Walk':\n self.fsm.request('Lonely')\n\n def handleHolidays(self):\n DistributedCCharBaseAI.DistributedCCharBaseAI.handleHolidays(self)\n if hasattr(simbase.air, 'holidayManager'):\n if ToontownGlobals.APRIL_FOOLS_COSTUMES in simbase.air.holidayManager.currentHolidays:\n if simbase.air.holidayManager.currentHolidays[ToontownGlobals.APRIL_FOOLS_COSTUMES] != None and simbase.air.holidayManager.currentHolidays[ToontownGlobals.APRIL_FOOLS_COSTUMES].getRunningState():\n self.diffPath = TTLocalizer.Donald\n return\n\n def getCCLocation(self):\n if self.diffPath == None:\n return 1\n else:\n return 0\n return\n\n def enterTransitionToCostume(self):\n pass\n\n def exitTransitionToCostume(self):\n pass\n"},"repo_name":{"kind":"string","value":"ksmit799/Toontown-Source"},"path":{"kind":"string","value":"toontown/classicchars/DistributedGoofySpeedwayAI.py"},"language":{"kind":"string","value":"Python"},"license":{"kind":"string","value":"mit"},"size":{"kind":"number","value":6450,"string":"6,450"}}},{"rowIdx":1789,"cells":{"code":{"kind":"string","value":"root('rmc_core_static_page');\n\n $rootNode\n ->children()\n ->arrayNode('static_page')\n ->children()\n ->scalarNode('is_enabled')->end()\n ->scalarNode('source')->end()\n ->scalarNode('entity_manager_name')->end()\n ->scalarNode('entity_class')->end()\n ->scalarNode('local_feed_path')->defaultFalse()->end()\n ->end()\n ->end()\n ->end();\n\n // Here you should define the parameters that are allowed to\n // configure your bundle. See the documentation linked above for\n // more information on that topic.\n\n return $treeBuilder;\n }\n}\n"},"repo_name":{"kind":"string","value":"jignesh-russmediatech/rmcdemo"},"path":{"kind":"string","value":"src/Rmc/Core/StaticPageBundle/DependencyInjection/Configuration.php"},"language":{"kind":"string","value":"PHP"},"license":{"kind":"string","value":"mit"},"size":{"kind":"number","value":1405,"string":"1,405"}}},{"rowIdx":1790,"cells":{"code":{"kind":"string","value":""},"repo_name":{"kind":"string","value":"textlint/textlint-plugin-html"},"path":{"kind":"string","value":"test/ast-test-case/doctype-quirksmode-xml/result.html"},"language":{"kind":"string","value":"HTML"},"license":{"kind":"string","value":"mit"},"size":{"kind":"number","value":65,"string":"65"}}},{"rowIdx":1791,"cells":{"code":{"kind":"string","value":"import React from 'react';\nimport {\n Link\n } from 'react-router';\nimport HotdotActions from '../actions/HotdotActions';\nimport HotdotObjStore from '../stores/HotdotObjStore';\nimport MyInfoNavbar from './MyInfoNavbar';\nimport Weixin from './Weixin';\nclass Hotdot extends React.Component {\n constructor(props) {\n super(props);\n this.state = HotdotObjStore.getState();\n this.onChange = this.onChange.bind(this);\n }\n componentDidMount() {\n HotdotActions.getHotdotDatas();\n $(\".month-search\").hide();\n $(\".navbar-hotdot\").on(\"touchend\",function(){\n var index = $(this).index();\n if(index==0){\n //本周\n $(\".month-search\").hide();\n $(\".week-search\").show();\n }else{\n //本月\n $(\".month-search\").show();\n $(\".week-search\").hide();\n }\n\n });\n HotdotObjStore.listen(this.onChange);\n Weixin.getUrl();\n Weixin.weixinReady();\n }\n componentWillUnmount() {\n HotdotObjStore.unlisten(this.onChange);\n }\n\n onChange(state) {\n this.setState(state);\n }\n\n getUpOrDown(curData,preData,isWeek){\n var preDataItem = isWeek ? preData.week:preData.month;\n if(preData==false || preData == [] || preDataItem==undefined){\n return (\n {curData.value});\n }else{\n for(var i = 0;i < preDataItem.length;i++){\n if(preDataItem[i].word == curData.word){\n if(preDataItem[i].value < curData.value){\n return (\n {curData.value});\n }else{\n return (\n {curData.value});\n }\n }\n }\n }\n return (\n {curData.value});\n }\n render() {\n var hotdotData = (this.state.data);\n var firstHotData = hotdotData[0];\n var preHotData ;\n if(hotdotData.length > 7){\n preHotData = hotdotData[7];\n }else{\n preHotData = [];\n }\n if(firstHotData){\n var weekList = firstHotData.week.map((weekItem,i)=>(\n
  • \n {this.getUpOrDown(weekItem,preHotData,true)}\n {weekItem.word}\n
  • \n ));\n if(weekList.length==0){\n weekList =
    数据还没有准备好,要不去其他页面瞅瞅?
    \n }\n\n var monthList = firstHotData.month.map((monthItem,i)=>(\n
  • \n {this.getUpOrDown(monthItem,preHotData,false)}\n {monthItem.word}\n
  • \n ));\n if(monthList.length==0){\n monthList =
    Whops,这个页面的数据没有准备好,去其他页面瞅瞅?
    \n }\n }else{\n var weekList = (正在构建,敬请期待...);\n var monthList = (正在构建,敬请期待...);\n }\n return (
    \n
    \n
    \n
    \n
    \n 本周关键字排行榜\n
    \n 本周\n 本月\n
    \n
    \n
    \n
      \n {weekList}\n
    \n
    \n
    \n
    \n
    \n
    \n
    \n 本月关键字排行榜\n
    \n 本周\n 本月\n
    \n
    \n
    \n
      \n {monthList}\n
    \n
    \n
    \n
    \n
    \n
    );\n}\n}\n\nexport default Hotdot;"},"repo_name":{"kind":"string","value":"kongchun/BigData-Web"},"path":{"kind":"string","value":"app/m_components/Hotdot.js"},"language":{"kind":"string","value":"JavaScript"},"license":{"kind":"string","value":"mit"},"size":{"kind":"number","value":5621,"string":"5,621"}}},{"rowIdx":1792,"cells":{"code":{"kind":"string","value":"// Copyright (c) 2013-2014 PropCoin Developers\n\n#ifndef CLIENTVERSION_H\n#define CLIENTVERSION_H\n\n//\n// client versioning and copyright year\n//\n\n// These need to be macros, as version.cpp's and bitcoin-qt.rc's voodoo requires it\n#define CLIENT_VERSION_MAJOR 1\n#define CLIENT_VERSION_MINOR 5\n#define CLIENT_VERSION_REVISION 1\n#define CLIENT_VERSION_BUILD 0\n\n// Set to true for release, false for prerelease or test build\n#define CLIENT_VERSION_IS_RELEASE true\n\n// Copyright year (2009-this)\n// Todo: update this when changing our copyright comments in the source\n#define COPYRIGHT_YEAR 2014\n\n// Converts the parameter X to a string after macro replacement on X has been performed.\n// Don't merge these into one macro!\n#define STRINGIZE(X) DO_STRINGIZE(X)\n#define DO_STRINGIZE(X) #X\n\n#endif // CLIENTVERSION_H\n"},"repo_name":{"kind":"string","value":"demomint/prop"},"path":{"kind":"string","value":"src/clientversion.h"},"language":{"kind":"string","value":"C"},"license":{"kind":"string","value":"mit"},"size":{"kind":"number","value":829,"string":"829"}}},{"rowIdx":1793,"cells":{"code":{"kind":"string","value":"varnish\n=======\n\nVarnish to run EOL site\n\n\nsudo docker run -v /eol/varnish/default.vcl:/etc/varnish/default.vcl \\\n -p 80:80 eoldocker/varnish:v3.0.5\n"},"repo_name":{"kind":"string","value":"EolDocker/varnish"},"path":{"kind":"string","value":"README.md"},"language":{"kind":"string","value":"Markdown"},"license":{"kind":"string","value":"mit"},"size":{"kind":"number","value":150,"string":"150"}}},{"rowIdx":1794,"cells":{"code":{"kind":"string","value":"\n */\ninterface BuilderInterface\n{\n /**\n * Build a response object\n *\n * @param RequestInterface $request\n * @param ResponseInterface $response\n * @param string $context\n * @return AbstractResponse\n */\n public function build(RequestInterface $request, ResponseInterface $response, $context = null);\n}"},"repo_name":{"kind":"string","value":"devsdmf/payu-php-sdk"},"path":{"kind":"string","value":"src/PayU/Api/Response/Builder/BuilderInterface.php"},"language":{"kind":"string","value":"PHP"},"license":{"kind":"string","value":"mit"},"size":{"kind":"number","value":713,"string":"713"}}},{"rowIdx":1795,"cells":{"code":{"kind":"string","value":"using SolrExpress.Search.Parameter;\nusing System;\nusing System.Globalization;\nusing System.Linq;\nusing System.Text;\n\nnamespace SolrExpress.Utility\n{\n /// \n /// Helper class used to extract information inside parameters\n /// \n internal static class ParameterUtil\n {\n /// \n /// Get the sort type and direction\n /// \n /// Type used in match\n /// Type name\n /// Sort direction\n public static void GetFacetSort(FacetSortType solrFacetSortType, out string typeName, out string sortName)\n {\n switch (solrFacetSortType)\n {\n case FacetSortType.IndexAsc:\n typeName = \"index\";\n sortName = \"asc\";\n break;\n case FacetSortType.IndexDesc:\n typeName = \"index\";\n sortName = \"desc\";\n break;\n case FacetSortType.CountAsc:\n typeName = \"count\";\n sortName = \"asc\";\n break;\n case FacetSortType.CountDesc:\n typeName = \"count\";\n sortName = \"desc\";\n break;\n default:\n throw new ArgumentException(nameof(solrFacetSortType));\n }\n }\n\n /// \n /// Calculate and returns spatial formule\n /// \n /// Field name\n /// Function used in spatial filter\n /// Center point to spatial filter\n /// Distance from center point\n /// Spatial formule\n internal static string GetSpatialFormule(string fieldName, SpatialFunctionType functionType, GeoCoordinate centerPoint, decimal distance)\n {\n var functionTypeStr = functionType.ToString().ToLower();\n var latitude = centerPoint.Latitude.ToString(\"G\", CultureInfo.InvariantCulture);\n var longitude = centerPoint.Longitude.ToString(\"G\", CultureInfo.InvariantCulture);\n var distanceStr = distance.ToString(\"G\", CultureInfo.InvariantCulture);\n\n return $\"{{!{functionTypeStr} sfield={fieldName} pt={latitude},{longitude} d={distanceStr}}}\";\n }\n\n /// \n /// Get the field with excludes\n /// \n /// Excludes tags\n /// Alias name\n /// Field name\n internal static string GetFacetName(string[] excludes, string aliasName, string fieldName)\n {\n var sb = new StringBuilder();\n var needsBraces = (excludes?.Any() ?? false) || !string.IsNullOrWhiteSpace(aliasName);\n\n if (needsBraces)\n {\n sb.Append(\"{!\");\n }\n\n if (excludes?.Any() ?? false)\n {\n sb.Append($\"ex={string.Join(\",\", excludes)}\");\n }\n\n if (sb.Length > 2)\n {\n sb.Append(\" \");\n }\n\n if (!string.IsNullOrWhiteSpace(aliasName))\n {\n sb.Append($\"key={aliasName}\");\n }\n\n if (needsBraces)\n {\n sb.Append(\"}\");\n }\n\n sb.Append(fieldName);\n\n return sb.ToString();\n }\n\n /// \n /// Get the filter with tag\n /// \n /// Query value\n /// Alias name\n public static string GetFilterWithTag(string query, string aliasName)\n {\n return !string.IsNullOrWhiteSpace(aliasName) ? $\"{{!tag={aliasName}}}{query}\" : query;\n }\n }\n}\n"},"repo_name":{"kind":"string","value":"solr-express/solr-express"},"path":{"kind":"string","value":"src/SolrExpress/Utility/ParameterUtil.cs"},"language":{"kind":"string","value":"C#"},"license":{"kind":"string","value":"mit"},"size":{"kind":"number","value":4034,"string":"4,034"}}},{"rowIdx":1796,"cells":{"code":{"kind":"string","value":"/**\n * React Starter Kit (https://www.reactstarterkit.com/)\n *\n * Copyright © 2014-2016 Kriasoft, LLC. All rights reserved.\n *\n * This source code is licensed under the MIT license found in the\n * LICENSE.txt file in the root directory of this source tree.\n */\n\nimport 'babel-polyfill';\nimport ReactDOM from 'react-dom';\nimport React from 'react';\nimport FastClick from 'fastclick';\nimport Router from './routes';\nimport Location from './core/Location';\nimport { addEventListener, removeEventListener } from './core/DOMUtils';\nimport { ApolloClient, createNetworkInterface } from 'react-apollo';\n\nfunction getCookie(name) {\n let value = \"; \" + document.cookie;\n let parts = value.split(\"; \" + name + \"=\");\n if (parts.length == 2) return parts.pop().split(\";\").shift();\n}\n\nconst networkInterface = createNetworkInterface('/graphql', {\n credentials: 'same-origin',\n uri: '/graphql',\n headers: {\n Cookie: getCookie(\"id_token\")\n }\n});\n\nconst client = new ApolloClient({\n connectToDevTools: true,\n networkInterface: networkInterface,\n});\n\nlet cssContainer = document.getElementById('css');\nconst appContainer = document.getElementById('app');\nconst context = {\n insertCss: styles => styles._insertCss(),\n onSetTitle: value => (document.title = value),\n onSetMeta: (name, content) => {\n // Remove and create a new tag in order to make it work\n // with bookmarks in Safari\n const elements = document.getElementsByTagName('meta');\n Array.from(elements).forEach((element) => {\n if (element.getAttribute('name') === name) {\n element.parentNode.removeChild(element);\n }\n });\n const meta = document.createElement('meta');\n meta.setAttribute('name', name);\n meta.setAttribute('content', content);\n document\n .getElementsByTagName('head')[0]\n .appendChild(meta);\n },\n client\n};\n\n// Google Analytics tracking. Don't send 'pageview' event after the first\n// rendering, as it was already sent by the Html component.\nlet trackPageview = () => (trackPageview = () => window.ga('send', 'pageview'));\n\nfunction render(state) {\n Router.dispatch(state, (newState, component) => {\n ReactDOM.render( component, appContainer,\n () => {\n\n // Restore the scroll position if it was saved into the state\n if (state.scrollY !== undefined) {\n window.scrollTo(state.scrollX, state.scrollY);\n } else {\n window.scrollTo(0, 0);\n }\n\n trackPageview();\n\n // Remove the pre-rendered CSS because it's no longer used\n // after the React app is launched\n if (cssContainer) {\n cssContainer.parentNode.removeChild(cssContainer);\n cssContainer = null;\n }\n });\n });\n}\n\nfunction run() {\n let currentLocation = null;\n let currentState = null;\n\n // Make taps on links and buttons work fast on mobiles\n FastClick.attach(document.body);\n\n // Re-render the app when window.location changes\n const unlisten = Location.listen(location => {\n currentLocation = location;\n currentState = Object.assign({}, location.state, {\n path: location.pathname,\n query: location.query,\n state: location.state,\n context,\n });\n render(currentState);\n });\n\n // Save the page scroll position into the current location's state\n const supportPageOffset = window.pageXOffset !== undefined;\n const isCSS1Compat = ((document.compatMode || '') === 'CSS1Compat');\n const setPageOffset = () => {\n currentLocation.state = currentLocation.state || Object.create(null);\n if (supportPageOffset) {\n currentLocation.state.scrollX = window.pageXOffset;\n currentLocation.state.scrollY = window.pageYOffset;\n } else {\n currentLocation.state.scrollX = isCSS1Compat ?\n document.documentElement.scrollLeft : document.body.scrollLeft;\n currentLocation.state.scrollY = isCSS1Compat ?\n document.documentElement.scrollTop : document.body.scrollTop;\n }\n };\n\n addEventListener(window, 'scroll', setPageOffset);\n addEventListener(window, 'pagehide', () => {\n removeEventListener(window, 'scroll', setPageOffset);\n unlisten();\n });\n}\n\n// Run the application when both DOM is ready and page content is loaded\nif (['complete', 'loaded', 'interactive'].includes(document.readyState) && document.body) {\n run();\n} else {\n document.addEventListener('DOMContentLoaded', run, false);\n}\n"},"repo_name":{"kind":"string","value":"reicheltp/Sonic"},"path":{"kind":"string","value":"src/client.js"},"language":{"kind":"string","value":"JavaScript"},"license":{"kind":"string","value":"mit"},"size":{"kind":"number","value":4372,"string":"4,372"}}},{"rowIdx":1797,"cells":{"code":{"kind":"string","value":"var $M = require(\"@effectful/debugger\"),\n $x = $M.context,\n $ret = $M.ret,\n $unhandled = $M.unhandled,\n $brk = $M.brk,\n $lset = $M.lset,\n $mcall = $M.mcall,\n $m = $M.module(\"file.js\", null, typeof module === \"undefined\" ? null : module, null, \"$\", {\n __webpack_require__: typeof __webpack_require__ !== \"undefined\" && __webpack_require__\n}, null),\n $s$1 = [{\n e: [1, \"1:9-1:10\"]\n}, null, 0],\n $s$2 = [{}, $s$1, 1],\n $m$0 = $M.fun(\"m$0\", \"file.js\", null, null, [], 0, 2, \"1:0-4:0\", 32, function ($, $l, $p) {\n for (;;) switch ($.state = $.goto) {\n case 0:\n $lset($l, 1, $m$1($));\n $.goto = 2;\n continue;\n\n case 1:\n $.goto = 2;\n return $unhandled($.error);\n\n case 2:\n return $ret($.result);\n\n default:\n throw new Error(\"Invalid state\");\n }\n}, null, null, 0, [[0, \"1:0-3:1\", $s$1], [16, \"4:0-4:0\", $s$1], [16, \"4:0-4:0\", $s$1]]),\n $m$1 = $M.fun(\"m$1\", \"e\", null, $m$0, [], 0, 2, \"1:0-3:1\", 0, function ($, $l, $p) {\n for (;;) switch ($.state = $.goto) {\n case 0:\n $.goto = 1;\n $brk();\n $.state = 1;\n\n case 1:\n $.goto = 2;\n $p = ($x.call = eff)(1);\n $.state = 2;\n\n case 2:\n $l[1] = $p;\n $.goto = 3;\n $p = ($x.call = eff)(2);\n $.state = 3;\n\n case 3:\n $.goto = 4;\n $mcall(\"log\", console, $l[1] + $p);\n $.state = 4;\n\n case 4:\n $.goto = 6;\n $brk();\n continue;\n\n case 5:\n $.goto = 6;\n return $unhandled($.error);\n\n case 6:\n return $ret($.result);\n\n default:\n throw new Error(\"Invalid state\");\n }\n}, null, null, 1, [[4, \"2:2-2:31\", $s$2], [2, \"2:14-2:20\", $s$2], [2, \"2:23-2:29\", $s$2], [2, \"2:2-2:30\", $s$2], [36, \"3:1-3:1\", $s$2], [16, \"3:1-3:1\", $s$2], [16, \"3:1-3:1\", $s$2]]);\n\n$M.moduleExports();"},"repo_name":{"kind":"string","value":"awto/effectfuljs"},"path":{"kind":"string","value":"packages/core/test/samples/simple/expr/test04-out-ds.js"},"language":{"kind":"string","value":"JavaScript"},"license":{"kind":"string","value":"mit"},"size":{"kind":"number","value":1801,"string":"1,801"}}},{"rowIdx":1798,"cells":{"code":{"kind":"string","value":"# INTRODUCTION\n\n\nTriplie is an AI bot based on 2nd up to 5th order Markov model. It uses an\nSQLite database for storage.\n\nTriplie learns by creating\n\n1. a dictionary of words\n2. a graph representing valid 5-grams (consecutive groups of 5 words)\n encountered in the text\n3. a graph of associations between words from sentences formed according to the\n Hebbian rule\n\nTo respond to a user, triplie extracts keywords from the user's text, finds\ntheir most appropriate associated keywords in the Hebbian association network,\nand generates replies that contain the associated keywords using multiple\nbreadth-first-search Markov chains algorithm.\n\nFor more information on installing and configuring read below\n\nYou can join the project's IRC channel too:\n[#triplie on irc.freenode.net](irc://irc.freenode.net/#triplie)\n\n\n# Install\n\n## Prerequisites\n\nDownload and install [node.js](http://nodejs.org/) for your system.\nIts recommended to build node from source. If you don't do that, make\nsure that npm is also installed alongside with node and that the\nnode binary is called \"node\"\n\nThen from a terminal run:\n\n npm install -g triplie\n\n\nThis will install the `triplie` command on your system.\n\nConfigure the bot as explained below before running!\n\n# CONFIGURATION\n\nIf running the bot for the first time and its not configured,\nyou should create a new directory and run:\n\n triplie config.yaml --init\n\nto create the initial config file\n\n### Edit config.yaml\n\nconfig.yaml is already pre-filled with some default for your bot. You will want\nto change some of these settings.\n\nThe configuration file is really well commented. Open it and edit it according\nto the instructions contained inside. Once you run the bot however, the\ninstructions will disappear the moment you change a setting by giving a command\nto the bot.\n\n# RUNNING\n\nAfter you edited the config file, to run the bot use the command:\n\n triplie config.yaml\n\n# IMPORT EXISTING TEXT\n\nIf called with the argument `--feed` triplie will receive data from stdin,\nparse it using a regular expression then feed the database.\n\nExample:\n\n cat log.txt | triplie config.yaml --feed --regex '(?\\d+)-(?\\d+)-(?)T(?\\d+):(?\\d+):(?\\d+)Z\\s+(?.+):\\s+(?.+)'\n\nwill work for a `log.txt` that has lines in the format:\n\n 2013-04-04T13:15:00Z someuser: I wrote some text\n\nThe syntax is XRegExp and uses named groups. See\n[the XRegExp readme](https://npmjs.org/package/xregexp) for more info\n\nCurrently, supported named captures are:\n\n* year\n* month\n* day\n* hour\n* minute\n* second\n* timestamp - unix timestamp in seconds, used instead of the date captures\n* timestampms - unix timestamp in miliseconds, used instead of both above.\n* text - the text content\n\nTimestamp example:\n\n cat log.txt | triplie config.yaml --feed --regex '(?\\d+) (?.+)\n\nwill match `log.txt` containing lines in the format:\n\n 1234567890 example text here\n\nAll captures except text are optional - the time is optional and if left out\nthe feeder will generate reasonable \"fake\" timestamps.\n\n cat log.txt | triplie config.yaml --feed --regex '(?.+)'\n\n\n# COMMANDS\n\nList of triplie's commands (assuming \"!\" is the cmdchar)\n\n1. !join #channel - causes the bot to join and remember the channel\n\n2. !part #channel - part and forget channel\n\n3. !reload - causes reload of the bot code, useful for development\n\n4. !set path value - set a config setting to the specified value. Examples\n\n !set ai.sleep.1 10 - Set the upper sleep limit to 10 seconds\n !set ai.sleep [2,3] - Set both sleep limits. Value musn't contain space.\n\n5. !get path - get the config value at the specified path\n\n6. !db stats - triplie will output database statistics\n\n!cmd will return results via private notice\n\n!!cmd returns results via public message\n\n# LICENCE & AUTHOR\n\nSee LICENCE and AUTHORS (if present)\n\n![Bitdeli Badge](https://d2weczhvl823v0.cloudfront.net/spion/triplie-ng/trend.png)\n\n"},"repo_name":{"kind":"string","value":"spion/triplie-ng"},"path":{"kind":"string","value":"README.md"},"language":{"kind":"string","value":"Markdown"},"license":{"kind":"string","value":"mit"},"size":{"kind":"number","value":3968,"string":"3,968"}}},{"rowIdx":1799,"cells":{"code":{"kind":"string","value":"\n\n \n \n\n\n\n\n\n\n\n\nMy post &middot; Entropista\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n \n
    \n
    \n
    \n \n \n
    \n \n\n
    \n\n
    \n \n

    My post

    \n\n

    http://feedproxy.google.com/~r/geledes/~3/EdmSLCOQs3o/

    \n\n
    \n\n\n \n
    \n\n \n\n"},"repo_name":{"kind":"string","value":"oentropista/oentropista.github.io"},"path":{"kind":"string","value":"_site/post/index.html"},"language":{"kind":"string","value":"HTML"},"license":{"kind":"string","value":"mit"},"size":{"kind":"number","value":2129,"string":"2,129"}}}],"truncated":false,"partial":false},"paginationData":{"pageIndex":17,"numItemsPerPage":100,"numTotalItems":115086922,"offset":1700,"length":100}},"jwt":"eyJhbGciOiJFZERTQSJ9.eyJyZWFkIjp0cnVlLCJwZXJtaXNzaW9ucyI6eyJyZXBvLmNvbnRlbnQucmVhZCI6dHJ1ZX0sImlhdCI6MTc1NzUzODAyMSwic3ViIjoiL2RhdGFzZXRzL2xvdWJuYWJubC9naXRodWItY29kZS1kdXBsaWNhdGUiLCJleHAiOjE3NTc1NDE2MjEsImlzcyI6Imh0dHBzOi8vaHVnZ2luZ2ZhY2UuY28ifQ.yFzecdQmGWes-C7fujDvPqblt7IbK4O-xl-A9fIIGLEoeoxbxlBXX4rWesa5qktVgu1t146I7RIYoJ_BBM3QBQ","displayUrls":true},"discussionsStats":{"closed":0,"open":0,"total":0},"fullWidth":true,"hasGatedAccess":true,"hasFullAccess":true,"isEmbedded":false,"savedQueries":{"community":[],"user":[]}}">
    code
    stringlengths
    3
    1.05M
    repo_name
    stringlengths
    4
    116
    path
    stringlengths
    3
    942
    language
    stringclasses
    30 values
    license
    stringclasses
    15 values
    size
    int32
    3
    1.05M
    <!-- ~ Copyright (c) 2017. MIT-license for Jari Van Melckebeke ~ Note that there was a lot of educational work in this project, ~ this project was (or is) used for an assignment from Realdolmen in Belgium. ~ Please just don't abuse my work --> <html> <head> <meta charset="utf-8"> <script src="esl.js"></script> <script src="config.js"></script> </head> <body> <style> html, body, #main { width: 100%; height: 100%; } </style> <div id="main"></div> <script> require([ 'echarts', 'echarts/chart/scatter', 'echarts/component/legend', 'echarts/component/polar' ], function (echarts) { var chart = echarts.init(document.getElementById('main'), null, { renderer: 'canvas' }); var data1 = []; var data2 = []; var data3 = []; for (var i = 0; i < 100; i++) { data1.push([Math.random() * 5, Math.random() * 360]); data2.push([Math.random() * 5, Math.random() * 360]); data3.push([Math.random() * 10, Math.random() * 360]); } chart.setOption({ legend: { data: ['scatter', 'scatter2', 'scatter3'] }, polar: { }, angleAxis: { type: 'value' }, radiusAxis: { axisAngle: 0 }, series: [{ coordinateSystem: 'polar', name: 'scatter', type: 'scatter', symbolSize: 10, data: data1 }, { coordinateSystem: 'polar', name: 'scatter2', type: 'scatter', symbolSize: 10, data: data2 }, { coordinateSystem: 'polar', name: 'scatter3', type: 'scatter', symbolSize: 10, data: data3 }] }); }) </script> </body> </html>
    N00bface/Real-Dolmen-Stage-Opdrachten
    stageopdracht/src/main/resources/static/vendors/gentelella/vendors/echarts/test/polarScatter.html
    HTML
    mit
    2,536
    # Contributing I explicitly welcome contributions from people who have never contributed to open-source before: we were all beginners once! I can help build on a partially working pull request with the aim of getting it merged. I am also actively seeking to diversify our contributors and especially welcome contributions from women from all backgrounds and people of color. <sup>[1](#References)</sup> If you're interested in contributing, fork this repo and create a pull request. Please include a short descriptive link to your code in the readme, and order the link alphpabetically by file name. Include a description of each data structure or algorithm at the top of the file, and if you feel that your code needs further explanation, you can include a more detailed summary in the Data Structures or Algorithms subfolder's readme. Please follow the [Ruby](https://github.com/bbatsov/ruby-style-guide) and [JavaScript](https://github.com/airbnb/javascript) Style Guides. Tests are recommended, but optional. If you're looking for inspiration, I'd love to have a: + [Priority Queue](https://en.wikipedia.org/wiki/Priority_queue) + [Valid Sudoku Board](https://en.wikipedia.org/wiki/Sudoku_solving_algorithms) + [Sorting Algorithms](https://en.wikipedia.org/wiki/Sorting_algorithm#Popular_sorting_algorithms) + [A* Search Algorithm](https://en.wikipedia.org/wiki/A*_search_algorithm) + [Knuth-Morris-Pratt Algorithm](https://en.wikipedia.org/wiki/Knuth%E2%80%93Morris%E2%80%93Pratt_algorithm) + [Heap](https://en.wikipedia.org/wiki/Heap_\(data_structure\)) + [Bloom Filter](https://en.wikipedia.org/wiki/Bloom_filter) + [Or refactor one of these files!](/REFACTOR.md) ## Attribution 1. I used and modified [Homebrew's](https://github.com/Homebrew/brew#contributing) welcoming contributing section.
    Dbz/Algorithms
    CONTRIBUTING.md
    Markdown
    mit
    1,821
    Alchemy sentiment analysis: fb12d2c55fff36e1e268584e261b6b010b37279f Africa Is Talking: 676dbd926bbb04fa69ce90ee81d3f5ffee2692aaf80eb5793bd70fe93e77dc2e
    crakama/bc_7_twitment
    keys.py
    Python
    mit
    156
    module.exports = { project: { server: { basePath: '', ip: '0.0.0.0', request: { sesskey: 'sid', limit: 5000, parameters: 60 }, render: 'swig', path: { routes: 'app/routes', views: 'app/views', public: 'public/', docs: false }, views: { extension: 'swig', errors: 'errors/' } } }, environment: { server: { debug: true, host: 'localhost', port: 3000, request: { secret: new Date().getTime() + '' + Math.random(), cors: true, geolocation: false }, views: { cache: false } } } };
    PearlVentures/Crux
    boilerplate/server/config.js
    JavaScript
    mit
    699
    <?php namespace RedMedica\ConsultasBundle\Entity; use Doctrine\Common\Collections\ArrayCollection; use Doctrine\ORM\Mapping as ORM; use RedMedica\ConsultasBundle\Entity\Article; use FOS\ElasticaBundle\Configuration\Search; /** * Category * * @ORM\Table(name="category") * @ORM\Entity() * @Search(repositoryClass="RedMedica\ConsultasBundle\Entity\SearchRepository\CategoryRepository") */ class Category { /** * @var integer * * @ORM\Column(name="id", type="integer", nullable=false) * @ORM\Id * @ORM\GeneratedValue(strategy="IDENTITY") */ protected $id; /** * @var string * * @ORM\Column(name="label", type="string", length=250, nullable=false) */ protected $label; /** * @var Doctrine\Common\Collections\ArrayCollection * * @ORM\OneToMany(targetEntity="RedMedica\ConsultasBundle\Entity\Article", mappedBy="category") */ protected $articles; public function __construct() { $this->articles = new ArrayCollection(); } public function __toString() { return $this->label; } public function getId() { return $this->id; } public function setLabel($label) { $this->label = $label; return $this; } public function getLabel() { return $this->label; } public function addArticle(Article $article) { $this->articles->add($article); return $this; } public function setArticles($articles) { $this->articles = $articles; return $this; } public function getArticles() { return $this->articles; } }
    dysan1376/hospi
    src/RedMedica/ConsultasBundle/Entity/Category.php
    PHP
    mit
    1,682
    import React from "react"; import styled from 'styled-components' import Link from './link'; const nextArrow = "/icons/next-arrow.png"; const prevArrow = "/icons/prev-arrow.png"; const PatternLink = styled.span` width: 100%; display: flex; flex-direction: column; padding: 1em; float: ${props => props.previous ? 'left' : 'right'} @media(min-width: $width-tablet) { width: auto; } `; const ImageContainer = styled.span` height: 50px; `; const Image = styled.img` height: 100%; background-color: white; float: ${props => props.previous ? 'right' : 'left'} `; const ArrowContainer = styled.div` display: flex; flex-direction: ${props => props.previous ? 'row-reverse' : 'row'}; align-items: center; `; const Name = styled.p` padding: 10px 0; `; const Arrow = styled.img` height: 10px; flex-direction: row-reverse; padding: ${props => props.previous ? '0 10px 0 0' : '0 0 0 10px'}; `; const NextPrevPattern = ({pattern, direction}) => { const previous = direction === "previous" return ( <Link href={pattern.url}> <PatternLink previous={previous}> <ImageContainer> <Image previous={previous} src={pattern.painted || pattern.lineDrawing} /> </ImageContainer> <ArrowContainer previous={previous}> <Name>{pattern.name}</Name> { (direction === "next") && <Arrow src={nextArrow}/> } { (direction === "previous") && <Arrow previous src={prevArrow} /> } </ArrowContainer> </PatternLink> </Link> ) }; export default NextPrevPattern;
    redfieldstefan/kibaktile.com
    src/components/next-prev-pattern.js
    JavaScript
    mit
    1,640
    // Copyright (c) 2009-2010 Satoshi Nakamoto // Copyright (c) 2009-2016 The Bitcoin Core developers // Distributed under the MIT software license, see the accompanying // file COPYING or http://www.opensource.org/licenses/mit-license.php. #ifndef DIGIBYTE_NET_PROCESSING_H #define DIGIBYTE_NET_PROCESSING_H #include "net.h" #include "validationinterface.h" /** Default for -maxorphantx, maximum number of orphan transactions kept in memory */ static const unsigned int DEFAULT_MAX_ORPHAN_TRANSACTIONS = 100; /** Expiration time for orphan transactions in seconds */ static const int64_t ORPHAN_TX_EXPIRE_TIME = 20 * 60; /** Minimum time between orphan transactions expire time checks in seconds */ static const int64_t ORPHAN_TX_EXPIRE_INTERVAL = 5 * 60; /** Default number of orphan+recently-replaced txn to keep around for block reconstruction */ static const unsigned int DEFAULT_BLOCK_RECONSTRUCTION_EXTRA_TXN = 100; /** Register with a network node to receive its signals */ void RegisterNodeSignals(CNodeSignals& nodeSignals); /** Unregister a network node */ void UnregisterNodeSignals(CNodeSignals& nodeSignals); class PeerLogicValidation : public CValidationInterface { private: CConnman* connman; public: PeerLogicValidation(CConnman* connmanIn); virtual void SyncTransaction(const CTransaction& tx, const CBlockIndex* pindex, int nPosInBlock); virtual void UpdatedBlockTip(const CBlockIndex *pindexNew, const CBlockIndex *pindexFork, bool fInitialDownload); virtual void BlockChecked(const CBlock& block, const CValidationState& state); virtual void NewPoWValidBlock(const CBlockIndex *pindex, const std::shared_ptr<const CBlock>& pblock); }; struct CNodeStateStats { int nMisbehavior; int nSyncHeight; int nCommonHeight; std::vector<int> vHeightInFlight; }; /** Get statistics from node state */ bool GetNodeStateStats(NodeId nodeid, CNodeStateStats &stats); /** Increase a node's misbehavior score. */ void Misbehaving(NodeId nodeid, int howmuch); /** Process protocol messages received from a given node */ bool ProcessMessages(CNode* pfrom, CConnman& connman, std::atomic<bool>& interrupt); /** * Send queued protocol messages to be sent to a give node. * * @param[in] pto The node which we are sending messages to. * @param[in] connman The connection manager for that node. * @param[in] interrupt Interrupt condition for processing threads * @return True if there is more work to be done */ bool SendMessages(CNode* pto, CConnman& connman, std::atomic<bool>& interrupt); #endif // DIGIBYTE_NET_PROCESSING_H
    DigiByte-Team/digibyte
    src/net_processing.h
    C
    mit
    2,633
    # # $Header: svn://svn/SWM/trunk/web/Reports/ReportAdvanced_TXSold.pm 8251 2013-04-08 09:00:53Z rlee $ # package Reports::ReportAdvanced_TXSold; use strict; use lib "."; use ReportAdvanced_Common; use Reports::ReportAdvanced; our @ISA =qw(Reports::ReportAdvanced); use strict; sub _getConfiguration { my $self = shift; my $currentLevel = $self->{'EntityTypeID'} || 0; my $Data = $self->{'Data'}; my $SystemConfig = $self->{'SystemConfig'}; my $clientValues = $Data->{'clientValues'}; my $CommonVals = getCommonValues( $Data, { MYOB => 1, }, ); my $txt_Clr = $Data->{'SystemConfig'}{'txtCLR'} || 'Clearance'; my %config = ( Name => 'Transactions Sold Report', StatsReport => 0, MemberTeam => 0, ReportEntity => 3, ReportLevel => 0, Template => 'default_adv', TemplateEmail => 'default_adv_CSV', DistinctValues => 1, SQLBuilder => \&SQLBuilder, DefaultPermType => 'NONE', Fields => { intPaymentType=> [ 'Payment Type', { active=>1, displaytype=>'lookup', fieldtype=>'dropdown', dropdownoptions => \%Defs::paymentTypes, allowsort=>1, dbfield=>'TL.intPaymentType' } ], strTXN=> [ 'PayPal Reference Number', { displaytype=>'text', fieldtype=>'text', dbfield=>'TL.strTXN', active=>1 } ], intLogID=> [ 'Payment Log ID', { displaytype=>'text', fieldtype=>'text', dbfield=>'TL.intLogID', allowgrouping=>1, active=>1 } ], dtSettlement=> [ 'Settlement Date', { active=>1, displaytype=>'date', fieldtype=>'datetime', allowsort=>1, dbformat=>' DATE_FORMAT(dtSettlement,"%d/%m/%Y %H:%i")' } ], intAmount => [ 'Total Amount Paid', { displaytype=>'currency', fieldtype=>'text', allowsort=>1, dbfield=>'TL.intAmount', active=>1 } ], SplitAmount=> [ 'Split Amount', { displaytype=>'currency', fieldtype=>'text', allowsort=>1, total=>1, active=>1 } ], SplitLevel=> [ 'Split Level', { displaytype=>'text', fieldtype=>'text', allowsort=>1, active=>1 } ], PaymentFor=> [ 'Payment For', { active=>1, displaytype=>'text', fieldtype=>'text', allowsort => 1 } ], intExportBankFileID=> [ 'PayPal Distribution ID', { displaytype=>'text', fieldtype=>'text', dbfield=>'intExportAssocBankFileID' } ], intMyobExportID=> [ 'SP Invoice Run', { displaytype=>'lookup', fieldtype=>'dropdown', dropdownoptions => $CommonVals->{'MYOB'}{'Values'}, active=>1, dbfield=>'intMyobExportID' } ], dtRun=> [ 'Date Funds Received', { displaytype=>'date', fieldtype=>'date', allowsort=>1, dbformat=>' DATE_FORMAT(dtRun,"%d/%m/%Y")', allowgrouping=>1, sortfield=>'TL.dtSettlement' } ], }, Order => [qw( intLogID intPaymentType strTXN intAmount dtSettlement PaymentFor SplitLevel SplitAmount intMyobExportID )], OptionGroups => { default => ['Details',{}], }, Config => { FormFieldPrefix => 'c', FormName => 'txnform_', EmailExport => 1, limitView => 5000, EmailSenderAddress => $Defs::admin_email, SecondarySort => 1, RunButtonLabel => 'Run Report', }, ); $self->{'Config'} = \%config; } sub SQLBuilder { my($self, $OptVals, $ActiveFields) =@_ ; my $currentLevel = $self->{'EntityTypeID'} || 0; my $intID = $self->{'EntityID'} || 0; my $Data = $self->{'Data'}; my $clientValues = $Data->{'clientValues'}; my $SystemConfig = $Data->{'SystemConfig'}; my $from_levels = $OptVals->{'FROM_LEVELS'}; my $from_list = $OptVals->{'FROM_LIST'}; my $where_levels = $OptVals->{'WHERE_LEVELS'}; my $where_list = $OptVals->{'WHERE_LIST'}; my $current_from = $OptVals->{'CURRENT_FROM'}; my $current_where = $OptVals->{'CURRENT_WHERE'}; my $select_levels = $OptVals->{'SELECT_LEVELS'}; my $sql = ''; { #Work out SQL my $clubWHERE = $currentLevel == $Defs::LEVEL_CLUB ? qq[ AND ML.intClubID = $intID ] : ''; $sql = qq[ SELECT DISTINCT TL.intLogID, TL.intAmount, TL.strTXN, TL.intPaymentType, ML.intLogType, ML.intEntityType, ML.intMyobExportID, dtSettlement, IF(T.intTableType=$Defs::LEVEL_PERSON, CONCAT(M.strLocalSurname, ", ", M.strLocalFirstname), Entity.strLocalName) as PaymentFor, SUM(ML.curMoney) as SplitAmount, IF(ML.intEntityType = $Defs::LEVEL_NATIONAL, 'National Split', IF(ML.intEntityType = $Defs::LEVEL_STATE, 'State Split', IF(ML.intEntityType = $Defs::LEVEL_REGION, 'Region Split', IF(ML.intEntityType = $Defs::LEVEL_ZONE, 'Zone Split', IF(ML.intEntityType = $Defs::LEVEL_CLUB, 'Club Split', IF((ML.intEntityType = 0 AND intLogType IN (2,3)), 'Fees', '') ) ) ) ) ) as SplitLevel FROM tblTransLog as TL INNER JOIN tblMoneyLog as ML ON ( ML.intTransLogID = TL.intLogID AND ML.intLogType IN ($Defs::ML_TYPE_SPMAX, $Defs::ML_TYPE_LPF, $Defs::ML_TYPE_SPLIT) ) LEFT JOIN tblTransactions as T ON ( T.intTransactionID = ML.intTransactionID ) LEFT JOIN tblPerson as M ON ( M.intPersonID = T.intID AND T.intTableType = $Defs::LEVEL_PERSON ) LEFT JOIN tblEntity as Entity ON ( Entity.intEntityID = T.intID AND T.intTableType = $Defs::LEVEL_PERSON ) LEFT JOIN tblRegoForm as RF ON ( RF.intRegoFormID= TL.intRegoFormID ) WHERE TL.intRealmID = $Data->{'Realm'} $clubWHERE $where_list GROUP BY TL.intLogID ]; return ($sql,''); } } 1;
    facascante/slimerp
    fifs/web/Reports/ReportAdvanced_TXSold.pm
    Perl
    mit
    5,822
    #!/usr/bin/node --harmony 'use strict' const noble = require('noble'), program = require('commander') program .version('0.0.1') .option('-p, --prefix <integer>', 'Manufacturer identifier prefixed to all fan commands', parseInt) .option('-t, --target [mac]', 'MAC address of devices to target', function(val){ return val.toLowerCase() }) .option('-s, --service <uuid>', 'UUID of fan controller BLE service') .option('-w, --write <uuid>', 'UUID of fan controller BLE write characteristic') .option('-n, --notify <uuid>', 'UUID of fan controller BLE notify characteristic') class FanRequest { writeInto(buffer) { throw new TypeError('Must override method') } toBuffer() { var buffer if (program.prefix > 0) { buffer = new Buffer(13) buffer.writeUInt8(program.prefix) this.writeInto(buffer.slice(1)) } else { buffer = new Buffer(12) this.writeInto(buffer) } const checksum = buffer.slice(0, buffer.length - 1).reduce(function(a, b){ return a + b }, 0) & 255 buffer.writeUInt8(checksum, buffer.length - 1) return buffer } } class FanGetStateRequest extends FanRequest { writeInto(buffer) { buffer.fill(0) buffer.writeUInt8(160) } } Math.clamp = function(number, min, max) { return Math.max(min, Math.min(number, max)) } class FanUpdateLightRequest extends FanRequest { constructor(isOn, level) { super() this.on = isOn ? 1 : 0 this.level = Math.clamp(level, 0, 100) } writeInto(buffer) { buffer.fill(0) buffer.writeUInt8(161) buffer.writeUInt8(255, 4) buffer.writeUInt8(100, 5) buffer.writeUInt8((this.on << 7) | this.level, 6) buffer.fill(255, 7, 10) } } class FanUpdateLevelRequest extends FanRequest { constructor(level) { super() this.level = Math.clamp(level, 0, 3) } writeInto(buffer) { buffer.fill(0) buffer.writeUInt8(161) buffer.writeUInt8(this.level, 4) buffer.fill(255, 5, 10) } } class FanResponse { static fromBuffer(buffer) { if (program.prefix > 0) { buffer = buffer.slice(1) } if (buffer.readUInt8(0) != 176) { return null } const response = new FanResponse() const windVelocity = buffer.readUInt8(2) response.supportsFanReversal = (windVelocity & 0b00100000) != 0 response.maximumFanLevel = windVelocity & 0b00011111 const currentWindVelocity = buffer.readUInt8(4) response.isFanReversed = (currentWindVelocity & 0b10000000) != 0 response.fanLevel = currentWindVelocity & 0b00011111 const currentBrightness = buffer.readUInt8(6) response.lightIsOn = (currentBrightness & 0b10000000) != 0 response.lightBrightness = (currentBrightness & 0b01111111) return response } } // MARK: - var command program .command('current') .description('print current state') .action(function(env, options) { command = new FanGetStateRequest() }) program .command('fan') .description('adjusts the fan') .option('-l --level <size>', 'Fan speed', /^(off|low|medium|high)$/i, 'high') .action(function(env, options) { var level switch (env.level) { case 'low': level = 1 break case 'medium': level = 2 break case 'high': level = 3 break default: level = 0 break } command = new FanUpdateLevelRequest(level) }) program .command('light <on|off>') .description('adjusts the light') .option('-l, --level <percent>', 'Light brightness', parseInt, 100) .action(function(env, options) { command = new FanUpdateLightRequest(env !== 'off', options.level) }) program.parse(process.argv); if (!command) { program.help(); } if (!program.target) { throw new Error('MAC address required') } const serviceUUID = program.service || '539c681361a021374f79bf1a11984790' const writeUUID = program.write || '539c681361a121374f79bf1a11984790' const notifyUUID = program.notify || '539c681361a221374f79bf1a11984790' noble.on('stateChange', function(state) { if (state === 'poweredOn') { console.log('scanning.') noble.startScanning([ serviceUUID ], false) } else { noble.stopScanning() } }) noble.on('discover', function(peripheral) { console.log('found ' + peripheral.address) if (peripheral.address !== program.target) { return } noble.stopScanning() explore(peripheral) }); function bail(error) { console.log('failed: ' + error); process.exit(1) } function explore(peripheral) { console.log('connecting.') peripheral.once('disconnect', function() { peripheral.removeAllListeners() explore(peripheral) }) peripheral.connect(function(error) { if (error) { bail(error); } peripheral.discoverSomeServicesAndCharacteristics([ serviceUUID ], [ writeUUID, notifyUUID ], function(error, services, characteristics) { if (error) { bail(error); } var service = services[0] var write = characteristics[0], notify = characteristics[1] notify.on('data', function(data, isNotification) { const response = FanResponse.fromBuffer(data) if (response) { console.log(response) } else { console.log('sent') } process.exit() }) notify.subscribe(function(error) { if (error) { bail(error); } console.log('sending') const buffer = command.toBuffer() write.write(buffer, false, function(error){ if (error) { bail(error); } }) }) }) }) }
    zwaldowski/homebridge-satellite-fan
    test/poc.js
    JavaScript
    mit
    5,557
    --- layout: post category: "工具" tags: [zsh,fish,linux] --- [TOC] ### zsh 与yosemite 的bug? ### 在更新了Mac Yosemite 后,发现各种问题,首先是php,macport等问题 接着就是zsh了,不知道为什么,`zsh`总是几乎占了100%的cpu,这让我的macbook电池 暴跌,非常郁闷. 开始怀疑是插件的问题,但是即使把插件全部关了,也还是那样. 之前也用过fish,发现还是不错的一个shell,从设计上面说,非常方便.功能也不错. 于是就准备换到fish算了. ![image](/public/img/zsh-yosemite-bug.png) > 发了封邮件给zsh后, >reply: Any chance that it's this issue with zsh-autosuggestions? [问题解决](https://github.com/tarruda/zsh-autosuggestions/issues/24 ) 发现原来是因为zsh-autosuggestions 的问题. ### fish优点 ### - Autosuggestions 自动提示history,命令补全等很方便 - 命令的completions 甚至包括man的提示 - 一些`zsh`我喜欢的插件fish也有 例如`autojump` 通过[oh-my-fish](https://github.com/bpinto/oh-my-fishURL ) 可以很方便安装 - `fish-config`命令 可以在线的编辑fish的配置 其实以上一些功能其实`zsh`也可以做到,不过个人觉得补全做的没有`fish`好,只是一直以来`zsh`的社区比较强大 而`fish` 插件会少点,但是一般使用其实用不上很多插件,而且`zsh`用多几个插件就变得很慢. 一直使用 [oh-my-zsh](https://github.com/robbyrussell/oh-my-zsh ) ,但是发现了[oh-my-fish](https://github.com/bpinto/oh-my-fishURL ) 后 就想转过去了,因为以前一直以为fish没有插件支持. ### 安装[oh-my-fish](https://github.com/bpinto/oh-my-fishURL )### brew install fish sudo vi /etc/shells 将/usr/local/bin/fish加上,否则下面的命令会报错 chsh -s /usr/local/bin/fish git clone git://github.com/bpinto/oh-my-fish.git ~/.oh-my-fish copy配置文件 cp ~/.oh-my-fish/templates/config.fish ~/.config/fish/config.fish ### 设置fish ### - 编辑~/config/fish/config.fish ``` set fish_plugins autojump bundler brew set -xu PATH /usr/local/bin:$PATH 比较不爽的就是export 在这里不能用要使用`set -x`代替: `set -x PATH /usr/local/bin $PATH` -x : -export -u : 意思是对所有fish session都使用 ``` ### 编写fish插件 ### fish 的插件看起来非常好懂,是基于函数的 ```ruby function rg rails generate $argv end ```
    algking/algking.github.com
    _posts/2014-10-19-从oh-my-zsh到oh-my-fish.md
    Markdown
    mit
    2,439
    const HEX_SHORT = /^#([a-fA-F0-9]{3})$/; const HEX = /^#([a-fA-F0-9]{6})$/; function roundColors(obj, round) { if (!round) return obj; const o = {}; for (let k in obj) { o[k] = Math.round(obj[k]); } return o; } function hasProp(obj, key) { return obj.hasOwnProperty(key); } function isRgb(obj) { return hasProp(obj, "r") && hasProp(obj, "g") && hasProp(obj, "b"); } export default class Color { static normalizeHex(hex) { if (HEX.test(hex)) { return hex; } else if (HEX_SHORT.test(hex)) { const r = hex.slice(1, 2); const g = hex.slice(2, 3); const b = hex.slice(3, 4); return `#${r + r}${g + g}${b + b}`; } return null; } static hexToRgb(hex) { const normalizedHex = this.normalizeHex(hex); if (normalizedHex == null) { return null; } const m = normalizedHex.match(HEX); const i = parseInt(m[1], 16); const r = (i >> 16) & 0xFF; const g = (i >> 8) & 0xFF; const b = i & 0xFF; return { r, g, b }; } static rgbToHex(rgb) { const { r, g, b} = rgb; const i = ((Math.round(r) & 0xFF) << 16) + ((Math.round(g) & 0xFF) << 8) + (Math.round(b) & 0xFF); const s = i.toString(16).toLowerCase(); return `#${"000000".substring(s.length) + s}`; } static rgbToHsv(rgb, round = true) { const { r, g, b } = rgb; const min = Math.min(r, g, b); const max = Math.max(r, g, b); const delta = max - min; const hsv = {}; if (max === 0) { hsv.s = 0; } else { hsv.s = (delta / max * 1000) / 10; } if (max === min) { hsv.h = 0; } else if (r === max) { hsv.h = (g - b) / delta; } else if (g === max) { hsv.h = 2 + (b - r) / delta; } else { hsv.h = 4 + (r - g) / delta; } hsv.h = Math.min(hsv.h * 60, 360); hsv.h = hsv.h < 0 ? hsv.h + 360 : hsv.h; hsv.v = ((max / 255) * 1000) / 10; return roundColors(hsv, round); } static rgbToXyz(rgb, round = true) { const r = rgb.r / 255; const g = rgb.g / 255; const b = rgb.b / 255; const rr = r > 0.04045 ? Math.pow(((r + 0.055) / 1.055), 2.4) : r / 12.92; const gg = g > 0.04045 ? Math.pow(((g + 0.055) / 1.055), 2.4) : g / 12.92; const bb = b > 0.04045 ? Math.pow(((b + 0.055) / 1.055), 2.4) : b / 12.92; const x = (rr * 0.4124 + gg * 0.3576 + bb * 0.1805) * 100; const y = (rr * 0.2126 + gg * 0.7152 + bb * 0.0722) * 100; const z = (rr * 0.0193 + gg * 0.1192 + bb * 0.9505) * 100; return roundColors({ x, y, z }, round); } static rgbToLab(rgb, round = true) { const xyz = Color.rgbToXyz(rgb, false); let { x, y, z } = xyz; x /= 95.047; y /= 100; z /= 108.883; x = x > 0.008856 ? Math.pow(x, 1 / 3) : 7.787 * x + 16 / 116; y = y > 0.008856 ? Math.pow(y, 1 / 3) : 7.787 * y + 16 / 116; z = z > 0.008856 ? Math.pow(z, 1 / 3) : 7.787 * z + 16 / 116; const l = (116 * y) - 16; const a = 500 * (x - y); const b = 200 * (y - z); return roundColors({ l, a, b }, round); } constructor(value) { this.original = value; if (isRgb(value)) { this.rgb = value; this.hex = Color.rgbToHex(value); } else { this.hex = Color.normalizeHex(value); this.rgb = Color.hexToRgb(this.hex); } this.hsv = Color.rgbToHsv(this.rgb); } }
    tsuyoshiwada/color-classifier
    src/utils/color.js
    JavaScript
    mit
    3,342
    export { default } from 'ember-validation/components/ember-validation-error-list';
    ajile/ember-validation
    app/components/ember-validation-error-list.js
    JavaScript
    mit
    83
    .WeatherStations { margin: 30px 30px 30px 30px; } .clear{ clear: both; }
    atSistemas/react-base
    src/app/containers/WeatherStations/styles.css
    CSS
    mit
    77
    /*global window */ /** * @license countdown.js v2.5.2 http://countdownjs.org * Copyright (c)2006-2014 Stephen M. McKamey. * Licensed under The MIT License. */ /*jshint bitwise:false */ /** * @public * @type {Object|null} */ var module; /** * API entry * @public * @param {function(Object)|Date|number} start the starting date * @param {function(Object)|Date|number} end the ending date * @param {number} units the units to populate * @return {Object|number} */ var countdown = ( /** * @param {Object} module CommonJS Module */ function(module) { /*jshint smarttabs:true */ 'use strict'; /** * @private * @const * @type {number} */ var MILLISECONDS = 0x001; /** * @private * @const * @type {number} */ var SECONDS = 0x002; /** * @private * @const * @type {number} */ var MINUTES = 0x004; /** * @private * @const * @type {number} */ var HOURS = 0x008; /** * @private * @const * @type {number} */ var DAYS = 0x010; /** * @private * @const * @type {number} */ var WEEKS = 0x020; /** * @private * @const * @type {number} */ var MONTHS = 0x040; /** * @private * @const * @type {number} */ var YEARS = 0x080; /** * @private * @const * @type {number} */ var DECADES = 0x100; /** * @private * @const * @type {number} */ var CENTURIES = 0x200; /** * @private * @const * @type {number} */ var MILLENNIA = 0x400; /** * @private * @const * @type {number} */ var DEFAULTS = YEARS|MONTHS|DAYS|HOURS|MINUTES|SECONDS; /** * @private * @const * @type {number} */ var MILLISECONDS_PER_SECOND = 1000; /** * @private * @const * @type {number} */ var SECONDS_PER_MINUTE = 60; /** * @private * @const * @type {number} */ var MINUTES_PER_HOUR = 60; /** * @private * @const * @type {number} */ var HOURS_PER_DAY = 24; /** * @private * @const * @type {number} */ var MILLISECONDS_PER_DAY = HOURS_PER_DAY * MINUTES_PER_HOUR * SECONDS_PER_MINUTE * MILLISECONDS_PER_SECOND; /** * @private * @const * @type {number} */ var DAYS_PER_WEEK = 7; /** * @private * @const * @type {number} */ var MONTHS_PER_YEAR = 12; /** * @private * @const * @type {number} */ var YEARS_PER_DECADE = 10; /** * @private * @const * @type {number} */ var DECADES_PER_CENTURY = 10; /** * @private * @const * @type {number} */ var CENTURIES_PER_MILLENNIUM = 10; /** * @private * @param {number} x number * @return {number} */ var ceil = Math.ceil; /** * @private * @param {number} x number * @return {number} */ var floor = Math.floor; /** * @private * @param {Date} ref reference date * @param {number} shift number of months to shift * @return {number} number of days shifted */ function borrowMonths(ref, shift) { var prevTime = ref.getTime(); // increment month by shift ref.setMonth( ref.getMonth() + shift ); // this is the trickiest since months vary in length return Math.round( (ref.getTime() - prevTime) / MILLISECONDS_PER_DAY ); } /** * @private * @param {Date} ref reference date * @return {number} number of days */ function daysPerMonth(ref) { var a = ref.getTime(); // increment month by 1 var b = new Date(a); b.setMonth( ref.getMonth() + 1 ); // this is the trickiest since months vary in length return Math.round( (b.getTime() - a) / MILLISECONDS_PER_DAY ); } /** * @private * @param {Date} ref reference date * @return {number} number of days */ function daysPerYear(ref) { var a = ref.getTime(); // increment year by 1 var b = new Date(a); b.setFullYear( ref.getFullYear() + 1 ); // this is the trickiest since years (periodically) vary in length return Math.round( (b.getTime() - a) / MILLISECONDS_PER_DAY ); } /** * Applies the Timespan to the given date. * * @private * @param {Timespan} ts * @param {Date=} date * @return {Date} */ function addToDate(ts, date) { date = (date instanceof Date) || ((date !== null) && isFinite(date)) ? new Date(+date) : new Date(); if (!ts) { return date; } // if there is a value field, use it directly var value = +ts.value || 0; if (value) { date.setTime(date.getTime() + value); return date; } value = +ts.milliseconds || 0; if (value) { date.setMilliseconds(date.getMilliseconds() + value); } value = +ts.seconds || 0; // if (value) { date.setSeconds(date.getSeconds() + value); // } value = +ts.minutes || 0; if (value) { date.setMinutes(date.getMinutes() + value); } value = +ts.hours || 0; if (value) { date.setHours(date.getHours() + value); } value = +ts.weeks || 0; if (value) { value *= DAYS_PER_WEEK; } value += +ts.days || 0; if (value) { date.setDate(date.getDate() + value); } value = +ts.months || 0; if (value) { date.setMonth(date.getMonth() + value); } value = +ts.millennia || 0; if (value) { value *= CENTURIES_PER_MILLENNIUM; } value += +ts.centuries || 0; if (value) { value *= DECADES_PER_CENTURY; } value += +ts.decades || 0; if (value) { value *= YEARS_PER_DECADE; } value += +ts.years || 0; if (value) { date.setFullYear(date.getFullYear() + value); } return date; } /** * @private * @const * @type {number} */ var LABEL_MILLISECONDS = 0; /** * @private * @const * @type {number} */ var LABEL_SECONDS = 1; /** * @private * @const * @type {number} */ var LABEL_MINUTES = 2; /** * @private * @const * @type {number} */ var LABEL_HOURS = 3; /** * @private * @const * @type {number} */ var LABEL_DAYS = 4; /** * @private * @const * @type {number} */ var LABEL_WEEKS = 5; /** * @private * @const * @type {number} */ var LABEL_MONTHS = 6; /** * @private * @const * @type {number} */ var LABEL_YEARS = 7; /** * @private * @const * @type {number} */ var LABEL_DECADES = 8; /** * @private * @const * @type {number} */ var LABEL_CENTURIES = 9; /** * @private * @const * @type {number} */ var LABEL_MILLENNIA = 10; /** * @private * @type {Array} */ var LABELS_SINGLUAR; /** * @private * @type {Array} */ var LABELS_PLURAL; /** * @private * @type {string} */ var LABEL_LAST; /** * @private * @type {string} */ var LABEL_DELIM; /** * @private * @type {string} */ var LABEL_NOW; /** * Formats a number as a string * * @private * @param {number} value * @return {string} */ var formatNumber; /** * @private * @param {number} value * @param {number} unit unit index into label list * @return {string} */ function plurality(value, unit) { return formatNumber(value)+((value === 1) ? LABELS_SINGLUAR[unit] : LABELS_PLURAL[unit]); } /** * Formats the entries with singular or plural labels * * @private * @param {Timespan} ts * @return {Array} */ var formatList; /** * Timespan representation of a duration of time * * @private * @this {Timespan} * @constructor */ function Timespan() {} /** * Formats the Timespan as a sentence * * @param {string=} emptyLabel the string to use when no values returned * @return {string} */ Timespan.prototype.toString = function(emptyLabel) { var label = formatList(this); var count = label.length; if (!count) { return emptyLabel ? ''+emptyLabel : LABEL_NOW; } if (count === 1) { return label[0]; } var last = LABEL_LAST+label.pop(); return label.join(LABEL_DELIM)+last; }; /** * Formats the Timespan as a sentence in HTML * * @param {string=} tag HTML tag name to wrap each value * @param {string=} emptyLabel the string to use when no values returned * @return {string} */ Timespan.prototype.toHTML = function(tag, emptyLabel) { tag = tag || 'span'; var label = formatList(this); var count = label.length; if (!count) { emptyLabel = emptyLabel || LABEL_NOW; return emptyLabel ? '<'+tag+'>'+emptyLabel+'</'+tag+'>' : emptyLabel; } for (var i=0; i<count; i++) { // wrap each unit in tag label[i] = '<'+tag+'>'+label[i]+'</'+tag+'>'; } if (count === 1) { return label[0]; } var last = LABEL_LAST+label.pop(); return label.join(LABEL_DELIM)+last; }; /** * Applies the Timespan to the given date * * @param {Date=} date the date to which the timespan is added. * @return {Date} */ Timespan.prototype.addTo = function(date) { return addToDate(this, date); }; /** * Formats the entries as English labels * * @private * @param {Timespan} ts * @return {Array} */ formatList = function(ts) { var list = []; var value = ts.millennia; if (value) { list.push(plurality(value, LABEL_MILLENNIA)); } value = ts.centuries; if (value) { list.push(plurality(value, LABEL_CENTURIES)); } value = ts.decades; if (value) { list.push(plurality(value, LABEL_DECADES)); } value = ts.years; if (value) { list.push(plurality(value, LABEL_YEARS)); } value = ts.months; if (value) { list.push(plurality(value, LABEL_MONTHS)); } value = ts.weeks; if (value) { list.push(plurality(value, LABEL_WEEKS)); } value = ts.days; if (value) { list.push(plurality(value, LABEL_DAYS)); } value = ts.hours; if (value) { list.push(plurality(value, LABEL_HOURS)); } value = ts.minutes; if (value) { list.push(plurality(value, LABEL_MINUTES)); } value = ts.seconds; // if (value) { list.push(plurality(value, LABEL_SECONDS)); // } value = ts.milliseconds; if (value) { list.push(plurality(value, LABEL_MILLISECONDS)); } return list; }; /** * Borrow any underflow units, carry any overflow units * * @private * @param {Timespan} ts * @param {string} toUnit */ function rippleRounded(ts, toUnit) { switch (toUnit) { case 'seconds': if (ts.seconds !== SECONDS_PER_MINUTE || isNaN(ts.minutes)) { return; } // ripple seconds up to minutes ts.minutes++; ts.seconds = 0; /* falls through */ case 'minutes': if (ts.minutes !== MINUTES_PER_HOUR || isNaN(ts.hours)) { return; } // ripple minutes up to hours ts.hours++; ts.minutes = 0; /* falls through */ case 'hours': if (ts.hours !== HOURS_PER_DAY || isNaN(ts.days)) { return; } // ripple hours up to days ts.days++; ts.hours = 0; /* falls through */ case 'days': if (ts.days !== DAYS_PER_WEEK || isNaN(ts.weeks)) { return; } // ripple days up to weeks ts.weeks++; ts.days = 0; /* falls through */ case 'weeks': if (ts.weeks !== daysPerMonth(ts.refMonth)/DAYS_PER_WEEK || isNaN(ts.months)) { return; } // ripple weeks up to months ts.months++; ts.weeks = 0; /* falls through */ case 'months': if (ts.months !== MONTHS_PER_YEAR || isNaN(ts.years)) { return; } // ripple months up to years ts.years++; ts.months = 0; /* falls through */ case 'years': if (ts.years !== YEARS_PER_DECADE || isNaN(ts.decades)) { return; } // ripple years up to decades ts.decades++; ts.years = 0; /* falls through */ case 'decades': if (ts.decades !== DECADES_PER_CENTURY || isNaN(ts.centuries)) { return; } // ripple decades up to centuries ts.centuries++; ts.decades = 0; /* falls through */ case 'centuries': if (ts.centuries !== CENTURIES_PER_MILLENNIUM || isNaN(ts.millennia)) { return; } // ripple centuries up to millennia ts.millennia++; ts.centuries = 0; /* falls through */ } } /** * Ripple up partial units one place * * @private * @param {Timespan} ts timespan * @param {number} frac accumulated fractional value * @param {string} fromUnit source unit name * @param {string} toUnit target unit name * @param {number} conversion multiplier between units * @param {number} digits max number of decimal digits to output * @return {number} new fractional value */ function fraction(ts, frac, fromUnit, toUnit, conversion, digits) { if (ts[fromUnit] >= 0) { frac += ts[fromUnit]; delete ts[fromUnit]; } frac /= conversion; if (frac + 1 <= 1) { // drop if below machine epsilon return 0; } if (ts[toUnit] >= 0) { // ensure does not have more than specified number of digits ts[toUnit] = +(ts[toUnit] + frac).toFixed(digits); rippleRounded(ts, toUnit); return 0; } return frac; } /** * Ripple up partial units to next existing * * @private * @param {Timespan} ts * @param {number} digits max number of decimal digits to output */ function fractional(ts, digits) { var frac = fraction(ts, 0, 'milliseconds', 'seconds', MILLISECONDS_PER_SECOND, digits); if (!frac) { return; } frac = fraction(ts, frac, 'seconds', 'minutes', SECONDS_PER_MINUTE, digits); if (!frac) { return; } frac = fraction(ts, frac, 'minutes', 'hours', MINUTES_PER_HOUR, digits); if (!frac) { return; } frac = fraction(ts, frac, 'hours', 'days', HOURS_PER_DAY, digits); if (!frac) { return; } frac = fraction(ts, frac, 'days', 'weeks', DAYS_PER_WEEK, digits); if (!frac) { return; } frac = fraction(ts, frac, 'weeks', 'months', daysPerMonth(ts.refMonth)/DAYS_PER_WEEK, digits); if (!frac) { return; } frac = fraction(ts, frac, 'months', 'years', daysPerYear(ts.refMonth)/daysPerMonth(ts.refMonth), digits); if (!frac) { return; } frac = fraction(ts, frac, 'years', 'decades', YEARS_PER_DECADE, digits); if (!frac) { return; } frac = fraction(ts, frac, 'decades', 'centuries', DECADES_PER_CENTURY, digits); if (!frac) { return; } frac = fraction(ts, frac, 'centuries', 'millennia', CENTURIES_PER_MILLENNIUM, digits); // should never reach this with remaining fractional value if (frac) { throw new Error('Fractional unit overflow'); } } /** * Borrow any underflow units, carry any overflow units * * @private * @param {Timespan} ts */ function ripple(ts) { var x; if (ts.milliseconds < 0) { // ripple seconds down to milliseconds x = ceil(-ts.milliseconds / MILLISECONDS_PER_SECOND); ts.seconds -= x; ts.milliseconds += x * MILLISECONDS_PER_SECOND; } else if (ts.milliseconds >= MILLISECONDS_PER_SECOND) { // ripple milliseconds up to seconds ts.seconds += floor(ts.milliseconds / MILLISECONDS_PER_SECOND); ts.milliseconds %= MILLISECONDS_PER_SECOND; } if (ts.seconds < 0) { // ripple minutes down to seconds x = ceil(-ts.seconds / SECONDS_PER_MINUTE); ts.minutes -= x; ts.seconds += x * SECONDS_PER_MINUTE; } else if (ts.seconds >= SECONDS_PER_MINUTE) { // ripple seconds up to minutes ts.minutes += floor(ts.seconds / SECONDS_PER_MINUTE); ts.seconds %= SECONDS_PER_MINUTE; } if (ts.minutes < 0) { // ripple hours down to minutes x = ceil(-ts.minutes / MINUTES_PER_HOUR); ts.hours -= x; ts.minutes += x * MINUTES_PER_HOUR; } else if (ts.minutes >= MINUTES_PER_HOUR) { // ripple minutes up to hours ts.hours += floor(ts.minutes / MINUTES_PER_HOUR); ts.minutes %= MINUTES_PER_HOUR; } if (ts.hours < 0) { // ripple days down to hours x = ceil(-ts.hours / HOURS_PER_DAY); ts.days -= x; ts.hours += x * HOURS_PER_DAY; } else if (ts.hours >= HOURS_PER_DAY) { // ripple hours up to days ts.days += floor(ts.hours / HOURS_PER_DAY); ts.hours %= HOURS_PER_DAY; } while (ts.days < 0) { // NOTE: never actually seen this loop more than once // ripple months down to days ts.months--; ts.days += borrowMonths(ts.refMonth, 1); } // weeks is always zero here if (ts.days >= DAYS_PER_WEEK) { // ripple days up to weeks ts.weeks += floor(ts.days / DAYS_PER_WEEK); ts.days %= DAYS_PER_WEEK; } if (ts.months < 0) { // ripple years down to months x = ceil(-ts.months / MONTHS_PER_YEAR); ts.years -= x; ts.months += x * MONTHS_PER_YEAR; } else if (ts.months >= MONTHS_PER_YEAR) { // ripple months up to years ts.years += floor(ts.months / MONTHS_PER_YEAR); ts.months %= MONTHS_PER_YEAR; } // years is always non-negative here // decades, centuries and millennia are always zero here if (ts.years >= YEARS_PER_DECADE) { // ripple years up to decades ts.decades += floor(ts.years / YEARS_PER_DECADE); ts.years %= YEARS_PER_DECADE; if (ts.decades >= DECADES_PER_CENTURY) { // ripple decades up to centuries ts.centuries += floor(ts.decades / DECADES_PER_CENTURY); ts.decades %= DECADES_PER_CENTURY; if (ts.centuries >= CENTURIES_PER_MILLENNIUM) { // ripple centuries up to millennia ts.millennia += floor(ts.centuries / CENTURIES_PER_MILLENNIUM); ts.centuries %= CENTURIES_PER_MILLENNIUM; } } } } /** * Remove any units not requested * * @private * @param {Timespan} ts * @param {number} units the units to populate * @param {number} max number of labels to output * @param {number} digits max number of decimal digits to output */ function pruneUnits(ts, units, max, digits) { var count = 0; // Calc from largest unit to smallest to prevent underflow if (!(units & MILLENNIA) || (count >= max)) { // ripple millennia down to centuries ts.centuries += ts.millennia * CENTURIES_PER_MILLENNIUM; delete ts.millennia; } else if (ts.millennia) { count++; } if (!(units & CENTURIES) || (count >= max)) { // ripple centuries down to decades ts.decades += ts.centuries * DECADES_PER_CENTURY; delete ts.centuries; } else if (ts.centuries) { count++; } if (!(units & DECADES) || (count >= max)) { // ripple decades down to years ts.years += ts.decades * YEARS_PER_DECADE; delete ts.decades; } else if (ts.decades) { count++; } if (!(units & YEARS) || (count >= max)) { // ripple years down to months ts.months += ts.years * MONTHS_PER_YEAR; delete ts.years; } else if (ts.years) { count++; } if (!(units & MONTHS) || (count >= max)) { // ripple months down to days if (ts.months) { ts.days += borrowMonths(ts.refMonth, ts.months); } delete ts.months; if (ts.days >= DAYS_PER_WEEK) { // ripple day overflow back up to weeks ts.weeks += floor(ts.days / DAYS_PER_WEEK); ts.days %= DAYS_PER_WEEK; } } else if (ts.months) { count++; } if (!(units & WEEKS) || (count >= max)) { // ripple weeks down to days ts.days += ts.weeks * DAYS_PER_WEEK; delete ts.weeks; } else if (ts.weeks) { count++; } if (!(units & DAYS) || (count >= max)) { //ripple days down to hours ts.hours += ts.days * HOURS_PER_DAY; delete ts.days; } else if (ts.days) { count++; } if (!(units & HOURS) || (count >= max)) { // ripple hours down to minutes ts.minutes += ts.hours * MINUTES_PER_HOUR; delete ts.hours; } else if (ts.hours) { count++; } if (!(units & MINUTES) || (count >= max)) { // ripple minutes down to seconds ts.seconds += ts.minutes * SECONDS_PER_MINUTE; delete ts.minutes; } else if (ts.minutes) { count++; } if (!(units & SECONDS) || (count >= max)) { // ripple seconds down to milliseconds ts.milliseconds += ts.seconds * MILLISECONDS_PER_SECOND; delete ts.seconds; } else if (ts.seconds) { count++; } // nothing to ripple milliseconds down to // so ripple back up to smallest existing unit as a fractional value if (!(units & MILLISECONDS) || (count >= max)) { fractional(ts, digits); } } /** * Populates the Timespan object * * @private * @param {Timespan} ts * @param {?Date} start the starting date * @param {?Date} end the ending date * @param {number} units the units to populate * @param {number} max number of labels to output * @param {number} digits max number of decimal digits to output */ function populate(ts, start, end, units, max, digits) { var now = new Date(); ts.start = start = start || now; ts.end = end = end || now; ts.units = units; ts.value = end.getTime() - start.getTime(); if (ts.value < 0) { // swap if reversed var tmp = end; end = start; start = tmp; } // reference month for determining days in month ts.refMonth = new Date(start.getFullYear(), start.getMonth(), 15, 12, 0, 0); try { // reset to initial deltas ts.millennia = 0; ts.centuries = 0; ts.decades = 0; ts.years = end.getFullYear() - start.getFullYear(); ts.months = end.getMonth() - start.getMonth(); ts.weeks = 0; ts.days = end.getDate() - start.getDate(); ts.hours = end.getHours() - start.getHours(); ts.minutes = end.getMinutes() - start.getMinutes(); ts.seconds = end.getSeconds() - start.getSeconds(); ts.milliseconds = end.getMilliseconds() - start.getMilliseconds(); ripple(ts); pruneUnits(ts, units, max, digits); } finally { delete ts.refMonth; } return ts; } /** * Determine an appropriate refresh rate based upon units * * @private * @param {number} units the units to populate * @return {number} milliseconds to delay */ function getDelay(units) { if (units & MILLISECONDS) { // refresh very quickly return MILLISECONDS_PER_SECOND / 30; //30Hz } if (units & SECONDS) { // refresh every second return MILLISECONDS_PER_SECOND; //1Hz } if (units & MINUTES) { // refresh every minute return MILLISECONDS_PER_SECOND * SECONDS_PER_MINUTE; } if (units & HOURS) { // refresh hourly return MILLISECONDS_PER_SECOND * SECONDS_PER_MINUTE * MINUTES_PER_HOUR; } if (units & DAYS) { // refresh daily return MILLISECONDS_PER_SECOND * SECONDS_PER_MINUTE * MINUTES_PER_HOUR * HOURS_PER_DAY; } // refresh the rest weekly return MILLISECONDS_PER_SECOND * SECONDS_PER_MINUTE * MINUTES_PER_HOUR * HOURS_PER_DAY * DAYS_PER_WEEK; } /** * API entry point * * @public * @param {Date|number|Timespan|null|function(Timespan,number)} start the starting date * @param {Date|number|Timespan|null|function(Timespan,number)} end the ending date * @param {number=} units the units to populate * @param {number=} max number of labels to output * @param {number=} digits max number of decimal digits to output * @return {Timespan|number} */ function countdown(start, end, units, max, digits) { var callback; // ensure some units or use defaults units = +units || DEFAULTS; // max must be positive max = (max > 0) ? max : NaN; // clamp digits to an integer between [0, 20] digits = (digits > 0) ? (digits < 20) ? Math.round(digits) : 20 : 0; // ensure start date var startTS = null; if ('function' === typeof start) { callback = start; start = null; } else if (!(start instanceof Date)) { if ((start !== null) && isFinite(start)) { start = new Date(+start); } else { if ('object' === typeof startTS) { startTS = /** @type{Timespan} */(start); } start = null; } } // ensure end date var endTS = null; if ('function' === typeof end) { callback = end; end = null; } else if (!(end instanceof Date)) { if ((end !== null) && isFinite(end)) { end = new Date(+end); } else { if ('object' === typeof end) { endTS = /** @type{Timespan} */(end); } end = null; } } // must wait to interpret timespans until after resolving dates if (startTS) { start = addToDate(startTS, end); } if (endTS) { end = addToDate(endTS, start); } if (!start && !end) { // used for unit testing return new Timespan(); } if (!callback) { return populate(new Timespan(), /** @type{Date} */(start), /** @type{Date} */(end), /** @type{number} */(units), /** @type{number} */(max), /** @type{number} */(digits)); } // base delay off units var delay = getDelay(units), timerId, fn = function() { callback( populate(new Timespan(), /** @type{Date} */(start), /** @type{Date} */(end), /** @type{number} */(units), /** @type{number} */(max), /** @type{number} */(digits)), timerId ); }; fn(); return (timerId = setInterval(fn, delay)); } /** * @public * @const * @type {number} */ countdown.MILLISECONDS = MILLISECONDS; /** * @public * @const * @type {number} */ countdown.SECONDS = SECONDS; /** * @public * @const * @type {number} */ countdown.MINUTES = MINUTES; /** * @public * @const * @type {number} */ countdown.HOURS = HOURS; /** * @public * @const * @type {number} */ countdown.DAYS = DAYS; /** * @public * @const * @type {number} */ countdown.WEEKS = WEEKS; /** * @public * @const * @type {number} */ countdown.MONTHS = MONTHS; /** * @public * @const * @type {number} */ countdown.YEARS = YEARS; /** * @public * @const * @type {number} */ countdown.DECADES = DECADES; /** * @public * @const * @type {number} */ countdown.CENTURIES = CENTURIES; /** * @public * @const * @type {number} */ countdown.MILLENNIA = MILLENNIA; /** * @public * @const * @type {number} */ countdown.DEFAULTS = DEFAULTS; /** * @public * @const * @type {number} */ countdown.ALL = MILLENNIA|CENTURIES|DECADES|YEARS|MONTHS|WEEKS|DAYS|HOURS|MINUTES|SECONDS|MILLISECONDS; /** * Override the unit labels * @public * @param {string|Array=} singular a pipe ('|') delimited list of singular unit name overrides * @param {string|Array=} plural a pipe ('|') delimited list of plural unit name overrides * @param {string=} last a delimiter before the last unit (default: ' and ') * @param {string=} delim a delimiter to use between all other units (default: ', ') * @param {string=} empty a label to use when all units are zero (default: '') * @param {function(number):string=} formatter a function which formats numbers as a string */ countdown.setLabels = function(singular, plural, last, delim, empty, formatter) { singular = singular || []; if (singular.split) { singular = singular.split('|'); } plural = plural || []; if (plural.split) { plural = plural.split('|'); } for (var i=LABEL_MILLISECONDS; i<=LABEL_MILLENNIA; i++) { // override any specified units LABELS_SINGLUAR[i] = singular[i] || LABELS_SINGLUAR[i]; LABELS_PLURAL[i] = plural[i] || LABELS_PLURAL[i]; } LABEL_LAST = ('string' === typeof last) ? last : LABEL_LAST; LABEL_DELIM = ('string' === typeof delim) ? delim : LABEL_DELIM; LABEL_NOW = ('string' === typeof empty) ? empty : LABEL_NOW; formatNumber = ('function' === typeof formatter) ? formatter : formatNumber; }; /** * Revert to the default unit labels * @public */ var resetLabels = countdown.resetLabels = function() { LABELS_SINGLUAR = ' millisecond| second| minute| hour| day| week| month| year| decade| century| millennium'.split('|'); LABELS_PLURAL = ' milliseconds| seconds| minutes| hours| days| weeks| months| years| decades| centuries| millennia'.split('|'); LABEL_LAST = ' and '; LABEL_DELIM = ', '; LABEL_NOW = ''; formatNumber = function(value) { return '<span class="contest_timedelta">' + value + "</span>"; }; }; resetLabels(); if (module && module.exports) { module.exports = countdown; } else if (typeof window.define === 'function' && typeof window.define.amd !== 'undefined') { window.define('countdown', [], function() { return countdown; }); } return countdown; })(module);
    entpy/beauty-and-pics
    beauty_and_pics/website/static/website/js/vendor/countdown.js
    JavaScript
    mit
    27,520
    --- layout: page title: Avila - Wu Wedding date: 2016-05-24 author: Abigail Heath tags: weekly links, java status: published summary: Vestibulum enim odio, dapibus non turpis. banner: images/banner/people.jpg booking: startDate: 05/10/2016 endDate: 05/13/2016 ctyhocn: HTSBVHX groupCode: AWW published: true --- Nam luctus finibus nisi vel accumsan. Nunc luctus diam orci, sed sodales mi luctus quis. Donec eget aliquet augue. Nunc eleifend, nisi id vulputate vehicula, eros dui iaculis velit, ac feugiat lectus diam quis tortor. Nam vitae elementum nisi. Suspendisse sed blandit diam. Cras id sodales magna. Integer quam neque, feugiat in venenatis eget, convallis id velit. Donec posuere lectus tincidunt, malesuada sapien ac, lacinia ante. Pellentesque ex risus, volutpat id augue ac, scelerisque ullamcorper mauris. Nam ac metus mauris. Etiam leo mauris, auctor eget pellentesque eu, aliquam sit amet neque. Quisque eget eleifend dolor. Aenean venenatis odio a est egestas commodo quis quis nulla. Duis luctus velit vitae pulvinar elementum. Curabitur quis tincidunt ex. * Nam imperdiet purus at ante efficitur, ut elementum lectus facilisis * Cras non elit at mauris lacinia eleifend id a orci * Nulla pretium odio non varius cursus. Aliquam erat volutpat. Mauris aliquet nisi et metus porta pulvinar. Curabitur ornare eros eu posuere lacinia. Mauris et tortor gravida, ultrices massa ut, auctor ex. Donec non pharetra nisl. Maecenas augue nibh, hendrerit sed lobortis nec, malesuada eu metus. Proin sollicitudin fermentum tortor et tincidunt. Cras quis tristique odio. Aenean molestie iaculis ornare. Quisque ac nunc arcu. Suspendisse quis mollis est. Maecenas feugiat sit amet nulla vitae condimentum. Vivamus dictum mi sit amet ultrices tristique. Quisque sit amet venenatis est. Donec vulputate malesuada purus sed finibus. Nunc id justo quis odio vulputate pellentesque a nec arcu. Etiam felis eros, placerat eget odio in, lobortis congue massa. Suspendisse elementum fermentum consectetur. Aliquam diam sapien, mattis sit amet volutpat id, gravida ac lorem. Vestibulum dignissim nibh eu porta sagittis. Aliquam facilisis rhoncus egestas. Sed semper vel eros at lobortis. Quisque non mi massa. Vestibulum feugiat diam ex, eu aliquam mi pharetra id. Nam faucibus sollicitudin nibh, et ultricies ligula porttitor ullamcorper.
    KlishGroup/prose-pogs
    pogs/H/HTSBVHX/AWW/index.md
    Markdown
    mit
    2,344
    file(REMOVE_RECURSE "CMakeFiles/coverage_polymorphic.dir/polymorphic.cpp.o" "../../../coverage/coverage_polymorphic.pdb" "../../../coverage/coverage_polymorphic" ) # Per-language clean rules from dependency scanning. foreach(lang CXX) include(CMakeFiles/coverage_polymorphic.dir/cmake_clean_${lang}.cmake OPTIONAL) endforeach()
    noahhsmith/starid
    libstarid/cereal-1.2.2/unittests/CMakeFiles/coverage_polymorphic.dir/cmake_clean.cmake
    CMake
    mit
    337
    var formMode="detail"; /*formMode 页面模式 页面有三种模式 detail add modify*/ var panelType="form"; /*panelType 面板类型 form表单 search 查询 child 从表对象*/ var editIndex = undefined; /*datagrid 编辑对象的行号*/ var dg1EditIndex = undefined; var objName=label.objName; /*页面管理对象名称*/ var lblDetailStr=label.detailStr; /*在不同的语种下应该不同*/ var lblAddStr=label.addStr; /*在不同的语种下应该不同*/ var lblEditStr=label.editStr; /*在不同的语种下应该不同*/ var pageName=null; /*根据pageName能够取得按钮定义*/ var pageHeight=0; /*pageHeight 页面高度*/ var topHeight=366; /*datagrid高度*/ var dgHeadHeight=28; /*datagrid 收缩后高度*/ var downHeight=30; /*底部高度*/ var paddingHeight=11; /*页面内补丁高度 paddingTop+paddingBottom*/ var gridToolbar = null; /*按钮定义 */ var dgConf=null; /*dgConf配置信息*/ var dg1Conf=null; function initConf(){} /*在此初始化本页面的所有配置信息*/ function initButton(){ for(var i=0;i<gridToolbar.length;i++){ var b=gridToolbar[i];/*首次运行时所有按钮都是disable状态*/ $("#"+b.id).linkbutton({iconCls: b.iconCls,text:b.text,disabled:true,handler:b.handler,plain:1}); } } function initBtnDisabled() { var btnDisabled=[{"id":"btn_refresh"},{"id":"btn_search"}]; for(var i=0;i<btnDisabled.length;i++) { $('#'+btnDisabled[i].id).linkbutton('enable'); } } function component() { initConf(); if(window.innerHeight) pageHeight=window.innerHeight; else pageHeight=document.documentElement.clientHeight; $('#middle').css("height",pageHeight-topHeight-downHeight-paddingHeight); $('#tab').tabs({ onSelect:tab_select, fit:true }); /*这时候可能还没有key 所以不能直接绑定dom对象,只能使用dom id*/ installKey("btn_collapse",Keys.f1,null,null,null); installKey("btn_edit",Keys.f2,null,null,null); installKey("btn_search",Keys.f3,null,null,null); installKey("btn_add",Keys.f4,null,null,null); installKey("btn_delete",Keys.del,null,null,null); installKey("btn2_save",Keys.s,true,null,null); installKey("btn2_search",Keys.q,true,null,null); installKey("btn2_edit",Keys.e,true,null,null); document.onhelp=function(){return false}; /*为了屏蔽IE的F1按键*/ window.onhelp=function(){return false}; /*为了屏蔽IE的F1按键*/ $('#btn2_save').linkbutton({iconCls: 'icon-save'}).click(btn2_save); $('#btn2_edit').linkbutton({iconCls: 'icon-save'}).click(btn2_update), $('#btn2_search').linkbutton({iconCls: 'icon-search'}).click(btn2_search); $('#btn2_addItem').linkbutton({iconCls: 'icon-add'}).click(btn2_addItem); $('#btn2_editItem').linkbutton({iconCls: 'icon-edit'}).click(btn2_editItem); $('#btn2_rmItem').linkbutton({iconCls: 'icon-remove'}).click(btn2_rmItem); $('#btn2_ok').linkbutton({iconCls: 'icon-ok'}).click(btn2_ok); dgConf.toolbar='#tb'; dgConf.onCollapse=dg_collapse; dgConf.onSelect=dg_select; dgConf.singleSelect=true; dgConf.onLoadSuccess=dg_load; dgConf.onClickRow=dg_click; dgConf.onDblClickRow=dg_dbl; dgConf.onExpand=dg_expand; dgConf.collapsible=true; dgConf.collapseID="btn_collapse"; dgConf.pagination=true; dgConf.fit=true; dgConf.rownumbers=true; dgConf.singleSelect=true; dg1Conf.onClickRow=dg1_click; dg1Conf.onDblClickRow=dg1_dbl; $("#dg").datagrid(dgConf); initButton(); initBtnDisabled(); $('#top').css("height","auto"); lov_init(); $(".formChild").height(pageHeight-topHeight-downHeight-paddingHeight-dgHeadHeight-1); //$("#ff1 input").attr("readonly",1); /*详细表单的输入框只读*/ } function showChildGrid(param){/*dg 选中事件触发*/ $("#dg1").datagrid(dg1Conf); } function showForm(row){/*dg 选中事件触发*/ //$("#ff1").form("load",row); //$("#ff2").form("load",row);; } function dg_collapse(){/*收缩后 总是要修改tabs 会触发tab_select事件 那么前面就需要将panel的selected属性设为true*/ var panel=$("#tab").tabs("getSelected"); /*先获取selected对象*/ if(panel!=null) panel.panel({selected:1}); $('#middle').css("height",pageHeight-dgHeadHeight-downHeight-paddingHeight); $(".formChild").height(pageHeight-dgHeadHeight-downHeight-paddingHeight-dgHeadHeight-1); $("#tab").tabs({fit:true,stopSelect:true});/*tab发生变化了 会触发tab_select事件 */ if(panel!=null) panel.panel({selected:0}); } function dg_expand(){ var panel=$("#tab").tabs("getSelected"); if(panel!=null) panel.panel({selected:1}); $('#middle').css("height",pageHeight-topHeight-downHeight-paddingHeight); $(".formChild").height(pageHeight-topHeight-downHeight-paddingHeight-dgHeadHeight-1); $("#tab").tabs({fit:true,stopSelect:true}); if(panel!=null) panel.panel({selected:0}); } function dg_load(){/*选中第一行*/ $('#mask').css('display', "none"); $('#dg').datagrid('selectRow', 0); } function dg_select(rowIndex, rowData){/*选中事件 填充ff1 ff2 dg1*/ showChildGrid(rowData);/*子表模式下,重绘子表列表*/ showForm(rowData,"add"); useDetailMode(); } function dg_add(){/*列表新增按钮事件*/ useAddMode(); } function dg_edit(){/*列表编辑按钮触发事件*/ var row=$('#dg').datagrid('getSelected'); if(row){ useEditMode(); } else $.messager.alert('选择提示', '请选择您编辑的数据!',"info"); } function dg_delete(){/*列表删除按钮触发事件*/ var confirmBack=function(r){ if(!r) return; var p=$('#dg').datagrid('getRowIndex',$('#dg').datagrid('getSelected')); /*执行服务器请求,完成服务端数据的删除 然后完成前端的删除*/ if (p == undefined){return} $('#dg').datagrid('cancelEdit', p) .datagrid('deleteRow', p); /*删除成功后应该刷新页面 并把下一条选中*/ var currRows=$('#dg').datagrid('getRows').length; if(p>=currRows) p--; if(p>=0) $('#dg').datagrid('selectRow', p);/*如果已经到末尾则 选中p-1 */ } var row=$('#dg').datagrid('getSelected'); if(row) $.messager.confirm('确认提示', '您确认要删除这条数据吗?', confirmBack); else $.messager.alert('选择提示', '请选择您要删除的数据!',"info"); } function dg_refresh(){/*列表刷新按钮事件*/ } function dg_search(){/*列表搜索事件 search模式不再禁用其他面板*/ panelType="search"; $('#tab').tabs("select",1); } function dg_click(index){ /*切换回详细信息模式 首先判断tab的当前选项*/ if(panelType=="search"){ $('#tab').tabs("select",0); } } function dg_dbl(){/*列表双击事件 双击进入编辑模式*/ document.getElementById("btn_edit").click();/*双击等同于点击编辑按钮*/ } function tab_select(title,index){/*选项卡的切换 需要更改按钮的显示*/ $('#down a').css("display","none"); if(index==0){/*根据grid的状态来生成按钮 add edit*/ $('#btn2_addItem').css("display","inline-block");/*新增行按钮*/ $('#btn2_editItem').css("display","inline-block");/*删除行按钮*/ $('#btn2_rmItem').css("display","inline-block");/*删除行按钮*/ $('#btn2_ok').css("display","inline-block");/*commit按钮*/ } else if(index==1){/*查询选项卡 切换到查询页签等同于按钮 search被点击*/ panelType="search"; $('#btn2_search').css("display","inline-block");/*搜索按钮*/ } } function useDetailMode(row){ //formMode="detail"; //$('#ff2').css("display","none"); //$('#ff1').css("display","block"); //if(panelType=="search") $('#tab').tabs("select",0); //else tab_select(); } function btn2_addItem(){ if(dg1_endEditing()){/*结束编辑状态成功*/ var p=$('#dg1').datagrid('getRowIndex',$('#dg1').datagrid('getSelected')); /*执行服务器请求,完成服务端数据的删除 然后完成前端的删除*/ if (p == undefined){return} $('#dg1').datagrid('unselectAll'); $('#dg1').datagrid('insertRow',{index:p+1,row:{}}) .datagrid('beginEdit', p+1) .datagrid('selectRow', p+1); dg1EditIndex=p+1; } else{ $('#dg1').datagrid('selectRow', dg1EditIndex); } } function btn2_editItem(){ var index=$('#dg1').datagrid('getRowIndex', $('#dg1').datagrid('getSelected')); if (dg1EditIndex != index){ if (dg1_endEditing()){ $('#dg1').datagrid('selectRow', index) .datagrid('beginEdit', index); dg1EditIndex = index; } else { $('#dg1').datagrid('selectRow', dg1EditIndex); } } } function btn2_rmItem(){ var confirmBack=function(r){ if(!r) return; var p=$('#dg1').datagrid('getRowIndex',$('#dg1').datagrid('getSelected')); if (p == undefined){return} $('#dg1').datagrid('cancelEdit', p) .datagrid('deleteRow', p); var currRows=$('#dg1').datagrid('getRows').length; if(p>=currRows) p--; if(p>=0) $('#dg1').datagrid('selectRow', p);/*如果已经到末尾则 选中p-1 */ } var row=$('#dg1').datagrid('getSelected'); if(row) $.messager.confirm('确认提示', '您确认要删除这条数据吗?', confirmBack); else $.messager.alert('选择提示', '请选择您要删除的数据!',"info"); } function dg1_endEditing(){ if (dg1EditIndex == undefined){return true} var flag=$('#dg1').datagrid('validateRow',dg1EditIndex); if(flag){/*如果校验通过 允许结束编辑状态*/ $('#dg1').datagrid('endEdit', dg1EditIndex); dg1EditIndex = undefined; return true; } return false; } function dg1_click(index){/*从表单击事件 在编辑模式下打开编辑*/ if (dg1EditIndex != index){ dg1_endEditing(); } } function dg1_dbl(index){/*从表双击事件 双击进入编辑模式*/ document.getElementById("btn2_editItem").click();/*双击等同于点击编辑按钮*/ } function useAddMode(){}; function useEditMode(){}; function form_change(type){}/*type= add|edit*/ function removeValidate(){}/*type= enable|remove*/ function btn2_save(){} function btn2_update(){} function btn2_search(){} function btn2_ok(){} function lov_init(){}/*绑定值列表*/
    ldjking/wbscreen
    web/wb/2tp/template/js/common/copy/a3.js
    JavaScript
    mit
    9,914
    FROM ruby:2.3.3 RUN apt-get update && apt-get install -y \ #Packages net-tools \ nodejs #Install phantomjs RUN apt-get update \ && apt-get install -y --no-install-recommends \ ca-certificates \ bzip2 \ libfontconfig \ && apt-get clean \ && rm -rf /var/lib/apt/lists/* RUN apt-get update \ && apt-get install -y --no-install-recommends \ curl \ && mkdir /tmp/phantomjs \ && curl -L https://bitbucket.org/ariya/phantomjs/downloads/phantomjs-2.1.1-linux-x86_64.tar.bz2 \ | tar -xj --strip-components=1 -C /tmp/phantomjs \ && cd /tmp/phantomjs \ && mv bin/phantomjs /usr/local/bin \ && cd \ && apt-get purge --auto-remove -y \ curl \ && apt-get clean \ && rm -rf /tmp/* /var/lib/apt/lists/* #Install gems RUN mkdir /app WORKDIR /app COPY Gemfile* /app/ RUN bundle install RUN apt-get clean #Upload source COPY . /app RUN useradd ruby RUN chown -R ruby /app USER ruby # Database defaults ENV DATABASE_NAME bookIT ENV DATABASE_HOST db ENV DATABASE_USER bookIT ENV DATABASE_PASSWORD password ENV DATABASE_ADAPTER mysql2 ENV ACCOUNT_ADDRESS https://gamma.chalmers.it #In production, Host is set to naboo.chlamers.it # Start server ENV RAILS_ENV production ENV RACK_ENV production ENV SECRET_KEY_BASE secret ENV PORT 3000 EXPOSE 3000 RUN rake assets:precompile CMD ["sh", "start.sh"]
    cthit/bookIT
    Dockerfile
    Dockerfile
    mit
    1,372
    ## Capistrano [![Build Status](https://secure.travis-ci.org/capistrano/capistrano.png)](http://travis-ci.org/capistrano/capistrano)[![Code Climate](https://codeclimate.com/badge.png)](https://codeclimate.com/github/capistrano/capistrano) Capistrano is a utility and framework for executing commands in parallel on multiple remote machines, via SSH. It uses a simple DSL (borrowed in part from [Rake](http://rake.rubyforge.org/)) that allows you to define _tasks_, which may be applied to machines in certain roles. It also supports tunneling connections via some gateway machine to allow operations to be performed behind VPN's and firewalls. Capistrano was originally designed to simplify and automate deployment of web applications to distributed environments, and originally came bundled with a set of tasks designed for deploying Rails applications. ## Documentation * [https://github.com/capistrano/capistrano/wiki](https://github.com/capistrano/capistrano/wiki) ## DEPENDENCIES * [Net::SSH](http://net-ssh.rubyforge.org) * [Net::SFTP](http://net-ssh.rubyforge.org) * [Net::SCP](http://net-ssh.rubyforge.org) * [Net::SSH::Gateway](http://net-ssh.rubyforge.org) * [HighLine](http://highline.rubyforge.org) * [Ruby](http://www.ruby-lang.org/en/) &#x2265; 1.8.7 If you want to run the tests, you'll also need to install the dependencies with Bundler, see the `Gemfile` within . ## ASSUMPTIONS Capistrano is "opinionated software", which means it has very firm ideas about how things ought to be done, and tries to force those ideas on you. Some of the assumptions behind these opinions are: * You are using SSH to access the remote servers. * You either have the same password to all target machines, or you have public keys in place to allow passwordless access to them. Do not expect these assumptions to change. ## USAGE In general, you'll use Capistrano as follows: * Create a recipe file ("capfile" or "Capfile"). * Use the `cap` script to execute your recipe. Use the `cap` script as follows: cap sometask By default, the script will look for a file called one of `capfile` or `Capfile`. The `sometask` text indicates which task to execute. You can do "cap -h" to see all the available options and "cap -T" to see all the available tasks. ## CONTRIBUTING: * Fork Capistrano * Create a topic branch - `git checkout -b my_branch` * Rebase your branch so that all your changes are reflected in one commit * Push to your branch - `git push origin my_branch` * Create a Pull Request from your branch, include as much documentation as you can in the commit message/pull request, following these [guidelines on writing a good commit message](http://tbaggery.com/2008/04/19/a-note-about-git-commit-messages.html) * That's it! ## LICENSE: Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the 'Software'), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. THE SOFTWARE IS PROVIDED 'AS IS', WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
    piousbox/microsites2-cities
    vendor/ruby/1.9.1/gems/capistrano-2.15.4/README.md
    Markdown
    mit
    3,795
    /* * Jermit * * The MIT License (MIT) * * Copyright (C) 2018 Kevin Lamonte * * Permission is hereby granted, free of charge, to any person obtaining a * copy of this software and associated documentation files (the "Software"), * to deal in the Software without restriction, including without limitation * the rights to use, copy, modify, merge, publish, distribute, sublicense, * and/or sell copies of the Software, and to permit persons to whom the * Software is furnished to do so, subject to the following conditions: * * The above copyright notice and this permission notice shall be included in * all copies or substantial portions of the Software. * * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL * THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER * DEALINGS IN THE SOFTWARE. * * @author Kevin Lamonte [[email protected]] * @version 1 */ package jermit.protocol.zmodem; /** * ZEofHeader represents the end of a file. */ class ZEofHeader extends Header { // ------------------------------------------------------------------------ // Constructors ----------------------------------------------------------- // ------------------------------------------------------------------------ /** * Public constructor. */ public ZEofHeader() { this(0); } /** * Public constructor. * * @param data the data field for this header */ public ZEofHeader(final int data) { super(Type.ZEOF, (byte) 0x0B, "ZEOF", data); } // ------------------------------------------------------------------------ // Header ----------------------------------------------------------------- // ------------------------------------------------------------------------ // ------------------------------------------------------------------------ // ZEofHeader ------------------------------------------------------------- // ------------------------------------------------------------------------ /** * Get the file size value. * * @return the value */ public int getFileSize() { return data; } }
    klamonte/jermit
    src/jermit/protocol/zmodem/ZEofHeader.java
    Java
    mit
    2,504
    # fullstack-course4-submissions
    aaronblair/fullstack-course4-submissions
    README.md
    Markdown
    mit
    31
    import java.sql.Connection; import java.sql.DriverManager; import java.sql.SQLException; public class FormLoader { public static String connectionString = "jdbc:hsqldb:file:db-data/teamsandplayers"; static Connection con; public static void main(String[] args) throws Exception { try { Class.forName("org.hsqldb.jdbc.JDBCDriver"); } catch (ClassNotFoundException e) { throw e; } MainTeamForm form = new MainTeamForm(); form.setVisible(true); try { // will create DB if does not exist // "SA" is default user with hypersql con = DriverManager.getConnection(connectionString, "SA", ""); } catch (SQLException e) { throw e; } finally { con.close(); System.out.println("Program complete"); } } }
    a-r-d/java-1-class-demos
    jframe-actionlistener-access-db-cxn/homework-start/Week13Assignment10/src/FormLoader.java
    Java
    mit
    788
    <?php namespace Memento\Test; use Memento; class SingleTest extends Harness { /** @dataProvider provideClients */ public function testStoreMethod(Memento\Client $client) { $success = $client->store($this->getKey(), array('foo' => 'bar'), $this->getExpires()); $this->assertTrue($success); $this->assertEquals($this->getExpires(), $client->getExpires($this->getKey())); $this->assertEquals($this->getExpires(), $client->getTtl($this->getKey())); // default should be the same as expires // store with ttl $success = $client->store($this->getKey(), array('foo' => 'bar'), $this->getExpires(), $this->getTtl()); $this->assertTrue($success); $this->assertLessThanOrEqual($this->getExpires(), $client->getExpires($this->getKey())); $this->assertLessThanOrEqual($this->getTtl(), $client->getTtl($this->getKey())); } /** @dataProvider provideClients */ public function testExists(Memento\Client $client) { $client->store($this->getKey(), true); $exists = $client->exists($this->getKey()); $this->assertTrue($exists); } /** @dataProvider provideClients */ public function testRetrieve(Memento\Client $client) { $client->store($this->getKey(), array('foo' => 'bar')); $data = $client->retrieve($this->getKey()); $this->assertEquals($data, array('foo' => 'bar')); } /** @dataProvider provideClients */ public function testInvalidRetrieve(Memento\Client $client) { $data = $client->retrieve(new Memento\Key(md5(time() . rand(0, 1000)))); $this->assertEquals($data, null); } /** @dataProvider provideClients */ public function testInvalidate(Memento\Client $client) { $client->store($this->getKey(), true); $invalid = $client->invalidate($this->getKey()); $this->assertTrue($invalid); $exists = $client->exists($this->getKey()); $this->assertFalse($exists); } /** @dataProvider provideClients */ public function testTerminate(Memento\Client $client) { $client->store($this->getKey(), true); $terminated = $client->terminate($this->getKey()); $this->assertTrue($terminated); $exists = $client->exists($this->getKey()); $this->assertFalse($exists); } /** @dataProvider provideClients */ public function testExpires(Memento\Client $client) { $client->store($this->getKey(), array('foo' => 'bar'), 1, $ttl = 5); sleep(3); $exists = $client->exists($this->getKey()); $this->assertFalse($exists); // check if cache exists but include expired caches $exists = $client->exists($this->getKey(), true); $this->assertTrue($exists); $client->store($this->getKey(), array('foo' => 'bar'), $this->getExpires(), $this->getTtl()); $this->assertTrue($client->exists($this->getKey())); $client->expire($this->getKey()); sleep(1); $this->assertFalse($client->exists($this->getKey())); // check if cache exists but include expired caches $exists = $client->exists($this->getKey(), true); $this->assertTrue($exists); } }
    garyr/memento
    test/Memento/Test/SingleTest.php
    PHP
    mit
    3,252
    /* Copyright (c) 2015 Shaps Mohsenin. All rights reserved. Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met: 1. Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer. 2. Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution. THIS SOFTWARE IS PROVIDED BY Shaps Mohsenin `AS IS' AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL Shaps Mohsenin OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. */ @import UIKit; #import "SPXDataView.h" /** * Provides collectionView specific definitions of a dataView */ @interface UITableView (SPXDataViewAdditions) <SPXDataView> /** * Gets/sets the block to execute when the collectionView requests a cell */ @property (nonatomic, copy) UITableViewCell *(^viewForItemAtIndexPathBlock)(UITableView *tableView, id object, NSIndexPath *indexPath); /** * Gets/sets the block to execute when the collectionView requests the cell to be configured */ @property (nonatomic, copy) void (^configureViewForItemAtIndexPathBlock)(UITableView *tableView, UITableViewCell *cell, id object, NSIndexPath *indexPath); /** * Gets/sets the block to execute when the collectionView requests a section header */ @property (nonatomic, copy) NSString *(^titleForHeaderInSectionBlock)(UITableView *tableView, NSUInteger section); /** * Gets/sets the block to execute when the collectionView requests a section footer */ @property (nonatomic, copy) NSString *(^titleForFooterInSectionBlock)(UITableView *tableView, NSUInteger section); /** * Gets/sets the block to execute when the collectionView requests whether or not a cell can be moved */ @property (nonatomic, copy) BOOL (^canMoveItemAtIndexPathBlock)(UITableView *tableView, UITableViewCell *cell, id object, NSIndexPath *indexPath); /** * Gets/sets the block to execute when the collectionView requests whether or not a cell can be edited */ @property (nonatomic, copy) BOOL (^canEditItemAtIndexPathBlock)(UITableView *tableView, UITableViewCell *cell, id object, NSIndexPath *indexPath); /** * Gets/sets the block to execute when the collectionView commits an editing action for a cell */ @property (nonatomic, copy) void (^commitEditingStyleForItemAtIndexPathBlock)(UITableView *tableView, UITableViewCell *cell, id object, NSIndexPath *indexPath); /** * Gets/sets the block to execute when the collectionView moves a cell */ @property (nonatomic, copy) void (^moveItemAtSourceIndexPathToDestinationIndexPathBlock)(UITableView *tableView, NSIndexPath *sourceIndexPath, NSIndexPath *destinationIndexPath); @end
    shaps80/SPXCore
    Example/Pods/SPXDataSources/Pod/Classes/DataViews/UITableView+SPXDataViewAdditions.h
    C
    mit
    3,424
    // // Generated by class-dump 3.5 (64 bit). // // class-dump is Copyright (C) 1997-1998, 2000-2001, 2004-2013 by Steve Nygard. // #import "CDStructures.h" @interface _IDEKitPrivateClassForFindingBundle : NSObject { } @end
    kolinkrewinkel/Multiplex
    Multiplex/IDEHeaders/IDEHeaders/IDEKit/_IDEKitPrivateClassForFindingBundle.h
    C
    mit
    234
    <html lang="en"> <head> <title>C - Debugging with GDB</title> <meta http-equiv="Content-Type" content="text/html"> <meta name="description" content="Debugging with GDB"> <meta name="generator" content="makeinfo 4.8"> <link title="Top" rel="start" href="index.html#Top"> <link rel="up" href="Supported-Languages.html#Supported-Languages" title="Supported Languages"> <link rel="next" href="D.html#D" title="D"> <link href="http://www.gnu.org/software/texinfo/" rel="generator-home" title="Texinfo Homepage"> <!-- Copyright (C) 1988-2017 Free Software Foundation, Inc. Permission is granted to copy, distribute and/or modify this document under the terms of the GNU Free Documentation License, Version 1.3 or any later version published by the Free Software Foundation; with the Invariant Sections being ``Free Software'' and ``Free Software Needs Free Documentation'', with the Front-Cover Texts being ``A GNU Manual,'' and with the Back-Cover Texts as in (a) below. (a) The FSF's Back-Cover Text is: ``You are free to copy and modify this GNU Manual. Buying copies from GNU Press supports the FSF in developing GNU and promoting software freedom.'' --> <meta http-equiv="Content-Style-Type" content="text/css"> <style type="text/css"><!-- pre.display { font-family:inherit } pre.format { font-family:inherit } pre.smalldisplay { font-family:inherit; font-size:smaller } pre.smallformat { font-family:inherit; font-size:smaller } pre.smallexample { font-size:smaller } pre.smalllisp { font-size:smaller } span.sc { font-variant:small-caps } span.roman { font-family:serif; font-weight:normal; } span.sansserif { font-family:sans-serif; font-weight:normal; } --></style> </head> <body> <div class="node"> <p> <a name="C"></a> Next:&nbsp;<a rel="next" accesskey="n" href="D.html#D">D</a>, Up:&nbsp;<a rel="up" accesskey="u" href="Supported-Languages.html#Supported-Languages">Supported Languages</a> <hr> </div> <h4 class="subsection">15.4.1 C and C<tt>++</tt></h4> <p><a name="index-C-and-C_0040t_007b_002b_002b_007d-947"></a><a name="index-expressions-in-C-or-C_0040t_007b_002b_002b_007d-948"></a> Since C and C<tt>++</tt> are so closely related, many features of <span class="sc">gdb</span> apply to both languages. Whenever this is the case, we discuss those languages together. <p><a name="index-C_0040t_007b_002b_002b_007d-949"></a><a name="index-g_t_0040code_007bg_002b_002b_007d_002c-_0040sc_007bgnu_007d-C_0040t_007b_002b_002b_007d-compiler-950"></a><a name="index-g_t_0040sc_007bgnu_007d-C_0040t_007b_002b_002b_007d-951"></a>The C<tt>++</tt> debugging facilities are jointly implemented by the C<tt>++</tt> compiler and <span class="sc">gdb</span>. Therefore, to debug your C<tt>++</tt> code effectively, you must compile your C<tt>++</tt> programs with a supported C<tt>++</tt> compiler, such as <span class="sc">gnu</span> <code>g++</code>, or the HP ANSI C<tt>++</tt> compiler (<code>aCC</code>). <ul class="menu"> <li><a accesskey="1" href="C-Operators.html#C-Operators">C Operators</a>: C and C<tt>++</tt> operators <li><a accesskey="2" href="C-Constants.html#C-Constants">C Constants</a>: C and C<tt>++</tt> constants <li><a accesskey="3" href="C-Plus-Plus-Expressions.html#C-Plus-Plus-Expressions">C Plus Plus Expressions</a>: C<tt>++</tt> expressions <li><a accesskey="4" href="C-Defaults.html#C-Defaults">C Defaults</a>: Default settings for C and C<tt>++</tt> <li><a accesskey="5" href="C-Checks.html#C-Checks">C Checks</a>: C and C<tt>++</tt> type and range checks <li><a accesskey="6" href="Debugging-C.html#Debugging-C">Debugging C</a>: <span class="sc">gdb</span> and C <li><a accesskey="7" href="Debugging-C-Plus-Plus.html#Debugging-C-Plus-Plus">Debugging C Plus Plus</a>: <span class="sc">gdb</span> features for C<tt>++</tt> <li><a accesskey="8" href="Decimal-Floating-Point.html#Decimal-Floating-Point">Decimal Floating Point</a>: Numbers in Decimal Floating Point format </ul> </body></html>
    ChangsoonKim/STM32F7DiscTutor
    toolchain/osx/gcc-arm-none-eabi-6-2017-q1-update/share/doc/gcc-arm-none-eabi/html/gdb/C.html
    HTML
    mit
    4,070
    Answer these questions in your reflection: What git concepts were you struggling with prior to the GPS session? - Prior to the GPS session I was having trouble navigating between branches. I also was completely confused on remote and fetch. I thought that you could just use the command git pull which would fetch/merge in one. What concepts were clarified during the GPS? - Using git checkout moves between branches. What questions did you ask your pair and the guide? - I asked them questions on what was troubling me and that cleared things up. I am still a little fuzzy on fetch / remote but I know that will come with more practice. Git pull is also a compact way to fetch and merge in one. What still confuses you about git? - When using the remote I am still not completely sure on what it does. I will need to do more research and practice while I work on the HTML this week. How was your first experience of pairing in a GPS? - My first experience was great! I really enjoyed working with my partner and the guide had some great pointers. Once again my feelings toward DBC are getting better and better as the days go on. I am having a great time learning things that interest me.
    mikelondon/phase-0-gps-1
    london-reflection.md
    Markdown
    mit
    1,202
    #!/usr/bin/env python3 """ Categorize and analyze user sessions. Read in ecfs_obfuscated_filtered.gz file, output some fancy results. """ from collections import defaultdict from collections import Counter import sys import time import os import resource import json import fnmatch from pipes import Pipes import operator from operation import Operation KB = 1024 MB = KB * 1024 GB = MB * 1024 TB = GB * 1024 PB = TB * 1024 MONITOR_LINES = 100000 class UserSession(): def __init__(self, user_id): self.user_id = user_id self.from_ts = 0 self.till_ts = 0 self.get_requests = 0 self.reget_requests = 0 self.put_requests = 0 self.get_bytes = 0 self.put_bytes = 0 self.rename_requests = 0 self.del_requests = 0 self.get_dirs = 0 self.put_dirs = 0 self.put_files_per_dir = 0.0 self.get_files_per_dir = 0.0 self.window_seconds = 0 self.file_cnt_gets = Counter() self.file_cnt_puts = Counter() self.dir_cnt_gets = Counter() self.dir_cnt_puts = Counter() self.num_ops = 0 self.last_ts = 0 def add_op(self, op): self.num_ops += 1 if op.ts < self.last_ts: raise Exception("Timestamp too old") else: self.last_ts = op.ts if op.optype == 'g': self.get_requests += 1 self.get_bytes += op.size self.file_cnt_gets[op.obj_id] += 1 self.dir_cnt_gets[op.parent_dir_id] += 1 elif op.optype == 'p': self.put_requests += 1 self.put_bytes += op.size self.file_cnt_puts[op.obj_id] += 1 self.dir_cnt_puts[op.parent_dir_id] += 1 elif op.optype == 'd': self.del_requests += 1 elif op.optype == 'r': self.rename_requests += 1 #update last time stamp in the session self.till_ts = op.ts + op.execution_time def finish(self): self.get_dirs = len(self.dir_cnt_gets) if self.get_dirs > 0: self.get_files_per_dir = float(self.get_requests) / self.get_dirs self.put_dirs = len(self.dir_cnt_puts) if self.put_dirs > 0: self.put_files_per_dir = float(self.put_requests) / self.put_dirs """ set reget_counter :param counter: contains [ 1, 1, 5] counts of objects. value > 1 is a re-retrieval. :return: """ for c in self.file_cnt_gets.values(): if c > 1: self.reget_requests += (c - 1) # self.announce() return ";".join([str(x) for x in [ self.user_id, self.from_ts, self.till_ts, self.till_ts - self.from_ts, self.get_requests, self.reget_requests, self.put_requests, self.get_bytes, self.put_bytes, self.rename_requests, self.del_requests, self.get_dirs, self.put_dirs, self.put_files_per_dir, self.get_files_per_dir, self.window_seconds ]] ) def announce(self): print("closed session. gets: %r, regets: %r, puts: %r, dels: %r, renames: %r get_dirs: %r, put_dirs: %r, get_bytes: %r put_bytes: %r window_seconds: %d" % \ (self.get_requests, self.reget_requests, self.put_requests, self.del_requests, self.rename_requests, self.get_dirs, self.put_dirs, self.get_bytes, self.put_bytes, self.window_seconds)) def find_clusters(atimes): foo = Counter() bar = dict() for i in xrange(120, 3660, 10): clusters = get_clusters(atimes, i) cs = len(clusters) foo[cs] += 1 # note first occurance of this cluster size. if cs not in bar: bar[cs] = i # print(len(atimes), i, cs) return bar[foo.most_common()[0][0]] def get_clusters(data, maxgap): '''Arrange data into groups where successive elements differ by no more than *maxgap* >>> cluster([1, 6, 9, 100, 102, 105, 109, 134, 139], maxgap=10) [[1, 6, 9], [100, 102, 105, 109], [134, 139]] >>> cluster([1, 6, 9, 99, 100, 102, 105, 134, 139, 141], maxgap=10) [[1, 6, 9], [99, 100, 102, 105], [134, 139, 141]] ''' data.sort() groups = [[data[0]]] for x in data[1:]: if abs(x - groups[-1][-1]) <= maxgap: groups[-1].append(x) else: groups.append([x]) return groups def analyze_user_session(user_session_file, out_pipeline, target_file_name): with open(user_session_file, 'r') as sf: ops = list() atimes = list() for line in sf: op = Operation() op.init(line.strip()) ops.append(op) atimes.append(op.ts) ops.sort(key=operator.attrgetter('ts')) atimes.sort() window_seconds = find_clusters(atimes) session_counter = 1 uf = os.path.basename(user_session_file) user_id = uf[:uf.find(".user_session.csv")] session = UserSession(user_id) session.window_seconds = window_seconds for op in ops: if session.from_ts == 0: session.from_ts = op.ts session.till_ts = op.ts + op.execution_time if (session.till_ts + window_seconds) < op.ts: # this session is over, so archive it. out_pipeline.write_to(target_file_name, session.finish()) del session session = UserSession(user_id) session.window_seconds = window_seconds session_counter += 1 session.add_op(op) if session.num_ops > 0: out_pipeline.write_to(target_file_name, session.finish()) print("sessions: %d with window_seconds: %d" %(session_counter, window_seconds)) if __name__ == "__main__": source_dir = os.path.abspath(sys.argv[1]) result = os.path.abspath(sys.argv[2]) results_dir = os.path.dirname(result) target_file_name = os.path.basename(result) users_session_files = [os.path.join(dirpath, f) for dirpath, dirnames, files in os.walk(source_dir) for f in fnmatch.filter(files, '*.user_session.csv')] #remove the old log file, as outpipe is append only. if os.path.exists(os.path.join(results_dir, target_file_name)): os.remove(os.path.join(results_dir, target_file_name)) out_pipe = Pipes(results_dir) csv_header = ";".join(["user_id", "from_ts", "till_ts", "session_lifetime", "get_requests", "reget_requests", "put_requests", "get_bytes", "put_bytes", "rename_requests", "del_requests", "get_dirs", "put_dirs", "put_files_per_dir", "get_files_per_dir", "window_seconds" ]) out_pipe.write_to(target_file_name, csv_header) cnt = 0 for sf in users_session_files: cnt += 1 print ("working on %d/%d" % (cnt, len(users_session_files))) analyze_user_session(sf, out_pipe, target_file_name) # if cnt >=20: # break out_pipe.close() print("wrote results to %s: " % (os.path.join(results_dir, target_file_name)))
    zdvresearch/fast15-paper-extras
    ecfs_user_sessions/src/analyze_user_sessions.py
    Python
    mit
    7,526
    package esl import ( "io" "errors" "unicode/utf8" ) // Buffer ... type buffer []byte // MemoryReader ... type memReader [ ]byte // MemoryWriter ... type memWriter [ ]byte // ErrBufferSize indicates that memory cannot be allocated to store data in a buffer. var ErrBufferSize = errors.New(`could not allocate memory`) func newBuffer( size int ) *buffer { buf := make([ ]byte, 0, size ) return (*buffer)(&buf) } func ( buf *buffer ) reader( ) *memReader { n := len( *buf ) rbuf := ( *buf )[:n:n] return ( *memReader )( &rbuf ) } func ( buf *buffer ) writer( ) *memWriter { return ( *memWriter )( buf ) } func ( buf *buffer ) grow( n int ) error { if ( len( *buf )+ n ) > cap( *buf ) { // Not enough space to store [:+(n)]byte(s) mbuf, err := makebuf( cap( *buf )+ n ) if ( err != nil ) { return ( err ) } copy( mbuf, *buf ) *( buf ) = mbuf } return nil } // allocates a byte slice of size. // If the allocation fails, returns error // indicating that memory cannot be allocated to store data in a buffer. func makebuf( size int ) ( buf [ ]byte, memerr error ) { defer func( ) { // If the make fails, give a known error. if ( recover( ) != nil ) { ( memerr ) = ErrBufferSize } }( ) return make( [ ]byte, 0, size ), nil } func ( buf *memReader ) Read( b [ ]byte ) ( n int, err error ) { if len( *buf ) == 0 { return ( 0 ), io.EOF } n, *buf = copy( b, *buf ), ( *buf )[ n: ] return // n, nil } func ( buf *memReader ) ReadByte( ) ( c byte, err error ) { if len(*buf) == 0 { return ( 0 ), io.EOF } c, *buf = (*buf)[0], (*buf)[1:] return // c, nil } func ( buf *memReader ) ReadRune( ) ( r rune, size int, err error ) { if len(*buf) == 0 { return 0, 0, io.EOF } r, size = utf8.DecodeRune(*buf) *buf = (*buf)[size:] return // r, size, nil } func ( buf *memReader ) WriteTo( w io.Writer ) ( n int64, err error ) { for len( *buf ) > 0 { rw, err := w.Write( *buf ) if ( rw > 0 ) { n, *buf = n + int64( rw ), (*buf)[rw:] } if ( err != nil ) { return n, err } } return ( 0 ), io.EOF } func ( buf *memWriter ) Write( b []byte ) ( n int, err error ) { *buf = append( *buf, b...) return len( b ), nil } func ( buf *memWriter ) WriteByte( c byte ) error { *buf = append( *buf, c ) return ( nil ) } func ( buf *memWriter ) WriteRune( r rune ) error { if ( r < utf8.RuneSelf ) { return buf.WriteByte( byte( r )) } b := *buf n := len( b ) if ( n + utf8.UTFMax ) > cap( b ) { b = make( []byte, ( n + utf8.UTFMax )) copy( b, *buf ) } w := utf8.EncodeRune( b[ n:( n + utf8.UTFMax )], r ) *buf = b[ :( n + w )] return nil } func ( buf *memWriter ) WriteString( s string ) ( n int, err error ) { *buf = append( *buf, s...) return len( s ), nil } // func (buf *memWriter) ReadFrom(r io.Reader) (n int64, err error) { // // NOTE: indefinite allocation! Try to use io.WriterTo interface! // }
    navrotskyj/acr
    src/pkg/esl/io.go
    GO
    mit
    2,905
    #ifdef __OBJC__ #import <UIKit/UIKit.h> #else #ifndef FOUNDATION_EXPORT #if defined(__cplusplus) #define FOUNDATION_EXPORT extern "C" #else #define FOUNDATION_EXPORT extern #endif #endif #endif FOUNDATION_EXPORT double Pods_WZYUnlimitedScrollViewDemoVersionNumber; FOUNDATION_EXPORT const unsigned char Pods_WZYUnlimitedScrollViewDemoVersionString[];
    CoderZYWang/WZYUnlimitedScrollView
    WZYUnlimitedScrollViewDemo/Pods/Target Support Files/Pods-WZYUnlimitedScrollViewDemo/Pods-WZYUnlimitedScrollViewDemo-umbrella.h
    C
    mit
    354
    package com.zimbra.cs.versioncheck; import java.io.IOException; import java.util.Iterator; import java.util.List; import java.util.Date; import org.apache.commons.cli.CommandLine; import org.apache.commons.cli.Options; import org.apache.commons.cli.ParseException; import com.zimbra.common.util.ZimbraLog; import com.zimbra.common.account.Key; import com.zimbra.common.account.Key.ServerBy; import com.zimbra.common.service.ServiceException; import com.zimbra.common.soap.AdminConstants; import com.zimbra.common.soap.SoapFaultException; import com.zimbra.common.soap.SoapTransport; import com.zimbra.common.util.CliUtil; import com.zimbra.cs.account.Config; import com.zimbra.cs.account.Provisioning; import com.zimbra.cs.account.Server; import com.zimbra.cs.client.LmcSession; import com.zimbra.cs.client.soap.LmcSoapClientException; import com.zimbra.cs.client.soap.LmcVersionCheckRequest; import com.zimbra.cs.client.soap.LmcVersionCheckResponse; import com.zimbra.cs.util.BuildInfo; import com.zimbra.cs.util.SoapCLI; import com.zimbra.common.util.DateUtil; /** * @author Greg Solovyev */ public class VersionCheckUtil extends SoapCLI { private static final String OPT_CHECK_VERSION = "c"; private static final String OPT_MANUAL_CHECK_VERSION = "m"; private static final String SHOW_LAST_STATUS = "r"; protected VersionCheckUtil() throws ServiceException { super(); } public static void main(String[] args) { CliUtil.toolSetup(); SoapTransport.setDefaultUserAgent("zmcheckversion", BuildInfo.VERSION); VersionCheckUtil util = null; try { util = new VersionCheckUtil(); } catch (ServiceException e) { System.err.println(e.getMessage()); System.exit(1); } try { util.setupCommandLineOptions(); CommandLine cl = null; try { cl = util.getCommandLine(args); } catch (ParseException e) { System.out.println(e.getMessage()); util.usage(); System.exit(1); } if (cl == null) { System.exit(1); } if (cl.hasOption(OPT_CHECK_VERSION)) { //check schedule Provisioning prov = Provisioning.getInstance(); Config config; config = prov.getConfig(); String updaterServerId = config.getAttr(Provisioning.A_zimbraVersionCheckServer); if (updaterServerId != null) { Server server = prov.get(Key.ServerBy.id, updaterServerId); if (server != null) { Server localServer = prov.getLocalServer(); if (localServer!=null) { if(!localServer.getId().equalsIgnoreCase(server.getId())) { System.out.println("Wrong server"); System.exit(0); } } } } String versionInterval = config.getAttr(Provisioning.A_zimbraVersionCheckInterval); if(versionInterval == null || versionInterval.length()==0 || versionInterval.equalsIgnoreCase("0")) { System.out.println("Automatic updates are disabled"); System.exit(0); } else { long checkInterval = DateUtil.getTimeIntervalSecs(versionInterval,0); String lastAttempt = config.getAttr(Provisioning.A_zimbraVersionCheckLastAttempt); if(lastAttempt != null) { Date lastChecked = DateUtil.parseGeneralizedTime(config.getAttr(Provisioning.A_zimbraVersionCheckLastAttempt)); Date now = new Date(); if (now.getTime()/1000- lastChecked.getTime()/1000 >= checkInterval) { util.doVersionCheck(); } else { System.out.println("Too early"); System.exit(0); } } else { util.doVersionCheck(); } } } else if (cl.hasOption(OPT_MANUAL_CHECK_VERSION)) { util.doVersionCheck(); } else if (cl.hasOption(SHOW_LAST_STATUS)) { util.doResult(); System.exit(0); } else { util.usage(); System.exit(1); } } catch (Exception e) { System.err.println(e.getMessage()); ZimbraLog.extensions.error("Error in versioncheck util", e); util.usage(null); System.exit(1); } } private void doVersionCheck() throws SoapFaultException, IOException, ServiceException, LmcSoapClientException { LmcSession session = auth(); LmcVersionCheckRequest req = new LmcVersionCheckRequest(); req.setAction(AdminConstants.VERSION_CHECK_CHECK); req.setSession(session); req.invoke(getServerUrl()); } private void doResult() throws SoapFaultException, IOException, ServiceException, LmcSoapClientException { try { LmcSession session = auth(); LmcVersionCheckRequest req = new LmcVersionCheckRequest(); req.setAction(AdminConstants.VERSION_CHECK_STATUS); req.setSession(session); LmcVersionCheckResponse res = (LmcVersionCheckResponse) req.invoke(getServerUrl()); List <VersionUpdate> updates = res.getUpdates(); for(Iterator <VersionUpdate> iter = updates.iterator();iter.hasNext();){ VersionUpdate update = iter.next(); String critical; if(update.isCritical()) { critical = "critical"; } else { critical = "not critical"; } System.out.println( String.format("Found a %s update. Update is %s . Update version: %s. For more info visit: %s", update.getType(),critical,update.getVersion(),update.getUpdateURL()) ); } } catch (SoapFaultException soape) { System.out.println("Cought SoapFaultException"); System.out.println(soape.getStackTrace().toString()); throw (soape); } catch (LmcSoapClientException lmce) { System.out.println("Cought LmcSoapClientException"); System.out.println(lmce.getStackTrace().toString()); throw (lmce); } catch (ServiceException se) { System.out.println("Cought ServiceException"); System.out.println(se.getStackTrace().toString()); throw (se); } catch (IOException ioe) { System.out.println("Cought IOException"); System.out.println(ioe.getStackTrace().toString()); throw (ioe); } } protected void setupCommandLineOptions() { // super.setupCommandLineOptions(); Options options = getOptions(); Options hiddenOptions = getHiddenOptions(); hiddenOptions.addOption(OPT_CHECK_VERSION, "autocheck", false, "Initiate version check request (exits if zimbraVersionCheckInterval==0)"); options.addOption(SHOW_LAST_STATUS, "result", false, "Show results of last version check."); options.addOption(OPT_MANUAL_CHECK_VERSION, "manual", false, "Initiate version check request."); } protected String getCommandUsage() { return "zmcheckversion <options>"; } }
    nico01f/z-pec
    ZimbraAdminVersionCheck/src/java/com/zimbra/cs/versioncheck/VersionCheckUtil.java
    Java
    mit
    7,334
    using System; using System.Collections.Generic; using System.Linq; using System.Text; using System.Threading.Tasks; namespace EmployeeFinder.Models { public enum Position { Bartender, Waiter, Bellboy, Receptionist, Manager, Housekeeper, Chef, Manintanace } }
    GeorgiNik/EmployeeFinder
    EmployeeFinder.Models/Position.cs
    C#
    mit
    340
    using System; using System.Collections.Generic; using System.Linq; using System.Text; using System.Threading.Tasks; namespace Domain { public class Meeting { public int ConsultantId { get; set; } public Consultant Consultant { get; set; } public int UserId { get; set; } public User User { get; set; } public DateTime BeginTime { get; set; } public DateTime EndTime { get; set; } public override string ToString() { return $"{BeginTime} -> {EndTime}"; } } }
    rohansen/Code-Examples
    Database/TransactionScopeWithGUI/Domain/Meeting.cs
    C#
    mit
    559
    module PiwikAnalytics module Helpers def piwik_tracking_tag config = PiwikAnalytics.configuration return if config.disabled? if config.use_async? file = "piwik_analytics/piwik_tracking_tag_async" else file = "piwik_analytics/piwik_tracking_tag" end render({ :file => file, :locals => {:url => config.url, :id_site => config.id_site} }) end end end
    piwik/piwik-ruby-tracking
    lib/piwik_analytics/helpers.rb
    Ruby
    mit
    435
    #!/usr/bin/env python # -*- coding: utf-8 -*- """ @author Stephan Reith @date 31.08.2016 This is a simple example to demonstrate how the ROS Spinnaker Interface can be used. You will also need a ROS Listener and a ROS Talker to send and receive data. Make sure they communicate over the same ROS topics and std_msgs.Int64 ROS Messages used in here. """ import spynnaker.pyNN as pynn from ros_spinnaker_interface import ROS_Spinnaker_Interface # import transfer_functions as tf from ros_spinnaker_interface import SpikeSourcePoisson from ros_spinnaker_interface import SpikeSinkSmoothing ts = 0.1 n_neurons = 1 simulation_time = 10000 # ms pynn.setup(timestep=ts, min_delay=ts, max_delay=2.0*ts) pop = pynn.Population(size=n_neurons, cellclass=pynn.IF_curr_exp, cellparams={}, label='pop') # The ROS_Spinnaker_Interface just needs to be initialised. The following parameters are possible: ros_interface = ROS_Spinnaker_Interface( n_neurons_source=n_neurons, # number of neurons of the injector population Spike_Source_Class=SpikeSourcePoisson, # the transfer function ROS Input -> Spikes you want to use. Spike_Sink_Class=SpikeSinkSmoothing, # the transfer function Spikes -> ROS Output you want to use. # You can choose from the transfer_functions module # or write one yourself. output_population=pop, # the pynn population you wish to receive the # live spikes from. ros_topic_send='to_spinnaker', # the ROS topic used for the incoming ROS values. ros_topic_recv='from_spinnaker', # the ROS topic used for the outgoing ROS values. clk_rate=1000, # mainloop clock (update) rate in Hz. ros_output_rate=10) # number of ROS messages send out per second. # Build your network, run the simulation and optionally record the spikes and voltages. pynn.Projection(ros_interface, pop, pynn.OneToOneConnector(weights=5, delays=1)) pop.record() pop.record_v() pynn.run(simulation_time) spikes = pop.getSpikes() pynn.end() # Plot import pylab spike_times = [spike[1] for spike in spikes] spike_ids = [spike[0] for spike in spikes] pylab.plot(spike_times, spike_ids, ".") pylab.xlabel('Time (ms)') pylab.ylabel('Neuron ID') pylab.title('Spike Plot') pylab.xlim(xmin=0) pylab.show()
    reiths/ros_spinnaker_interface
    examples/example_ros_spinnaker_interface.py
    Python
    mit
    2,533
    <!DOCTYPE html> <html lang="en"> <head> <meta charset="utf-8"> <meta name="viewport" content="width=device-width, initial-scale=1"> <title>area-method: Not compatible 👼</title> <link rel="shortcut icon" type="image/png" href="../../../../../favicon.png" /> <link href="../../../../../bootstrap.min.css" rel="stylesheet"> <link href="../../../../../bootstrap-custom.css" rel="stylesheet"> <link href="//maxcdn.bootstrapcdn.com/font-awesome/4.2.0/css/font-awesome.min.css" rel="stylesheet"> <script src="../../../../../moment.min.js"></script> <!-- HTML5 Shim and Respond.js IE8 support of HTML5 elements and media queries --> <!-- WARNING: Respond.js doesn't work if you view the page via file:// --> <!--[if lt IE 9]> <script src="https://oss.maxcdn.com/html5shiv/3.7.2/html5shiv.min.js"></script> <script src="https://oss.maxcdn.com/respond/1.4.2/respond.min.js"></script> <![endif]--> </head> <body> <div class="container"> <div class="navbar navbar-default" role="navigation"> <div class="container-fluid"> <div class="navbar-header"> <a class="navbar-brand" href="../../../../.."><i class="fa fa-lg fa-flag-checkered"></i> Coq bench</a> </div> <div id="navbar" class="collapse navbar-collapse"> <ul class="nav navbar-nav"> <li><a href="../..">clean / released</a></li> <li class="active"><a href="">8.13.1 / area-method - 8.5.0</a></li> </ul> </div> </div> </div> <div class="article"> <div class="row"> <div class="col-md-12"> <a href="../..">« Up</a> <h1> area-method <small> 8.5.0 <span class="label label-info">Not compatible 👼</span> </small> </h1> <p>📅 <em><script>document.write(moment("2022-02-04 18:52:19 +0000", "YYYY-MM-DD HH:mm:ss Z").fromNow());</script> (2022-02-04 18:52:19 UTC)</em><p> <h2>Context</h2> <pre># Packages matching: installed # Name # Installed # Synopsis base-bigarray base base-threads base base-unix base conf-findutils 1 Virtual package relying on findutils conf-gmp 4 Virtual package relying on a GMP lib system installation coq 8.13.1 Formal proof management system num 1.4 The legacy Num library for arbitrary-precision integer and rational arithmetic ocaml 4.07.1 The OCaml compiler (virtual package) ocaml-base-compiler 4.07.1 Official release 4.07.1 ocaml-config 1 OCaml Switch Configuration ocamlfind 1.9.3 A library manager for OCaml zarith 1.12 Implements arithmetic and logical operations over arbitrary-precision integers # opam file: opam-version: &quot;2.0&quot; maintainer: &quot;[email protected]&quot; homepage: &quot;https://github.com/coq-contribs/area-method&quot; license: &quot;Proprietary&quot; build: [make &quot;-j%{jobs}%&quot;] install: [make &quot;install&quot;] remove: [&quot;rm&quot; &quot;-R&quot; &quot;%{lib}%/coq/user-contrib/AreaMethod&quot;] depends: [ &quot;ocaml&quot; &quot;coq&quot; {&gt;= &quot;8.5&quot; &amp; &lt; &quot;8.6~&quot;} ] tags: [ &quot;keyword:geometry&quot; &quot;keyword:chou gao zhang area method&quot; &quot;keyword:decision procedure&quot; &quot;category:Mathematics/Geometry/AutomatedDeduction&quot; &quot;date:2004-2010&quot; ] authors: [ &quot;Julien Narboux &lt;&gt;&quot; ] bug-reports: &quot;https://github.com/coq-contribs/area-method/issues&quot; dev-repo: &quot;git+https://github.com/coq-contribs/area-method.git&quot; synopsis: &quot;The Chou, Gao and Zhang area method&quot; description: &quot;&quot;&quot; This contribution is the implementation of the Chou, Gao and Zhang&#39;s area method decision procedure for euclidean plane geometry. This development contains a partial formalization of the book &quot;Machine Proofs in Geometry, Automated Production of Readable Proofs for Geometry Theorems&quot; by Chou, Gao and Zhang. The examples shown automatically (there are more than 100 examples) includes the Ceva, Desargues, Menelaus, Pascal, Centroïd, Pappus, Gauss line, Euler line, Napoleon theorems. Changelog 2.1 : remove some not needed assumptions in some elimination lemmas (2010) 2.0 : extension implementation to Euclidean geometry (2009-2010) 1.0 : first implementation for affine geometry (2004)&quot;&quot;&quot; flags: light-uninstall url { src: &quot;https://github.com/coq-contribs/area-method/archive/v8.5.0.tar.gz&quot; checksum: &quot;md5=ba9772aa2056aa4bc9ccc051a9a76a7f&quot; } </pre> <h2>Lint</h2> <dl class="dl-horizontal"> <dt>Command</dt> <dd><code>true</code></dd> <dt>Return code</dt> <dd>0</dd> </dl> <h2>Dry install 🏜️</h2> <p>Dry install with the current Coq version:</p> <dl class="dl-horizontal"> <dt>Command</dt> <dd><code>opam install -y --show-action coq-area-method.8.5.0 coq.8.13.1</code></dd> <dt>Return code</dt> <dd>5120</dd> <dt>Output</dt> <dd><pre>[NOTE] Package coq is already installed (current version is 8.13.1). The following dependencies couldn&#39;t be met: - coq-area-method -&gt; coq &lt; 8.6~ -&gt; ocaml &lt; 4.06.0 base of this switch (use `--unlock-base&#39; to force) No solution found, exiting </pre></dd> </dl> <p>Dry install without Coq/switch base, to test if the problem was incompatibility with the current Coq/OCaml version:</p> <dl class="dl-horizontal"> <dt>Command</dt> <dd><code>opam remove -y coq; opam install -y --show-action --unlock-base coq-area-method.8.5.0</code></dd> <dt>Return code</dt> <dd>0</dd> </dl> <h2>Install dependencies</h2> <dl class="dl-horizontal"> <dt>Command</dt> <dd><code>true</code></dd> <dt>Return code</dt> <dd>0</dd> <dt>Duration</dt> <dd>0 s</dd> </dl> <h2>Install 🚀</h2> <dl class="dl-horizontal"> <dt>Command</dt> <dd><code>true</code></dd> <dt>Return code</dt> <dd>0</dd> <dt>Duration</dt> <dd>0 s</dd> </dl> <h2>Installation size</h2> <p>No files were installed.</p> <h2>Uninstall 🧹</h2> <dl class="dl-horizontal"> <dt>Command</dt> <dd><code>true</code></dd> <dt>Return code</dt> <dd>0</dd> <dt>Missing removes</dt> <dd> none </dd> <dt>Wrong removes</dt> <dd> none </dd> </dl> </div> </div> </div> <hr/> <div class="footer"> <p class="text-center"> Sources are on <a href="https://github.com/coq-bench">GitHub</a> © Guillaume Claret 🐣 </p> </div> </div> <script src="https://ajax.googleapis.com/ajax/libs/jquery/1.11.1/jquery.min.js"></script> <script src="../../../../../bootstrap.min.js"></script> </body> </html>
    coq-bench/coq-bench.github.io
    clean/Linux-x86_64-4.07.1-2.0.6/released/8.13.1/area-method/8.5.0.html
    HTML
    mit
    7,612
    <!DOCTYPE html> <html lang="en"> <head> <meta charset="utf-8"> <meta name="viewport" content="width=device-width, initial-scale=1"> <title>ieee754: Not compatible</title> <link rel="shortcut icon" type="image/png" href="../../../../../favicon.png" /> <link href="../../../../../bootstrap.min.css" rel="stylesheet"> <link href="../../../../../bootstrap-custom.css" rel="stylesheet"> <link href="//maxcdn.bootstrapcdn.com/font-awesome/4.2.0/css/font-awesome.min.css" rel="stylesheet"> <script src="../../../../../moment.min.js"></script> <!-- HTML5 Shim and Respond.js IE8 support of HTML5 elements and media queries --> <!-- WARNING: Respond.js doesn't work if you view the page via file:// --> <!--[if lt IE 9]> <script src="https://oss.maxcdn.com/html5shiv/3.7.2/html5shiv.min.js"></script> <script src="https://oss.maxcdn.com/respond/1.4.2/respond.min.js"></script> <![endif]--> </head> <body> <div class="container"> <div class="navbar navbar-default" role="navigation"> <div class="container-fluid"> <div class="navbar-header"> <a class="navbar-brand" href="../../../../.."><i class="fa fa-lg fa-flag-checkered"></i> Coq bench</a> </div> <div id="navbar" class="collapse navbar-collapse"> <ul class="nav navbar-nav"> <li><a href="../..">clean / released</a></li> <li class="active"><a href="">8.9.1 / ieee754 - 8.7.0</a></li> </ul> </div> </div> </div> <div class="article"> <div class="row"> <div class="col-md-12"> <a href="../..">« Up</a> <h1> ieee754 <small> 8.7.0 <span class="label label-info">Not compatible</span> </small> </h1> <p><em><script>document.write(moment("2020-08-24 17:47:06 +0000", "YYYY-MM-DD HH:mm:ss Z").fromNow());</script> (2020-08-24 17:47:06 UTC)</em><p> <h2>Context</h2> <pre># Packages matching: installed # Name # Installed # Synopsis base-bigarray base base-num base Num library distributed with the OCaml compiler base-threads base base-unix base camlp5 7.12 Preprocessor-pretty-printer of OCaml conf-findutils 1 Virtual package relying on findutils conf-m4 1 Virtual package relying on m4 coq 8.9.1 Formal proof management system num 0 The Num library for arbitrary-precision integer and rational arithmetic ocaml 4.05.0 The OCaml compiler (virtual package) ocaml-base-compiler 4.05.0 Official 4.05.0 release ocaml-config 1 OCaml Switch Configuration ocamlfind 1.8.1 A library manager for OCaml # opam file: opam-version: &quot;2.0&quot; maintainer: &quot;[email protected]&quot; homepage: &quot;https://github.com/coq-contribs/ieee754&quot; license: &quot;LGPL 2.1&quot; build: [make &quot;-j%{jobs}%&quot;] install: [make &quot;install&quot;] remove: [&quot;rm&quot; &quot;-R&quot; &quot;%{lib}%/coq/user-contrib/IEEE754&quot;] depends: [ &quot;ocaml&quot; &quot;coq&quot; {&gt;= &quot;8.7&quot; &amp; &lt; &quot;8.8~&quot;} ] tags: [ &quot;keyword: floating-point arithmetic&quot; &quot;keyword: floats&quot; &quot;keyword: IEEE&quot; &quot;category: Computer Science/Data Types and Data Structures&quot; &quot;category: Computer Science/Semantics and Compilation/Semantics&quot; &quot;date: 1997&quot; ] authors: [ &quot;Patrick Loiseleur&quot; ] bug-reports: &quot;https://github.com/coq-contribs/ieee754/issues&quot; dev-repo: &quot;git+https://github.com/coq-contribs/ieee754.git&quot; synopsis: &quot;A formalisation of the IEEE754 norm on floating-point arithmetic&quot; description: &quot;&quot;&quot; This library contains a non-verified implementation of binary floating-point addition and multiplication operators inspired by the IEEE-754 standard. It is today outdated. See the attached 1997 report rapport-stage-dea.ps.gz for a discussion (in French) of this work. For the state of the art at the time of updating this notice, see e.g. &quot;Flocq: A Unified Library for Proving Floating-point Algorithms in Coq&quot; by S. Boldo and G. Melquiond, 2011.&quot;&quot;&quot; flags: light-uninstall url { src: &quot;https://github.com/coq-contribs/ieee754/archive/v8.7.0.tar.gz&quot; checksum: &quot;md5=c79fabb9831e0231bc5ce75f3be6aad7&quot; } </pre> <h2>Lint</h2> <dl class="dl-horizontal"> <dt>Command</dt> <dd><code>true</code></dd> <dt>Return code</dt> <dd>0</dd> </dl> <h2>Dry install</h2> <p>Dry install with the current Coq version:</p> <dl class="dl-horizontal"> <dt>Command</dt> <dd><code>opam install -y --show-action coq-ieee754.8.7.0 coq.8.9.1</code></dd> <dt>Return code</dt> <dd>5120</dd> <dt>Output</dt> <dd><pre>[NOTE] Package coq is already installed (current version is 8.9.1). The following dependencies couldn&#39;t be met: - coq-ieee754 -&gt; coq &lt; 8.8~ -&gt; ocaml &lt; 4.03.0 base of this switch (use `--unlock-base&#39; to force) Your request can&#39;t be satisfied: - No available version of coq satisfies the constraints No solution found, exiting </pre></dd> </dl> <p>Dry install without Coq/switch base, to test if the problem was incompatibility with the current Coq/OCaml version:</p> <dl class="dl-horizontal"> <dt>Command</dt> <dd><code>opam remove -y coq; opam install -y --show-action --unlock-base coq-ieee754.8.7.0</code></dd> <dt>Return code</dt> <dd>0</dd> </dl> <h2>Install dependencies</h2> <dl class="dl-horizontal"> <dt>Command</dt> <dd><code>true</code></dd> <dt>Return code</dt> <dd>0</dd> <dt>Duration</dt> <dd>0 s</dd> </dl> <h2>Install</h2> <dl class="dl-horizontal"> <dt>Command</dt> <dd><code>true</code></dd> <dt>Return code</dt> <dd>0</dd> <dt>Duration</dt> <dd>0 s</dd> </dl> <h2>Installation size</h2> <p>No files were installed.</p> <h2>Uninstall</h2> <dl class="dl-horizontal"> <dt>Command</dt> <dd><code>true</code></dd> <dt>Return code</dt> <dd>0</dd> <dt>Missing removes</dt> <dd> none </dd> <dt>Wrong removes</dt> <dd> none </dd> </dl> </div> </div> </div> <hr/> <div class="footer"> <p class="text-center"> <small>Sources are on <a href="https://github.com/coq-bench">GitHub</a>. © Guillaume Claret.</small> </p> </div> </div> <script src="https://ajax.googleapis.com/ajax/libs/jquery/1.11.1/jquery.min.js"></script> <script src="../../../../../bootstrap.min.js"></script> </body> </html>
    coq-bench/coq-bench.github.io
    clean/Linux-x86_64-4.05.0-2.0.6/released/8.9.1/ieee754/8.7.0.html
    HTML
    mit
    7,485
    /** * @license * Copyright Google Inc. All Rights Reserved. * * Use of this source code is governed by an MIT-style license that can be * found in the LICENSE file at https://angular.io/license */ import {CompileDirectiveMetadata, CompileStylesheetMetadata, CompileTemplateMetadata, templateSourceUrl} from './compile_metadata'; import {CompilerConfig, preserveWhitespacesDefault} from './config'; import {ViewEncapsulation} from './core'; import * as html from './ml_parser/ast'; import {HtmlParser} from './ml_parser/html_parser'; import {InterpolationConfig} from './ml_parser/interpolation_config'; import {ParseTreeResult as HtmlParseTreeResult} from './ml_parser/parser'; import {ResourceLoader} from './resource_loader'; import {extractStyleUrls, isStyleUrlResolvable} from './style_url_resolver'; import {PreparsedElementType, preparseElement} from './template_parser/template_preparser'; import {UrlResolver} from './url_resolver'; import {isDefined, stringify, SyncAsync, syntaxError} from './util'; export interface PrenormalizedTemplateMetadata { ngModuleType: any; componentType: any; moduleUrl: string; template: string|null; templateUrl: string|null; styles: string[]; styleUrls: string[]; interpolation: [string, string]|null; encapsulation: ViewEncapsulation|null; animations: any[]; preserveWhitespaces: boolean|null; } export class DirectiveNormalizer { private _resourceLoaderCache = new Map<string, SyncAsync<string>>(); constructor( private _resourceLoader: ResourceLoader, private _urlResolver: UrlResolver, private _htmlParser: HtmlParser, private _config: CompilerConfig) {} clearCache(): void { this._resourceLoaderCache.clear(); } clearCacheFor(normalizedDirective: CompileDirectiveMetadata): void { if (!normalizedDirective.isComponent) { return; } const template = normalizedDirective.template !; this._resourceLoaderCache.delete(template.templateUrl!); template.externalStylesheets.forEach((stylesheet) => { this._resourceLoaderCache.delete(stylesheet.moduleUrl!); }); } private _fetch(url: string): SyncAsync<string> { let result = this._resourceLoaderCache.get(url); if (!result) { result = this._resourceLoader.get(url); this._resourceLoaderCache.set(url, result); } return result; } normalizeTemplate(prenormData: PrenormalizedTemplateMetadata): SyncAsync<CompileTemplateMetadata> { if (isDefined(prenormData.template)) { if (isDefined(prenormData.templateUrl)) { throw syntaxError(`'${ stringify(prenormData .componentType)}' component cannot define both template and templateUrl`); } if (typeof prenormData.template !== 'string') { throw syntaxError(`The template specified for component ${ stringify(prenormData.componentType)} is not a string`); } } else if (isDefined(prenormData.templateUrl)) { if (typeof prenormData.templateUrl !== 'string') { throw syntaxError(`The templateUrl specified for component ${ stringify(prenormData.componentType)} is not a string`); } } else { throw syntaxError( `No template specified for component ${stringify(prenormData.componentType)}`); } if (isDefined(prenormData.preserveWhitespaces) && typeof prenormData.preserveWhitespaces !== 'boolean') { throw syntaxError(`The preserveWhitespaces option for component ${ stringify(prenormData.componentType)} must be a boolean`); } return SyncAsync.then( this._preParseTemplate(prenormData), (preparsedTemplate) => this._normalizeTemplateMetadata(prenormData, preparsedTemplate)); } private _preParseTemplate(prenomData: PrenormalizedTemplateMetadata): SyncAsync<PreparsedTemplate> { let template: SyncAsync<string>; let templateUrl: string; if (prenomData.template != null) { template = prenomData.template; templateUrl = prenomData.moduleUrl; } else { templateUrl = this._urlResolver.resolve(prenomData.moduleUrl, prenomData.templateUrl!); template = this._fetch(templateUrl); } return SyncAsync.then( template, (template) => this._preparseLoadedTemplate(prenomData, template, templateUrl)); } private _preparseLoadedTemplate( prenormData: PrenormalizedTemplateMetadata, template: string, templateAbsUrl: string): PreparsedTemplate { const isInline = !!prenormData.template; const interpolationConfig = InterpolationConfig.fromArray(prenormData.interpolation!); const templateUrl = templateSourceUrl( {reference: prenormData.ngModuleType}, {type: {reference: prenormData.componentType}}, {isInline, templateUrl: templateAbsUrl}); const rootNodesAndErrors = this._htmlParser.parse( template, templateUrl, {tokenizeExpansionForms: true, interpolationConfig}); if (rootNodesAndErrors.errors.length > 0) { const errorString = rootNodesAndErrors.errors.join('\n'); throw syntaxError(`Template parse errors:\n${errorString}`); } const templateMetadataStyles = this._normalizeStylesheet(new CompileStylesheetMetadata( {styles: prenormData.styles, moduleUrl: prenormData.moduleUrl})); const visitor = new TemplatePreparseVisitor(); html.visitAll(visitor, rootNodesAndErrors.rootNodes); const templateStyles = this._normalizeStylesheet(new CompileStylesheetMetadata( {styles: visitor.styles, styleUrls: visitor.styleUrls, moduleUrl: templateAbsUrl})); const styles = templateMetadataStyles.styles.concat(templateStyles.styles); const inlineStyleUrls = templateMetadataStyles.styleUrls.concat(templateStyles.styleUrls); const styleUrls = this ._normalizeStylesheet(new CompileStylesheetMetadata( {styleUrls: prenormData.styleUrls, moduleUrl: prenormData.moduleUrl})) .styleUrls; return { template, templateUrl: templateAbsUrl, isInline, htmlAst: rootNodesAndErrors, styles, inlineStyleUrls, styleUrls, ngContentSelectors: visitor.ngContentSelectors, }; } private _normalizeTemplateMetadata( prenormData: PrenormalizedTemplateMetadata, preparsedTemplate: PreparsedTemplate): SyncAsync<CompileTemplateMetadata> { return SyncAsync.then( this._loadMissingExternalStylesheets( preparsedTemplate.styleUrls.concat(preparsedTemplate.inlineStyleUrls)), (externalStylesheets) => this._normalizeLoadedTemplateMetadata( prenormData, preparsedTemplate, externalStylesheets)); } private _normalizeLoadedTemplateMetadata( prenormData: PrenormalizedTemplateMetadata, preparsedTemplate: PreparsedTemplate, stylesheets: Map<string, CompileStylesheetMetadata>): CompileTemplateMetadata { // Algorithm: // - produce exactly 1 entry per original styleUrl in // CompileTemplateMetadata.externalStylesheets with all styles inlined // - inline all styles that are referenced by the template into CompileTemplateMetadata.styles. // Reason: be able to determine how many stylesheets there are even without loading // the template nor the stylesheets, so we can create a stub for TypeScript always synchronously // (as resource loading may be async) const styles = [...preparsedTemplate.styles]; this._inlineStyles(preparsedTemplate.inlineStyleUrls, stylesheets, styles); const styleUrls = preparsedTemplate.styleUrls; const externalStylesheets = styleUrls.map(styleUrl => { const stylesheet = stylesheets.get(styleUrl)!; const styles = [...stylesheet.styles]; this._inlineStyles(stylesheet.styleUrls, stylesheets, styles); return new CompileStylesheetMetadata({moduleUrl: styleUrl, styles: styles}); }); let encapsulation = prenormData.encapsulation; if (encapsulation == null) { encapsulation = this._config.defaultEncapsulation; } if (encapsulation === ViewEncapsulation.Emulated && styles.length === 0 && styleUrls.length === 0) { encapsulation = ViewEncapsulation.None; } return new CompileTemplateMetadata({ encapsulation, template: preparsedTemplate.template, templateUrl: preparsedTemplate.templateUrl, htmlAst: preparsedTemplate.htmlAst, styles, styleUrls, ngContentSelectors: preparsedTemplate.ngContentSelectors, animations: prenormData.animations, interpolation: prenormData.interpolation, isInline: preparsedTemplate.isInline, externalStylesheets, preserveWhitespaces: preserveWhitespacesDefault( prenormData.preserveWhitespaces, this._config.preserveWhitespaces), }); } private _inlineStyles( styleUrls: string[], stylesheets: Map<string, CompileStylesheetMetadata>, targetStyles: string[]) { styleUrls.forEach(styleUrl => { const stylesheet = stylesheets.get(styleUrl)!; stylesheet.styles.forEach(style => targetStyles.push(style)); this._inlineStyles(stylesheet.styleUrls, stylesheets, targetStyles); }); } private _loadMissingExternalStylesheets( styleUrls: string[], loadedStylesheets: Map<string, CompileStylesheetMetadata> = new Map<string, CompileStylesheetMetadata>()): SyncAsync<Map<string, CompileStylesheetMetadata>> { return SyncAsync.then( SyncAsync.all(styleUrls.filter((styleUrl) => !loadedStylesheets.has(styleUrl)) .map( styleUrl => SyncAsync.then( this._fetch(styleUrl), (loadedStyle) => { const stylesheet = this._normalizeStylesheet(new CompileStylesheetMetadata( {styles: [loadedStyle], moduleUrl: styleUrl})); loadedStylesheets.set(styleUrl, stylesheet); return this._loadMissingExternalStylesheets( stylesheet.styleUrls, loadedStylesheets); }))), (_) => loadedStylesheets); } private _normalizeStylesheet(stylesheet: CompileStylesheetMetadata): CompileStylesheetMetadata { const moduleUrl = stylesheet.moduleUrl!; const allStyleUrls = stylesheet.styleUrls.filter(isStyleUrlResolvable) .map(url => this._urlResolver.resolve(moduleUrl, url)); const allStyles = stylesheet.styles.map(style => { const styleWithImports = extractStyleUrls(this._urlResolver, moduleUrl, style); allStyleUrls.push(...styleWithImports.styleUrls); return styleWithImports.style; }); return new CompileStylesheetMetadata( {styles: allStyles, styleUrls: allStyleUrls, moduleUrl: moduleUrl}); } } interface PreparsedTemplate { template: string; templateUrl: string; isInline: boolean; htmlAst: HtmlParseTreeResult; styles: string[]; inlineStyleUrls: string[]; styleUrls: string[]; ngContentSelectors: string[]; } class TemplatePreparseVisitor implements html.Visitor { ngContentSelectors: string[] = []; styles: string[] = []; styleUrls: string[] = []; ngNonBindableStackCount: number = 0; visitElement(ast: html.Element, context: any): any { const preparsedElement = preparseElement(ast); switch (preparsedElement.type) { case PreparsedElementType.NG_CONTENT: if (this.ngNonBindableStackCount === 0) { this.ngContentSelectors.push(preparsedElement.selectAttr); } break; case PreparsedElementType.STYLE: let textContent = ''; ast.children.forEach(child => { if (child instanceof html.Text) { textContent += child.value; } }); this.styles.push(textContent); break; case PreparsedElementType.STYLESHEET: this.styleUrls.push(preparsedElement.hrefAttr); break; default: break; } if (preparsedElement.nonBindable) { this.ngNonBindableStackCount++; } html.visitAll(this, ast.children); if (preparsedElement.nonBindable) { this.ngNonBindableStackCount--; } return null; } visitExpansion(ast: html.Expansion, context: any): any { html.visitAll(this, ast.cases); } visitExpansionCase(ast: html.ExpansionCase, context: any): any { html.visitAll(this, ast.expression); } visitComment(ast: html.Comment, context: any): any { return null; } visitAttribute(ast: html.Attribute, context: any): any { return null; } visitText(ast: html.Text, context: any): any { return null; } }
    matsko/angular
    packages/compiler/src/directive_normalizer.ts
    TypeScript
    mit
    12,863
    <?php /* TwigBundle:Exception:error.atom.twig */ class __TwigTemplate_405349459f7f2e8922747537b1c12aa2323bb61b0265aaf549db7e51eafd66f4 extends Twig_Template { public function __construct(Twig_Environment $env) { parent::__construct($env); $this->parent = false; $this->blocks = array( ); } protected function doDisplay(array $context, array $blocks = array()) { // line 1 $this->env->loadTemplate("TwigBundle:Exception:error.xml.twig")->display(array_merge($context, array("exception" => (isset($context["exception"]) ? $context["exception"] : $this->getContext($context, "exception"))))); } public function getTemplateName() { return "TwigBundle:Exception:error.atom.twig"; } public function isTraitable() { return false; } public function getDebugInfo() { return array ( 19 => 1, 79 => 21, 72 => 13, 69 => 12, 47 => 18, 40 => 11, 37 => 10, 22 => 1, 246 => 32, 157 => 56, 145 => 46, 139 => 45, 131 => 42, 123 => 41, 120 => 40, 115 => 39, 111 => 38, 108 => 37, 101 => 33, 98 => 32, 96 => 31, 83 => 25, 74 => 14, 66 => 11, 55 => 16, 52 => 21, 50 => 14, 43 => 9, 41 => 8, 35 => 9, 32 => 4, 29 => 6, 209 => 82, 203 => 78, 199 => 76, 193 => 73, 189 => 71, 187 => 70, 182 => 68, 176 => 64, 173 => 63, 168 => 62, 164 => 58, 162 => 57, 154 => 54, 149 => 51, 147 => 50, 144 => 49, 141 => 48, 133 => 42, 130 => 41, 125 => 38, 122 => 37, 116 => 36, 112 => 35, 109 => 34, 106 => 36, 103 => 32, 99 => 30, 95 => 28, 92 => 29, 86 => 24, 82 => 22, 80 => 24, 73 => 19, 64 => 19, 60 => 6, 57 => 12, 54 => 22, 51 => 10, 48 => 9, 45 => 17, 42 => 16, 39 => 6, 36 => 5, 33 => 4, 30 => 3,); } }
    Mchichou/UEOptionnelles
    app/cache/dev/twig/40/53/49459f7f2e8922747537b1c12aa2323bb61b0265aaf549db7e51eafd66f4.php
    PHP
    mit
    1,789
    <HTML> <!-- Created by HTTrack Website Copier/3.49-2 [XR&CO'2014] --> <!-- Mirrored from thevillagedanang.com/?p=62 by HTTrack Website Copier/3.x [XR&CO'2014], Thu, 02 Nov 2017 14:46:03 GMT --> <!-- Added by HTTrack --><meta http-equiv="content-type" content="text/html;charset=UTF-8" /><!-- /Added by HTTrack --> <HEAD> <META HTTP-EQUIV="Content-Type" CONTENT="text/html;charset=UTF-8"><META HTTP-EQUIV="Refresh" CONTENT="0; URL=about-us/index.html"><TITLE>Page has moved</TITLE> </HEAD> <BODY> <A HREF="about-us/index.html"><h3>Click here...</h3></A> </BODY> <!-- Created by HTTrack Website Copier/3.49-2 [XR&CO'2014] --> <!-- Mirrored from thevillagedanang.com/?p=62 by HTTrack Website Copier/3.x [XR&CO'2014], Thu, 02 Nov 2017 14:46:03 GMT --> </HTML>
    hoangphuc1494/sourd_codeigniter
    assest/thevillage/index5c36.html
    HTML
    mit
    758
    /** * HTTP.test */ "use strict"; /* Node modules */ /* Third-party modules */ var steeplejack = require("steeplejack"); /* Files */ describe("HTTPError test", function () { var HTTPError; beforeEach(function () { injector(function (_HTTPError_) { HTTPError = _HTTPError_; }); }); describe("Instantation tests", function () { it("should extend the steeplejack Fatal exception", function () { var obj = new HTTPError("text"); expect(obj).to.be.instanceof(HTTPError) .to.be.instanceof(steeplejack.Exceptions.Fatal); expect(obj.type).to.be.equal("HTTPError"); expect(obj.message).to.be.equal("text"); expect(obj.httpCode).to.be.equal(500); expect(obj.getHttpCode()).to.be.equal(500); }); it("should set the HTTP code in the first input", function () { var obj = new HTTPError(401); expect(obj.httpCode).to.be.equal(401); expect(obj.getHttpCode()).to.be.equal(401); }); }); });
    riggerthegeek/steeplejack-errors
    test/unit/errors/HTTP.test.js
    JavaScript
    mit
    1,103
    require File.join(File.dirname(__FILE__), './scribd-carrierwave/version') require File.join(File.dirname(__FILE__), './scribd-carrierwave/config') require 'carrierwave' require 'rscribd' require 'configatron' module ScribdCarrierWave class << self def included(base) base.extend ClassMethods end def upload uploader file_path = full_path(uploader) args = { file: file_path, access: ( uploader.class.public? ? 'public' : 'private' )} type = File.extname(file_path) if type type = type.gsub(/^\./, '').gsub(/\?.*$/, '') args.merge!(type: type) if type != '' end scribd_user.upload(args) end def destroy uploader document = scribd_user.find_document(uploader.ipaper_id) rescue nil document.destroy if !document.nil? end def load_ipaper_document(id) scribd_user.find_document(id) rescue nil end def full_path uploader if uploader.url =~ /^http(s?):\/\// uploader.url else uploader.root + uploader.url end end module ClassMethods def public? @public end def has_ipaper(public = false) include InstanceMethods after :store, :upload_to_scribd before :remove, :delete_from_scribd @public = !!public end end module InstanceMethods def self.included(base) base.extend ClassMethods end def upload_to_scribd files res = ScribdCarrierWave::upload(self) set_params res end def delete_from_scribd ScribdCarrierWave::destroy(self) end def display_ipaper(options = {}) id = options.delete(:id) <<-END <script type="text/javascript" src="//www.scribd.com/javascripts/scribd_api.js"></script> <div id="embedded_doc#{id}">#{options.delete(:alt)}</div> <script type="text/javascript"> var scribd_doc = scribd.Document.getDoc(#{ipaper_id}, '#{ipaper_access_key}'); scribd_doc.addParam( 'jsapi_version', 2 ); #{options.map do |k,v| " scribd_doc.addParam('#{k.to_s}', #{v.is_a?(String) ? "'#{v.to_s}'" : v.to_s});" end.join("\n")} scribd_doc.write("embedded_doc#{id}"); </script> END end def fullscreen_url "http://www.scribd.com/fullscreen/#{ipaper_id}?access_key=#{ipaper_access_key}" end def ipaper_id self.model.send("#{self.mounted_as.to_s}_ipaper_id") end def ipaper_access_key self.model.send("#{self.mounted_as.to_s}_ipaper_access_key") end # Responds the Scribd::Document associated with this model, or nil if it does not exist. def ipaper_document @document ||= ScribdCarrierWave::load_ipaper_document(ipaper_id) end private def set_params res self.model.update_attributes({"#{self.mounted_as}_ipaper_id" => res.doc_id, "#{self.mounted_as}_ipaper_access_key" => res.access_key}) end end private def scribd_user Scribd::API.instance.key = ScribdCarrierWave.config.key Scribd::API.instance.secret = ScribdCarrierWave.config.secret @scribd_user = Scribd::User.login(ScribdCarrierWave.config.username, ScribdCarrierWave.config.password) end end end CarrierWave::Uploader::Base.send(:include, ScribdCarrierWave) if Object.const_defined?("CarrierWave")
    milkfarm/scribd-carrierwave
    lib/scribd-carrierwave.rb
    Ruby
    mit
    3,507
    var gulp = require('gulp'); var babel = require('gulp-babel'); var concat = require('gulp-concat'); var merge = require('merge-stream'); var stylus = require('gulp-stylus'); var rename = require("gulp-rename"); var uglify = require("gulp-uglify"); var cssmin = require("gulp-cssmin"); var ngAnnotate = require('gulp-ng-annotate'); var nib = require("nib"); var watch = require('gulp-watch'); function compileJs(devOnly) { var othersUmd = gulp.src(['src/**/*.js', '!src/main.js']) .pipe(babel({ modules: 'umdStrict', moduleRoot: 'angular-chatbar', moduleIds: true })), mainUmd = gulp.src('src/main.js') .pipe(babel({ modules: 'umdStrict', moduleIds: true, moduleId: 'angular-chatbar' })), stream = merge(othersUmd, mainUmd) .pipe(concat('angular-chatbar.umd.js')) .pipe(gulp.dest('dist')) ; if (!devOnly) { stream = stream .pipe(ngAnnotate()) .pipe(uglify()) .pipe(rename('angular-chatbar.umd.min.js')) .pipe(gulp.dest('dist')); } return stream; } function compileCss(name, devOnly) { var stream = gulp.src('styles/' + name + '.styl') .pipe(stylus({use: nib()})) .pipe(rename('angular-' + name + '.css')) .pipe(gulp.dest('dist')) ; if (!devOnly) { stream = stream.pipe(cssmin()) .pipe(rename('angular-' + name + '.min.css')) .pipe(gulp.dest('dist')); } return stream; } function compileAllCss(devOnly) { var streams = []; ['chatbar', 'chatbar.default-theme', 'chatbar.default-animations'].forEach(function (name) { streams.push(compileCss(name, devOnly)); }); return merge.apply(null, streams); } gulp.task('default', function() { return merge.apply(compileJs(), compileAllCss()); }); gulp.task('_watch', function() { watch('styles/**/*.styl', function () { compileAllCss(true); }); watch('src/**/*.js', function () { compileJs(true); }); }); gulp.task('watch', ['default', '_watch']);
    jlowcs/angular-chatbar
    gulpfile.js
    JavaScript
    mit
    1,878
    # -*- coding: utf-8 -*- """ Resource Import Tools @copyright: 2011-12 (c) Sahana Software Foundation @license: MIT Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. """ # @todo: remove all interactive error reporting out of the _private methods, and raise exceptions instead. __all__ = ["S3Importer", "S3ImportJob", "S3ImportItem"] import os import sys import cPickle import tempfile from datetime import datetime from copy import deepcopy try: from cStringIO import StringIO # Faster, where available except: from StringIO import StringIO try: from lxml import etree except ImportError: print >> sys.stderr, "ERROR: lxml module needed for XML handling" raise try: import json # try stdlib (Python 2.6) except ImportError: try: import simplejson as json # try external module except: import gluon.contrib.simplejson as json # fallback to pure-Python module from gluon import * from gluon.serializers import json as jsons from gluon.storage import Storage, Messages from gluon.tools import callback from s3utils import SQLTABLES3 from s3crud import S3CRUD from s3xml import S3XML from s3utils import s3_mark_required, s3_has_foreign_key, s3_get_foreign_key DEBUG = False if DEBUG: print >> sys.stderr, "S3IMPORTER: DEBUG MODE" def _debug(m): print >> sys.stderr, m else: _debug = lambda m: None # ============================================================================= class S3Importer(S3CRUD): """ Transformable formats (XML, JSON, CSV) import handler """ UPLOAD_TABLE_NAME = "s3_import_upload" # ------------------------------------------------------------------------- def apply_method(self, r, **attr): """ Apply CRUD methods @param r: the S3Request @param attr: dictionary of parameters for the method handler @returns: output object to send to the view Known means of communicating with this module: It expects a URL of the form: /prefix/name/import It will interpret the http requests as follows: GET will trigger the upload POST will trigger either commits or display the import details DELETE will trigger deletes It will accept one of the following control vars: item: to specify a single item in the import job job: to specify a job It should not receive both so job takes precedent over item For CSV imports, the calling controller can add extra fields to the upload form to add columns to each row in the CSV. To add the extra fields, pass a named parameter "csv_extra_fields" to the s3_rest_controller call (or the S3Request call, respectively): s3_rest_controller(module, resourcename, csv_extra_fields=[ dict(label="ColumnLabelInTheCSV", field=field_instance) ]) The Field instance "field" will be added to the upload form, and the user input will be added to each row of the CSV under the label as specified. If the "field" validator has options, the input value will be translated into the option representation, otherwise the value will be used as-is. Note that the "label" in the dict is the column label in the CSV, whereas the field label for the form is to be set in the Field instance passed as "field". You can add any arbitrary number of csv_extra_fields to the list. Additionally, you may want to allow the user to choose whether the import shall first remove all existing data in the target table. To do so, pass a label for the "replace_option" to the request: s3_rest_controller(module, resourcename, replace_option=T("Remove existing data before import")) This will add the respective checkbox to the upload form. You may also want to provide a link to download a CSV template from the upload form. To do that, add the resource name to the request attributes: s3_rest_controller(module, resourcename, csv_template="<resourcename>") This will provide a link to: - static/formats/s3csv/<controller>/<resourcename>.csv at the top of the upload form. """ _debug("S3Importer.apply_method(%s)" % r) # Messages T = current.T messages = self.messages = Messages(T) messages.download_template = "Download Template" messages.invalid_file_format = "Invalid File Format" messages.unsupported_file_type = "Unsupported file type of %s" messages.stylesheet_not_found = "No Stylesheet %s could be found to manage the import file." messages.no_file = "No file submitted" messages.file_open_error = "Unable to open the file %s" messages.file_not_found = "The file to upload is missing" messages.no_records_to_import = "No records to import" messages.no_job_to_delete = "No job to delete, maybe it has already been deleted." messages.title_job_read = "Details of the selected import job" messages.title_job_list = "List of import items" messages.file_uploaded = "Import file uploaded" messages.upload_submit_btn = "Upload Data File" messages.open_btn = "Open" messages.view_btn = "View" messages.delete_btn = "Delete" messages.item_show_details = "Display Details" messages.job_total_records = "Total records in the Import Job" messages.job_records_selected = "Records selected" messages.job_deleted = "Import job deleted" messages.job_completed = "Job run on %s. With result of (%s)" messages.import_file = "Import File" messages.import_file_comment = "Upload a file formatted according to the Template." messages.user_name = "User Name" messages.commit_total_records_imported = "%s records imported" messages.commit_total_records_ignored = "%s records ignored" messages.commit_total_errors = "%s records in error" try: self.uploadTitle = current.response.s3.crud_strings[self.tablename].title_upload except: self.uploadTitle = T("Upload a %s import file" % r.function) # @todo: correct to switch this off for the whole session? current.session.s3.ocr_enabled = False # Reset all errors/warnings self.error = None self.warning = None # CSV upload configuration if "csv_stylesheet" in attr: self.csv_stylesheet = attr["csv_stylesheet"] else: self.csv_stylesheet = None self.csv_extra_fields = None self.csv_extra_data = None # Environment self.controller = r.controller self.function = r.function # Target table for the data import self.controller_resource = self.resource self.controller_table = self.table self.controller_tablename = self.tablename # Table for uploads self.__define_table() self.upload_resource = None self.item_resource = None # XSLT Path self.xslt_path = os.path.join(r.folder, r.XSLT_PATH) self.xslt_extension = r.XSLT_EXTENSION # Check authorization authorised = self.permit("create", self.upload_tablename) and \ self.permit("create", self.controller_tablename) if not authorised: if r.method is not None: r.unauthorised() else: return dict(form=None) # @todo: clean this up source = None transform = None upload_id = None items = None # @todo get the data from either get_vars or post_vars appropriately # for post -> commit_items would need to add the uploadID if "transform" in r.get_vars: transform = r.get_vars["transform"] if "filename" in r.get_vars: source = r.get_vars["filename"] if "job" in r.post_vars: upload_id = r.post_vars["job"] elif "job" in r.get_vars: upload_id = r.get_vars["job"] items = self._process_item_list(upload_id, r.vars) if "delete" in r.get_vars: r.http = "DELETE" # If we have an upload ID, then get upload and import job self.upload_id = upload_id query = (self.upload_table.id == upload_id) self.upload_job = current.db(query).select(limitby=(0, 1)).first() if self.upload_job: self.job_id = self.upload_job.job_id else: self.job_id = None # Now branch off to the appropriate controller function if r.http == "GET": if source != None: self.commit(source, transform) output = self.upload(r, **attr) if upload_id != None: output = self.display_job(upload_id) else: output = self.upload(r, **attr) elif r.http == "POST": if items != None: output = self.commit_items(upload_id, items) else: output = self.generate_job(r, **attr) elif r.http == "DELETE": if upload_id != None: output = self.delete_job(upload_id) else: r.error(405, current.manager.ERROR.BAD_METHOD) return output # ------------------------------------------------------------------------- def upload(self, r, **attr): """ This will display the upload form It will ask for a file to be uploaded or for a job to be selected. If a file is uploaded then it will guess at the file type and ask for the transform file to be used. The transform files will be in a dataTable with the module specific files shown first and after those all other known transform files. Once the transform file is selected the import process can be started which will generate an importJob, and a "POST" method will occur If a job is selected it will have two actions, open and delete. Open will mean that a "GET" method will occur, with the job details passed in. Whilst the delete action will trigger a "DELETE" method. """ _debug("S3Importer.upload()") request = self.request form = self._upload_form(r, **attr) output = self._create_upload_dataTable() if request.representation == "aadata": return output output.update(form=form, title=self.uploadTitle) return output # ------------------------------------------------------------------------- def generate_job(self, r, **attr): """ Generate an ImportJob from the submitted upload form """ _debug("S3Importer.display()") response = current.response s3 = response.s3 db = current.db table = self.upload_table title=self.uploadTitle form = self._upload_form(r, **attr) r = self.request r.read_body() sfilename = form.vars.file try: ofilename = r.post_vars["file"].filename except: form.errors.file = self.messages.no_file if form.errors: response.flash = "" output = self._create_upload_dataTable() output.update(form=form, title=title) elif not sfilename or \ ofilename not in r.files or r.files[ofilename] is None: response.flash = "" response.error = self.messages.file_not_found output = self._create_upload_dataTable() output.update(form=form, title=title) else: output = dict() query = (table.file == sfilename) db(query).update(controller=self.controller, function=self.function, filename=ofilename, user_id=current.session.auth.user.id) # must commit here to separate this transaction from # the trial import phase which will be rolled back. db.commit() extension = ofilename.rsplit(".", 1).pop() if extension not in ("csv", "xls"): response.flash = None response.error = self.messages.invalid_file_format return self.upload(r, **attr) upload_file = r.files[ofilename] if extension == "xls": if "xls_parser" in s3: upload_file.seek(0) upload_file = s3.xls_parser(upload_file.read()) extension = "csv" if upload_file is None: response.flash = None response.error = self.messages.file_not_found return self.upload(r, **attr) else: upload_file.seek(0) row = db(query).select(table.id, limitby=(0, 1)).first() upload_id = row.id if "single_pass" in r.vars: single_pass = r.vars["single_pass"] else: single_pass = None self._generate_import_job(upload_id, upload_file, extension, commit_job = single_pass) if upload_id is None: row = db(query).update(status = 2) # in error if self.error != None: response.error = self.error if self.warning != None: response.warning = self.warning response.flash = "" return self.upload(r, **attr) else: if single_pass: current.session.flash = self.messages.file_uploaded # For a single pass retain the vars from the original URL next_URL = URL(r=self.request, f=self.function, args=["import"], vars=current.request.get_vars ) redirect(next_URL) s3.dataTable_vars = {"job" : upload_id} return self.display_job(upload_id) return output # ------------------------------------------------------------------------- def display_job(self, upload_id): """ @todo: docstring? """ _debug("S3Importer.display_job()") request = self.request response = current.response db = current.db table = self.upload_table job_id = self.job_id output = dict() if job_id == None: # redirect to the start page (removes all vars) query = (table.id == upload_id) row = db(query).update(status = 2) # in error current.session.warning = self.messages.no_records_to_import redirect(URL(r=request, f=self.function, args=["import"])) # Get the status of the upload job query = (table.id == upload_id) row = db(query).select(table.status, table.modified_on, table.summary_added, table.summary_error, table.summary_ignored, limitby=(0, 1)).first() status = row.status # completed display details if status == 3: # Completed # @todo currently this is an unnecessary server call, # change for completed records to be a display details # and thus avoid the round trip. # but keep this code to protect against hand-crafted URLs # (and the 'go back' syndrome on the browser) result = (row.summary_added, row.summary_error, row.summary_ignored, ) self._display_completed_job(result, row.modified_on) redirect(URL(r=request, f=self.function, args=["import"])) # otherwise display import items response.view = self._view(request, "list.html") output = self._create_import_item_dataTable(upload_id, job_id) if request.representation == "aadata": return output if response.s3.error_report: error_report = "Errors|" + "|".join(response.s3.error_report) error_tip = A("All Errors", _class="errortip", _title=error_report) else: # @todo: restore the error tree from all items? error_tip = "" rowcount = len(self._get_all_items(upload_id)) rheader = DIV(TABLE( TR( TH("%s: " % self.messages.job_total_records), TD(rowcount, _id="totalAvaliable"), TH("%s: " % self.messages.job_records_selected), TD(0, _id="totalSelected"), TH(error_tip) ), )) output["title"] = self.messages.title_job_read output["rheader"] = rheader output["subtitle"] = self.messages.title_job_list return output # ------------------------------------------------------------------------- def commit(self, source, transform): """ @todo: docstring? """ _debug("S3Importer.commit(%s, %s)" % (source, transform)) db = current.db session = current.session request = self.request try: openFile = open(source, "r") except: session.error = self.messages.file_open_error % source redirect(URL(r=request, f=self.function)) # @todo: manage different file formats # @todo: find file format from request.extension fileFormat = "csv" # insert data in the table and get the ID try: user = session.auth.user.id except: user = None upload_id = self.upload_table.insert(controller=self.controller, function=self.function, filename = source, user_id = user, status = 1) db.commit() # create the import job result = self._generate_import_job(upload_id, openFile, fileFormat, stylesheet=transform ) if result == None: if self.error != None: if session.error == None: session.error = self.error else: session.error += self.error if self.warning != None: if session.warning == None: session.warning = self.warning else: session.warning += self.warning else: items = self._get_all_items(upload_id, True) # commit the import job self._commit_import_job(upload_id, items) result = self._update_upload_job(upload_id) # get the results and display msg = "%s : %s %s %s" % (source, self.messages.commit_total_records_imported, self.messages.commit_total_errors, self.messages.commit_total_records_ignored) msg = msg % result if session.flash == None: session.flash = msg else: session.flash += msg # @todo: return the upload_id? # ------------------------------------------------------------------------- def commit_items(self, upload_id, items): """ @todo: docstring? """ _debug("S3Importer.commit_items(%s, %s)" % (upload_id, items)) # Save the import items self._commit_import_job(upload_id, items) # Update the upload table # change the status to completed # record the summary details # delete the upload file result = self._update_upload_job(upload_id) # redirect to the start page (removes all vars) self._display_completed_job(result) redirect(URL(r=self.request, f=self.function, args=["import"])) # ------------------------------------------------------------------------- def delete_job(self, upload_id): """ Delete an uploaded file and the corresponding import job @param upload_id: the upload ID """ _debug("S3Importer.delete_job(%s)" % (upload_id)) db = current.db request = self.request resource = request.resource # use self.resource? response = current.response # Get the import job ID job_id = self.job_id # Delete the import job (if any) if job_id: result = resource.import_xml(None, id = None, tree = None, job_id = job_id, delete_job = True) # @todo: check result # now delete the upload entry query = (self.upload_table.id == upload_id) count = db(query).delete() # @todo: check that the record has been deleted # Now commit the changes db.commit() result = count # return to the main import screen # @todo: check result properly if result == False: response.warning = self.messages.no_job_to_delete else: response.flash = self.messages.job_deleted # redirect to the start page (remove all vars) self.next = self.request.url(vars=dict()) return # ======================================================================== # Utility methods # ======================================================================== def _upload_form(self, r, **attr): """ Create and process the upload form, including csv_extra_fields """ EXTRA_FIELDS = "csv_extra_fields" TEMPLATE = "csv_template" REPLACE_OPTION = "replace_option" session = current.session response = current.response s3 = response.s3 request = self.request table = self.upload_table formstyle = s3.crud.formstyle response.view = self._view(request, "list_create.html") if REPLACE_OPTION in attr: replace_option = attr[REPLACE_OPTION] if replace_option is not None: table.replace_option.readable = True table.replace_option.writable = True table.replace_option.label = replace_option fields = [f for f in table if f.readable or f.writable and not f.compute] if EXTRA_FIELDS in attr: extra_fields = attr[EXTRA_FIELDS] if extra_fields is not None: fields.extend([f["field"] for f in extra_fields if "field" in f]) self.csv_extra_fields = extra_fields labels, required = s3_mark_required(fields) if required: s3.has_required = True form = SQLFORM.factory(table_name=self.UPLOAD_TABLE_NAME, labels=labels, formstyle=formstyle, upload = os.path.join(request.folder, "uploads", "imports"), separator = "", message=self.messages.file_uploaded, *fields) args = ["s3csv"] template = attr.get(TEMPLATE, True) if template is True: args.extend([self.controller, "%s.csv" % self.function]) elif isinstance(template, basestring): args.extend([self.controller, "%s.csv" % template]) elif isinstance(template, (tuple, list)): args.extend(template[:-1]) args.append("%s.csv" % template[-1]) else: template = None if template is not None: url = URL(r=request, c="static", f="formats", args=args) try: # only add the download link if the template can be opened open("%s/../%s" % (r.folder, url)) form[0][0].insert(0, TR(TD(A(self.messages.download_template, _href=url)), _id="template__row")) except: pass if form.accepts(r.post_vars, session, formname="upload_form"): upload_id = table.insert(**table._filter_fields(form.vars)) if self.csv_extra_fields: self.csv_extra_data = Storage() for f in self.csv_extra_fields: label = f.get("label", None) if not label: continue field = f.get("field", None) value = f.get("value", None) if field: if field.name in form.vars: data = form.vars[field.name] else: data = field.default value = data requires = field.requires if not isinstance(requires, (list, tuple)): requires = [requires] if requires: requires = requires[0] if isinstance(requires, IS_EMPTY_OR): requires = requires.other try: options = requires.options() except: pass else: for k, v in options: if k == str(data): value = v elif value is None: continue self.csv_extra_data[label] = value s3.no_formats = True return form # ------------------------------------------------------------------------- def _create_upload_dataTable(self): """ List of previous Import jobs """ db = current.db request = self.request controller = self.controller function = self.function s3 = current.response.s3 table = self.upload_table s3.filter = (table.controller == controller) & \ (table.function == function) fields = ["id", "filename", "created_on", "user_id", "replace_option", "status"] self._use_upload_table() # Hide the list of prior uploads for now #output = self._dataTable(fields, sort_by = [[2,"desc"]]) output = dict() self._use_controller_table() if request.representation == "aadata": return output query = (table.status != 3) # Status of Pending or in-Error rows = db(query).select(table.id) restrictOpen = [str(row.id) for row in rows] query = (table.status == 3) # Status of Completed rows = db(query).select(table.id) restrictView = [str(row.id) for row in rows] s3.actions = [ dict(label=str(self.messages.open_btn), _class="action-btn", url=URL(r=request, c=controller, f=function, args=["import"], vars={"job":"[id]"}), restrict = restrictOpen ), dict(label=str(self.messages.view_btn), _class="action-btn", url=URL(r=request, c=controller, f=function, args=["import"], vars={"job":"[id]"}), restrict = restrictView ), dict(label=str(self.messages.delete_btn), _class="delete-btn", url=URL(r=request, c=controller, f=function, args=["import"], vars={"job":"[id]", "delete":"True" } ) ), ] # Display an Error if no job is attached with this record query = (table.status == 1) # Pending rows = db(query).select(table.id) s3.dataTableStyleAlert = [str(row.id) for row in rows] query = (table.status == 2) # in error rows = db(query).select(table.id) s3.dataTableStyleWarning = [str(row.id) for row in rows] return output # ------------------------------------------------------------------------- def _create_import_item_dataTable(self, upload_id, job_id): """ @todo: docstring? """ s3 = current.response.s3 represent = {"element" : self._item_element_represent} self._use_import_item_table(job_id) # Add a filter to the dataTable query s3.filter = (self.table.job_id == job_id) & \ (self.table.tablename == self.controller_tablename) # Get a list of the records that have an error of None query = (self.table.job_id == job_id) & \ (self.table.tablename == self.controller_tablename) rows = current.db(query).select(self.table.id, self.table.error) select_list = [] error_list = [] for row in rows: if row.error: error_list.append(str(row.id)) else: select_list.append("%s" % row.id) select_id = ",".join(select_list) output = self._dataTable(["id", "element", "error"], sort_by = [[1, "asc"]], represent=represent) self._use_controller_table() if self.request.representation == "aadata": return output # Highlight rows in error in red s3.dataTableStyleWarning = error_list s3.dataTableSelectable = True s3.dataTablePostMethod = True table = output["items"] job = INPUT(_type="hidden", _id="importUploadID", _name="job", _value="%s" % upload_id) mode = INPUT(_type="hidden", _id="importMode", _name="mode", _value="Inclusive") # only select the rows with no errors selected = INPUT(_type="hidden", _id="importSelected", _name="selected", _value="[%s]" % select_id) form = FORM(table, job, mode, selected) output["items"] = form s3.dataTableSelectSubmitURL = "import?job=%s&" % upload_id s3.actions = [ dict(label= str(self.messages.item_show_details), _class="action-btn", _jqclick="$('.importItem.'+id).toggle();", ), ] return output # ------------------------------------------------------------------------- def _generate_import_job(self, upload_id, openFile, fileFormat, stylesheet=None, commit_job=False): """ This will take a s3_import_upload record and generate the importJob @param uploadFilename: The name of the uploaded file @todo: complete parameter descriptions """ _debug("S3Importer._generate_import_job(%s, %s, %s, %s)" % (upload_id, openFile, fileFormat, stylesheet ) ) db = current.db request = self.request resource = request.resource # --------------------------------------------------------------------- # CSV if fileFormat == "csv" or fileFormat == "comma-separated-values": fmt = "csv" src = openFile # --------------------------------------------------------------------- # XML # @todo: implement #elif fileFormat == "xml": # --------------------------------------------------------------------- # S3JSON # @todo: implement #elif fileFormat == "s3json": # --------------------------------------------------------------------- # PDF # @todo: implement #elif fileFormat == "pdf": # --------------------------------------------------------------------- # Unsupported Format else: msg = self.messages.unsupported_file_type % fileFormat self.error = msg _debug(msg) return None # Get the stylesheet if stylesheet == None: stylesheet = self._get_stylesheet() if stylesheet == None: return None # before calling import tree ensure the db.table is the controller_table self.table = self.controller_table self.tablename = self.controller_tablename # Pass stylesheet arguments args = Storage() mode = request.get_vars.get("xsltmode", None) if mode is not None: args.update(mode=mode) # Generate the import job resource.import_xml(src, format=fmt, extra_data=self.csv_extra_data, stylesheet=stylesheet, ignore_errors = True, commit_job = commit_job, **args) job = resource.job if job is None: if resource.error: # Error self.error = resource.error return None else: # Nothing to import self.warning = self.messages.no_records_to_import return None else: # Job created job_id = job.job_id errors = current.xml.collect_errors(job) if errors: current.response.s3.error_report = errors query = (self.upload_table.id == upload_id) result = db(query).update(job_id=job_id) # @todo: add check that result == 1, if not we are in error # Now commit the changes db.commit() self.job_id = job_id return True # ------------------------------------------------------------------------- def _get_stylesheet(self, file_format="csv"): """ Get the stylesheet for transformation of the import @param file_format: the import source file format """ if file_format == "csv": xslt_path = os.path.join(self.xslt_path, "s3csv") else: xslt_path = os.path.join(self.xslt_path, file_format, "import.xsl") return xslt_path # Use the "csv_stylesheet" parameter to override the CSV stylesheet subpath # and filename, e.g. # s3_rest_controller(module, resourcename, # csv_stylesheet=("inv", "inv_item.xsl")) if self.csv_stylesheet: if isinstance(self.csv_stylesheet, (tuple, list)): stylesheet = os.path.join(xslt_path, *self.csv_stylesheet) else: stylesheet = os.path.join(xslt_path, self.controller, self.csv_stylesheet) else: xslt_filename = "%s.%s" % (self.function, self.xslt_extension) stylesheet = os.path.join(xslt_path, self.controller, xslt_filename) if os.path.exists(stylesheet) is False: msg = self.messages.stylesheet_not_found % stylesheet self.error = msg _debug(msg) return None return stylesheet # ------------------------------------------------------------------------- def _commit_import_job(self, upload_id, items): """ This will save all of the selected import items @todo: parameter descriptions? """ _debug("S3Importer._commit_import_job(%s, %s)" % (upload_id, items)) db = current.db resource = self.request.resource # Load the items from the s3_import_item table self.importDetails = dict() table = self.upload_table query = (table.id == upload_id) row = db(query).select(table.job_id, table.replace_option, limitby=(0, 1)).first() if row is None: return False else: job_id = row.job_id current.response.s3.import_replace = row.replace_option itemTable = S3ImportJob.define_item_table() if itemTable != None: #**************************************************************** # EXPERIMENTAL # This doesn't delete related items # but import_tree will tidy it up later #**************************************************************** # get all the items selected for import rows = self._get_all_items(upload_id, as_string=True) # loop through each row and delete the items not required self._store_import_details(job_id, "preDelete") for id in rows: if str(id) not in items: # @todo: replace with a helper method from the API _debug("Deleting item.id = %s" % id) query = (itemTable.id == id) db(query).delete() #**************************************************************** # EXPERIMENTAL #**************************************************************** # set up the table we will import data into self.table = self.controller_table self.tablename = self.controller_tablename self._store_import_details(job_id, "preImportTree") # Now commit the remaining items msg = resource.import_xml(None, job_id = job_id, ignore_errors = True) return resource.error is None # ------------------------------------------------------------------------- def _store_import_details(self, job_id, key): """ This will store the details from an importJob @todo: parameter descriptions? """ _debug("S3Importer._store_import_details(%s, %s)" % (job_id, key)) itemTable = S3ImportJob.define_item_table() query = (itemTable.job_id == job_id) & \ (itemTable.tablename == self.controller_tablename) rows = current.db(query).select(itemTable.data, itemTable.error) items = [dict(data=row.data, error=row.error) for row in rows] self.importDetails[key] = items # ------------------------------------------------------------------------- def _update_upload_job(self, upload_id): """ This will record the results from the import, and change the status of the upload job @todo: parameter descriptions? @todo: report errors in referenced records, too """ _debug("S3Importer._update_upload_job(%s)" % (upload_id)) request = self.request resource = request.resource db = current.db totalPreDelete = len(self.importDetails["preDelete"]) totalPreImport = len(self.importDetails["preImportTree"]) totalIgnored = totalPreDelete - totalPreImport if resource.error_tree is None: totalErrors = 0 else: totalErrors = len(resource.error_tree.findall( "resource[@name='%s']" % resource.tablename)) totalRecords = totalPreImport - totalErrors if totalRecords < 0: totalRecords = 0 query = (self.upload_table.id == upload_id) result = db(query).update(summary_added=totalRecords, summary_error=totalErrors, summary_ignored = totalIgnored, status = 3) # Now commit the changes db.commit() return (totalRecords, totalErrors, totalIgnored) # ------------------------------------------------------------------------- def _display_completed_job(self, totals, timestmp=None): """ Generate a summary flash message for a completed import job @param totals: the job totals as tuple (total imported, total errors, total ignored) @param timestmp: the timestamp of the completion """ session = current.session msg = "%s - %s - %s" % \ (self.messages.commit_total_records_imported, self.messages.commit_total_errors, self.messages.commit_total_records_ignored) msg = msg % totals if timestmp != None: session.flash = self.messages.job_completed % \ (self.date_represent(timestmp), msg) elif totals[1] is not 0: session.error = msg elif totals[2] is not 0: session.warning = msg else: session.flash = msg # ------------------------------------------------------------------------- def _dataTable(self, list_fields = [], sort_by = [[1, "asc"]], represent={}, ): """ Method to get the data for the dataTable This can be either a raw html representation or and ajax call update Additional data will be cached to limit calls back to the server @param list_fields: list of field names @param sort_by: list of sort by columns @param represent: a dict of field callback functions used to change how the data will be displayed @return: a dict() In html representations this will be a table of the data plus the sortby instructions In ajax this will be a json response In addition the following values will be made available: totalRecords Number of records in the filtered data set totalDisplayRecords Number of records to display start Start point in the ordered data set limit Number of records in the ordered set NOTE: limit - totalDisplayRecords = total cached """ # ******************************************************************** # Common tasks # ******************************************************************** db = current.db session = current.session request = self.request response = current.response resource = self.resource s3 = response.s3 representation = request.representation table = self.table tablename = self.tablename vars = request.get_vars output = dict() # Check permission to read this table authorised = self.permit("read", tablename) if not authorised: request.unauthorised() # List of fields to select from # fields is a list of Field objects # list_field is a string list of field names if list_fields == []: fields = resource.readable_fields() else: fields = [table[f] for f in list_fields if f in table.fields] if not fields: fields = [] # attach any represent callbacks for f in fields: if f.name in represent: f.represent = represent[f.name] # Make sure that we have the table id as the first column if fields[0].name != table.fields[0]: fields.insert(0, table[table.fields[0]]) list_fields = [f.name for f in fields] # Filter if s3.filter is not None: self.resource.add_filter(s3.filter) # ******************************************************************** # ajax call # ******************************************************************** if representation == "aadata": start = vars.get("iDisplayStart", None) limit = vars.get("iDisplayLength", None) if limit is not None: try: start = int(start) limit = int(limit) except ValueError: start = None limit = None # use default else: start = None # use default # Using the sort variables sent from dataTables if vars.iSortingCols: orderby = self.ssp_orderby(resource, list_fields) # Echo sEcho = int(vars.sEcho or 0) # Get the list items = resource.sqltable(fields=list_fields, start=start, limit=limit, orderby=orderby, download_url=self.download_url, as_page=True) or [] # Ugly hack to change any occurrence of [id] with the true id # Needed because the represent doesn't know the id for i in range(len(items)): id = items[i][0] for j in range(len(items[i])): new = items[i][j].replace("[id]",id) items[i][j] = new totalrows = self.resource.count() result = dict(sEcho = sEcho, iTotalRecords = totalrows, iTotalDisplayRecords = totalrows, aaData = items) output = jsons(result) # ******************************************************************** # html 'initial' call # ******************************************************************** else: # catch all start = 0 limit = 1 # Sort by vars["iSortingCols"] = len(sort_by) # generate the dataTables.js variables for sorting index = 0 for col in sort_by: colName = "iSortCol_%s" % str(index) colValue = col[0] dirnName = "sSortDir_%s" % str(index) if len(col) > 1: dirnValue = col[1] else: dirnValue = "asc" vars[colName] = colValue vars[dirnName] = dirnValue # Now using these sort variables generate the order by statement orderby = self.ssp_orderby(resource, list_fields) del vars["iSortingCols"] for col in sort_by: del vars["iSortCol_%s" % str(index)] del vars["sSortDir_%s" % str(index)] # Get the first row for a quick up load items = resource.sqltable(fields=list_fields, start=start, limit=1, orderby=orderby, download_url=self.download_url) totalrows = resource.count() if items: if totalrows: if s3.dataTable_iDisplayLength: limit = 2 * s3.dataTable_iDisplayLength else: limit = 50 # Add a test on the first call here: # Now get the limit rows for ajax style update of table sqltable = resource.sqltable(fields=list_fields, start=start, limit=limit, orderby=orderby, download_url=self.download_url, as_page=True) aadata = dict(aaData = sqltable or []) # Ugly hack to change any occurrence of [id] with the true id # Needed because the represent doesn't know the id for i in range(len(aadata["aaData"])): id = aadata["aaData"][i][0] for j in range(len(aadata["aaData"][i])): new = aadata["aaData"][i][j].replace("[id]",id) aadata["aaData"][i][j] = new aadata.update(iTotalRecords=totalrows, iTotalDisplayRecords=totalrows) response.aadata = jsons(aadata) s3.start = 0 s3.limit = limit else: # No items in database # s3import tables don't have a delete field but kept for the record if "deleted" in table: available_records = db(table.deleted == False) else: available_records = db(table.id > 0) # check for any records on an unfiltered table if available_records.select(table.id, limitby=(0, 1)).first(): items = self.crud_string(tablename, "msg_no_match") else: items = self.crud_string(tablename, "msg_list_empty") output.update(items=items, sortby=sort_by) # Value to be added to the dataTable ajax call s3.dataTable_Method = "import" return output # ------------------------------------------------------------------------- def _item_element_represent(self, value): """ Represent the element in an import item for dataTable display @param value: the string containing the element """ T = current.T db = current.db value = S3XML.xml_decode(value) try: element = etree.fromstring(value) except: # XMLSyntaxError: return the element as-is return DIV(value) tablename = element.get("name") table = current.db[tablename] output = DIV() details = TABLE(_class="importItem [id]") header, rows = self._add_item_details(element.findall("data"), table) if header is not None: output.append(header) # Add components, if present components = element.findall("resource") for component in components: ctablename = component.get("name") ctable = db[ctablename] self._add_item_details(component.findall("data"), ctable, details=rows, prefix=True) if rows: details.append(TBODY(rows)) # Add error messages, if present errors = current.xml.collect_errors(element) if errors: details.append(TFOOT(TR(TH("%s:" % T("Errors")), TD(UL([LI(e) for e in errors]))))) if rows == [] and components == []: # At this stage we don't have anything to display to see if we can # find something to show. This could be the case when a table being # imported is a resolver for a many to many relationship refdetail = TABLE(_class="importItem [id]") references = element.findall("reference") for reference in references: tuid = reference.get("tuid") resource = reference.get("resource") refdetail.append(TR(TD(resource), TD(tuid))) output.append(refdetail) else: output.append(details) return str(output) # ------------------------------------------------------------------------- @staticmethod def _add_item_details(data, table, details=None, prefix=False): """ Add details of the item element @param data: the list of data elements in the item element @param table: the table for the data @param details: the existing details rows list (to append to) """ tablename = table._tablename if details is None: details = [] first = None firstString = None header = None for child in data: f = child.get("field", None) if f not in table.fields: continue elif f == "wkt": # Skip bulky WKT fields continue field = table[f] ftype = str(field.type) value = child.get("value", None) if not value: value = child.text try: value = S3Importer._decode_data(field, value) except: pass if value: value = S3XML.xml_encode(unicode(value)) else: value = "" if f != None and value != None: headerText = P(B("%s: " % f), value) if not first: first = headerText if ftype == "string" and not firstString: firstString = headerText if f == "name": header = headerText if prefix: details.append(TR(TH("%s.%s:" % (tablename, f)), TD(value))) else: details.append(TR(TH("%s:" % f), TD(value))) if not header: if firstString: header = firstString else: header = first return (header, details) # ------------------------------------------------------------------------- @staticmethod def _decode_data(field, value): """ Try to decode string data into their original type @param field: the Field instance @param value: the stringified value @todo: replace this by ordinary decoder """ if field.type == "string" or \ field.type == "string" or \ field.type == "password" or \ field.type == "upload" or \ field.type == "text": return value elif field.type == "integer" or field.type == "id": return int(value) elif field.type == "double" or field.type == "decimal": return double(value) elif field.type == 'boolean': if value and not str(value)[:1].upper() in ["F", "0"]: return "T" else: return "F" elif field.type == "date": return value # @todo fix this to get a date elif field.type == "time": return value # @todo fix this to get a time elif field.type == "datetime": return value # @todo fix this to get a datetime else: return value # ------------------------------------------------------------------------- @staticmethod def date_represent(date_obj): """ Represent a datetime object as string @param date_obj: the datetime object @todo: replace by S3DateTime method? """ return date_obj.strftime("%d %B %Y, %I:%M%p") # ------------------------------------------------------------------------- def _process_item_list(self, upload_id, vars): """ Get the list of IDs for the selected items from the "mode" and "selected" request variables @param upload_id: the upload_id @param vars: the request variables """ items = None if "mode" in vars: mode = vars["mode"] if "selected" in vars: selected = vars["selected"].split(",") else: selected = [] if mode == "Inclusive": items = selected elif mode == "Exclusive": all_items = self._get_all_items(upload_id, as_string=True) items = [i for i in all_items if i not in selected] return items # ------------------------------------------------------------------------- def _get_all_items(self, upload_id, as_string=False): """ Get a list of the record IDs of all import items for the the given upload ID @param upload_id: the upload ID @param as_string: represent each ID as string """ item_table = S3ImportJob.define_item_table() upload_table = self.upload_table query = (upload_table.id == upload_id) & \ (item_table.job_id == upload_table.job_id) & \ (item_table.tablename == self.controller_tablename) rows = current.db(query).select(item_table.id) if as_string: items = [str(row.id) for row in rows] else: items = [row.id for row in rows] return items # ------------------------------------------------------------------------- def _use_upload_table(self): """ Set the resource and the table to being s3_import_upload """ if self.upload_resource == None: from s3resource import S3Resource (prefix, name) = self.UPLOAD_TABLE_NAME.split("_",1) self.upload_resource = S3Resource(prefix, name) self.resource = self.upload_resource self.table = self.upload_table self.tablename = self.upload_tablename # ------------------------------------------------------------------------- def _use_controller_table(self): """ Set the resource and the table to be the imported resource """ self.resource = self.controller_resource self.table = self.controller_table self.tablename = self.controller_tablename # ------------------------------------------------------------------------- def _use_import_item_table(self, job_id): """ Set the resource and the table to being s3_import_item """ if self.item_resource == None: from s3resource import S3Resource (prefix, name) = S3ImportJob.ITEM_TABLE_NAME.split("_",1) self.item_resource = S3Resource(prefix, name) self.resource = self.item_resource self.tablename = S3ImportJob.ITEM_TABLE_NAME self.table = S3ImportJob.define_item_table() # ------------------------------------------------------------------------- def __define_table(self): """ Configures the upload table """ _debug("S3Importer.__define_table()") T = current.T db = current.db request = current.request self.upload_tablename = self.UPLOAD_TABLE_NAME import_upload_status = { 1: T("Pending"), 2: T("In error"), 3: T("Completed"), } def user_name_represent(id): # @todo: use s3_present_user? rep_str = "-" table = db.auth_user query = (table.id == id) row = db(query).select(table.first_name, table.last_name, limitby=(0, 1)).first() if row: rep_str = "%s %s" % (row.first_name, row.last_name) return rep_str def status_represent(index): if index == None: return "Unknown" # @todo: use messages (internationalize) else: return import_upload_status[index] now = request.utcnow table = self.define_upload_table() table.file.upload_folder = os.path.join(request.folder, "uploads", #"imports" ) table.file.comment = DIV(_class="tooltip", _title="%s|%s" % (self.messages.import_file, self.messages.import_file_comment)) table.file.label = self.messages.import_file table.status.requires = IS_IN_SET(import_upload_status, zero=None) table.status.represent = status_represent table.user_id.label = self.messages.user_name table.user_id.represent = user_name_represent table.created_on.default = now table.created_on.represent = self.date_represent table.modified_on.default = now table.modified_on.update = now table.modified_on.represent = self.date_represent table.replace_option.label = T("Replace") self.upload_table = db[self.UPLOAD_TABLE_NAME] # ------------------------------------------------------------------------- @classmethod def define_upload_table(cls): """ Defines the upload table """ db = current.db uploadfolder = os.path.join(current.request.folder, "uploads", ) if cls.UPLOAD_TABLE_NAME not in db: upload_table = db.define_table(cls.UPLOAD_TABLE_NAME, Field("controller", readable=False, writable=False), Field("function", readable=False, writable=False), Field("file", "upload", uploadfolder=os.path.join(current.request.folder, "uploads", "imports"), autodelete=True), Field("filename", readable=False, writable=False), Field("status", "integer", default=1, readable=False, writable=False), Field("extra_data", readable=False, writable=False), Field("replace_option", "boolean", default=False, readable=False, writable=False), Field("job_id", length=128, readable=False, writable=False), Field("user_id", "integer", readable=False, writable=False), Field("created_on", "datetime", readable=False, writable=False), Field("modified_on", "datetime", readable=False, writable=False), Field("summary_added", "integer", readable=False, writable=False), Field("summary_error", "integer", readable=False, writable=False), Field("summary_ignored", "integer", readable=False, writable=False), Field("completed_details", "text", readable=False, writable=False)) else: upload_table = db[cls.UPLOAD_TABLE_NAME] return upload_table # ============================================================================= class S3ImportItem(object): """ Class representing an import item (=a single record) """ METHOD = Storage( CREATE="create", UPDATE="update", DELETE="delete" ) POLICY = Storage( THIS="THIS", # keep local instance OTHER="OTHER", # update unconditionally NEWER="NEWER", # update if import is newer MASTER="MASTER" # update if import is master ) # ------------------------------------------------------------------------- def __init__(self, job): """ Constructor @param job: the import job this item belongs to """ self.job = job self.ERROR = current.manager.ERROR # Locking and error handling self.lock = False self.error = None # Identification import uuid self.item_id = uuid.uuid4() # unique ID for this item self.id = None self.uid = None # Data elements self.table = None self.tablename = None self.element = None self.data = None self.original = None self.components = [] self.references = [] self.load_components = [] self.load_references = [] self.parent = None self.skip = False # Conflict handling self.mci = 2 self.mtime = datetime.utcnow() self.modified = True self.conflict = False # Allowed import methods self.strategy = job.strategy # Update and conflict resolution policies self.update_policy = job.update_policy self.conflict_policy = job.conflict_policy # Actual import method self.method = None self.onvalidation = None self.onaccept = None # Item import status flags self.accepted = None self.permitted = False self.committed = False # Writeback hook for circular references: # Items which need a second write to update references self.update = [] # ------------------------------------------------------------------------- def __repr__(self): """ Helper method for debugging """ _str = "<S3ImportItem %s {item_id=%s uid=%s id=%s error=%s data=%s}>" % \ (self.table, self.item_id, self.uid, self.id, self.error, self.data) return _str # ------------------------------------------------------------------------- def parse(self, element, original=None, table=None, tree=None, files=None): """ Read data from a <resource> element @param element: the element @param table: the DB table @param tree: the import tree @param files: uploaded files @returns: True if successful, False if not (sets self.error) """ db = current.db xml = current.xml manager = current.manager validate = manager.validate s3db = current.s3db self.element = element if table is None: tablename = element.get(xml.ATTRIBUTE.name, None) try: table = s3db[tablename] except: self.error = self.ERROR.BAD_RESOURCE element.set(xml.ATTRIBUTE.error, self.error) return False self.table = table self.tablename = table._tablename if original is None: original = manager.original(table, element) data = xml.record(table, element, files=files, original=original, validate=validate) if data is None: self.error = self.ERROR.VALIDATION_ERROR self.accepted = False if not element.get(xml.ATTRIBUTE.error, False): element.set(xml.ATTRIBUTE.error, str(self.error)) return False self.data = data if original is not None: self.original = original self.id = original[table._id.name] if xml.UID in original: self.uid = original[xml.UID] self.data.update({xml.UID:self.uid}) elif xml.UID in data: self.uid = data[xml.UID] if xml.MTIME in data: self.mtime = data[xml.MTIME] if xml.MCI in data: self.mci = data[xml.MCI] _debug("New item: %s" % self) return True # ------------------------------------------------------------------------- def deduplicate(self): RESOLVER = "deduplicate" if self.id: return table = self.table if table is None: return if self.original is not None: original = self.original else: original = current.manager.original(table, self.data) if original is not None: self.original = original self.id = original[table._id.name] UID = current.xml.UID if UID in original: self.uid = original[UID] self.data.update({UID:self.uid}) self.method = self.METHOD.UPDATE else: resolve = current.s3db.get_config(self.tablename, RESOLVER) if self.data and resolve: resolve(self) return # ------------------------------------------------------------------------- def authorize(self): """ Authorize the import of this item, sets self.permitted """ db = current.db manager = current.manager authorize = manager.permit self.permitted = False if not self.table: return False prefix = self.tablename.split("_", 1)[0] if prefix in manager.PROTECTED: return False if not authorize: self.permitted = True self.method = self.METHOD.CREATE if self.id: if self.data.deleted is True: self.method = self.METHOD.DELETE self.accepted = True else: if not self.original: query = (self.table.id == self.id) self.original = db(query).select(limitby=(0, 1)).first() if self.original: self.method = self.METHOD.UPDATE if self.method == self.METHOD.CREATE: self.id = 0 if authorize: self.permitted = authorize(self.method, self.tablename, record_id=self.id) return self.permitted # ------------------------------------------------------------------------- def validate(self): """ Validate this item (=record onvalidation), sets self.accepted """ if self.accepted is not None: return self.accepted if self.data is None or not self.table: self.accepted = False return False form = Storage() form.method = self.method form.vars = self.data if self.id: form.vars.id = self.id form.errors = Storage() tablename = self.tablename key = "%s_onvalidation" % self.method s3db = current.s3db onvalidation = s3db.get_config(tablename, key, s3db.get_config(tablename, "onvalidation")) if onvalidation: try: callback(onvalidation, form, tablename=tablename) except: pass # @todo need a better handler here. self.accepted = True if form.errors: error = current.xml.ATTRIBUTE.error for k in form.errors: e = self.element.findall("data[@field='%s']" % k) if not e: e = self.element.findall("reference[@field='%s']" % k) if not e: e = self.element form.errors[k] = "[%s] %s" % (k, form.errors[k]) else: e = e[0] e.set(error, str(form.errors[k]).decode("utf-8")) self.error = self.ERROR.VALIDATION_ERROR self.accepted = False return self.accepted # ------------------------------------------------------------------------- def commit(self, ignore_errors=False): """ Commit this item to the database @param ignore_errors: skip invalid components (still reports errors) """ db = current.db s3db = current.s3db xml = current.xml manager = current.manager table = self.table # Check if already committed if self.committed: # already committed return True # If the parent item gets skipped, then skip this item as well if self.parent is not None and self.parent.skip: return True _debug("Committing item %s" % self) # Resolve references self._resolve_references() # Validate if not self.validate(): _debug("Validation error: %s (%s)" % (self.error, xml.tostring(self.element, pretty_print=True))) self.skip = True return ignore_errors elif self.components: for component in self.components: if not component.validate(): if hasattr(component, "tablename"): tn = component.tablename else: tn = None _debug("Validation error, component=%s" % tn) component.skip = True # Skip this item on any component validation errors # unless ignore_errors is True if ignore_errors: continue else: self.skip = True return False # De-duplicate self.deduplicate() # Log this item if manager.log is not None: manager.log(self) # Authorize item if not self.authorize(): _debug("Not authorized - skip") self.error = manager.ERROR.NOT_PERMITTED self.skip = True return ignore_errors _debug("Method: %s" % self.method) # Check if import method is allowed in strategy if not isinstance(self.strategy, (list, tuple)): self.strategy = [self.strategy] if self.method not in self.strategy: _debug("Method not in strategy - skip") self.error = manager.ERROR.NOT_PERMITTED self.skip = True return True this = self.original if not this and self.id and \ self.method in (self.METHOD.UPDATE, self.METHOD.DELETE): query = (table.id == self.id) this = db(query).select(limitby=(0, 1)).first() this_mtime = None this_mci = 0 if this: if xml.MTIME in table.fields: this_mtime = xml.as_utc(this[xml.MTIME]) if xml.MCI in table.fields: this_mci = this[xml.MCI] self.mtime = xml.as_utc(self.mtime) # Conflict detection this_modified = True self.modified = True self.conflict = False last_sync = xml.as_utc(self.job.last_sync) if last_sync: if this_mtime and this_mtime < last_sync: this_modified = False if self.mtime and self.mtime < last_sync: self.modified = False if self.modified and this_modified: self.conflict = True if self.conflict and \ self.method in (self.METHOD.UPDATE, self.METHOD.DELETE): _debug("Conflict: %s" % self) if self.job.onconflict: self.job.onconflict(self) if self.data is not None: data = Storage(self.data) else: data = Storage() # Update existing record if self.method == self.METHOD.UPDATE: if this: if "deleted" in this and this.deleted: policy = self._get_update_policy(None) if policy == self.POLICY.NEWER and \ this_mtime and this_mtime > self.mtime or \ policy == self.POLICY.MASTER and \ (this_mci == 0 or self.mci != 1): self.skip = True return True fields = data.keys() for f in fields: if f not in this: continue if isinstance(this[f], datetime): if xml.as_utc(data[f]) == xml.as_utc(this[f]): del data[f] continue else: if data[f] == this[f]: del data[f] continue remove = False policy = self._get_update_policy(f) if policy == self.POLICY.THIS: remove = True elif policy == self.POLICY.NEWER: if this_mtime and this_mtime > self.mtime: remove = True elif policy == self.POLICY.MASTER: if this_mci == 0 or self.mci != 1: remove = True if remove: del data[f] self.data.update({f:this[f]}) if "deleted" in this and this.deleted: # Undelete re-imported records: data.update(deleted=False) if "deleted_fk" in table: data.update(deleted_fk="") if "created_by" in table: data.update(created_by=table.created_by.default) if "modified_by" in table: data.update(modified_by=table.modified_by.default) if not self.skip and not self.conflict and \ (len(data) or self.components or self.references): if self.uid and xml.UID in table: data.update({xml.UID:self.uid}) if xml.MTIME in table: data.update({xml.MTIME: self.mtime}) if xml.MCI in data: # retain local MCI on updates del data[xml.MCI] query = (table._id == self.id) try: success = db(query).update(**dict(data)) except: self.error = sys.exc_info()[1] self.skip = True return False if success: self.committed = True else: # Nothing to update self.committed = True # Create new record elif self.method == self.METHOD.CREATE: # Do not apply field policy to UID and MCI UID = xml.UID if UID in data: del data[UID] MCI = xml.MCI if MCI in data: del data[MCI] for f in data: policy = self._get_update_policy(f) if policy == self.POLICY.MASTER and self.mci != 1: del data[f] if len(data) or self.components or self.references: # Restore UID and MCI if self.uid and UID in table.fields: data.update({UID:self.uid}) if MCI in table.fields: data.update({MCI:self.mci}) # Insert the new record try: success = table.insert(**dict(data)) except: self.error = sys.exc_info()[1] self.skip = True return False if success: self.id = success self.committed = True else: # Nothing to create self.skip = True return True # Delete local record elif self.method == self.METHOD.DELETE: if this: if this.deleted: self.skip = True policy = self._get_update_policy(None) if policy == self.POLICY.THIS: self.skip = True elif policy == self.POLICY.NEWER and \ (this_mtime and this_mtime > self.mtime): self.skip = True elif policy == self.POLICY.MASTER and \ (this_mci == 0 or self.mci != 1): self.skip = True else: self.skip = True if not self.skip and not self.conflict: prefix, name = self.tablename.split("_", 1) resource = manager.define_resource(prefix, name, id=self.id) ondelete = s3db.get_config(self.tablename, "ondelete") success = resource.delete(ondelete=ondelete, cascade=True) if resource.error: self.error = resource.error self.skip = True return ignore_errors _debug("Success: %s, id=%s %sd" % (self.tablename, self.id, self.skip and "skippe" or \ self.method)) return True # Audit + onaccept on successful commits if self.committed: form = Storage() form.method = self.method form.vars = self.data tablename = self.tablename prefix, name = tablename.split("_", 1) if self.id: form.vars.id = self.id if manager.audit is not None: manager.audit(self.method, prefix, name, form=form, record=self.id, representation="xml") s3db.update_super(table, form.vars) if self.method == self.METHOD.CREATE: current.auth.s3_set_record_owner(table, self.id) key = "%s_onaccept" % self.method onaccept = s3db.get_config(tablename, key, s3db.get_config(tablename, "onaccept")) if onaccept: callback(onaccept, form, tablename=self.tablename) # Update referencing items if self.update and self.id: for u in self.update: item = u.get("item", None) if not item: continue field = u.get("field", None) if isinstance(field, (list, tuple)): pkey, fkey = field query = table.id == self.id row = db(query).select(table[pkey], limitby=(0, 1)).first() if row: item._update_reference(fkey, row[pkey]) else: item._update_reference(field, self.id) _debug("Success: %s, id=%s %sd" % (self.tablename, self.id, self.skip and "skippe" or \ self.method)) return True # ------------------------------------------------------------------------- def _get_update_policy(self, field): """ Get the update policy for a field (if the item will update an existing record) @param field: the name of the field """ if isinstance(self.update_policy, dict): r = self.update_policy.get(field, self.update_policy.get("__default__", self.POLICY.THIS)) else: r = self.update_policy if not r in self.POLICY.values(): r = self.POLICY.THIS return r # ------------------------------------------------------------------------- def _resolve_references(self): """ Resolve the references of this item (=look up all foreign keys from other items of the same job). If a foreign key is not yet available, it will be scheduled for later update. """ if not self.table: return items = self.job.items for reference in self.references: item = None field = reference.field entry = reference.entry if not entry: continue # Resolve key tuples if isinstance(field, (list,tuple)): pkey, fkey = field else: pkey, fkey = ("id", field) # Resolve the key table name ktablename, key, multiple = s3_get_foreign_key(self.table[fkey]) if not ktablename: if self.tablename == "auth_user" and \ fkey == "organisation_id": ktablename = "org_organisation" else: continue if entry.tablename: ktablename = entry.tablename try: ktable = current.s3db[ktablename] except: continue # Resolve the foreign key (value) fk = entry.id if entry.item_id: item = items[entry.item_id] if item: fk = item.id if fk and pkey != "id": row = current.db(ktable._id == fk).select(ktable[pkey], limitby=(0, 1)).first() if not row: fk = None continue else: fk = row[pkey] # Update record data if fk: if multiple: val = self.data.get(fkey, []) if fk not in val: val.append(fk) self.data[fkey] = val else: self.data[fkey] = fk else: if fkey in self.data and not multiple: del self.data[fkey] if item: item.update.append(dict(item=self, field=fkey)) # ------------------------------------------------------------------------- def _update_reference(self, field, value): """ Helper method to update a foreign key in an already written record. Will be called by the referenced item after (and only if) it has been committed. This is only needed if the reference could not be resolved before commit due to circular references. @param field: the field name of the foreign key @param value: the value of the foreign key """ if not value or not self.table: return db = current.db if self.id and self.permitted: fieldtype = str(self.table[field].type) if fieldtype.startswith("list:reference"): query = (self.table.id == self.id) record = db(query).select(self.table[field], limitby=(0,1)).first() if record: values = record[field] if value not in values: values.append(value) db(self.table.id == self.id).update(**{field:values}) else: db(self.table.id == self.id).update(**{field:value}) # ------------------------------------------------------------------------- def store(self, item_table=None): """ Store this item in the DB """ _debug("Storing item %s" % self) if item_table is None: return None db = current.db query = item_table.item_id == self.item_id row = db(query).select(item_table.id, limitby=(0, 1)).first() if row: record_id = row.id else: record_id = None record = Storage(job_id = self.job.job_id, item_id = self.item_id, tablename = self.tablename, record_uid = self.uid, error = self.error) if self.element is not None: element_str = current.xml.tostring(self.element, xml_declaration=False) record.update(element=element_str) if self.data is not None: data = Storage() for f in self.data.keys(): table = self.table if f not in table.fields: continue fieldtype = str(self.table[f].type) if fieldtype == "id" or s3_has_foreign_key(self.table[f]): continue data.update({f:self.data[f]}) data_str = cPickle.dumps(data) record.update(data=data_str) ritems = [] for reference in self.references: field = reference.field entry = reference.entry store_entry = None if entry: if entry.item_id is not None: store_entry = dict(field=field, item_id=str(entry.item_id)) elif entry.uid is not None: store_entry = dict(field=field, tablename=entry.tablename, uid=str(entry.uid)) if store_entry is not None: ritems.append(json.dumps(store_entry)) if ritems: record.update(ritems=ritems) citems = [c.item_id for c in self.components] if citems: record.update(citems=citems) if self.parent: record.update(parent=self.parent.item_id) if record_id: db(item_table.id == record_id).update(**record) else: record_id = item_table.insert(**record) _debug("Record ID=%s" % record_id) return record_id # ------------------------------------------------------------------------- def restore(self, row): """ Restore an item from a item table row. This does not restore the references (since this can not be done before all items are restored), must call job.restore_references() to do that @param row: the item table row """ xml = current.xml self.item_id = row.item_id self.accepted = None self.permitted = False self.committed = False tablename = row.tablename self.id = None self.uid = row.record_uid if row.data is not None: self.data = cPickle.loads(row.data) else: self.data = Storage() data = self.data if xml.MTIME in data: self.mtime = data[xml.MTIME] if xml.MCI in data: self.mci = data[xml.MCI] UID = xml.UID if UID in data: self.uid = data[UID] self.element = etree.fromstring(row.element) if row.citems: self.load_components = row.citems if row.ritems: self.load_references = [json.loads(ritem) for ritem in row.ritems] self.load_parent = row.parent try: table = current.s3db[tablename] except: self.error = self.ERROR.BAD_RESOURCE return False else: self.table = table self.tablename = tablename original = current.manager.original(table, self.data) if original is not None: self.original = original self.id = original[table._id.name] if UID in original: self.uid = original[UID] self.data.update({UID:self.uid}) self.error = row.error if self.error and not self.data: # Validation error return False return True # ============================================================================= class S3ImportJob(): """ Class to import an element tree into the database """ JOB_TABLE_NAME = "s3_import_job" ITEM_TABLE_NAME = "s3_import_item" # ------------------------------------------------------------------------- def __init__(self, manager, table, tree=None, files=None, job_id=None, strategy=None, update_policy=None, conflict_policy=None, last_sync=None, onconflict=None): """ Constructor @param manager: the S3RequestManager instance performing this job @param tree: the element tree to import @param files: files attached to the import (for upload fields) @param job_id: restore job from database (record ID or job_id) @param strategy: the import strategy @param update_policy: the update policy @param conflict_policy: the conflict resolution policy @param last_sync: the last synchronization time stamp (datetime) @param onconflict: custom conflict resolver function """ self.error = None # the last error self.error_tree = etree.Element(current.xml.TAG.root) self.table = table self.tree = tree self.files = files self.directory = Storage() self.elements = Storage() self.items = Storage() self.references = [] self.job_table = None self.item_table = None self.count = 0 # total number of records imported self.created = [] # IDs of created records self.updated = [] # IDs of updated records self.deleted = [] # IDs of deleted records # Import strategy self.strategy = strategy if self.strategy is None: self.strategy = [S3ImportItem.METHOD.CREATE, S3ImportItem.METHOD.UPDATE, S3ImportItem.METHOD.DELETE] if not isinstance(self.strategy, (tuple, list)): self.strategy = [self.strategy] # Update policy (default=always update) self.update_policy = update_policy if not self.update_policy: self.update_policy = S3ImportItem.POLICY.OTHER # Conflict resolution policy (default=always update) self.conflict_policy = conflict_policy if not self.conflict_policy: self.conflict_policy = S3ImportItem.POLICY.OTHER # Synchronization settings self.mtime = None self.last_sync = last_sync self.onconflict = onconflict if job_id: self.__define_tables() jobtable = self.job_table if str(job_id).isdigit(): query = jobtable.id == job_id else: query = jobtable.job_id == job_id row = current.db(query).select(limitby=(0, 1)).first() if not row: raise SyntaxError("Job record not found") self.job_id = row.job_id if not self.table: tablename = row.tablename try: table = current.s3db[tablename] except: pass else: import uuid self.job_id = uuid.uuid4() # unique ID for this job # ------------------------------------------------------------------------- def add_item(self, element=None, original=None, components=None, parent=None, joinby=None): """ Parse and validate an XML element and add it as new item to the job. @param element: the element @param original: the original DB record (if already available, will otherwise be looked-up by this function) @param components: a dictionary of components (as in S3Resource) to include in the job (defaults to all defined components) @param parent: the parent item (if this is a component) @param joinby: the component join key(s) (if this is a component) @returns: a unique identifier for the new item, or None if there was an error. self.error contains the last error, and self.error_tree an element tree with all failing elements including error attributes. """ if element in self.elements: # element has already been added to this job return self.elements[element] # Parse the main element item = S3ImportItem(self) # Update lookup lists item_id = item.item_id self.items[item_id] = item if element is not None: self.elements[element] = item_id if not item.parse(element, original=original, files=self.files): self.error = item.error item.accepted = False if parent is None: self.error_tree.append(deepcopy(item.element)) else: # Now parse the components table = item.table components = current.s3db.get_components(table, names=components) cnames = Storage() cinfos = Storage() for alias in components: component = components[alias] pkey = component.pkey if component.linktable: ctable = component.linktable fkey = component.lkey else: ctable = component.table fkey = component.fkey ctablename = ctable._tablename if ctablename in cnames: cnames[ctablename].append(alias) else: cnames[ctablename] = [alias] cinfos[(ctablename, alias)] = Storage(component = component, ctable = ctable, pkey = pkey, fkey = fkey, original = None, uid = None) add_item = self.add_item xml = current.xml for celement in xml.components(element, names=cnames.keys()): # Get the component tablename ctablename = celement.get(xml.ATTRIBUTE.name, None) if not ctablename: continue # Get the component alias (for disambiguation) calias = celement.get(xml.ATTRIBUTE.alias, None) if calias is None: if ctablename not in cnames: continue aliases = cnames[ctablename] if len(aliases) == 1: calias = aliases[0] else: # ambiguous components *must* use alias continue if (ctablename, calias) not in cinfos: continue else: cinfo = cinfos[(ctablename, calias)] component = cinfo.component original = cinfo.original ctable = cinfo.ctable pkey = cinfo.pkey fkey = cinfo.fkey if not component.multiple: if cinfo.uid is not None: continue if original is None and item.id: query = (table.id == item.id) & \ (table[pkey] == ctable[fkey]) original = current.db(query).select(ctable.ALL, limitby=(0, 1)).first() if original: cinfo.uid = uid = original.get(xml.UID, None) celement.set(xml.UID, uid) cinfo.original = original item_id = add_item(element=celement, original=original, parent=item, joinby=(pkey, fkey)) if item_id is None: item.error = self.error self.error_tree.append(deepcopy(item.element)) else: citem = self.items[item_id] citem.parent = item item.components.append(citem) # Handle references table = item.table tree = self.tree if tree is not None: fields = [table[f] for f in table.fields] rfields = filter(s3_has_foreign_key, fields) item.references = self.lookahead(element, table=table, fields=rfields, tree=tree, directory=self.directory) for reference in item.references: entry = reference.entry if entry and entry.element is not None: item_id = add_item(element=entry.element) if item_id: entry.update(item_id=item_id) # Parent reference if parent is not None: entry = Storage(item_id=parent.item_id, element=parent.element, tablename=parent.tablename) item.references.append(Storage(field=joinby, entry=entry)) return item.item_id # ------------------------------------------------------------------------- def lookahead(self, element, table=None, fields=None, tree=None, directory=None): """ Find referenced elements in the tree @param element: the element @param table: the DB table @param fields: the FK fields in the table @param tree: the import tree @param directory: a dictionary to lookup elements in the tree (will be filled in by this function) """ db = current.db s3db = current.s3db xml = current.xml import_uid = xml.import_uid ATTRIBUTE = xml.ATTRIBUTE TAG = xml.TAG UID = xml.UID reference_list = [] root = None if tree is not None: if isinstance(tree, etree._Element): root = tree else: root = tree.getroot() references = element.findall("reference") for reference in references: field = reference.get(ATTRIBUTE.field, None) # Ignore references without valid field-attribute if not field or field not in fields: continue # Find the key table multiple = False fieldtype = str(table[field].type) if fieldtype.startswith("reference"): ktablename = fieldtype[10:] elif fieldtype.startswith("list:reference"): ktablename = fieldtype[15:] multiple = True else: # ignore if the field is not a reference type continue try: ktable = s3db[ktablename] except: # Invalid tablename - skip continue tablename = reference.get(ATTRIBUTE.resource, None) # Ignore references to tables without UID field: if UID not in ktable.fields: continue # Fall back to key table name if tablename is not specified: if not tablename: tablename = ktablename # Super-entity references must use the super-key: if tablename != ktablename: field = (ktable._id.name, field) # Ignore direct references to super-entities: if tablename == ktablename and ktable._id.name != "id": continue # Get the foreign key uids = reference.get(UID, None) attr = UID if not uids: uids = reference.get(ATTRIBUTE.tuid, None) attr = ATTRIBUTE.tuid if uids and multiple: uids = json.loads(uids) elif uids: uids = [uids] # Find the elements and map to DB records relements = [] # Create a UID<->ID map id_map = Storage() if attr == UID and uids: _uids = map(import_uid, uids) query = ktable[UID].belongs(_uids) records = db(query).select(ktable.id, ktable[UID]) id_map = dict([(r[UID], r.id) for r in records]) if not uids: # Anonymous reference: <resource> inside the element expr = './/%s[@%s="%s"]' % (TAG.resource, ATTRIBUTE.name, tablename) relements = reference.xpath(expr) if relements and not multiple: relements = [relements[0]] elif root is not None: for uid in uids: entry = None # Entry already in directory? if directory is not None: entry = directory.get((tablename, attr, uid), None) if not entry: expr = ".//%s[@%s='%s' and @%s='%s']" % ( TAG.resource, ATTRIBUTE.name, tablename, attr, uid) e = root.xpath(expr) if e: # Element in the source => append to relements relements.append(e[0]) else: # No element found, see if original record exists _uid = import_uid(uid) if _uid and _uid in id_map: _id = id_map[_uid] entry = Storage(tablename=tablename, element=None, uid=uid, id=_id, item_id=None) reference_list.append(Storage(field=field, entry=entry)) else: continue else: reference_list.append(Storage(field=field, entry=entry)) # Create entries for all newly found elements for relement in relements: uid = relement.get(attr, None) if attr == UID: _uid = import_uid(uid) id = _uid and id_map and id_map.get(_uid, None) or None else: _uid = None id = None entry = Storage(tablename=tablename, element=relement, uid=uid, id=id, item_id=None) # Add entry to directory if uid and directory is not None: directory[(tablename, attr, uid)] = entry # Append the entry to the reference list reference_list.append(Storage(field=field, entry=entry)) return reference_list # ------------------------------------------------------------------------- def load_item(self, row): """ Load an item from the item table (counterpart to add_item when restoring a job from the database) """ item = S3ImportItem(self) if not item.restore(row): self.error = item.error if item.load_parent is None: self.error_tree.append(deepcopy(item.element)) # Update lookup lists item_id = item.item_id self.items[item_id] = item return item_id # ------------------------------------------------------------------------- def resolve(self, item_id, import_list): """ Resolve the reference list of an item @param item_id: the import item UID @param import_list: the ordered list of items (UIDs) to import """ item = self.items[item_id] if item.lock or item.accepted is False: return False references = [] for reference in item.references: ritem_id = reference.entry.item_id if ritem_id and ritem_id not in import_list: references.append(ritem_id) for ritem_id in references: item.lock = True if self.resolve(ritem_id, import_list): import_list.append(ritem_id) item.lock = False return True # ------------------------------------------------------------------------- def commit(self, ignore_errors=False): """ Commit the import job to the DB @param ignore_errors: skip any items with errors (does still report the errors) """ ATTRIBUTE = current.xml.ATTRIBUTE # Resolve references import_list = [] for item_id in self.items: self.resolve(item_id, import_list) if item_id not in import_list: import_list.append(item_id) # Commit the items items = self.items count = 0 mtime = None created = [] cappend = created.append updated = [] deleted = [] tablename = self.table._tablename for item_id in import_list: item = items[item_id] error = None success = item.commit(ignore_errors=ignore_errors) error = item.error if error: self.error = error element = item.element if element is not None: if not element.get(ATTRIBUTE.error, False): element.set(ATTRIBUTE.error, str(self.error)) self.error_tree.append(deepcopy(element)) if not ignore_errors: return False elif item.tablename == tablename: count += 1 if mtime is None or item.mtime > mtime: mtime = item.mtime if item.id: if item.method == item.METHOD.CREATE: cappend(item.id) elif item.method == item.METHOD.UPDATE: updated.append(item.id) elif item.method == item.METHOD.DELETE: deleted.append(item.id) self.count = count self.mtime = mtime self.created = created self.updated = updated self.deleted = deleted return True # ------------------------------------------------------------------------- def __define_tables(self): """ Define the database tables for jobs and items """ self.job_table = self.define_job_table() self.item_table = self.define_item_table() # ------------------------------------------------------------------------- @classmethod def define_job_table(cls): db = current.db if cls.JOB_TABLE_NAME not in db: job_table = db.define_table(cls.JOB_TABLE_NAME, Field("job_id", length=128, unique=True, notnull=True), Field("tablename"), Field("timestmp", "datetime", default=datetime.utcnow())) else: job_table = db[cls.JOB_TABLE_NAME] return job_table # ------------------------------------------------------------------------- @classmethod def define_item_table(cls): db = current.db if cls.ITEM_TABLE_NAME not in db: item_table = db.define_table(cls.ITEM_TABLE_NAME, Field("item_id", length=128, unique=True, notnull=True), Field("job_id", length=128), Field("tablename", length=128), #Field("record_id", "integer"), Field("record_uid"), Field("error", "text"), Field("data", "text"), Field("element", "text"), Field("ritems", "list:string"), Field("citems", "list:string"), Field("parent", length=128)) else: item_table = db[cls.ITEM_TABLE_NAME] return item_table # ------------------------------------------------------------------------- def store(self): """ Store this job and all its items in the job table """ db = current.db _debug("Storing Job ID=%s" % self.job_id) self.__define_tables() jobtable = self.job_table query = jobtable.job_id == self.job_id row = db(query).select(jobtable.id, limitby=(0, 1)).first() if row: record_id = row.id else: record_id = None record = Storage(job_id=self.job_id) try: tablename = self.table._tablename except: pass else: record.update(tablename=tablename) for item in self.items.values(): item.store(item_table=self.item_table) if record_id: db(jobtable.id == record_id).update(**record) else: record_id = jobtable.insert(**record) _debug("Job record ID=%s" % record_id) return record_id # ------------------------------------------------------------------------- def get_tree(self): """ Reconstruct the element tree of this job """ if self.tree is not None: return tree else: xml = current.xml root = etree.Element(xml.TAG.root) for item in self.items.values(): if item.element is not None and not item.parent: if item.tablename == self.table._tablename or \ item.element.get(xml.UID, None) or \ item.element.get(xml.ATTRIBUTE.tuid, None): root.append(deepcopy(item.element)) return etree.ElementTree(root) # ------------------------------------------------------------------------- def delete(self): """ Delete this job and all its items from the job table """ db = current.db _debug("Deleting job ID=%s" % self.job_id) self.__define_tables() item_table = self.item_table query = item_table.job_id == self.job_id db(query).delete() job_table = self.job_table query = job_table.job_id == self.job_id db(query).delete() # ------------------------------------------------------------------------- def restore_references(self): """ Restore the job's reference structure after loading items from the item table """ db = current.db UID = current.xml.UID for item in self.items.values(): for citem_id in item.load_components: if citem_id in self.items: item.components.append(self.items[citem_id]) item.load_components = [] for ritem in item.load_references: field = ritem["field"] if "item_id" in ritem: item_id = ritem["item_id"] if item_id in self.items: _item = self.items[item_id] entry = Storage(tablename=_item.tablename, element=_item.element, uid=_item.uid, id=_item.id, item_id=item_id) item.references.append(Storage(field=field, entry=entry)) else: _id = None uid = ritem.get("uid", None) tablename = ritem.get("tablename", None) if tablename and uid: try: table = current.s3db[tablename] except: continue if UID not in table.fields: continue query = table[UID] == uid row = db(query).select(table._id, limitby=(0, 1)).first() if row: _id = row[table._id.name] else: continue entry = Storage(tablename = ritem["tablename"], element=None, uid = ritem["uid"], id = _id, item_id = None) item.references.append(Storage(field=field, entry=entry)) item.load_references = [] if item.load_parent is not None: item.parent = self.items[item.load_parent] item.load_parent = None # END =========================================================================
    ashwyn/eden-message_parser
    modules/s3/s3import.py
    Python
    mit
    123,322
    package br.ufrj.g2matricula.domain; import org.springframework.data.elasticsearch.annotations.Document; import javax.persistence.*; import javax.validation.constraints.*; import java.io.Serializable; import java.util.Objects; import br.ufrj.g2matricula.domain.enumeration.MatriculaStatus; /** * A Matricula. */ @Entity @Table(name = "matricula") @Document(indexName = "matricula") public class Matricula implements Serializable { private static final long serialVersionUID = 1L; @Id @GeneratedValue(strategy = GenerationType.IDENTITY) private Long id; @NotNull @Enumerated(EnumType.STRING) @Column(name = "status", nullable = false) private MatriculaStatus status; @ManyToOne private Aluno dreAluno; @ManyToOne private Curso curso; // jhipster-needle-entity-add-field - JHipster will add fields here, do not remove public Long getId() { return id; } public void setId(Long id) { this.id = id; } public MatriculaStatus getStatus() { return status; } public Matricula status(MatriculaStatus status) { this.status = status; return this; } public void setStatus(MatriculaStatus status) { this.status = status; } public Aluno getDreAluno() { return dreAluno; } public Matricula dreAluno(Aluno aluno) { this.dreAluno = aluno; return this; } public void setDreAluno(Aluno aluno) { this.dreAluno = aluno; } public Curso getCurso() { return curso; } public Matricula curso(Curso curso) { this.curso = curso; return this; } public void setCurso(Curso curso) { this.curso = curso; } // jhipster-needle-entity-add-getters-setters - JHipster will add getters and setters here, do not remove @Override public boolean equals(Object o) { if (this == o) { return true; } if (o == null || getClass() != o.getClass()) { return false; } Matricula matricula = (Matricula) o; if (matricula.getId() == null || getId() == null) { return false; } return Objects.equals(getId(), matricula.getId()); } @Override public int hashCode() { return Objects.hashCode(getId()); } @Override public String toString() { return "Matricula{" + "id=" + getId() + ", status='" + getStatus() + "'" + "}"; } }
    DamascenoRafael/cos482-qualidade-de-software
    www/src/main/java/br/ufrj/g2matricula/domain/Matricula.java
    Java
    mit
    2,535
    # rhodecode-ce-dockerized Docker container for RhodeCode Community Edition repository management platform # WIP
    darneta/rhodecode-ce-dockerized
    README.md
    Markdown
    mit
    113
    <?php class Admin_GeneralModel extends CI_Model { public function GetAdminModuleCategoryList() { $this->db->select('CID, CategoryName'); $this->db->from('admin_module_category'); $this->db->order_by('Ordering'); $query = $this->db->get(); if($query->num_rows()) return $query; else return FALSE; } public function GetAdminModuleList() { $this->db->select('MID, CID, ModuleName, DisplayName'); $this->db->from('admin_module'); $this->db->order_by('Ordering'); $query = $this->db->get(); if($query->num_rows()) return $query; else return FALSE; } public function GetAdminModuleActions($MID = NULL) { $this->db->select('AID, MID, Action'); $this->db->from('admin_module_action'); if($MID != NULL) $this->db->where('MID', $MID); $query = $this->db->get(); if($query->num_rows()) return $query->result(); else return FALSE; } } ?>
    dernst91/deCMS
    application/models/Admin_GeneralModel.php
    PHP
    mit
    895
    #ifndef SYMTAB_H #define SYMTAB_H #include "symbol.h" void symtab_init(); void push_scope(); void pop_scope(); symbol *bind_symbol(char *name); symbol *lookup_symbol(char *name); void print_symtab(); #endif
    rayiner/ccpoc
    symtab.h
    C
    mit
    213
    RSpec.describe("executables", skip_db_cleaner: true) do include SharedSpecSetup before do #migrations don't work if we are still connected to the db ActiveRecord::Base.remove_connection end it "extracts the schema" do output = `bin/extract #{config_filename} production #{schema_filename} 2>&1` expect(output).to match(/extracted to/) expect(output).to match(/#{schema_filename}/) end it "transfers the schema" do output = `bin/transfer-schema #{config_filename} production test config/include_tables.txt 2>&1` expect(output).to match(/transferred schema from production to test/) end end
    ifad/sybase-schema-extractor
    spec/bin_spec.rb
    Ruby
    mit
    636
    ## S3proxy - serve S3 files simply S3proxy is a simple flask-based REST web application which can expose files (keys) stored in the AWS Simple Storage Service (S3) via a simple REST api. ### What does this do? S3proxy takes a set of AWS credentials and an S3 bucket name and provides GET and HEAD endpoints on the files within the bucket. It uses the [boto][boto] library for internal access to S3. For example, if your bucket has the following file: s3://mybucket/examples/path/to/myfile.txt then running S3proxy on a localhost server (port 5000) would enable you read (GET) this file at: http://localhost:5000/files/examples/path/to/myfile.txt Support exists in S3proxy for the `byte-range` header in a GET request. This means that the API can provide arbitrary parts of S3 files if requested/supported by the application making the GET request. ### Why do this? S3proxy simplifies access to private S3 objects. While S3 already provides [a complete REST API][s3_api], this API requires signed authentication headers or parameters that are not always obtainable within existing applications (see below), or overly complex for simple development/debugging tasks. In fact, however, S3proxy was specifically designed to provide a compatability layer for viewing DNA sequencing data in(`.bam` files) using [IGV][igv]. While IGV already includes an interface for reading bam files from an HTTP endpoint, it does not support creating signed requests as required by the AWS S3 API (IGV does support HTTP Basic Authentication, a feature that I would like to include in S3proxy in the near future). Though it is in principal possible to provide a signed AWS-compatible URL to IGV, IGV will still not be able to create its own signed URLs necessary for accessing `.bai` index files, usually located in the same directory as the `.bam` file. Using S3proxy you can expose the S3 objects via a simplified HTTP API which IGV can understand and access directly. This project is in many ways similar to [S3Auth][s3auth], a hosted service which provides a much more complete API to a private S3 bucket. I wrote S3proxy as a faster, simpler solution-- and because S3Auth requires a domain name and access to the `CNAME` record in order to function. If you want a more complete API (read: more than just GET/HEAD at the moment) should check them out! ### Features - Serves S3 file objects via standard GET request, optionally providing only a part of a file using the `byte-range` header. - Easy to configure via a the `config.yaml` file-- S3 keys and bucket name is all you need! - Limited support for simple url-rewriting where necessary. - Uses the werkzeug [`SimpleCache` module][simplecache] to cache S3 object identifiers (but not data) in order to reduce latency and lookup times. ### Usage #### Requirements To run S3proxy, you will need: - [Flask][flask] - [boto][boto] - [PyYAML][pyyaml] - An Amazon AWS account and keys with appropriate S3 access #### Installation/Configuration At the moment, there is no installation. Simply put your AWS keys and bucket name into the config.yaml file: ```yaml AWS_ACCESS_KEY_ID: '' AWS_SECRET_ACCESS_KEY: '' bucket_name: '' ``` You may also optionally specify a number of "rewrite" rules. These are simple pairs of a regular expression and a replacement string which can be used to internally redirect (Note, the API does not actually currently send a REST 3XX redirect header) file paths. The example in the config.yaml file reads: ```yaml rewrite_rules: bai_rule: from: ".bam.bai$" to: ".bai" ``` ... which will match all url/filenames ending with ".bam.bai" and rewrite this to ".bai". If you do not wish to use any rewrite_rules, simply leave this commented out. #### Running S3cmd: Once you have filled out the config.yaml file, you can test out S3proxy simply by running on the command line: python app.py *Note*: Running using the built-in flask server is not recommended for anything other than debugging. Refer to [these deployment options][wsgi_server] for instructions on how to set up a flask applicaiton in a WSGI framework. #### Options If you wish to see more debug-level output (headers, etc.), use the `--debug` option. You may also specify a yaml configuration file to load using the `--config` parameter. ### Important considerations and caveats S3proxy should not be used in production-level or open/exposed servers! There is currently no security provided by S3proxy (though I may add basic HTTP authentication later). Once given the AWS credentials, S3proxy will serve any path available to it. And, although I restrict requests to GET and HEAD only, I cannot currently guarantee that a determined person would not be able to execute a PUT/UPDATE/DELETE request using this service. Finally, I highly recommend you create a separate [IAM role][iam_roles] in AWS with limited access and permisisons to S3 only for use with S3proxy. ### Future development - Implement HTTP Basic Authentication to provide some level of security. - Implement other error codes and basic REST responses. - Add ability to log to a file and specify a `--log-level` (use the Python logging module) [boto]: http://boto.readthedocs.org/ [flask]: http://flask.pocoo.org/ [pyyaml]: http://pyyaml.org/wiki/PyYAML [s3_api]: http://docs.aws.amazon.com/AmazonS3/latest/API/APIRest.html [igv]: http://www.broadinstitute.org/igv/home [wsgi_server]: http://flask.pocoo.org/docs/deploying/ [iam_roles]: http://aws.amazon.com/iam/ [simplecache]: http://flask.pocoo.org/docs/patterns/caching/ [s3auth]: http://www.s3auth.com/
    nkrumm/s3proxy
    README.md
    Markdown
    mit
    5,632
    Title: Survey data Template: survey Slug: survey/data Github: True The code to clean and process the survey data is available in the [GitHub repository](https://github.com/andrewheiss/From-the-Trenches-Anti-TIP-NGOs-and-US) for Andrew Heiss and Judith G. Kelley. 2016. "From the Trenches: A Global Survey of Anti-TIP NGOs and their Views of US Efforts." *Journal of Human Trafficking*. [doi:10.1080/23322705.2016.1199241](https://dx.doi.org/10.1080/23322705.2016.1199241) <div class="row"> <div class="col-xs-12 col-sm-10 col-md-8 col-sm-offset-1 col-md-offset-2"> <div class="github-widget" data-repo="andrewheiss/From-the-Trenches-Anti-TIP-NGOs-and-US"></div> </div> </div> The free response answers for respondents who requested anonymity have been redacted. - CSV file: [`responses_full_anonymized.csv`](/files/data/responses_full_anonymized.csv) - R file: [`responses_full_anonymized.rds`](/files/data/responses_full_anonymized.rds)
    andrewheiss/scorecarddiplomacy-org
    content/pages/survey-stuff/data.md
    Markdown
    mit
    952
    // @flow import { StyleSheet } from 'react-native'; import { colors } from '../../themes'; const styles = StyleSheet.create({ divider: { height: 1, marginHorizontal: 0, backgroundColor: colors.darkDivider, }, }); export default styles;
    Dennitz/Timetable
    src/components/styles/HorizontalDividerList.styles.js
    JavaScript
    mit
    254
    ## Testing testing, 1, 2, 3 Let's see how *[this](https://github.com/imathis/jekyll-markdown-block)* does. puts 'awesome' unless not_awesome? - One item - Two item - Three Item - Four! And… scene!
    imathis/jekyll-markdown-block
    test/source/_includes/test.md
    Markdown
    mit
    208
    <!DOCTYPE HTML PUBLIC "-//W3C//DTD HTML 4.01 Transitional//EN" "http://www.w3.org/TR/html4/loose.dtd"> <!--NewPage--> <HTML> <HEAD> <!-- Generated by javadoc (build 1.6.0_27) on Wed Nov 21 16:03:26 EST 2012 --> <TITLE> ResourceXmlPropertyEmitterInterface </TITLE> <META NAME="date" CONTENT="2012-11-21"> <LINK REL ="stylesheet" TYPE="text/css" HREF="../../../../stylesheet.css" TITLE="Style"> <SCRIPT type="text/javascript"> function windowTitle() { if (location.href.indexOf('is-external=true') == -1) { parent.document.title="ResourceXmlPropertyEmitterInterface"; } } </SCRIPT> <NOSCRIPT> </NOSCRIPT> </HEAD> <BODY BGCOLOR="white" onload="windowTitle();"> <HR> <!-- ========= START OF TOP NAVBAR ======= --> <A NAME="navbar_top"><!-- --></A> <A HREF="#skip-navbar_top" title="Skip navigation links"></A> <TABLE BORDER="0" WIDTH="100%" CELLPADDING="1" CELLSPACING="0" SUMMARY=""> <TR> <TD COLSPAN=2 BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A NAME="navbar_top_firstrow"><!-- --></A> <TABLE BORDER="0" CELLPADDING="0" CELLSPACING="3" SUMMARY=""> <TR ALIGN="center" VALIGN="top"> <TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="../../../../overview-summary.html"><FONT CLASS="NavBarFont1"><B>Overview</B></FONT></A>&nbsp;</TD> <TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="package-summary.html"><FONT CLASS="NavBarFont1"><B>Package</B></FONT></A>&nbsp;</TD> <TD BGCOLOR="#FFFFFF" CLASS="NavBarCell1Rev"> &nbsp;<FONT CLASS="NavBarFont1Rev"><B>Class</B></FONT>&nbsp;</TD> <TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="class-use/ResourceXmlPropertyEmitterInterface.html"><FONT CLASS="NavBarFont1"><B>Use</B></FONT></A>&nbsp;</TD> <TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="package-tree.html"><FONT CLASS="NavBarFont1"><B>Tree</B></FONT></A>&nbsp;</TD> <TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="../../../../deprecated-list.html"><FONT CLASS="NavBarFont1"><B>Deprecated</B></FONT></A>&nbsp;</TD> <TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="../../../../index-files/index-1.html"><FONT CLASS="NavBarFont1"><B>Index</B></FONT></A>&nbsp;</TD> <TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="../../../../help-doc.html"><FONT CLASS="NavBarFont1"><B>Help</B></FONT></A>&nbsp;</TD> </TR> </TABLE> </TD> <TD ALIGN="right" VALIGN="top" ROWSPAN=3><EM> </EM> </TD> </TR> <TR> <TD BGCOLOR="white" CLASS="NavBarCell2"><FONT SIZE="-2"> &nbsp;<A HREF="../../../../org/pentaho/di/resource/ResourceUtil.html" title="class in org.pentaho.di.resource"><B>PREV CLASS</B></A>&nbsp; &nbsp;<A HREF="../../../../org/pentaho/di/resource/SequenceResourceNaming.html" title="class in org.pentaho.di.resource"><B>NEXT CLASS</B></A></FONT></TD> <TD BGCOLOR="white" CLASS="NavBarCell2"><FONT SIZE="-2"> <A HREF="../../../../index.html?org/pentaho/di/resource/ResourceXmlPropertyEmitterInterface.html" target="_top"><B>FRAMES</B></A> &nbsp; &nbsp;<A HREF="ResourceXmlPropertyEmitterInterface.html" target="_top"><B>NO FRAMES</B></A> &nbsp; &nbsp;<SCRIPT type="text/javascript"> <!-- if(window==top) { document.writeln('<A HREF="../../../../allclasses-noframe.html"><B>All Classes</B></A>'); } //--> </SCRIPT> <NOSCRIPT> <A HREF="../../../../allclasses-noframe.html"><B>All Classes</B></A> </NOSCRIPT> </FONT></TD> </TR> <TR> <TD VALIGN="top" CLASS="NavBarCell3"><FONT SIZE="-2"> SUMMARY:&nbsp;NESTED&nbsp;|&nbsp;FIELD&nbsp;|&nbsp;CONSTR&nbsp;|&nbsp;<A HREF="#method_summary">METHOD</A></FONT></TD> <TD VALIGN="top" CLASS="NavBarCell3"><FONT SIZE="-2"> DETAIL:&nbsp;FIELD&nbsp;|&nbsp;CONSTR&nbsp;|&nbsp;<A HREF="#method_detail">METHOD</A></FONT></TD> </TR> </TABLE> <A NAME="skip-navbar_top"></A> <!-- ========= END OF TOP NAVBAR ========= --> <HR> <!-- ======== START OF CLASS DATA ======== --> <H2> <FONT SIZE="-1"> org.pentaho.di.resource</FONT> <BR> Interface ResourceXmlPropertyEmitterInterface</H2> <HR> <DL> <DT><PRE>public interface <B>ResourceXmlPropertyEmitterInterface</B></DL> </PRE> <P> <HR> <P> <!-- ========== METHOD SUMMARY =========== --> <A NAME="method_summary"><!-- --></A> <TABLE BORDER="1" WIDTH="100%" CELLPADDING="3" CELLSPACING="0" SUMMARY=""> <TR BGCOLOR="#CCCCFF" CLASS="TableHeadingColor"> <TH ALIGN="left" COLSPAN="2"><FONT SIZE="+2"> <B>Method Summary</B></FONT></TH> </TR> <TR BGCOLOR="white" CLASS="TableRowColor"> <TD ALIGN="right" VALIGN="top" WIDTH="1%"><FONT SIZE="-1"> <CODE>&nbsp;<A HREF="http://java.sun.com/j2se/1.5.0/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</A></CODE></FONT></TD> <TD><CODE><B><A HREF="../../../../org/pentaho/di/resource/ResourceXmlPropertyEmitterInterface.html#getExtraResourceProperties(org.pentaho.di.resource.ResourceHolderInterface, int)">getExtraResourceProperties</A></B>(<A HREF="../../../../org/pentaho/di/resource/ResourceHolderInterface.html" title="interface in org.pentaho.di.resource">ResourceHolderInterface</A>&nbsp;ref, int&nbsp;indention)</CODE> <BR> &nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;Allows injection of additional relevant properties in the to-xml of the Resource Reference.</TD> </TR> </TABLE> &nbsp; <P> <!-- ============ METHOD DETAIL ========== --> <A NAME="method_detail"><!-- --></A> <TABLE BORDER="1" WIDTH="100%" CELLPADDING="3" CELLSPACING="0" SUMMARY=""> <TR BGCOLOR="#CCCCFF" CLASS="TableHeadingColor"> <TH ALIGN="left" COLSPAN="1"><FONT SIZE="+2"> <B>Method Detail</B></FONT></TH> </TR> </TABLE> <A NAME="getExtraResourceProperties(org.pentaho.di.resource.ResourceHolderInterface, int)"><!-- --></A><H3> getExtraResourceProperties</H3> <PRE> <A HREF="http://java.sun.com/j2se/1.5.0/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</A> <B>getExtraResourceProperties</B>(<A HREF="../../../../org/pentaho/di/resource/ResourceHolderInterface.html" title="interface in org.pentaho.di.resource">ResourceHolderInterface</A>&nbsp;ref, int&nbsp;indention)</PRE> <DL> <DD>Allows injection of additional relevant properties in the to-xml of the Resource Reference. <P> <DD><DL> <DT><B>Parameters:</B><DD><CODE>ref</CODE> - The Resource Reference Holder (a step, or a job entry)<DD><CODE>indention</CODE> - If -1, then no indenting, otherwise, it's the indent level to indent the XML strings <DT><B>Returns:</B><DD>String of injected XML</DL> </DD> </DL> <!-- ========= END OF CLASS DATA ========= --> <HR> <!-- ======= START OF BOTTOM NAVBAR ====== --> <A NAME="navbar_bottom"><!-- --></A> <A HREF="#skip-navbar_bottom" title="Skip navigation links"></A> <TABLE BORDER="0" WIDTH="100%" CELLPADDING="1" CELLSPACING="0" SUMMARY=""> <TR> <TD COLSPAN=2 BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A NAME="navbar_bottom_firstrow"><!-- --></A> <TABLE BORDER="0" CELLPADDING="0" CELLSPACING="3" SUMMARY=""> <TR ALIGN="center" VALIGN="top"> <TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="../../../../overview-summary.html"><FONT CLASS="NavBarFont1"><B>Overview</B></FONT></A>&nbsp;</TD> <TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="package-summary.html"><FONT CLASS="NavBarFont1"><B>Package</B></FONT></A>&nbsp;</TD> <TD BGCOLOR="#FFFFFF" CLASS="NavBarCell1Rev"> &nbsp;<FONT CLASS="NavBarFont1Rev"><B>Class</B></FONT>&nbsp;</TD> <TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="class-use/ResourceXmlPropertyEmitterInterface.html"><FONT CLASS="NavBarFont1"><B>Use</B></FONT></A>&nbsp;</TD> <TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="package-tree.html"><FONT CLASS="NavBarFont1"><B>Tree</B></FONT></A>&nbsp;</TD> <TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="../../../../deprecated-list.html"><FONT CLASS="NavBarFont1"><B>Deprecated</B></FONT></A>&nbsp;</TD> <TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="../../../../index-files/index-1.html"><FONT CLASS="NavBarFont1"><B>Index</B></FONT></A>&nbsp;</TD> <TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="../../../../help-doc.html"><FONT CLASS="NavBarFont1"><B>Help</B></FONT></A>&nbsp;</TD> </TR> </TABLE> </TD> <TD ALIGN="right" VALIGN="top" ROWSPAN=3><EM> </EM> </TD> </TR> <TR> <TD BGCOLOR="white" CLASS="NavBarCell2"><FONT SIZE="-2"> &nbsp;<A HREF="../../../../org/pentaho/di/resource/ResourceUtil.html" title="class in org.pentaho.di.resource"><B>PREV CLASS</B></A>&nbsp; &nbsp;<A HREF="../../../../org/pentaho/di/resource/SequenceResourceNaming.html" title="class in org.pentaho.di.resource"><B>NEXT CLASS</B></A></FONT></TD> <TD BGCOLOR="white" CLASS="NavBarCell2"><FONT SIZE="-2"> <A HREF="../../../../index.html?org/pentaho/di/resource/ResourceXmlPropertyEmitterInterface.html" target="_top"><B>FRAMES</B></A> &nbsp; &nbsp;<A HREF="ResourceXmlPropertyEmitterInterface.html" target="_top"><B>NO FRAMES</B></A> &nbsp; &nbsp;<SCRIPT type="text/javascript"> <!-- if(window==top) { document.writeln('<A HREF="../../../../allclasses-noframe.html"><B>All Classes</B></A>'); } //--> </SCRIPT> <NOSCRIPT> <A HREF="../../../../allclasses-noframe.html"><B>All Classes</B></A> </NOSCRIPT> </FONT></TD> </TR> <TR> <TD VALIGN="top" CLASS="NavBarCell3"><FONT SIZE="-2"> SUMMARY:&nbsp;NESTED&nbsp;|&nbsp;FIELD&nbsp;|&nbsp;CONSTR&nbsp;|&nbsp;<A HREF="#method_summary">METHOD</A></FONT></TD> <TD VALIGN="top" CLASS="NavBarCell3"><FONT SIZE="-2"> DETAIL:&nbsp;FIELD&nbsp;|&nbsp;CONSTR&nbsp;|&nbsp;<A HREF="#method_detail">METHOD</A></FONT></TD> </TR> </TABLE> <A NAME="skip-navbar_bottom"></A> <!-- ======== END OF BOTTOM NAVBAR ======= --> <HR> </BODY> </HTML>
    ColFusion/PentahoKettle
    kettle-data-integration/docs/api/org/pentaho/di/resource/ResourceXmlPropertyEmitterInterface.html
    HTML
    mit
    9,567
    require 'ffi' module ProcessShared module Posix module Errno extend FFI::Library ffi_lib FFI::Library::LIBC attach_variable :errno, :int # Replace methods in +syms+ with error checking wrappers that # invoke the original method and raise a {SystemCallError} with # the current errno if the return value is an error. # # Errors are detected if the block returns true when called with # the original method's return value. def error_check(*syms, &is_err) unless block_given? is_err = lambda { |v| (v == -1) } end syms.each do |sym| method = self.method(sym) new_method_body = proc do |*args| ret = method.call(*args) if is_err.call(ret) raise SystemCallError.new("error in #{sym}", Errno.errno) else ret end end define_singleton_method(sym, &new_method_body) define_method(sym, &new_method_body) end end end end end
    pmahoney/process_shared
    lib/process_shared/posix/errno.rb
    Ruby
    mit
    1,066
    <!DOCTYPE HTML PUBLIC "-//W3C//DTD HTML 4.01 Transitional//EN" "http://www.w3.org/TR/html4/loose.dtd"> <!-- UASR: Unified Approach to Speech Synthesis and Recognition < - Documentation home page < < AUTHOR : Matthias Wolff < PACKAGE: n/a < < Copyright 2013 UASR contributors (see COPYRIGHT file) < - Chair of System Theory and Speech Technology, TU Dresden < - Chair of Communications Engineering, BTU Cottbus < < This file is part of UASR. < < UASR is free software: you can redistribute it and/or modify it under the < terms of the GNU Lesser General Public License as published by the Free < Software Foundation, either version 3 of the License, or (at your option) < any later version. < < UASR is distributed in the hope that it will be useful, but WITHOUT ANY < WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS < FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for < more details. < < You should have received a copy of the GNU Lesser General Public License < along with UASR. If not, see [http://www.gnu.org/licenses/]. --> <html> <head> <link rel=stylesheet type="text/css" href="toc.css"> </head> <script type="text/javascript"> if (top==self) top.location = "index.html"; </script> <script type="text/javascript" src="default.js"></script> <body onload="void(__tocInit('tocRoot'));"> <h2 class="CONT">Manual</h2> <noscript><div class="noscript"> JavaScript is not enabled. </div></noscript> <div class="tocRoot" id="tocRoot"> <div class="tocLeaf"><a class="toc" href="home.html" target="contFrame" title="Database DocumentationHome Page">Home</a></div> <div class="tocNode" id="tocPackageDocumentation"> <a class="toc" href="javascript:__tocToggle('tocPackageDocumentation');">[&minus;]</a>&nbsp;<img src="resources/book_obj.gif" class="tocIcon">&nbsp;Scripts <!--{{ TOC --> <div class="tocLeaf"><a href="automatic/vau.itp.html" target="contFrame" title="Voice authentication database plug-in. " ><img src="resources/blank_stc.gif" class="tocIcon">&nbsp;<img src="resources/lib_obj.gif" class="tocIcon">&nbsp;vau.itp</a></div> <!--}} TOC --> </div> </div> </body> </html>
    matthias-wolff/C-VAU
    manual/toc.html
    HTML
    mit
    2,328
    'use strict'; const _ = require('lodash'); const co = require('co'); const Promise = require('bluebird'); const AWS = require('aws-sdk'); AWS.config.region = 'us-east-1'; const cloudwatch = Promise.promisifyAll(new AWS.CloudWatch()); const Lambda = new AWS.Lambda(); const START_TIME = new Date('2017-06-07T01:00:00.000Z'); const DAYS = 2; const ONE_DAY = 24 * 60 * 60 * 1000; let addDays = (startDt, n) => new Date(startDt.getTime() + ONE_DAY * n); let getFuncStats = co.wrap(function* (funcName) { let getStats = co.wrap(function* (startTime, endTime) { let req = { MetricName: 'Duration', Namespace: 'AWS/Lambda', Period: 60, Dimensions: [ { Name: 'FunctionName', Value: funcName } ], Statistics: [ 'Maximum' ], Unit: 'Milliseconds', StartTime: startTime, EndTime: endTime }; let resp = yield cloudwatch.getMetricStatisticsAsync(req); return resp.Datapoints.map(dp => { return { timestamp: dp.Timestamp, value: dp.Maximum }; }); }); let stats = []; for (let i = 0; i < DAYS; i++) { // CloudWatch only allows us to query 1440 data points per request, which // at 1 min period is 24 hours let startTime = addDays(START_TIME, i); let endTime = addDays(startTime, 1); let oneDayStats = yield getStats(startTime, endTime); stats = stats.concat(oneDayStats); } return _.sortBy(stats, s => s.timestamp); }); let listFunctions = co.wrap(function* (marker, acc) { acc = acc || []; let resp = yield Lambda.listFunctions({ Marker: marker, MaxItems: 100 }).promise(); let functions = resp.Functions .map(f => f.FunctionName) .filter(fn => fn.includes("aws-coldstart") && !fn.endsWith("run")); acc = acc.concat(functions); if (resp.NextMarker) { return yield listFunctions(resp.NextMarker, acc); } else { return acc; } }); listFunctions() .then(co.wrap(function* (funcs) { for (let func of funcs) { let stats = yield getFuncStats(func); stats.forEach(stat => console.log(`${func},${stat.timestamp},${stat.value}`)); } }));
    theburningmonk/lambda-coldstart-comparison
    download-stats.js
    JavaScript
    mit
    2,153
    from __future__ import absolute_import, division, print_function, unicode_literals import string import urllib try: from urllib.parse import urlparse, urlencode, urljoin, parse_qsl, urlunparse from urllib.request import urlopen, Request from urllib.error import HTTPError except ImportError: from urlparse import urlparse, urljoin, urlunparse, parse_qsl from urllib import urlencode from urllib2 import urlopen, Request, HTTPError from random import SystemRandom try: UNICODE_ASCII_CHARACTERS = (string.ascii_letters + string.digits) except AttributeError: UNICODE_ASCII_CHARACTERS = (string.ascii_letters.decode('ascii') + string.digits.decode('ascii')) def random_ascii_string(length): random = SystemRandom() return ''.join([random.choice(UNICODE_ASCII_CHARACTERS) for x in range(length)]) def url_query_params(url): """Return query parameters as a dict from the specified URL. :param url: URL. :type url: str :rtype: dict """ return dict(parse_qsl(urlparse(url).query, True)) def url_dequery(url): """Return a URL with the query component removed. :param url: URL to dequery. :type url: str :rtype: str """ url = urlparse(url) return urlunparse((url.scheme, url.netloc, url.path, url.params, '', url.fragment)) def build_url(base, additional_params=None): """Construct a URL based off of base containing all parameters in the query portion of base plus any additional parameters. :param base: Base URL :type base: str ::param additional_params: Additional query parameters to include. :type additional_params: dict :rtype: str """ url = urlparse(base) query_params = {} query_params.update(parse_qsl(url.query, True)) if additional_params is not None: query_params.update(additional_params) for k, v in additional_params.items(): if v is None: query_params.pop(k) return urlunparse((url.scheme, url.netloc, url.path, url.params, urlencode(query_params), url.fragment))
    VulcanTechnologies/oauth2lib
    oauth2lib/utils.py
    Python
    mit
    2,411
    #!/bin/bash # data in Empar_paper/data/simul_balanc4GenNonhSSM #length1000_b100.tar length1000_b150.tar length1000_b200.tar #length1000_b100_num98.fa MOD=ssm ITER=2 # number of data sets bl=100 #prep output files OUT_lik='likel_balanced4_gennonh_'$bl'_'$MOD'_E.txt' OUT_iter='iter_balanced4_gennonh_'$bl'_'$MOD'_E.txt' OUT_time='time_balanced4_gennonh_'$bl'_'$MOD'_E.txt' OUT_nc='neg_cases_balanced4_gennonh_'$bl'_'$MOD'_E.txt' [[ -f $OUT_lik ]] && rm -f $OUT_lik [[ -f $OUT_iter ]] && rm -f $OUT_iter [[ -f $OUT_time ]] && rm -f $OUT_time [[ -f $OUT_nc ]] && rm -f $OUT_nc touch $OUT_lik touch $OUT_iter touch $OUT_time touch $OUT_nc # run from within the scripts folder for i in $(seq 0 1 $ITER) do #extract a single file from tar tar -xvf ../data/simul_balanc4GenNonhSSM/length1000_b$bl.tar length1000_b$bl\_num$i.fa ./main ../data/trees/treeE.tree length1000_b$bl\_num$i.fa $MOD > out.txt cat out.txt | grep Likelihood | cut -d':' -f2 | xargs >> $OUT_lik cat out.txt | grep Iter | cut -d':' -f2 | xargs >> $OUT_iter cat out.txt | grep Time | cut -d':' -f2 | xargs >> $OUT_time cat out.txt | grep "negative branches" | cut -d':' -f2 | xargs >> $OUT_nc rm out.txt # not poluting the folder with single files rm length1000_b$bl\_num$i.fa done mv $OUT_time ../results/ssm/gennonh_data/balanc4GenNonh/. mv $OUT_lik ../results/ssm/gennonh_data/balanc4GenNonh/. mv $OUT_iter ../results/ssm/gennonh_data/balanc4GenNonh/. mv $OUT_nc ../results/ssm/gennonh_data/balanc4GenNonh/.
    Algebraicphylogenetics/Empar_paper
    scripts/process_balanced4_gennonh_ssm.sh
    Shell
    mit
    1,509
    /* The MIT License (MIT) Copyright (c) 2014 Banbury & Play-Em Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. */ using UnityEngine; #if UNITY_EDITOR using UnityEditor; using System.IO; #endif namespace SpritesAndBones.Editor { [CustomEditor(typeof(Skin2D))] public class Skin2DEditor : UnityEditor.Editor { private Skin2D skin; private float baseSelectDistance = 0.1f; private float changedBaseSelectDistance = 0.1f; private int selectedIndex = -1; private Color handleColor = Color.green; private void OnEnable() { skin = (Skin2D)target; } public override void OnInspectorGUI() { DrawDefaultInspector(); EditorGUILayout.Separator(); if (GUILayout.Button("Toggle Mesh Outline")) { Skin2D.showMeshOutline = !Skin2D.showMeshOutline; } EditorGUILayout.Separator(); if (skin.GetComponent<SkinnedMeshRenderer>().sharedMesh != null && GUILayout.Button("Save as Prefab")) { skin.SaveAsPrefab(); } EditorGUILayout.Separator(); if (skin.GetComponent<SkinnedMeshRenderer>().sharedMesh != null && GUILayout.Button("Recalculate Bone Weights")) { skin.RecalculateBoneWeights(); } EditorGUILayout.Separator(); handleColor = EditorGUILayout.ColorField("Handle Color", handleColor); changedBaseSelectDistance = EditorGUILayout.Slider("Handle Size", baseSelectDistance, 0, 1); if (baseSelectDistance != changedBaseSelectDistance) { baseSelectDistance = changedBaseSelectDistance; EditorUtility.SetDirty(this); SceneView.RepaintAll(); } if (skin.GetComponent<SkinnedMeshRenderer>().sharedMesh != null && GUILayout.Button("Create Control Points")) { skin.CreateControlPoints(skin.GetComponent<SkinnedMeshRenderer>()); } if (skin.GetComponent<SkinnedMeshRenderer>().sharedMesh != null && GUILayout.Button("Reset Control Points")) { skin.ResetControlPointPositions(); } if (skin.points != null && skin.controlPoints != null && skin.controlPoints.Length > 0 && selectedIndex != -1 && GUILayout.Button("Reset Selected Control Point")) { if (skin.controlPoints[selectedIndex].originalPosition != skin.GetComponent<MeshFilter>().sharedMesh.vertices[selectedIndex]) { skin.controlPoints[selectedIndex].originalPosition = skin.GetComponent<MeshFilter>().sharedMesh.vertices[selectedIndex]; } skin.controlPoints[selectedIndex].ResetPosition(); skin.points.SetPoint(skin.controlPoints[selectedIndex]); } if (GUILayout.Button("Remove Control Points")) { skin.RemoveControlPoints(); } EditorGUILayout.Separator(); if (skin.GetComponent<SkinnedMeshRenderer>().sharedMesh != null && GUILayout.Button("Generate Mesh Asset")) { #if UNITY_EDITOR // Check if the Meshes directory exists, if not, create it. if (!Directory.Exists("Assets/Meshes")) { AssetDatabase.CreateFolder("Assets", "Meshes"); AssetDatabase.Refresh(); } Mesh mesh = new Mesh(); mesh.name = skin.GetComponent<SkinnedMeshRenderer>().sharedMesh.name.Replace(".SkinnedMesh", ".Mesh"); ; mesh.vertices = skin.GetComponent<SkinnedMeshRenderer>().sharedMesh.vertices; mesh.triangles = skin.GetComponent<SkinnedMeshRenderer>().sharedMesh.triangles; mesh.normals = skin.GetComponent<SkinnedMeshRenderer>().sharedMesh.normals; mesh.uv = skin.GetComponent<SkinnedMeshRenderer>().sharedMesh.uv; mesh.uv2 = skin.GetComponent<SkinnedMeshRenderer>().sharedMesh.uv2; mesh.bounds = skin.GetComponent<SkinnedMeshRenderer>().sharedMesh.bounds; ScriptableObjectUtility.CreateAsset(mesh, "Meshes/" + skin.gameObject.name + ".Mesh"); #endif } if (skin.GetComponent<SkinnedMeshRenderer>().sharedMaterial != null && GUILayout.Button("Generate Material Asset")) { #if UNITY_EDITOR Material material = new Material(skin.GetComponent<SkinnedMeshRenderer>().sharedMaterial); material.CopyPropertiesFromMaterial(skin.GetComponent<SkinnedMeshRenderer>().sharedMaterial); skin.GetComponent<SkinnedMeshRenderer>().sharedMaterial = material; if (!Directory.Exists("Assets/Materials")) { AssetDatabase.CreateFolder("Assets", "Materials"); AssetDatabase.Refresh(); } AssetDatabase.CreateAsset(material, "Assets/Materials/" + material.mainTexture.name + ".mat"); Debug.Log("Created material " + material.mainTexture.name + " for " + skin.gameObject.name); #endif } } private void OnSceneGUI() { if (skin != null && skin.GetComponent<SkinnedMeshRenderer>().sharedMesh != null && skin.controlPoints != null && skin.controlPoints.Length > 0 && skin.points != null) { Event e = Event.current; Handles.matrix = skin.transform.localToWorldMatrix; EditorGUI.BeginChangeCheck(); Ray r = HandleUtility.GUIPointToWorldRay(e.mousePosition); Vector2 mousePos = r.origin; float selectDistance = HandleUtility.GetHandleSize(mousePos) * baseSelectDistance; #region Draw vertex handles Handles.color = handleColor; for (int i = 0; i < skin.controlPoints.Length; i++) { if (Handles.Button(skin.points.GetPoint(skin.controlPoints[i]), Quaternion.identity, selectDistance, selectDistance, Handles.CircleCap)) { selectedIndex = i; } if (selectedIndex == i) { EditorGUI.BeginChangeCheck(); skin.controlPoints[i].position = Handles.DoPositionHandle(skin.points.GetPoint(skin.controlPoints[i]), Quaternion.identity); if (EditorGUI.EndChangeCheck()) { skin.points.SetPoint(skin.controlPoints[i]); Undo.RecordObject(skin, "Changed Control Point"); Undo.RecordObject(skin.points, "Changed Control Point"); EditorUtility.SetDirty(this); } } } #endregion Draw vertex handles } } } }
    Apelsin/UnitySpritesAndBones
    Assets/SpritesAndBones/Scripts/Editor/Skin2DEditor.cs
    C#
    mit
    8,335
    package com.thilko.springdoc; @SuppressWarnings("all") public class CredentialsCode { Integer age; double anotherValue; public Integer getAge() { return age; } public void setAge(Integer age) { this.age = age; } public double getAnotherValue() { return anotherValue; } public void setAnotherValue(double anotherValue) { this.anotherValue = anotherValue; } }
    thilko/gradle-springdoc-plugin
    src/test/java/com/thilko/springdoc/CredentialsCode.java
    Java
    mit
    435
    <?php use yii\helpers\Html; use yii\grid\GridView; use yii\widgets\Pjax; /* @var $this yii\web\View */ /* @var $searchModel yii2learning\chartbuilder\models\DatasourceSearch */ /* @var $dataProvider yii\data\ActiveDataProvider */ $this->title = Yii::t('app', 'Datasources'); $this->params['breadcrumbs'][] = $this->title; ?> <div class="datasource-index"> <h1><?= Html::encode($this->title) ?></h1> <?=$this->render('/_menus') ?> <?php // echo $this->render('_search', ['model' => $searchModel]); ?> <p> <?= Html::a('<i class="glyphicon glyphicon-plus"></i> '.Yii::t('app', 'Create Datasource'), ['create'], ['class' => 'btn btn-success']) ?> </p> <?php Pjax::begin(); ?> <?= GridView::widget([ 'dataProvider' => $dataProvider, 'filterModel' => $searchModel, 'columns' => [ ['class' => 'yii\grid\SerialColumn'], 'name', // 'created_at', // 'updated_at', // 'created_by', 'updated_by:dateTime', [ 'class' => 'yii\grid\ActionColumn', 'options'=>['style'=>'width:150px;'], 'buttonOptions'=>['class'=>'btn btn-default'], 'template'=>'<div class="btn-group btn-group-sm text-center" role="group">{view} {update} {delete} </div>', ] ], ]); ?> <?php Pjax::end(); ?></div>
    Yii2Learning/yii2-chart-builder
    views/datasource/index.php
    PHP
    mit
    1,401
    module Web::Controllers::Books class Create include Web::Action expose :book params do param :book do param :title, presence: true param :author, presence: true end end def call(params) if params.valid? @book = BookRepository.create(Book.new(params[:book])) redirect_to routes.books_path end end end end
    matiasleidemer/lotus-bookshelf
    apps/web/controllers/books/create.rb
    Ruby
    mit
    393
    ;idta.asm sets up all the intterupt entry points extern default_handler extern idt_ftoi ;error interrupt entry point, we need to only push the error code details to stack %macro error_interrupt 1 global interrupt_handler_%1 interrupt_handler_%1: push dword %1 jmp common_handler %endmacro ;regular interrupt entry point, need to push interrupt number and other data %macro regular_interrupt 1 global interrupt_handler_%1 interrupt_handler_%1: push dword 0 push dword %1 jmp common_handler %endmacro ;common handler for all interrupts, saves all necessary stack data and calls our c intterupt handler common_handler: push dword ds push dword es push dword fs push dword gs pusha call default_handler popa pop dword gs pop dword fs pop dword es pop dword ds add esp, 8 iret regular_interrupt 0 regular_interrupt 1 regular_interrupt 2 regular_interrupt 3 regular_interrupt 4 regular_interrupt 5 regular_interrupt 6 regular_interrupt 7 error_interrupt 8 regular_interrupt 9 error_interrupt 10 error_interrupt 11 error_interrupt 12 error_interrupt 13 error_interrupt 14 regular_interrupt 15 regular_interrupt 16 error_interrupt 17 %assign i 18 %rep 12 regular_interrupt i %assign i i+1 %endrep error_interrupt 30 %assign i 31 %rep 225 regular_interrupt i %assign i i+1 %endrep ;interrupt setup, adds all of out interrupt handlers to the idt global idtsetup idtsetup: %assign i 0 %rep 256 push interrupt_handler_%[i] push i call idt_ftoi add esp, 8 %assign i i+1 %endrep ret
    MalcolmLorber/kernel
    src/idta.asm
    Assembly
    mit
    1,579
    // @flow (require('../../lib/git'): any).rebaseRepoMaster = jest.fn(); import { _clearCustomCacheDir as clearCustomCacheDir, _setCustomCacheDir as setCustomCacheDir, } from '../../lib/cacheRepoUtils'; import {copyDir, mkdirp} from '../../lib/fileUtils'; import {parseDirString as parseFlowDirString} from '../../lib/flowVersion'; import { add as gitAdd, commit as gitCommit, init as gitInit, setLocalConfig as gitConfig, } from '../../lib/git'; import {fs, path, child_process} from '../../lib/node'; import {getNpmLibDefs} from '../../lib/npm/npmLibDefs'; import {testProject} from '../../lib/TEST_UTILS'; import { _determineFlowVersion as determineFlowVersion, _installNpmLibDefs as installNpmLibDefs, _installNpmLibDef as installNpmLibDef, run, } from '../install'; const BASE_FIXTURE_ROOT = path.join(__dirname, '__install-fixtures__'); function _mock(mockFn) { return ((mockFn: any): JestMockFn<*, *>); } async function touchFile(filePath) { await fs.close(await fs.open(filePath, 'w')); } async function writePkgJson(filePath, pkgJson) { await fs.writeJson(filePath, pkgJson); } describe('install (command)', () => { describe('determineFlowVersion', () => { it('infers version from path if arg not passed', () => { return testProject(async ROOT_DIR => { const ARBITRARY_PATH = path.join(ROOT_DIR, 'some', 'arbitrary', 'path'); await Promise.all([ mkdirp(ARBITRARY_PATH), touchFile(path.join(ROOT_DIR, '.flowconfig')), writePkgJson(path.join(ROOT_DIR, 'package.json'), { name: 'test', devDependencies: { 'flow-bin': '^0.40.0', }, }), ]); const flowVer = await determineFlowVersion(ARBITRARY_PATH); expect(flowVer).toEqual({ kind: 'specific', ver: { major: 0, minor: 40, patch: 0, prerel: null, }, }); }); }); it('uses explicitly specified version', async () => { const explicitVer = await determineFlowVersion('/', '0.7.0'); expect(explicitVer).toEqual({ kind: 'specific', ver: { major: 0, minor: 7, patch: 0, prerel: null, }, }); }); it("uses 'v'-prefixed explicitly specified version", async () => { const explicitVer = await determineFlowVersion('/', 'v0.7.0'); expect(explicitVer).toEqual({ kind: 'specific', ver: { major: 0, minor: 7, patch: 0, prerel: null, }, }); }); }); describe('installNpmLibDefs', () => { const origConsoleError = console.error; beforeEach(() => { (console: any).error = jest.fn(); }); afterEach(() => { (console: any).error = origConsoleError; }); it('errors if unable to find a project root (.flowconfig)', () => { return testProject(async ROOT_DIR => { const result = await installNpmLibDefs({ cwd: ROOT_DIR, flowVersion: parseFlowDirString('flow_v0.40.0'), explicitLibDefs: [], libdefDir: 'flow-typed', verbose: false, overwrite: false, skip: false, ignoreDeps: [], useCacheUntil: 1000 * 60, }); expect(result).toBe(1); expect(_mock(console.error).mock.calls).toEqual([ [ 'Error: Unable to find a flow project in the current dir or any of ' + "it's parent dirs!\n" + 'Please run this command from within a Flow project.', ], ]); }); }); it( "errors if an explicitly specified libdef arg doesn't match npm " + 'pkgver format', () => { return testProject(async ROOT_DIR => { await touchFile(path.join(ROOT_DIR, '.flowconfig')); await writePkgJson(path.join(ROOT_DIR, 'package.json'), { name: 'test', devDependencies: { 'flow-bin': '^0.40.0', }, }); const result = await installNpmLibDefs({ cwd: ROOT_DIR, flowVersion: parseFlowDirString('flow_v0.40.0'), explicitLibDefs: ['INVALID'], libdefDir: 'flow-typed', verbose: false, overwrite: false, skip: false, ignoreDeps: [], useCacheUntil: 1000 * 60, }); expect(result).toBe(1); expect(_mock(console.error).mock.calls).toEqual([ [ 'ERROR: Package not found from package.json.\n' + 'Please specify version for the package in the format of `[email protected]`', ], ]); }); }, ); it('warns if 0 dependencies are found in package.json', () => { return testProject(async ROOT_DIR => { await Promise.all([ touchFile(path.join(ROOT_DIR, '.flowconfig')), writePkgJson(path.join(ROOT_DIR, 'package.json'), { name: 'test', }), ]); const result = await installNpmLibDefs({ cwd: ROOT_DIR, flowVersion: parseFlowDirString('flow_v0.40.0'), explicitLibDefs: [], libdefDir: 'flow-typed', verbose: false, overwrite: false, skip: false, ignoreDeps: [], useCacheUntil: 1000 * 60, }); expect(result).toBe(0); expect(_mock(console.error).mock.calls).toEqual([ ["No dependencies were found in this project's package.json!"], ]); }); }); }); describe('installNpmLibDef', () => { const FIXTURE_ROOT = path.join(BASE_FIXTURE_ROOT, 'installNpmLibDef'); const FIXTURE_FAKE_CACHE_REPO_DIR = path.join( FIXTURE_ROOT, 'fakeCacheRepo', ); const origConsoleLog = console.log; beforeEach(() => { (console: any).log = jest.fn(); }); afterEach(() => { (console: any).log = origConsoleLog; }); it('installs scoped libdefs within a scoped directory', () => { return testProject(async ROOT_DIR => { const FAKE_CACHE_DIR = path.join(ROOT_DIR, 'fakeCache'); const FAKE_CACHE_REPO_DIR = path.join(FAKE_CACHE_DIR, 'repo'); const FLOWPROJ_DIR = path.join(ROOT_DIR, 'flowProj'); const FLOWTYPED_DIR = path.join(FLOWPROJ_DIR, 'flow-typed', 'npm'); await Promise.all([mkdirp(FAKE_CACHE_REPO_DIR), mkdirp(FLOWTYPED_DIR)]); await Promise.all([ copyDir(FIXTURE_FAKE_CACHE_REPO_DIR, FAKE_CACHE_REPO_DIR), touchFile(path.join(FLOWPROJ_DIR, '.flowconfig')), writePkgJson(path.join(FLOWPROJ_DIR, 'package.json'), { name: 'test', devDependencies: { 'flow-bin': '^0.40.0', }, }), ]); await gitInit(FAKE_CACHE_REPO_DIR), await gitAdd(FAKE_CACHE_REPO_DIR, 'definitions'); await gitCommit(FAKE_CACHE_REPO_DIR, 'FIRST'); setCustomCacheDir(FAKE_CACHE_DIR); const availableLibDefs = await getNpmLibDefs( path.join(FAKE_CACHE_REPO_DIR, 'definitions'), ); await installNpmLibDef(availableLibDefs[0], FLOWTYPED_DIR, false); }); }); }); describe('end-to-end tests', () => { const FIXTURE_ROOT = path.join(BASE_FIXTURE_ROOT, 'end-to-end'); const FIXTURE_FAKE_CACHE_REPO_DIR = path.join( FIXTURE_ROOT, 'fakeCacheRepo', ); const origConsoleLog = console.log; const origConsoleError = console.error; beforeEach(() => { (console: any).log = jest.fn(); (console: any).error = jest.fn(); }); afterEach(() => { (console: any).log = origConsoleLog; (console: any).error = origConsoleError; }); async function fakeProjectEnv(runTest) { return await testProject(async ROOT_DIR => { const FAKE_CACHE_DIR = path.join(ROOT_DIR, 'fakeCache'); const FAKE_CACHE_REPO_DIR = path.join(FAKE_CACHE_DIR, 'repo'); const FLOWPROJ_DIR = path.join(ROOT_DIR, 'flowProj'); const FLOWTYPED_DIR = path.join(FLOWPROJ_DIR, 'flow-typed', 'npm'); await Promise.all([mkdirp(FAKE_CACHE_REPO_DIR), mkdirp(FLOWTYPED_DIR)]); await copyDir(FIXTURE_FAKE_CACHE_REPO_DIR, FAKE_CACHE_REPO_DIR); await gitInit(FAKE_CACHE_REPO_DIR), await Promise.all([ gitConfig(FAKE_CACHE_REPO_DIR, 'user.name', 'Test Author'), gitConfig(FAKE_CACHE_REPO_DIR, 'user.email', '[email protected]'), ]); await gitAdd(FAKE_CACHE_REPO_DIR, 'definitions'); await gitCommit(FAKE_CACHE_REPO_DIR, 'FIRST'); setCustomCacheDir(FAKE_CACHE_DIR); const origCWD = process.cwd; (process: any).cwd = () => FLOWPROJ_DIR; try { await runTest(FLOWPROJ_DIR); } finally { (process: any).cwd = origCWD; clearCustomCacheDir(); } }); } it('installs available libdefs', () => { return fakeProjectEnv(async FLOWPROJ_DIR => { // Create some dependencies await Promise.all([ touchFile(path.join(FLOWPROJ_DIR, '.flowconfig')), writePkgJson(path.join(FLOWPROJ_DIR, 'package.json'), { name: 'test', devDependencies: { 'flow-bin': '^0.43.0', }, dependencies: { foo: '1.2.3', }, }), mkdirp(path.join(FLOWPROJ_DIR, 'node_modules', 'foo')), mkdirp(path.join(FLOWPROJ_DIR, 'node_modules', 'flow-bin')), ]); // Run the install command await run({ overwrite: false, verbose: false, skip: false, ignoreDeps: [], explicitLibDefs: [], }); // Installs libdefs expect( await Promise.all([ fs.exists( path.join( FLOWPROJ_DIR, 'flow-typed', 'npm', 'flow-bin_v0.x.x.js', ), ), fs.exists( path.join(FLOWPROJ_DIR, 'flow-typed', 'npm', 'foo_v1.x.x.js'), ), ]), ).toEqual([true, true]); // Signs installed libdefs const fooLibDefContents = await fs.readFile( path.join(FLOWPROJ_DIR, 'flow-typed', 'npm', 'foo_v1.x.x.js'), 'utf8', ); expect(fooLibDefContents).toContain('// flow-typed signature: '); expect(fooLibDefContents).toContain('// flow-typed version: '); }); }); it('installs available libdefs using PnP', () => { return fakeProjectEnv(async FLOWPROJ_DIR => { // Create some dependencies await Promise.all([ touchFile(path.join(FLOWPROJ_DIR, '.flowconfig')), writePkgJson(path.join(FLOWPROJ_DIR, 'package.json'), { name: 'test', installConfig: { pnp: true, }, devDependencies: { 'flow-bin': '^0.43.0', }, dependencies: { // Use local foo for initial install foo: 'file:./foo', }, }), mkdirp(path.join(FLOWPROJ_DIR, 'foo')), ]); await writePkgJson(path.join(FLOWPROJ_DIR, 'foo/package.json'), { name: 'foo', version: '1.2.3', }); // Yarn install so PnP file resolves to local foo await child_process.execP('yarn install', {cwd: FLOWPROJ_DIR}); // Overwrite foo dep so it's like we installed from registry instead writePkgJson(path.join(FLOWPROJ_DIR, 'package.json'), { name: 'test', installConfig: { pnp: true, }, devDependencies: { 'flow-bin': '^0.43.0', }, dependencies: { foo: '1.2.3', }, }); // Run the install command await run({ overwrite: false, verbose: false, skip: false, ignoreDeps: [], explicitLibDefs: [], }); // Installs libdefs expect( await Promise.all([ fs.exists( path.join( FLOWPROJ_DIR, 'flow-typed', 'npm', 'flow-bin_v0.x.x.js', ), ), fs.exists( path.join(FLOWPROJ_DIR, 'flow-typed', 'npm', 'foo_v1.x.x.js'), ), ]), ).toEqual([true, true]); // Signs installed libdefs const fooLibDefRawContents = await fs.readFile( path.join(FLOWPROJ_DIR, 'flow-typed', 'npm', 'foo_v1.x.x.js'), ); const fooLibDefContents = fooLibDefRawContents.toString(); expect(fooLibDefContents).toContain('// flow-typed signature: '); expect(fooLibDefContents).toContain('// flow-typed version: '); }); }); it('ignores libdefs in dev, bundled, optional or peer dependencies when flagged', () => { return fakeProjectEnv(async FLOWPROJ_DIR => { // Create some dependencies await Promise.all([ touchFile(path.join(FLOWPROJ_DIR, '.flowconfig')), writePkgJson(path.join(FLOWPROJ_DIR, 'package.json'), { name: 'test', devDependencies: { foo: '1.2.3', }, peerDependencies: { 'flow-bin': '^0.43.0', }, optionalDependencies: { foo: '2.0.0', }, bundledDependencies: { bar: '^1.6.9', }, dependencies: { foo: '1.2.3', }, }), mkdirp(path.join(FLOWPROJ_DIR, 'node_modules', 'foo')), mkdirp(path.join(FLOWPROJ_DIR, 'node_modules', 'flow-bin')), mkdirp(path.join(FLOWPROJ_DIR, 'node_modules', 'bar')), ]); // Run the install command await run({ overwrite: false, verbose: false, skip: false, ignoreDeps: ['dev', 'optional', 'bundled'], explicitLibDefs: [], }); // Installs libdefs expect( await Promise.all([ fs.exists( path.join( FLOWPROJ_DIR, 'flow-typed', 'npm', 'flow-bin_v0.x.x.js', ), ), fs.exists( path.join(FLOWPROJ_DIR, 'flow-typed', 'npm', 'foo_v1.x.x.js'), ), fs.exists( path.join(FLOWPROJ_DIR, 'flow-typed', 'npm', 'bar_v1.x.x.js'), ), ]), ).toEqual([true, true, false]); }); }); it('stubs unavailable libdefs', () => { return fakeProjectEnv(async FLOWPROJ_DIR => { // Create some dependencies await Promise.all([ touchFile(path.join(FLOWPROJ_DIR, '.flowconfig')), writePkgJson(path.join(FLOWPROJ_DIR, 'package.json'), { name: 'test', devDependencies: { 'flow-bin': '^0.43.0', }, dependencies: { someUntypedDep: '1.2.3', }, }), mkdirp(path.join(FLOWPROJ_DIR, 'node_modules', 'someUntypedDep')), mkdirp(path.join(FLOWPROJ_DIR, 'node_modules', 'flow-bin')), ]); // Run the install command await run({ overwrite: false, verbose: false, skip: false, explicitLibDefs: [], }); // Installs a stub for someUntypedDep expect( await fs.exists( path.join( FLOWPROJ_DIR, 'flow-typed', 'npm', 'someUntypedDep_vx.x.x.js', ), ), ).toBe(true); }); }); it("doesn't stub unavailable libdefs when --skip is passed", () => { return fakeProjectEnv(async FLOWPROJ_DIR => { // Create some dependencies await Promise.all([ touchFile(path.join(FLOWPROJ_DIR, '.flowconfig')), writePkgJson(path.join(FLOWPROJ_DIR, 'package.json'), { name: 'test', devDependencies: { 'flow-bin': '^0.43.0', }, dependencies: { someUntypedDep: '1.2.3', }, }), mkdirp(path.join(FLOWPROJ_DIR, 'node_modules', 'someUntypedDep')), mkdirp(path.join(FLOWPROJ_DIR, 'node_modules', 'flow-bin')), ]); // Run the install command await run({ overwrite: false, verbose: false, skip: true, explicitLibDefs: [], }); // Installs a stub for someUntypedDep expect( await fs.exists(path.join(FLOWPROJ_DIR, 'flow-typed', 'npm')), ).toBe(true); }); }); it('overwrites stubs when libdef becomes available (with --overwrite)', () => { return fakeProjectEnv(async FLOWPROJ_DIR => { // Create some dependencies await Promise.all([ touchFile(path.join(FLOWPROJ_DIR, '.flowconfig')), writePkgJson(path.join(FLOWPROJ_DIR, 'package.json'), { name: 'test', devDependencies: { 'flow-bin': '^0.43.0', }, dependencies: { foo: '1.2.3', }, }), mkdirp(path.join(FLOWPROJ_DIR, 'node_modules', 'foo')), mkdirp(path.join(FLOWPROJ_DIR, 'node_modules', 'flow-bin')), ]); await fs.writeFile( path.join(FLOWPROJ_DIR, 'flow-typed', 'npm', 'foo_vx.x.x.js'), '', ); // Run the install command await run({ overwrite: true, verbose: false, skip: false, explicitLibDefs: [], }); // Replaces the stub with the real typedef expect( await Promise.all([ fs.exists( path.join(FLOWPROJ_DIR, 'flow-typed', 'npm', 'foo_vx.x.x.js'), ), fs.exists( path.join(FLOWPROJ_DIR, 'flow-typed', 'npm', 'foo_v1.x.x.js'), ), ]), ).toEqual([false, true]); }); }); it("doesn't overwrite tweaked libdefs (without --overwrite)", () => { return fakeProjectEnv(async FLOWPROJ_DIR => { // Create some dependencies await Promise.all([ touchFile(path.join(FLOWPROJ_DIR, '.flowconfig')), writePkgJson(path.join(FLOWPROJ_DIR, 'package.json'), { name: 'test', devDependencies: { 'flow-bin': '^0.43.0', }, dependencies: { foo: '1.2.3', }, }), mkdirp(path.join(FLOWPROJ_DIR, 'node_modules', 'foo')), mkdirp(path.join(FLOWPROJ_DIR, 'node_modules', 'flow-bin')), ]); // Run the install command await run({ overwrite: false, verbose: false, skip: false, explicitLibDefs: [], }); const libdefFilePath = path.join( FLOWPROJ_DIR, 'flow-typed', 'npm', 'foo_v1.x.x.js', ); // Tweak the libdef for foo const libdefFileContent = (await fs.readFile(libdefFilePath, 'utf8')) + '\n// TWEAKED!'; await fs.writeFile(libdefFilePath, libdefFileContent); // Run install command again await run({ overwrite: false, verbose: false, skip: false, explicitLibDefs: [], }); // Verify that the tweaked libdef file wasn't overwritten expect(await fs.readFile(libdefFilePath, 'utf8')).toBe( libdefFileContent, ); }); }); it('overwrites tweaked libdefs when --overwrite is passed', () => { return fakeProjectEnv(async FLOWPROJ_DIR => { // Create some dependencies await Promise.all([ touchFile(path.join(FLOWPROJ_DIR, '.flowconfig')), writePkgJson(path.join(FLOWPROJ_DIR, 'package.json'), { name: 'test', devDependencies: { 'flow-bin': '^0.43.0', }, dependencies: { foo: '1.2.3', }, }), mkdirp(path.join(FLOWPROJ_DIR, 'node_modules', 'foo')), mkdirp(path.join(FLOWPROJ_DIR, 'node_modules', 'flow-bin')), ]); // Run the install command await run({ overwrite: false, verbose: false, skip: false, explicitLibDefs: [], }); const libdefFilePath = path.join( FLOWPROJ_DIR, 'flow-typed', 'npm', 'foo_v1.x.x.js', ); // Tweak the libdef for foo const libdefFileContent = await fs.readFile(libdefFilePath, 'utf8'); await fs.writeFile(libdefFilePath, libdefFileContent + '\n// TWEAKED!'); // Run install command again await run({ overwrite: true, skip: false, verbose: false, explicitLibDefs: [], }); // Verify that the tweaked libdef file wasn't overwritten expect(await fs.readFile(libdefFilePath, 'utf8')).toBe( libdefFileContent, ); }); }); it('uses flow-bin defined in another package.json', () => { return fakeProjectEnv(async FLOWPROJ_DIR => { // Create some dependencies await Promise.all([ touchFile(path.join(FLOWPROJ_DIR, '.flowconfig')), writePkgJson(path.join(FLOWPROJ_DIR, 'package.json'), { name: 'test', dependencies: { foo: '1.2.3', }, }), mkdirp(path.join(FLOWPROJ_DIR, 'node_modules', 'foo')), writePkgJson(path.join(FLOWPROJ_DIR, '..', 'package.json'), { name: 'parent', devDependencies: { 'flow-bin': '^0.45.0', }, }), mkdirp(path.join(FLOWPROJ_DIR, '..', 'node_modules', 'flow-bin')), ]); // Run the install command await run({ overwrite: false, verbose: false, skip: false, packageDir: path.join(FLOWPROJ_DIR, '..'), explicitLibDefs: [], }); // Installs libdef expect( await fs.exists( path.join(FLOWPROJ_DIR, 'flow-typed', 'npm', 'foo_v1.x.x.js'), ), ).toEqual(true); }); }); it('uses .flowconfig from specified root directory', () => { return fakeProjectEnv(async FLOWPROJ_DIR => { // Create some dependencies await Promise.all([ mkdirp(path.join(FLOWPROJ_DIR, 'src')), writePkgJson(path.join(FLOWPROJ_DIR, 'package.json'), { name: 'test', devDependencies: { 'flow-bin': '^0.43.0', }, dependencies: { foo: '1.2.3', }, }), mkdirp(path.join(FLOWPROJ_DIR, 'node_modules', 'foo')), mkdirp(path.join(FLOWPROJ_DIR, 'node_modules', 'flow-bin')), ]); await touchFile(path.join(FLOWPROJ_DIR, 'src', '.flowconfig')); // Run the install command await run({ overwrite: false, verbose: false, skip: false, rootDir: path.join(FLOWPROJ_DIR, 'src'), explicitLibDefs: [], }); // Installs libdef expect( await fs.exists( path.join( FLOWPROJ_DIR, 'src', 'flow-typed', 'npm', 'foo_v1.x.x.js', ), ), ).toEqual(true); }); }); }); });
    splodingsocks/FlowTyped
    cli/src/commands/__tests__/install-test.js
    JavaScript
    mit
    23,904
    <!DOCTYPE html> <html lang="en"> <head> <meta http-equiv="refresh" content="0;URL=../../openssl_sys/fn.BN_exp.html"> </head> <body> <p>Redirecting to <a href="../../openssl_sys/fn.BN_exp.html">../../openssl_sys/fn.BN_exp.html</a>...</p> <script>location.replace("../../openssl_sys/fn.BN_exp.html" + location.search + location.hash);</script> </body> </html>
    malept/guardhaus
    main/openssl_sys/bn/fn.BN_exp.html
    HTML
    mit
    369
    import React, { Component } from 'react' import PropTypes from 'prop-types' import { assign } from 'lodash' import autoBind from '../utils/autoBind' const styles = { 'ClosedPanelWrapper': { height: '40px' }, 'PanelWrapper': { position: 'relative' }, 'Over': { border: '1px dashed white', overflowY: 'hidden' }, 'PanelTitle': { width: '100%', height: '40px', lineHeight: '40px', backgroundColor: '#000', color: '#fff', paddingLeft: '10px', position: 'relative', whiteSpace: 'nowrap', overflowX: 'hidden', textOverflow: 'ellipsis', paddingRight: '8px', cursor: 'pointer', WebkitUserSelect: 'none', userSelect: 'none' }, 'Handle': { cursor: '-webkit-grab', position: 'absolute', zIndex: '2', color: 'white', right: '10px', fontSize: '16px', top: '12px' }, 'OpenPanel': { position: 'relative', zIndex: '2', top: '0', left: '0', padding: '7px', paddingTop: '5px', maxHeight: '30%', display: 'block' }, 'ClosedPanel': { height: '0', position: 'relative', zIndex: '2', top: '-1000px', left: '0', overflow: 'hidden', maxHeight: '0', display: 'none' } } class Panel extends Component { constructor() { super() this.state = { dragIndex: null, overIndex: null, isOver: false } autoBind(this, [ 'handleTitleClick', 'handleDragStart', 'handleDragOver', 'handleDragEnter', 'handleDragLeave', 'handleDrop', 'handleDragEnd' ]) } handleTitleClick() { const { index, isOpen, openPanel } = this.props openPanel(isOpen ? -1 : index) } handleDragStart(e) { // e.target.style.opacity = '0.4'; // this / e.target is the source node. e.dataTransfer.setData('index', e.target.dataset.index) } handleDragOver(e) { if (e.preventDefault) { e.preventDefault() // Necessary. Allows us to drop. } return false } handleDragEnter(e) { const overIndex = e.target.dataset.index if (e.dataTransfer.getData('index') !== overIndex) { // e.target.classList.add('Over') // e.target is the current hover target. this.setState({ isOver: true }) } } handleDragLeave() { this.setState({ isOver: false }) // e.target.classList.remove('Over') // e.target is previous target element. } handleDrop(e) { if (e.stopPropagation) { e.stopPropagation() // stops the browser from redirecting. } const dragIndex = e.dataTransfer.getData('index') const dropIndex = this.props.index.toString() if (dragIndex !== dropIndex) { this.props.reorder(dragIndex, dropIndex) } return false } handleDragEnd() { this.setState({ isOver: false, dragIndex: null, overIndex: null }) } render() { const { isOpen, orderable } = this.props const { isOver } = this.state return ( <div style={assign({}, styles.PanelWrapper, isOpen ? {} : styles.ClosedPanelWrapper, isOver ? styles.Over : {})} onDragStart={this.handleDragStart} onDragEnter={this.handleDragEnter} onDragOver={this.handleDragOver} onDragLeave={this.handleDragLeave} onDrop={this.handleDrop} onDragEnd={this.handleDragEnd} > <div style={styles.PanelTitle} onClick={this.handleTitleClick} draggable={orderable} data-index={this.props.index} > {this.props.header} {orderable && (<i className="fa fa-th" style={styles.Handle}></i>)} </div> { isOpen && ( <div style={isOpen ? styles.OpenPanel : styles.ClosedPanel}> {this.props.children} </div> ) } </div> ) } } Panel.propTypes = { children: PropTypes.any, index: PropTypes.any, openPanel: PropTypes.func, isOpen: PropTypes.any, header: PropTypes.any, orderable: PropTypes.any, reorder: PropTypes.func } Panel.defaultProps = { isOpen: false, header: '', orderable: false } export default Panel
    jcgertig/react-struct-editor
    src/components/Panel.js
    JavaScript
    mit
    4,120
    'use strict'; // src\services\message\hooks\timestamp.js // // Use this hook to manipulate incoming or outgoing data. // For more information on hooks see: http://docs.feathersjs.com/hooks/readme.html const defaults = {}; module.exports = function(options) { options = Object.assign({}, defaults, options); return function(hook) { const usr = hook.params.user; const txt = hook.data.text; hook.data = { text: txt, createdBy: usr._id, createdAt: Date.now() } }; };
    zorqie/bfests
    src/services/message/hooks/timestamp.js
    JavaScript
    mit
    488
    /* --------------------------------------------------------------------------- Open Asset Import Library (assimp) --------------------------------------------------------------------------- Copyright (c) 2006-2021, assimp team All rights reserved. Redistribution and use of this software in source and binary forms, with or without modification, are permitted provided that the following conditions are met: * Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer. * Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution. * Neither the name of the assimp team, nor the names of its contributors may be used to endorse or promote products derived from this software without specific prior written permission of the assimp team. THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. --------------------------------------------------------------------------- */ /** @file Bitmap.h * @brief Defines bitmap format helper for textures * * Used for file formats which embed their textures into the model file. */ #pragma once #ifndef AI_BITMAP_H_INC #define AI_BITMAP_H_INC #ifdef __GNUC__ # pragma GCC system_header #endif #include "defs.h" #include <stdint.h> #include <cstddef> struct aiTexture; namespace Assimp { class IOStream; class ASSIMP_API Bitmap { protected: struct Header { uint16_t type; uint32_t size; uint16_t reserved1; uint16_t reserved2; uint32_t offset; // We define the struct size because sizeof(Header) might return a wrong result because of structure padding. // Moreover, we must use this ugly and error prone syntax because Visual Studio neither support constexpr or sizeof(name_of_field). static const std::size_t header_size = sizeof(uint16_t) + // type sizeof(uint32_t) + // size sizeof(uint16_t) + // reserved1 sizeof(uint16_t) + // reserved2 sizeof(uint32_t); // offset }; struct DIB { uint32_t size; int32_t width; int32_t height; uint16_t planes; uint16_t bits_per_pixel; uint32_t compression; uint32_t image_size; int32_t x_resolution; int32_t y_resolution; uint32_t nb_colors; uint32_t nb_important_colors; // We define the struct size because sizeof(DIB) might return a wrong result because of structure padding. // Moreover, we must use this ugly and error prone syntax because Visual Studio neither support constexpr or sizeof(name_of_field). static const std::size_t dib_size = sizeof(uint32_t) + // size sizeof(int32_t) + // width sizeof(int32_t) + // height sizeof(uint16_t) + // planes sizeof(uint16_t) + // bits_per_pixel sizeof(uint32_t) + // compression sizeof(uint32_t) + // image_size sizeof(int32_t) + // x_resolution sizeof(int32_t) + // y_resolution sizeof(uint32_t) + // nb_colors sizeof(uint32_t); // nb_important_colors }; static const std::size_t mBytesPerPixel = 4; public: static void Save(aiTexture* texture, IOStream* file); protected: static void WriteHeader(Header& header, IOStream* file); static void WriteDIB(DIB& dib, IOStream* file); static void WriteData(aiTexture* texture, IOStream* file); }; } #endif // AI_BITMAP_H_INC
    andrerogers/Enjin
    src/includes/assimp/Bitmap.h
    C
    mit
    4,360
    # LeadifyTest
    JomoLumina/LeadifyTest
    README.md
    Markdown
    mit
    13
    package fr.lteconsulting.pomexplorer.commands; import fr.lteconsulting.pomexplorer.AppFactory; import fr.lteconsulting.pomexplorer.Client; import fr.lteconsulting.pomexplorer.Log; public class HelpCommand { @Help( "gives this message" ) public void main( Client client, Log log ) { log.html( AppFactory.get().commands().help() ); } }
    ltearno/pom-explorer
    pom-explorer/src/main/java/fr/lteconsulting/pomexplorer/commands/HelpCommand.java
    Java
    mit
    342
    // // DORDoneHUD.h // DORDoneHUD // // Created by Pawel Bednorz on 23/09/15. // Copyright © 2015 Droids on Roids. All rights reserved. // #import <UIKit/UIKit.h> @interface DORDoneHUD : NSObject + (void)show:(UIView *)view message:(NSString *)messageText completion:(void (^)(void))completionBlock; + (void)show:(UIView *)view message:(NSString *)messageText; + (void)show:(UIView *)view; @end
    DroidsOnRoids/DORDoneHUD
    Source/DORDoneHUD.h
    C
    mit
    401
    namespace CAAssistant.Models { public class ClientFileViewModel { public ClientFileViewModel() { } public ClientFileViewModel(ClientFile clientFile) { Id = clientFile.Id; FileNumber = clientFile.FileNumber; ClientName = clientFile.ClientName; ClientContactPerson = clientFile.ClientContactPerson; AssociateReponsible = clientFile.AssociateReponsible; CaSign = clientFile.CaSign; DscExpiryDate = clientFile.DscExpiryDate; FileStatus = clientFile.FileStatus; } public string Id { get; set; } public int FileNumber { get; set; } public string ClientName { get; set; } public string ClientContactPerson { get; set; } public string AssociateReponsible { get; set; } public string CaSign { get; set; } public string DscExpiryDate { get; set; } public string FileStatus { get; set; } public string UserName { get; set; } public FileStatusModification InitialFileStatus { get; set; } } }
    vishipayyallore/CAAssitant
    CAAssistant/Models/ClientFileViewModel.cs
    C#
    mit
    1,131
    <?php /**************************************************************************** * todoyu is published under the BSD License: * http://www.opensource.org/licenses/bsd-license.php * * Copyright (c) 2013, snowflake productions GmbH, Switzerland * All rights reserved. * * This script is part of the todoyu project. * The todoyu project is free software; you can redistribute it and/or modify * it under the terms of the BSD License. * * This script is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the BSD License * for more details. * * This copyright notice MUST APPEAR in all copies of the script. *****************************************************************************/ /** * Task asset object * * @package Todoyu * @subpackage Assets */ class TodoyuAssetsTaskAsset extends TodoyuAssetsAsset { /** * Get task ID * * @return Integer */ public function getTaskID() { return $this->getParentID(); } /** * Get task object * * @return Task */ public function getTask() { return TodoyuProjectTaskManager::getTask($this->getTaskID()); } } ?>
    JoAutomation/todo-for-you
    ext/assets/model/TodoyuAssetsTaskAsset.class.php
    PHP
    mit
    1,210
    //------------------------------------------------------------------------------ // <auto-generated> // This code was generated by a tool. // // Changes to this file may cause incorrect behavior and will be lost if // the code is regenerated. // </auto-generated> //------------------------------------------------------------------------------ namespace NewsSystem.Web.Admin { public partial class Edit { } }
    MystFan/TelerikAcademy
    ASP.NET WebForms/NewsSystem/NewsSystem.Web/Admin/Edit.aspx.designer.cs
    C#
    mit
    440
    --- title: Stylesheets and JavaScript - Fabricator layout: 2-column section: Documentation --- {{#markdown}} # Stylesheets and JavaScript > How to work with CSS and JS within Fabricator Fabricator comes with little opinion about how you should architect your Stylesheets and JavaScript. Each use case is different, so it's up to you to define what works best. Out of the box, you'll find a single `.scss` and `.js` file. These are the entry points for Sass compilation and Webpack respectively. It is recommended that you leverage the module importing features of each preprocessor to compile your toolkit down to a single `.css` and `.js` file. Practically speaking, you should be able to drop these two files into any application and have full access to your entire toolkit. {{/markdown}}
    fbrctr/fbrctr.github.io
    src/views/docs/building-a-toolkit/assets.html
    HTML
    mit
    797
    <?php namespace IdeHelper\Test\TestCase\Utility; use Cake\Core\Configure; use Cake\TestSuite\TestCase; use IdeHelper\Utility\Plugin; class PluginTest extends TestCase { /** * @return void */ protected function setUp(): void { parent::setUp(); Configure::delete('IdeHelper.plugins'); } /** * @return void */ protected function tearDown(): void { parent::tearDown(); Configure::delete('IdeHelper.plugins'); } /** * @return void */ public function testAll() { $result = Plugin::all(); $this->assertArrayHasKey('IdeHelper', $result); $this->assertArrayHasKey('Awesome', $result); $this->assertArrayHasKey('MyNamespace/MyPlugin', $result); $this->assertArrayNotHasKey('FooBar', $result); Configure::write('IdeHelper.plugins', ['FooBar', '-MyNamespace/MyPlugin']); $result = Plugin::all(); $this->assertArrayHasKey('FooBar', $result); $this->assertArrayNotHasKey('MyNamespace/MyPlugin', $result); } }
    dereuromark/cakephp-ide-helper
    tests/TestCase/Utility/PluginTest.php
    PHP
    mit
    953
    # Hubot: hubot-loggly-slack A hubot script to post alerts from Loggly into a Slack room as an attachment. An attachment has additional formatting options. See [`src/loggly-slack.coffee`](src/loggly-slack.coffee) for documentation. # Installation npm install hubot-loggly-slack # Add "hubot-loggly-slack" to external-scripts.json # Other hubot slack modules https://github.com/spanishdict/hubot-awssns-slack https://github.com/spanishdict/hubot-loggly-slack https://github.com/spanishdict/hubot-scoutapp-slack
    spanishdict/hubot-loggly-slack
    README.md
    Markdown
    mit
    526
    <?php /** * The Initial Developer of the Original Code is * Tarmo Alexander Sundström <[email protected]>. * * Portions created by the Initial Developer are * Copyright (C) 2014 Tarmo Alexander Sundström <[email protected]> * * All Rights Reserved. * * Contributor(s): * * Permission is hereby granted, free of charge, to any person obtaining a * copy of this software and associated documentation files (the "Software"), * to deal in the Software without restriction, including without limitation * the rights to use, copy, modify, merge, publish, distribute, sublicense, * and/or sell copies of the Software, and to permit persons to whom the * Software is furnished to do so, subject to the following conditions: * * The above copyright notice and this permission notice shall be included * in all copies or substantial portions of the Software. * * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL * THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS * IN THE SOFTWARE. */ namespace Webvaloa; use Libvaloa\Db; use RuntimeException; /** * Manage and run plugins. */ class Plugin { private $db; private $plugins; private $runnablePlugins; private $plugin; // Objects that plugins can access public $_properties; public $ui; public $controller; public $request; public $view; public $xhtml; public static $properties = array( // Vendor tag 'vendor' => 'ValoaApplication', // Events 'events' => array( 'onAfterFrontControllerInit', 'onBeforeController', 'onAfterController', 'onBeforeRender', 'onAfterRender', ), // Skip plugins in these controllers 'skipControllers' => array( 'Setup', ), ); public function __construct($plugin = false) { $this->plugin = $plugin; $this->event = false; $this->plugins = false; $this->runnablePlugins = false; // Plugins can access and modify these $this->_properties = false; $this->ui = false; $this->controller = false; $this->request = false; $this->view = false; $this->xhtml = false; try { $this->db = \Webvaloa\Webvaloa::DBConnection(); } catch (Exception $e) { } } public function setEvent($e) { if (in_array($e, self::$properties['events'])) { $this->event = $e; } } public function plugins() { if (!method_exists($this->db, 'prepare')) { // Just bail out return false; } if (method_exists($this->request, 'getMainController') && (in_array($this->request->getMainController(), self::$properties['skipControllers']))) { return false; } $query = ' SELECT id, plugin, system_plugin FROM plugin WHERE blocked = 0 ORDER BY ordering ASC'; try { $stmt = $this->db->prepare($query); $stmt->execute(); $this->plugins = $stmt->fetchAll(); return $this->plugins; } catch (PDOException $e) { } } public function pluginExists($name) { $name = trim($name); foreach ($this->plugins as $k => $plugin) { if ($plugin->plugin == $name) { return true; } } return false; } public function hasRunnablePlugins() { // Return runnable plugins if we already gathered them if ($this->runnablePlugins) { return $this->runnablePlugins; } if (!$this->request) { throw new RuntimeException('Instance of request is required'); } if (in_array($this->request->getMainController(), self::$properties['skipControllers'])) { return false; } // Load plugins if (!$this->plugins) { $this->plugins(); } if (!is_array($this->plugins)) { return false; } $controller = $this->request->getMainController(); // Look for executable plugins foreach ($this->plugins as $k => $plugin) { if ($controller && strpos($plugin->plugin, $controller) === false && strpos($plugin->plugin, 'Plugin') === false) { continue; } $this->runnablePlugins[] = $plugin; } return (bool) ($this->runnablePlugins && !empty($this->runnablePlugins)) ? $this->runnablePlugins : false; } public function runPlugins() { if (!$this->runnablePlugins || empty($this->runnablePlugins)) { return false; } $e = $this->event; foreach ($this->runnablePlugins as $k => $v) { $p = '\\'.self::$properties['vendor'].'\Plugins\\'.$v->plugin.'Plugin'; $plugin = new $p(); $plugin->view = &$this->view; $plugin->ui = &$this->ui; $plugin->request = &$this->request; $plugin->controller = &$this->controller; $plugin->xhtml = &$this->xhtml; $plugin->_properties = &$this->_properties; if (method_exists($plugin, $e)) { $plugin->{$e}(); } } } public static function getPluginStatus($pluginID) { $query = ' SELECT blocked FROM plugin WHERE system_plugin = 0 AND id = ?'; try { $db = \Webvaloa\Webvaloa::DBConnection(); $stmt = $db->prepare($query); $stmt->set((int) $pluginID); $stmt->execute(); $row = $stmt->fetch(); if (isset($row->blocked)) { return $row->blocked; } return false; } catch (PDOException $e) { } } public static function setPluginStatus($pluginID, $status = 0) { $query = ' UPDATE plugin SET blocked = ? WHERE id = ?'; try { $db = \Webvaloa\Webvaloa::DBConnection(); $stmt = $db->prepare($query); $stmt->set((int) $status); $stmt->set((int) $pluginID); $stmt->execute(); } catch (PDOException $e) { } } public static function setPluginOrder($pluginID, $ordering = 0) { $query = ' UPDATE plugin SET ordering = ? WHERE id = ?'; try { $db = \Webvaloa\Webvaloa::DBConnection(); $stmt = $db->prepare($query); $stmt->set((int) $ordering); $stmt->set((int) $pluginID); $stmt->execute(); } catch (PDOException $e) { } } public function install() { if (!$this->plugin) { return false; } $installable = $this->discover(); if (!in_array($this->plugin, $installable)) { return false; } $db = \Webvaloa\Webvaloa::DBConnection(); // Install plugin $object = new Db\Object('plugin', $db); $object->plugin = $this->plugin; $object->system_plugin = 0; $object->blocked = 0; $object->ordering = 1; $id = $object->save(); return $id; } public function uninstall() { if (!$this->plugin) { return false; } $db = \Webvaloa\Webvaloa::DBConnection(); $query = ' DELETE FROM plugin WHERE system_plugin = 0 AND plugin = ?'; $stmt = $db->prepare($query); try { $stmt->set($this->plugin); $stmt->execute(); return true; } catch (Exception $e) { } return false; } public function discover() { // Installed plugins $tmp = $this->plugins(); foreach ($tmp as $v => $plugin) { $plugins[] = $plugin->plugin; } // Discovery paths $paths[] = LIBVALOA_INSTALLPATH.DIRECTORY_SEPARATOR.self::$properties['vendor'].DIRECTORY_SEPARATOR.'Plugins'; $paths[] = LIBVALOA_EXTENSIONSPATH.DIRECTORY_SEPARATOR.self::$properties['vendor'].DIRECTORY_SEPARATOR.'Plugins'; $skip = array( '.', '..', ); $plugins = array_merge($plugins, $skip); // Look for new plugins foreach ($paths as $path) { if ($handle = opendir($path)) { while (false !== ($entry = readdir($handle))) { if ($entry == '.' || $entry == '..') { continue; } if (substr($entry, -3) != 'php') { continue; } $pluginName = str_replace('Plugin.php', '', $entry); if (!isset($installablePlugins)) { $installablePlugins = array(); } if (!in_array($pluginName, $plugins) && !in_array($pluginName, $installablePlugins)) { $installablePlugins[] = $pluginName; } } closedir($handle); } } if (isset($installablePlugins)) { return $installablePlugins; } return array(); } }
    lahdekorpi/webvaloa
    vendor/Webvaloa/Plugin.php
    PHP
    mit
    9,863
    const electron = window.require('electron'); const events = window.require('events'); const { ipcRenderer } = electron; const { EventEmitter } = events; class Emitter extends EventEmitter {} window.Events = new Emitter(); module.exports = () => { let settings = window.localStorage.getItem('settings'); if (settings === null) { const defaultSettings = { general: { launch: true, clipboard: true }, images: { copy: false, delete: true }, notifications: { enabled: true } }; window.localStorage.setItem('settings', JSON.stringify(defaultSettings)); settings = defaultSettings; } ipcRenderer.send('settings', JSON.parse(settings)); };
    vevix/focus
    app/js/init.js
    JavaScript
    mit
    740
    --- title: 'Production applications updated 9.1.2018 10:05 - 10:54' lang: en ref: 2018-01-09-release image: published: true categories: en News traffictypes: - Road tags: - APIs - Admin --- Digitraffic production applications have been updated. Changelog: TIE - DPO-336 - LAM binääritietovirta jakautuu kahdeksi LOTJU 2.5 versiossa - Ei vaikuta datan formaattiin. Reaaliaika-asemien tiedot ovat nyt tuoreempia. - DPO-399 - CameraStationsStatusMetadataUpdateJob ei käsittele obsolete tietoa oikein We apologize for any inconvenience.
    lapintom/digitraffic
    _posts/2018-01-09-release-en.md
    Markdown
    mit
    549
    using System; using System.Collections.Generic; using System.Linq; using BohFoundation.ApplicantsRepository.Repositories.Implementations; using BohFoundation.AzureStorage.TableStorage.Implementations.Essay.Entities; using BohFoundation.AzureStorage.TableStorage.Interfaces.Essay; using BohFoundation.AzureStorage.TableStorage.Interfaces.Essay.Helpers; using BohFoundation.Domain.Dtos.Applicant.Essay; using BohFoundation.Domain.Dtos.Applicant.Notifications; using BohFoundation.Domain.Dtos.Common.AzureQueuryObjects; using BohFoundation.Domain.EntityFrameworkModels.Applicants; using BohFoundation.Domain.EntityFrameworkModels.Common; using BohFoundation.Domain.EntityFrameworkModels.Persons; using BohFoundation.EntityFrameworkBaseClass; using BohFoundation.TestHelpers; using EntityFramework.Extensions; using FakeItEasy; using Microsoft.VisualStudio.TestTools.UnitTesting; namespace BohFoundation.ApplicantsRepository.Tests.IntegrationTests { [TestClass] public class ApplicantsEssayRepositoryIntegrationTests { private static IEssayRowKeyGenerator _rowKeyGenerator; private static IAzureEssayRepository _azureAzureEssayRepository; private static ApplicantsEssayRepository _applicantsEssayRepository; private static ApplicantsesNotificationRepository _applicantsesNotification; [ClassInitialize] public static void InitializeClass(TestContext ctx) { Setup(); FirstTestOfNotifications(); FirstUpsert(); SecondUpsert(); SecondTestOfNotifications(); } #region SettingUp private static void Setup() { TestHelpersCommonFields.InitializeFields(); TestHelpersCommonFakes.InitializeFakes(); ApplicantsGuid = Guid.NewGuid(); Prompt = "prompt" + ApplicantsGuid; TitleOfEssay = "title" + ApplicantsGuid; _azureAzureEssayRepository = A.Fake<IAzureEssayRepository>(); _rowKeyGenerator = A.Fake<IEssayRowKeyGenerator>(); CreateEssayTopicAndApplicant(); SetupFakes(); _applicantsesNotification = new ApplicantsesNotificationRepository(TestHelpersCommonFields.DatabaseName, TestHelpersCommonFakes.ClaimsInformationGetters, TestHelpersCommonFakes.DeadlineUtilities); _applicantsEssayRepository = new ApplicantsEssayRepository(TestHelpersCommonFields.DatabaseName, TestHelpersCommonFakes.ClaimsInformationGetters, _azureAzureEssayRepository, _rowKeyGenerator); } private static void CreateEssayTopicAndApplicant() { var random = new Random(); GraduatingYear = random.Next(); var subject = new EssayTopic { EssayPrompt = Prompt, TitleOfEssay = TitleOfEssay, RevisionDateTime = DateTime.UtcNow }; var subject2 = new EssayTopic { EssayPrompt = Prompt + 2, TitleOfEssay = TitleOfEssay + 2, RevisionDateTime = DateTime.UtcNow }; var subject3 = new EssayTopic { EssayPrompt = "SHOULD NOT SHOW UP IN LIST", TitleOfEssay = "REALLY SHOULDN't SHOW up", RevisionDateTime = DateTime.UtcNow, }; var graduatingYear = new GraduatingClass { GraduatingYear = GraduatingYear, EssayTopics = new List<EssayTopic> { subject, subject2 } }; var applicant = new Applicant { Person = new Person { Guid = ApplicantsGuid, DateCreated = DateTime.UtcNow }, ApplicantPersonalInformation = new ApplicantPersonalInformation { GraduatingClass = graduatingYear, Birthdate = DateTime.UtcNow, LastUpdated = DateTime.UtcNow } }; using (var context = GetRootContext()) { context.EssayTopics.Add(subject3); context.GraduatingClasses.Add(graduatingYear); context.Applicants.Add(applicant); context.EssayTopics.Add(subject); context.SaveChanges(); EssayTopicId = context.EssayTopics.First(topic => topic.EssayPrompt == Prompt).Id; EssayTopicId2 = context.EssayTopics.First(topic => topic.EssayPrompt == Prompt + 2).Id; } } private static int EssayTopicId2 { get; set; } private static void SetupFakes() { RowKey = "THISISTHEROWKEYFORTHEAPPLICANT"; A.CallTo(() => TestHelpersCommonFakes.ClaimsInformationGetters.GetApplicantsGraduatingYear()) .Returns(GraduatingYear); A.CallTo(() => TestHelpersCommonFakes.ClaimsInformationGetters.GetUsersGuid()).Returns(ApplicantsGuid); A.CallTo(() => _rowKeyGenerator.CreateRowKeyForEssay(ApplicantsGuid, EssayTopicId)).Returns(RowKey); } private static string RowKey { get; set; } private static int GraduatingYear { get; set; } private static string TitleOfEssay { get; set; } private static string Prompt { get; set; } private static Guid ApplicantsGuid { get; set; } #endregion #region FirstNotifications private static void FirstTestOfNotifications() { FirstNotificationResult = _applicantsesNotification.GetApplicantNotifications(); } private static ApplicantNotificationsDto FirstNotificationResult { get; set; } [TestMethod, TestCategory("Integration")] public void ApplicantsNotificationRepository_FirstGetNotifications_Should_Have_Two_EssayTopics() { Assert.AreEqual(2, FirstNotificationResult.EssayNotifications.Count); } [TestMethod, TestCategory("Integration")] public void ApplicantsNotificationRepository_FirstGetNotifications_EssayTopics_Should_Have_No_LastUpdated() { foreach (var essayTopic in FirstNotificationResult.EssayNotifications) { Assert.IsNull(essayTopic.RevisionDateTime); } } [TestMethod, TestCategory("Integration")] public void ApplicantsNotificationRepository_FirstGetNotifications_EssayTopics_Should_Have_Right_EssayTopic() { foreach (var essayTopic in FirstNotificationResult.EssayNotifications) { if (essayTopic.EssayPrompt == Prompt) { Assert.AreEqual(TitleOfEssay, essayTopic.TitleOfEssay); } else { Assert.AreEqual(TitleOfEssay + 2, essayTopic.TitleOfEssay); } } } [TestMethod, TestCategory("Integration")] public void ApplicantsNotificationRepository_FirstGetNotifications_EssayTopics_Should_Have_Right_Ids() { foreach (var essayTopic in FirstNotificationResult.EssayNotifications) { Assert.AreEqual(essayTopic.EssayPrompt == Prompt ? EssayTopicId : EssayTopicId2, essayTopic.EssayTopicId); } } #endregion #region FirstUpsert private static void FirstUpsert() { Essay = "Essay"; var dto = new EssayDto {Essay = Essay + 1, EssayPrompt = Prompt, EssayTopicId = EssayTopicId}; _applicantsEssayRepository.UpsertEssay(dto); using (var context = GetRootContext()) { EssayUpsertResult1 = context.Essays.First( essay => essay.EssayTopic.Id == EssayTopicId && essay.Applicant.Person.Guid == ApplicantsGuid); } } private static Essay EssayUpsertResult1 { get; set; } private static string Essay { get; set; } [TestMethod, TestCategory("Integration")] public void ApplicantsEssayRepository_FirstUpsert_Should_Have_6_Characters() { Assert.AreEqual(6, EssayUpsertResult1.CharacterLength); } [TestMethod, TestCategory("Integration")] public void ApplicantsEssayRepository_FirstUpsert_Should_Have_Have_RecentUpdated() { TestHelpersTimeAsserts.RecentTime(EssayUpsertResult1.RevisionDateTime); } [TestMethod, TestCategory("Integration")] public void ApplicantsEssayRepository_FirstUpsert_Should_Have_Have_Correct_RowKey() { Assert.AreEqual(RowKey, EssayUpsertResult1.RowKey); } [TestMethod, TestCategory("Integration")] public void ApplicantsEssayRepository_FirstUpsert_Should_Have_Have_Correct_PartitionKey() { Assert.AreEqual(GraduatingYear.ToString(), EssayUpsertResult1.PartitionKey); } [TestMethod, TestCategory("Integration")] public void ApplicantsEssayRepository_FirstUpsert_Should_Have_Positive_Id() { TestHelpersCommonAsserts.IsGreaterThanZero(EssayUpsertResult1.Id); } [TestMethod, TestCategory("Integration")] public void ApplicantsEssayRepository_FirstUpsert_Should_Call_CreateRowKey() { A.CallTo(() => _rowKeyGenerator.CreateRowKeyForEssay(ApplicantsGuid, EssayTopicId)).MustHaveHappened(); } [TestMethod, TestCategory("Integration")] public void ApplicantsEssayRepository_FirstUpsert_Should_Call_UpsertEssay() { //Not checking time. It just isn't coming up. I did an in class check to see if it worked. It did. A.CallTo(() => _azureAzureEssayRepository.UpsertEssay(A<EssayAzureTableEntityDto> .That.Matches(x => x.Essay == Essay + 1 && x.EssayPrompt == Prompt && x.EssayTopicId == EssayTopicId && x.PartitionKey == GraduatingYear.ToString() && x.RowKey == RowKey ))).MustHaveHappened(); } #endregion #region SecondUpsert private static void SecondUpsert() { var dto = new EssayDto {Essay = Essay + Essay + Essay, EssayPrompt = Prompt, EssayTopicId = EssayTopicId}; _applicantsEssayRepository.UpsertEssay(dto); using (var context = GetRootContext()) { EssayUpsertResult2 = context.Essays.First( essay => essay.EssayTopic.Id == EssayTopicId && essay.Applicant.Person.Guid == ApplicantsGuid); } } private static Essay EssayUpsertResult2 { get; set; } private static int EssayTopicId { get; set; } [TestMethod, TestCategory("Integration")] public void ApplicantsEssayRepository_SecondUpsert_Should_Have_15_Characters() { Assert.AreEqual(15, EssayUpsertResult2.CharacterLength); } [TestMethod, TestCategory("Integration")] public void ApplicantsEssayRepository_SecondUpsert_Should_Have_Have_RecentUpdated_More_Recent_Than_First() { TestHelpersTimeAsserts.IsGreaterThanOrEqual(EssayUpsertResult2.RevisionDateTime, EssayUpsertResult1.RevisionDateTime); } [TestMethod, TestCategory("Integration")] public void ApplicantsEssayRepository_SecondUpsert_Should_Have_Have_Correct_RowKey() { Assert.AreEqual(RowKey, EssayUpsertResult2.RowKey); } [TestMethod, TestCategory("Integration")] public void ApplicantsEssayRepository_SecondUpsert_Should_Have_Have_Correct_PartitionKey() { Assert.AreEqual(GraduatingYear.ToString(), EssayUpsertResult2.PartitionKey); } [TestMethod, TestCategory("Integration")] public void ApplicantsEssayRepository_SecondUpsert_Should_Have_Equal_Id_To_First() { Assert.AreEqual(EssayUpsertResult1.Id, EssayUpsertResult2.Id); } [TestMethod, TestCategory("Integration")] public void ApplicantsEssayRepository_SecondUpsert_Should_Call_CreateRowKey() { A.CallTo(() => _rowKeyGenerator.CreateRowKeyForEssay(ApplicantsGuid, EssayTopicId)) .MustHaveHappened(Repeated.AtLeast.Times(3)); } [TestMethod, TestCategory("Integration")] public void ApplicantsEssayRepository_SecondUpsert_Should_Call_UpsertEssay() { //Not checking time. It just isn't coming up. I did an in class check to see if it worked. It did. A.CallTo(() => _azureAzureEssayRepository.UpsertEssay(A<EssayAzureTableEntityDto> .That.Matches(x => x.Essay == Essay + Essay + Essay && x.EssayPrompt == Prompt && x.EssayTopicId == EssayTopicId && x.PartitionKey == GraduatingYear.ToString() && x.RowKey == RowKey ))).MustHaveHappened(); } #endregion #region SecondNotifications private static void SecondTestOfNotifications() { SecondNotificationResult = _applicantsesNotification.GetApplicantNotifications(); } private static ApplicantNotificationsDto SecondNotificationResult { get; set; } [TestMethod, TestCategory("Integration")] public void ApplicantsNotificationRepository_SecondGetNotifications_Should_Have_Two_EssayTopics() { Assert.AreEqual(2, SecondNotificationResult.EssayNotifications.Count); } [TestMethod, TestCategory("Integration")] public void ApplicantsNotificationRepository_SecondGetNotifications_EssayTopics_Should_Have_No_LastUpdated() { foreach (var essayTopic in SecondNotificationResult.EssayNotifications) { if (essayTopic.EssayPrompt == Prompt) { Assert.AreEqual(EssayUpsertResult2.RevisionDateTime, essayTopic.RevisionDateTime); } else { Assert.IsNull(essayTopic.RevisionDateTime); } } } [TestMethod, TestCategory("Integration")] public void ApplicantsNotificationRepository_SecondGetNotifications_EssayTopics_Should_Have_Right_EssayTopic() { foreach (var essayTopic in SecondNotificationResult.EssayNotifications) { if (essayTopic.EssayPrompt == Prompt) { Assert.AreEqual(TitleOfEssay, essayTopic.TitleOfEssay); } else { Assert.AreEqual(TitleOfEssay + 2, essayTopic.TitleOfEssay); } } } [TestMethod, TestCategory("Integration")] public void ApplicantsNotificationRepository_SecondGetNotifications_EssayTopics_Should_Have_Right_Ids() { foreach (var essayTopic in SecondNotificationResult.EssayNotifications) { Assert.AreEqual(essayTopic.EssayPrompt == Prompt ? EssayTopicId : EssayTopicId2, essayTopic.EssayTopicId); } } #endregion #region Utilities private static DatabaseRootContext GetRootContext() { return new DatabaseRootContext(TestHelpersCommonFields.DatabaseName); } [ClassCleanup] public static void CleanDb() { using (var context = new DatabaseRootContext(TestHelpersCommonFields.DatabaseName)) { context.Essays.Where(essay => essay.Id > 0).Delete(); context.EssayTopics.Where(essayTopic => essayTopic.Id > 0).Delete(); context.ApplicantPersonalInformations.Where(info => info.Id > 0).Delete(); context.GraduatingClasses.Where(gradClass => gradClass.Id > 0).Delete(); } } #endregion #region GetEssay [TestMethod, TestCategory("Integration")] public void ApplicantsEssayRepository_GetEssay_Should_Call_CreateRowKeyForEssay() { GetEssay(); A.CallTo(() => _rowKeyGenerator.CreateRowKeyForEssay(ApplicantsGuid, EssayTopicId)).MustHaveHappened(); } [TestMethod, TestCategory("Integration")] public void ApplicantsEssayRepository_GetEssay_Should_Call_AzureEssayRepository() { GetEssay(); A.CallTo( () => _azureAzureEssayRepository.GetEssay( A<AzureTableStorageEntityKeyDto>.That.Matches( x => x.PartitionKey == GraduatingYear.ToString() && x.RowKey == RowKey))).MustHaveHappened(); } [TestMethod, TestCategory("Integration")] public void ApplicantsEssayRepository_GetEssay_Should_Return_Whatever_TheAzureRepoReturns() { var essayDto = new EssayDto(); A.CallTo(() => _azureAzureEssayRepository.GetEssay(A<AzureTableStorageEntityKeyDto>.Ignored)) .Returns(essayDto); Assert.AreSame(essayDto, GetEssay()); } private EssayDto GetEssay() { return _applicantsEssayRepository.GetEssay(EssayTopicId); } #endregion } }
    Sobieck00/BOH-Bulldog-Scholarship-Application-Management
    BohFoundation.ApplicantsRepository.Tests/IntegrationTests/ApplicantsEssayRepositoryIntegrationTests.cs
    C#
    mit
    17,599
    class AddAuthorAndSubjectToClaimStateTransitions < ActiveRecord::Migration[4.2] def change add_column :claim_state_transitions, :author_id, :integer add_column :claim_state_transitions, :subject_id, :integer end end
    ministryofjustice/advocate-defence-payments
    db/migrate/20160909150238_add_author_and_subject_to_claim_state_transitions.rb
    Ruby
    mit
    228
    # Using a compact OS FROM registry.dataos.io/library/nginx MAINTAINER Golfen Guo <[email protected]> # Install Nginx # Add 2048 stuff into Nginx server COPY . /usr/share/nginx/html EXPOSE 80
    yepengxj/dao-2048
    Dockerfile
    Dockerfile
    mit
    201
    require 'test_helper' require 'cache_value/util' class UtilTest < Test::Unit::TestCase include CacheValue::Util context 'hex_digest' do should 'return the same digest for identical hashes' do hex_digest({ :ha => 'ha'}).should == hex_digest({ :ha => 'ha'}) end end end
    tobias/cache_value
    test/util_test.rb
    Ruby
    mit
    298
    # This migration comes from thinkspace_resource (originally 20150502000000) class AddFingerprintToFile < ActiveRecord::Migration def change add_column :thinkspace_resource_files, :file_fingerprint, :string end end
    sixthedge/cellar
    packages/opentbl/api/db/migrate/20170511210074_add_fingerprint_to_file.thinkspace_resource.rb
    Ruby
    mit
    222
    /** * Copyright (c) Microsoft Corporation. All rights reserved. * Licensed under the MIT License. See License.txt in the project root for * license information. * * Code generated by Microsoft (R) AutoRest Code Generator. */ package com.microsoft.azure.management.network.v2020_04_01; import java.util.Map; import com.fasterxml.jackson.annotation.JsonProperty; /** * Tags object for patch operations. */ public class TagsObject { /** * Resource tags. */ @JsonProperty(value = "tags") private Map<String, String> tags; /** * Get resource tags. * * @return the tags value */ public Map<String, String> tags() { return this.tags; } /** * Set resource tags. * * @param tags the tags value to set * @return the TagsObject object itself. */ public TagsObject withTags(Map<String, String> tags) { this.tags = tags; return this; } }
    selvasingh/azure-sdk-for-java
    sdk/network/mgmt-v2020_04_01/src/main/java/com/microsoft/azure/management/network/v2020_04_01/TagsObject.java
    Java
    mit
    954
    /* * This file is part of Sponge, licensed under the MIT License (MIT). * * Copyright (c) SpongePowered <https://www.spongepowered.org> * Copyright (c) contributors * * Permission is hereby granted, free of charge, to any person obtaining a copy * of this software and associated documentation files (the "Software"), to deal * in the Software without restriction, including without limitation the rights * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell * copies of the Software, and to permit persons to whom the Software is * furnished to do so, subject to the following conditions: * * The above copyright notice and this permission notice shall be included in * all copies or substantial portions of the Software. * * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN * THE SOFTWARE. */ package org.spongepowered.common.mixin.core.server.network; import net.minecraft.network.NetworkManager; import net.minecraft.network.login.server.S00PacketDisconnect; import net.minecraft.server.MinecraftServer; import net.minecraft.server.management.ServerConfigurationManager; import net.minecraft.server.network.NetHandlerLoginServer; import net.minecraft.util.ChatComponentTranslation; import net.minecraft.util.IChatComponent; import org.apache.logging.log4j.Logger; import org.spongepowered.api.event.cause.NamedCause; import org.spongepowered.api.profile.GameProfile; import org.spongepowered.api.event.SpongeEventFactory; import org.spongepowered.api.event.cause.Cause; import org.spongepowered.api.event.network.ClientConnectionEvent; import org.spongepowered.api.network.RemoteConnection; import org.spongepowered.api.text.Text; import org.spongepowered.asm.lib.Opcodes; import org.spongepowered.asm.mixin.Mixin; import org.spongepowered.asm.mixin.Shadow; import org.spongepowered.asm.mixin.injection.At; import org.spongepowered.asm.mixin.injection.Inject; import org.spongepowered.asm.mixin.injection.Redirect; import org.spongepowered.asm.mixin.injection.callback.CallbackInfo; import org.spongepowered.common.SpongeImpl; import org.spongepowered.common.interfaces.IMixinNetHandlerLoginServer; import org.spongepowered.common.text.SpongeTexts; import java.net.SocketAddress; import java.util.Optional; @Mixin(NetHandlerLoginServer.class) public abstract class MixinNetHandlerLoginServer implements IMixinNetHandlerLoginServer { @Shadow private static Logger logger; @Shadow public NetworkManager networkManager; @Shadow private MinecraftServer server; @Shadow private com.mojang.authlib.GameProfile loginGameProfile; @Shadow public abstract String getConnectionInfo(); @Shadow public abstract com.mojang.authlib.GameProfile getOfflineProfile(com.mojang.authlib.GameProfile profile); @Redirect(method = "tryAcceptPlayer", at = @At(value = "INVOKE", target = "Lnet/minecraft/server/management/ServerConfigurationManager;" + "allowUserToConnect(Ljava/net/SocketAddress;Lcom/mojang/authlib/GameProfile;)Ljava/lang/String;")) public String onAllowUserToConnect(ServerConfigurationManager confMgr, SocketAddress address, com.mojang.authlib.GameProfile profile) { return null; // We handle disconnecting } private void closeConnection(IChatComponent reason) { try { logger.info("Disconnecting " + this.getConnectionInfo() + ": " + reason.getUnformattedText()); this.networkManager.sendPacket(new S00PacketDisconnect(reason)); this.networkManager.closeChannel(reason); } catch (Exception exception) { logger.error("Error whilst disconnecting player", exception); } } private void disconnectClient(Optional<Text> disconnectMessage) { IChatComponent reason = null; if (disconnectMessage.isPresent()) { reason = SpongeTexts.toComponent(disconnectMessage.get()); } else { reason = new ChatComponentTranslation("disconnect.disconnected"); } this.closeConnection(reason); } @Override public boolean fireAuthEvent() { Optional<Text> disconnectMessage = Optional.of(Text.of("You are not allowed to log in to this server.")); ClientConnectionEvent.Auth event = SpongeEventFactory.createClientConnectionEventAuth(Cause.of(NamedCause.source(this.loginGameProfile)), disconnectMessage, disconnectMessage, (RemoteConnection) this.networkManager, (GameProfile) this.loginGameProfile); SpongeImpl.postEvent(event); if (event.isCancelled()) { this.disconnectClient(event.getMessage()); } return event.isCancelled(); } @Inject(method = "processLoginStart", at = @At(value = "FIELD", target = "Lnet/minecraft/server/network/NetHandlerLoginServer;" + "currentLoginState:Lnet/minecraft/server/network/NetHandlerLoginServer$LoginState;", opcode = Opcodes.PUTFIELD, ordinal = 1), cancellable = true) public void fireAuthEventOffline(CallbackInfo ci) { // Move this check up here, so that the UUID isn't null when we fire the event if (!this.loginGameProfile.isComplete()) { this.loginGameProfile = this.getOfflineProfile(this.loginGameProfile); } if (this.fireAuthEvent()) { ci.cancel(); } } }
    kashike/SpongeCommon
    src/main/java/org/spongepowered/common/mixin/core/server/network/MixinNetHandlerLoginServer.java
    Java
    mit
    5,729
    from otp.ai.AIBaseGlobal import * import DistributedCCharBaseAI from direct.directnotify import DirectNotifyGlobal from direct.fsm import ClassicFSM, State from direct.fsm import State from direct.task import Task import random from toontown.toonbase import ToontownGlobals from toontown.toonbase import TTLocalizer import CharStateDatasAI class DistributedGoofySpeedwayAI(DistributedCCharBaseAI.DistributedCCharBaseAI): notify = DirectNotifyGlobal.directNotify.newCategory('DistributedGoofySpeedwayAI') def __init__(self, air): DistributedCCharBaseAI.DistributedCCharBaseAI.__init__(self, air, TTLocalizer.Goofy) self.fsm = ClassicFSM.ClassicFSM('DistributedGoofySpeedwayAI', [State.State('Off', self.enterOff, self.exitOff, ['Lonely', 'TransitionToCostume', 'Walk']), State.State('Lonely', self.enterLonely, self.exitLonely, ['Chatty', 'Walk', 'TransitionToCostume']), State.State('Chatty', self.enterChatty, self.exitChatty, ['Lonely', 'Walk', 'TransitionToCostume']), State.State('Walk', self.enterWalk, self.exitWalk, ['Lonely', 'Chatty', 'TransitionToCostume']), State.State('TransitionToCostume', self.enterTransitionToCostume, self.exitTransitionToCostume, ['Off'])], 'Off', 'Off') self.fsm.enterInitialState() self.handleHolidays() def delete(self): self.fsm.requestFinalState() DistributedCCharBaseAI.DistributedCCharBaseAI.delete(self) self.lonelyDoneEvent = None self.lonely = None self.chattyDoneEvent = None self.chatty = None self.walkDoneEvent = None self.walk = None return def generate(self): DistributedCCharBaseAI.DistributedCCharBaseAI.generate(self) name = self.getName() self.lonelyDoneEvent = self.taskName(name + '-lonely-done') self.lonely = CharStateDatasAI.CharLonelyStateAI(self.lonelyDoneEvent, self) self.chattyDoneEvent = self.taskName(name + '-chatty-done') self.chatty = CharStateDatasAI.CharChattyStateAI(self.chattyDoneEvent, self) self.walkDoneEvent = self.taskName(name + '-walk-done') if self.diffPath == None: self.walk = CharStateDatasAI.CharWalkStateAI(self.walkDoneEvent, self) else: self.walk = CharStateDatasAI.CharWalkStateAI(self.walkDoneEvent, self, self.diffPath) return def walkSpeed(self): return ToontownGlobals.GoofySpeed def start(self): self.fsm.request('Lonely') def __decideNextState(self, doneStatus): if self.transitionToCostume == 1: curWalkNode = self.walk.getDestNode() if simbase.air.holidayManager: if ToontownGlobals.HALLOWEEN_COSTUMES in simbase.air.holidayManager.currentHolidays and simbase.air.holidayManager.currentHolidays[ToontownGlobals.HALLOWEEN_COSTUMES]: simbase.air.holidayManager.currentHolidays[ToontownGlobals.HALLOWEEN_COSTUMES].triggerSwitch(curWalkNode, self) self.fsm.request('TransitionToCostume') elif ToontownGlobals.APRIL_FOOLS_COSTUMES in simbase.air.holidayManager.currentHolidays and simbase.air.holidayManager.currentHolidays[ToontownGlobals.APRIL_FOOLS_COSTUMES]: simbase.air.holidayManager.currentHolidays[ToontownGlobals.APRIL_FOOLS_COSTUMES].triggerSwitch(curWalkNode, self) self.fsm.request('TransitionToCostume') else: self.notify.warning('transitionToCostume == 1 but no costume holiday') else: self.notify.warning('transitionToCostume == 1 but no holiday Manager') if doneStatus['state'] == 'lonely' and doneStatus['status'] == 'done': self.fsm.request('Walk') elif doneStatus['state'] == 'chatty' and doneStatus['status'] == 'done': self.fsm.request('Walk') elif doneStatus['state'] == 'walk' and doneStatus['status'] == 'done': if len(self.nearbyAvatars) > 0: self.fsm.request('Chatty') else: self.fsm.request('Lonely') def enterOff(self): pass def exitOff(self): DistributedCCharBaseAI.DistributedCCharBaseAI.exitOff(self) def enterLonely(self): self.lonely.enter() self.acceptOnce(self.lonelyDoneEvent, self.__decideNextState) def exitLonely(self): self.ignore(self.lonelyDoneEvent) self.lonely.exit() def __goForAWalk(self, task): self.notify.debug('going for a walk') self.fsm.request('Walk') return Task.done def enterChatty(self): self.chatty.enter() self.acceptOnce(self.chattyDoneEvent, self.__decideNextState) def exitChatty(self): self.ignore(self.chattyDoneEvent) self.chatty.exit() def enterWalk(self): self.notify.debug('going for a walk') self.walk.enter() self.acceptOnce(self.walkDoneEvent, self.__decideNextState) def exitWalk(self): self.ignore(self.walkDoneEvent) self.walk.exit() def avatarEnterNextState(self): if len(self.nearbyAvatars) == 1: if self.fsm.getCurrentState().getName() != 'Walk': self.fsm.request('Chatty') else: self.notify.debug('avatarEnterNextState: in walk state') else: self.notify.debug('avatarEnterNextState: num avatars: ' + str(len(self.nearbyAvatars))) def avatarExitNextState(self): if len(self.nearbyAvatars) == 0: if self.fsm.getCurrentState().getName() != 'Walk': self.fsm.request('Lonely') def handleHolidays(self): DistributedCCharBaseAI.DistributedCCharBaseAI.handleHolidays(self) if hasattr(simbase.air, 'holidayManager'): if ToontownGlobals.APRIL_FOOLS_COSTUMES in simbase.air.holidayManager.currentHolidays: if simbase.air.holidayManager.currentHolidays[ToontownGlobals.APRIL_FOOLS_COSTUMES] != None and simbase.air.holidayManager.currentHolidays[ToontownGlobals.APRIL_FOOLS_COSTUMES].getRunningState(): self.diffPath = TTLocalizer.Donald return def getCCLocation(self): if self.diffPath == None: return 1 else: return 0 return def enterTransitionToCostume(self): pass def exitTransitionToCostume(self): pass
    ksmit799/Toontown-Source
    toontown/classicchars/DistributedGoofySpeedwayAI.py
    Python
    mit
    6,450
    <?php namespace Rmc\Core\StaticPageBundle\DependencyInjection; use Symfony\Component\Config\Definition\Builder\TreeBuilder; use Symfony\Component\Config\Definition\ConfigurationInterface; /** * This is the class that validates and merges configuration from your app/config files * * To learn more see {@link http://symfony.com/doc/current/cookbook/bundles/extension.html#cookbook-bundles-extension-config-class} */ class Configuration implements ConfigurationInterface { /** * {@inheritDoc} */ public function getConfigTreeBuilder() { $treeBuilder = new TreeBuilder(); $rootNode = $treeBuilder->root('rmc_core_static_page'); $rootNode ->children() ->arrayNode('static_page') ->children() ->scalarNode('is_enabled')->end() ->scalarNode('source')->end() ->scalarNode('entity_manager_name')->end() ->scalarNode('entity_class')->end() ->scalarNode('local_feed_path')->defaultFalse()->end() ->end() ->end() ->end(); // Here you should define the parameters that are allowed to // configure your bundle. See the documentation linked above for // more information on that topic. return $treeBuilder; } }
    jignesh-russmediatech/rmcdemo
    src/Rmc/Core/StaticPageBundle/DependencyInjection/Configuration.php
    PHP
    mit
    1,405
    <!--?xml version="1.0"?--><html><head></head><body></body></html>
    textlint/textlint-plugin-html
    test/ast-test-case/doctype-quirksmode-xml/result.html
    HTML
    mit
    65
    import React from 'react'; import { Link } from 'react-router'; import HotdotActions from '../actions/HotdotActions'; import HotdotObjStore from '../stores/HotdotObjStore'; import MyInfoNavbar from './MyInfoNavbar'; import Weixin from './Weixin'; class Hotdot extends React.Component { constructor(props) { super(props); this.state = HotdotObjStore.getState(); this.onChange = this.onChange.bind(this); } componentDidMount() { HotdotActions.getHotdotDatas(); $(".month-search").hide(); $(".navbar-hotdot").on("touchend",function(){ var index = $(this).index(); if(index==0){ //本周 $(".month-search").hide(); $(".week-search").show(); }else{ //本月 $(".month-search").show(); $(".week-search").hide(); } }); HotdotObjStore.listen(this.onChange); Weixin.getUrl(); Weixin.weixinReady(); } componentWillUnmount() { HotdotObjStore.unlisten(this.onChange); } onChange(state) { this.setState(state); } getUpOrDown(curData,preData,isWeek){ var preDataItem = isWeek ? preData.week:preData.month; if(preData==false || preData == [] || preDataItem==undefined){ return (<span className="hotdotRight"><span className="glyphicon-trend glyphicon glyphicon-arrow-up"></span> <span className="badge">{curData.value}</span></span>); }else{ for(var i = 0;i < preDataItem.length;i++){ if(preDataItem[i].word == curData.word){ if(preDataItem[i].value < curData.value){ return (<span className="hotdotRight"><span className="glyphicon-trend glyphicon glyphicon-arrow-up"></span> <span className="badge">{curData.value}</span></span>); }else{ return (<span className="hotdotRight"><span className="glyphicon-trend glyphicon glyphicon-arrow-down"></span> <span className="badge" style={{backgroundColor:"#4F81E3"}}>{curData.value}</span></span>); } } } } return (<span className="hotdotRight"><span className="glyphicon-trend glyphicon glyphicon-arrow-up"></span> <span className="badge">{curData.value}</span></span>); } render() { var hotdotData = (this.state.data); var firstHotData = hotdotData[0]; var preHotData ; if(hotdotData.length > 7){ preHotData = hotdotData[7]; }else{ preHotData = []; } if(firstHotData){ var weekList = firstHotData.week.map((weekItem,i)=>( <li className="list-group-item" key={i}> {this.getUpOrDown(weekItem,preHotData,true)} {weekItem.word} </li> )); if(weekList.length==0){ weekList = <div className = "noData">数据还没有准备好,要不去其他页面瞅瞅?</div> } var monthList = firstHotData.month.map((monthItem,i)=>( <li className="list-group-item" key={i}> {this.getUpOrDown(monthItem,preHotData,false)} {monthItem.word} </li> )); if(monthList.length==0){ monthList = <div className = "noData">Whops,这个页面的数据没有准备好,去其他页面瞅瞅?</div> } }else{ var weekList = (<span>正在构建,敬请期待...</span>); var monthList = (<span>正在构建,敬请期待...</span>); } return (<div> <div className="content-container"> <div className="week-search"> <div className="panel panel-back"> <div className="panel-heading"> <span className="panel-title">本周关键字排行榜</span> <div className="navbar-key-container"> <span className="navbar-hotdot navbar-week navbar-hotdot-active">本周</span> <span className="navbar-hotdot navbar-month">本月</span> </div> </div> <div className="panel-body"> <ul className="list-group"> {weekList} </ul> </div> </div> </div> <div className="month-search"> <div className="panel panel-back"> <div className="panel-heading"> <span className="panel-title">本月关键字排行榜</span> <div className="navbar-key-container"> <span className="navbar-hotdot navbar-week">本周</span> <span className="navbar-hotdot navbar-month navbar-hotdot-active">本月</span> </div> </div> <div className="panel-body"> <ul className="list-group"> {monthList} </ul> </div> </div> </div> </div> </div>); } } export default Hotdot;
    kongchun/BigData-Web
    app/m_components/Hotdot.js
    JavaScript
    mit
    5,621
    // Copyright (c) 2013-2014 PropCoin Developers #ifndef CLIENTVERSION_H #define CLIENTVERSION_H // // client versioning and copyright year // // These need to be macros, as version.cpp's and bitcoin-qt.rc's voodoo requires it #define CLIENT_VERSION_MAJOR 1 #define CLIENT_VERSION_MINOR 5 #define CLIENT_VERSION_REVISION 1 #define CLIENT_VERSION_BUILD 0 // Set to true for release, false for prerelease or test build #define CLIENT_VERSION_IS_RELEASE true // Copyright year (2009-this) // Todo: update this when changing our copyright comments in the source #define COPYRIGHT_YEAR 2014 // Converts the parameter X to a string after macro replacement on X has been performed. // Don't merge these into one macro! #define STRINGIZE(X) DO_STRINGIZE(X) #define DO_STRINGIZE(X) #X #endif // CLIENTVERSION_H
    demomint/prop
    src/clientversion.h
    C
    mit
    829
    varnish ======= Varnish to run EOL site sudo docker run -v /eol/varnish/default.vcl:/etc/varnish/default.vcl \ -p 80:80 eoldocker/varnish:v3.0.5
    EolDocker/varnish
    README.md
    Markdown
    mit
    150
    <?php namespace PayU\Api\Response\Builder; use PayU\Api\Request\RequestInterface; use PayU\Api\Response\AbstractResponse; use Psr\Http\Message\ResponseInterface; /** * Interface BuilderInterface * * Provides a common interface to build response objects based on request context * * @package PayU\Api\Response\Builder * @author Lucas Mendes <[email protected]> */ interface BuilderInterface { /** * Build a response object * * @param RequestInterface $request * @param ResponseInterface $response * @param string $context * @return AbstractResponse */ public function build(RequestInterface $request, ResponseInterface $response, $context = null); }
    devsdmf/payu-php-sdk
    src/PayU/Api/Response/Builder/BuilderInterface.php
    PHP
    mit
    713
    using SolrExpress.Search.Parameter; using System; using System.Globalization; using System.Linq; using System.Text; namespace SolrExpress.Utility { /// <summary> /// Helper class used to extract information inside parameters /// </summary> internal static class ParameterUtil { /// <summary> /// Get the sort type and direction /// </summary> /// <param name="solrFacetSortType">Type used in match</param> /// <param name="typeName">Type name</param> /// <param name="sortName">Sort direction</param> public static void GetFacetSort(FacetSortType solrFacetSortType, out string typeName, out string sortName) { switch (solrFacetSortType) { case FacetSortType.IndexAsc: typeName = "index"; sortName = "asc"; break; case FacetSortType.IndexDesc: typeName = "index"; sortName = "desc"; break; case FacetSortType.CountAsc: typeName = "count"; sortName = "asc"; break; case FacetSortType.CountDesc: typeName = "count"; sortName = "desc"; break; default: throw new ArgumentException(nameof(solrFacetSortType)); } } /// <summary> /// Calculate and returns spatial formule /// </summary> /// <param name="fieldName">Field name</param> /// <param name="functionType">Function used in spatial filter</param> /// <param name="centerPoint">Center point to spatial filter</param> /// <param name="distance">Distance from center point</param> /// <returns>Spatial formule</returns> internal static string GetSpatialFormule(string fieldName, SpatialFunctionType functionType, GeoCoordinate centerPoint, decimal distance) { var functionTypeStr = functionType.ToString().ToLower(); var latitude = centerPoint.Latitude.ToString("G", CultureInfo.InvariantCulture); var longitude = centerPoint.Longitude.ToString("G", CultureInfo.InvariantCulture); var distanceStr = distance.ToString("G", CultureInfo.InvariantCulture); return $"{{!{functionTypeStr} sfield={fieldName} pt={latitude},{longitude} d={distanceStr}}}"; } /// <summary> /// Get the field with excludes /// </summary> /// <param name="excludes">Excludes tags</param> /// <param name="aliasName">Alias name</param> /// <param name="fieldName">Field name</param> internal static string GetFacetName(string[] excludes, string aliasName, string fieldName) { var sb = new StringBuilder(); var needsBraces = (excludes?.Any() ?? false) || !string.IsNullOrWhiteSpace(aliasName); if (needsBraces) { sb.Append("{!"); } if (excludes?.Any() ?? false) { sb.Append($"ex={string.Join(",", excludes)}"); } if (sb.Length > 2) { sb.Append(" "); } if (!string.IsNullOrWhiteSpace(aliasName)) { sb.Append($"key={aliasName}"); } if (needsBraces) { sb.Append("}"); } sb.Append(fieldName); return sb.ToString(); } /// <summary> /// Get the filter with tag /// </summary> /// <param name="query">Query value</param> /// <param name="aliasName">Alias name</param> public static string GetFilterWithTag(string query, string aliasName) { return !string.IsNullOrWhiteSpace(aliasName) ? $"{{!tag={aliasName}}}{query}" : query; } } }
    solr-express/solr-express
    src/SolrExpress/Utility/ParameterUtil.cs
    C#
    mit
    4,034
    /** * React Starter Kit (https://www.reactstarterkit.com/) * * Copyright © 2014-2016 Kriasoft, LLC. All rights reserved. * * This source code is licensed under the MIT license found in the * LICENSE.txt file in the root directory of this source tree. */ import 'babel-polyfill'; import ReactDOM from 'react-dom'; import React from 'react'; import FastClick from 'fastclick'; import Router from './routes'; import Location from './core/Location'; import { addEventListener, removeEventListener } from './core/DOMUtils'; import { ApolloClient, createNetworkInterface } from 'react-apollo'; function getCookie(name) { let value = "; " + document.cookie; let parts = value.split("; " + name + "="); if (parts.length == 2) return parts.pop().split(";").shift(); } const networkInterface = createNetworkInterface('/graphql', { credentials: 'same-origin', uri: '/graphql', headers: { Cookie: getCookie("id_token") } }); const client = new ApolloClient({ connectToDevTools: true, networkInterface: networkInterface, }); let cssContainer = document.getElementById('css'); const appContainer = document.getElementById('app'); const context = { insertCss: styles => styles._insertCss(), onSetTitle: value => (document.title = value), onSetMeta: (name, content) => { // Remove and create a new <meta /> tag in order to make it work // with bookmarks in Safari const elements = document.getElementsByTagName('meta'); Array.from(elements).forEach((element) => { if (element.getAttribute('name') === name) { element.parentNode.removeChild(element); } }); const meta = document.createElement('meta'); meta.setAttribute('name', name); meta.setAttribute('content', content); document .getElementsByTagName('head')[0] .appendChild(meta); }, client }; // Google Analytics tracking. Don't send 'pageview' event after the first // rendering, as it was already sent by the Html component. let trackPageview = () => (trackPageview = () => window.ga('send', 'pageview')); function render(state) { Router.dispatch(state, (newState, component) => { ReactDOM.render( component, appContainer, () => { // Restore the scroll position if it was saved into the state if (state.scrollY !== undefined) { window.scrollTo(state.scrollX, state.scrollY); } else { window.scrollTo(0, 0); } trackPageview(); // Remove the pre-rendered CSS because it's no longer used // after the React app is launched if (cssContainer) { cssContainer.parentNode.removeChild(cssContainer); cssContainer = null; } }); }); } function run() { let currentLocation = null; let currentState = null; // Make taps on links and buttons work fast on mobiles FastClick.attach(document.body); // Re-render the app when window.location changes const unlisten = Location.listen(location => { currentLocation = location; currentState = Object.assign({}, location.state, { path: location.pathname, query: location.query, state: location.state, context, }); render(currentState); }); // Save the page scroll position into the current location's state const supportPageOffset = window.pageXOffset !== undefined; const isCSS1Compat = ((document.compatMode || '') === 'CSS1Compat'); const setPageOffset = () => { currentLocation.state = currentLocation.state || Object.create(null); if (supportPageOffset) { currentLocation.state.scrollX = window.pageXOffset; currentLocation.state.scrollY = window.pageYOffset; } else { currentLocation.state.scrollX = isCSS1Compat ? document.documentElement.scrollLeft : document.body.scrollLeft; currentLocation.state.scrollY = isCSS1Compat ? document.documentElement.scrollTop : document.body.scrollTop; } }; addEventListener(window, 'scroll', setPageOffset); addEventListener(window, 'pagehide', () => { removeEventListener(window, 'scroll', setPageOffset); unlisten(); }); } // Run the application when both DOM is ready and page content is loaded if (['complete', 'loaded', 'interactive'].includes(document.readyState) && document.body) { run(); } else { document.addEventListener('DOMContentLoaded', run, false); }
    reicheltp/Sonic
    src/client.js
    JavaScript
    mit
    4,372
    var $M = require("@effectful/debugger"), $x = $M.context, $ret = $M.ret, $unhandled = $M.unhandled, $brk = $M.brk, $lset = $M.lset, $mcall = $M.mcall, $m = $M.module("file.js", null, typeof module === "undefined" ? null : module, null, "$", { __webpack_require__: typeof __webpack_require__ !== "undefined" && __webpack_require__ }, null), $s$1 = [{ e: [1, "1:9-1:10"] }, null, 0], $s$2 = [{}, $s$1, 1], $m$0 = $M.fun("m$0", "file.js", null, null, [], 0, 2, "1:0-4:0", 32, function ($, $l, $p) { for (;;) switch ($.state = $.goto) { case 0: $lset($l, 1, $m$1($)); $.goto = 2; continue; case 1: $.goto = 2; return $unhandled($.error); case 2: return $ret($.result); default: throw new Error("Invalid state"); } }, null, null, 0, [[0, "1:0-3:1", $s$1], [16, "4:0-4:0", $s$1], [16, "4:0-4:0", $s$1]]), $m$1 = $M.fun("m$1", "e", null, $m$0, [], 0, 2, "1:0-3:1", 0, function ($, $l, $p) { for (;;) switch ($.state = $.goto) { case 0: $.goto = 1; $brk(); $.state = 1; case 1: $.goto = 2; $p = ($x.call = eff)(1); $.state = 2; case 2: $l[1] = $p; $.goto = 3; $p = ($x.call = eff)(2); $.state = 3; case 3: $.goto = 4; $mcall("log", console, $l[1] + $p); $.state = 4; case 4: $.goto = 6; $brk(); continue; case 5: $.goto = 6; return $unhandled($.error); case 6: return $ret($.result); default: throw new Error("Invalid state"); } }, null, null, 1, [[4, "2:2-2:31", $s$2], [2, "2:14-2:20", $s$2], [2, "2:23-2:29", $s$2], [2, "2:2-2:30", $s$2], [36, "3:1-3:1", $s$2], [16, "3:1-3:1", $s$2], [16, "3:1-3:1", $s$2]]); $M.moduleExports();
    awto/effectfuljs
    packages/core/test/samples/simple/expr/test04-out-ds.js
    JavaScript
    mit
    1,801
    # INTRODUCTION Triplie is an AI bot based on 2nd up to 5th order Markov model. It uses an SQLite database for storage. Triplie learns by creating 1. a dictionary of words 2. a graph representing valid 5-grams (consecutive groups of 5 words) encountered in the text 3. a graph of associations between words from sentences formed according to the Hebbian rule To respond to a user, triplie extracts keywords from the user's text, finds their most appropriate associated keywords in the Hebbian association network, and generates replies that contain the associated keywords using multiple breadth-first-search Markov chains algorithm. For more information on installing and configuring read below You can join the project's IRC channel too: [#triplie on irc.freenode.net](irc://irc.freenode.net/#triplie) # Install ## Prerequisites Download and install [node.js](http://nodejs.org/) for your system. Its recommended to build node from source. If you don't do that, make sure that npm is also installed alongside with node and that the node binary is called "node" Then from a terminal run: npm install -g triplie This will install the `triplie` command on your system. Configure the bot as explained below before running! # CONFIGURATION If running the bot for the first time and its not configured, you should create a new directory and run: triplie config.yaml --init to create the initial config file ### Edit config.yaml config.yaml is already pre-filled with some default for your bot. You will want to change some of these settings. The configuration file is really well commented. Open it and edit it according to the instructions contained inside. Once you run the bot however, the instructions will disappear the moment you change a setting by giving a command to the bot. # RUNNING After you edited the config file, to run the bot use the command: triplie config.yaml # IMPORT EXISTING TEXT If called with the argument `--feed` triplie will receive data from stdin, parse it using a regular expression then feed the database. Example: cat log.txt | triplie config.yaml --feed --regex '(?<year>\d+)-(?<month>\d+)-(?<day>)T(?<hour>\d+):(?<minute>\d+):(?<second>\d+)Z\s+(?<nick>.+):\s+(?<text>.+)' will work for a `log.txt` that has lines in the format: 2013-04-04T13:15:00Z someuser: I wrote some text The syntax is XRegExp and uses named groups. See [the XRegExp readme](https://npmjs.org/package/xregexp) for more info Currently, supported named captures are: * year * month * day * hour * minute * second * timestamp - unix timestamp in seconds, used instead of the date captures * timestampms - unix timestamp in miliseconds, used instead of both above. * text - the text content Timestamp example: cat log.txt | triplie config.yaml --feed --regex '(?<timestamp>\d+) (?<text>.+) will match `log.txt` containing lines in the format: 1234567890 example text here All captures except text are optional - the time is optional and if left out the feeder will generate reasonable "fake" timestamps. cat log.txt | triplie config.yaml --feed --regex '(?<text>.+)' # COMMANDS List of triplie's commands (assuming "!" is the cmdchar) 1. !join #channel - causes the bot to join and remember the channel 2. !part #channel - part and forget channel 3. !reload - causes reload of the bot code, useful for development 4. !set path value - set a config setting to the specified value. Examples !set ai.sleep.1 10 - Set the upper sleep limit to 10 seconds !set ai.sleep [2,3] - Set both sleep limits. Value musn't contain space. 5. !get path - get the config value at the specified path 6. !db stats - triplie will output database statistics !cmd will return results via private notice !!cmd returns results via public message # LICENCE & AUTHOR See LICENCE and AUTHORS (if present) ![Bitdeli Badge](https://d2weczhvl823v0.cloudfront.net/spion/triplie-ng/trend.png)
    spion/triplie-ng
    README.md
    Markdown
    mit
    3,968
    <!DOCTYPE html> <html> <head> <!-- meta information --> <meta charset="utf-8"> <meta name="description" content="http://feedproxy.google.com/~r/geledes/~3/EdmSLCOQs3o/" > <meta name="author" content="Kinho"> <!-- Enable responsiveness on mobile devices--> <meta name="viewport" content="width=device-width, initial-scale=1.0, maximum-scale=1"> <!-- title --> <title>My post &middot; Entropista</title> <!-- icons --> <link rel="shortcut icon" href="/public/images/favicon.ico" /> <!-- stylesheets --> <link rel="stylesheet" href="/public/css/responsive.gs.12col.css"> <link rel="stylesheet" href="/public/css/animate.min.css"> <link rel="stylesheet" href="/public/css/main.css"> <!-- Google fonts --> <link rel="stylesheet" href="http://fonts.googleapis.com/css?family=Source+Sans+Pro:400,700,400italic&subset=latin-ext"> <!-- feed links --> <link rel="alternate" href="/feed.xml" type="application/rss+xml" title=""> </head> <body> <div class="container amber"> <header class="top row gutters"> <div class="col span_2 center"> <!-- TODO: add baseurl to the logo link --> <a href="" id="logo" title="Entropista" style="background-image: url(/public/images/logo.png);"></a> </div> <nav class="col span_10 top-navbar"> <a href="/" title="Home" >Home</a> <a href="/about" title="About" >About</a> <a href="/NormasDeComunicaçãoViolenta" title="N.C.V." >N.C.V.</a> <a href="/Táticas" title="Táticas" >Táticas</a> <a href="/NovelaFuturista" title="Livro" >Livro</a> </nav> </header> <article class="single row gutters"> <time class="published" datetime="2015-10-12">12 October 2015</time> <h2>My post</h2> <p>http://feedproxy.google.com/~r/geledes/~3/EdmSLCOQs3o/</p> </article> <footer> <p> This work is licensed under a <a rel="license" href="http://creativecommons.org/licenses/by-nc/4.0/deed.en_US">Creative Commons Attribution-NonCommercial 4.0 International License</a>. </p> </footer> </div> </body> </html>
    oentropista/oentropista.github.io
    _site/post/index.html
    HTML
    mit
    2,129