{ // 获取包含Hugging Face文本的span元素 const spans = link.querySelectorAll('span.whitespace-nowrap, span.hidden.whitespace-nowrap'); spans.forEach(span => { if (span.textContent && span.textContent.trim().match(/Hugging\s*Face/i)) { span.textContent = 'AI快站'; } }); }); // 替换logo图片的alt属性 document.querySelectorAll('img[alt*="Hugging"], img[alt*="Face"]').forEach(img => { if (img.alt.match(/Hugging\s*Face/i)) { img.alt = 'AI快站 logo'; } }); } // 替换导航栏中的链接 function replaceNavigationLinks() { // 已替换标记,防止重复运行 if (window._navLinksReplaced) { return; } // 已经替换过的链接集合,防止重复替换 const replacedLinks = new Set(); // 只在导航栏区域查找和替换链接 const headerArea = document.querySelector('header') || document.querySelector('nav'); if (!headerArea) { return; } // 在导航区域内查找链接 const navLinks = headerArea.querySelectorAll('a'); navLinks.forEach(link => { // 如果已经替换过,跳过 if (replacedLinks.has(link)) return; const linkText = link.textContent.trim(); const linkHref = link.getAttribute('href') || ''; // 替换Spaces链接 - 仅替换一次 if ( (linkHref.includes('/spaces') || linkHref === '/spaces' || linkText === 'Spaces' || linkText.match(/^s*Spacess*$/i)) && linkText !== 'OCR模型免费转Markdown' && linkText !== 'OCR模型免费转Markdown' ) { link.textContent = 'OCR模型免费转Markdown'; link.href = 'https://fast360.xyz'; link.setAttribute('target', '_blank'); link.setAttribute('rel', 'noopener noreferrer'); replacedLinks.add(link); } // 删除Posts链接 else if ( (linkHref.includes('/posts') || linkHref === '/posts' || linkText === 'Posts' || linkText.match(/^s*Postss*$/i)) ) { if (link.parentNode) { link.parentNode.removeChild(link); } replacedLinks.add(link); } // 替换Docs链接 - 仅替换一次 else if ( (linkHref.includes('/docs') || linkHref === '/docs' || linkText === 'Docs' || linkText.match(/^s*Docss*$/i)) && linkText !== '模型下载攻略' ) { link.textContent = '模型下载攻略'; link.href = '/'; replacedLinks.add(link); } // 删除Enterprise链接 else if ( (linkHref.includes('/enterprise') || linkHref === '/enterprise' || linkText === 'Enterprise' || linkText.match(/^s*Enterprises*$/i)) ) { if (link.parentNode) { link.parentNode.removeChild(link); } replacedLinks.add(link); } }); // 查找可能嵌套的Spaces和Posts文本 const textNodes = []; function findTextNodes(element) { if (element.nodeType === Node.TEXT_NODE) { const text = element.textContent.trim(); if (text === 'Spaces' || text === 'Posts' || text === 'Enterprise') { textNodes.push(element); } } else { for (const child of element.childNodes) { findTextNodes(child); } } } // 只在导航区域内查找文本节点 findTextNodes(headerArea); // 替换找到的文本节点 textNodes.forEach(node => { const text = node.textContent.trim(); if (text === 'Spaces') { node.textContent = node.textContent.replace(/Spaces/g, 'OCR模型免费转Markdown'); } else if (text === 'Posts') { // 删除Posts文本节点 if (node.parentNode) { node.parentNode.removeChild(node); } } else if (text === 'Enterprise') { // 删除Enterprise文本节点 if (node.parentNode) { node.parentNode.removeChild(node); } } }); // 标记已替换完成 window._navLinksReplaced = true; } // 替换代码区域中的域名 function replaceCodeDomains() { // 特别处理span.hljs-string和span.njs-string元素 document.querySelectorAll('span.hljs-string, span.njs-string, span[class*="hljs-string"], span[class*="njs-string"]').forEach(span => { if (span.textContent && span.textContent.includes('huggingface.co')) { span.textContent = span.textContent.replace(/huggingface.co/g, 'aifasthub.com'); } }); // 替换hljs-string类的span中的域名(移除多余的转义符号) document.querySelectorAll('span.hljs-string, span[class*="hljs-string"]').forEach(span => { if (span.textContent && span.textContent.includes('huggingface.co')) { span.textContent = span.textContent.replace(/huggingface.co/g, 'aifasthub.com'); } }); // 替换pre和code标签中包含git clone命令的域名 document.querySelectorAll('pre, code').forEach(element => { if (element.textContent && element.textContent.includes('git clone')) { const text = element.innerHTML; if (text.includes('huggingface.co')) { element.innerHTML = text.replace(/huggingface.co/g, 'aifasthub.com'); } } }); // 处理特定的命令行示例 document.querySelectorAll('pre, code').forEach(element => { const text = element.innerHTML; if (text.includes('huggingface.co')) { // 针对git clone命令的专门处理 if (text.includes('git clone') || text.includes('GIT_LFS_SKIP_SMUDGE=1')) { element.innerHTML = text.replace(/huggingface.co/g, 'aifasthub.com'); } } }); // 特别处理模型下载页面上的代码片段 document.querySelectorAll('.flex.border-t, .svelte_hydrator, .inline-block').forEach(container => { const content = container.innerHTML; if (content && content.includes('huggingface.co')) { container.innerHTML = content.replace(/huggingface.co/g, 'aifasthub.com'); } }); // 特别处理模型仓库克隆对话框中的代码片段 try { // 查找包含"Clone this model repository"标题的对话框 const cloneDialog = document.querySelector('.svelte_hydration_boundary, [data-target="MainHeader"]'); if (cloneDialog) { // 查找对话框中所有的代码片段和命令示例 const codeElements = cloneDialog.querySelectorAll('pre, code, span'); codeElements.forEach(element => { if (element.textContent && element.textContent.includes('huggingface.co')) { if (element.innerHTML.includes('huggingface.co')) { element.innerHTML = element.innerHTML.replace(/huggingface.co/g, 'aifasthub.com'); } else { element.textContent = element.textContent.replace(/huggingface.co/g, 'aifasthub.com'); } } }); } // 更精确地定位克隆命令中的域名 document.querySelectorAll('[data-target]').forEach(container => { const codeBlocks = container.querySelectorAll('pre, code, span.hljs-string'); codeBlocks.forEach(block => { if (block.textContent && block.textContent.includes('huggingface.co')) { if (block.innerHTML.includes('huggingface.co')) { block.innerHTML = block.innerHTML.replace(/huggingface.co/g, 'aifasthub.com'); } else { block.textContent = block.textContent.replace(/huggingface.co/g, 'aifasthub.com'); } } }); }); } catch (e) { // 错误处理但不打印日志 } } // 当DOM加载完成后执行替换 if (document.readyState === 'loading') { document.addEventListener('DOMContentLoaded', () => { replaceHeaderBranding(); replaceNavigationLinks(); replaceCodeDomains(); // 只在必要时执行替换 - 3秒后再次检查 setTimeout(() => { if (!window._navLinksReplaced) { console.log('[Client] 3秒后重新检查导航链接'); replaceNavigationLinks(); } }, 3000); }); } else { replaceHeaderBranding(); replaceNavigationLinks(); replaceCodeDomains(); // 只在必要时执行替换 - 3秒后再次检查 setTimeout(() => { if (!window._navLinksReplaced) { console.log('[Client] 3秒后重新检查导航链接'); replaceNavigationLinks(); } }, 3000); } // 增加一个MutationObserver来处理可能的动态元素加载 const observer = new MutationObserver(mutations => { // 检查是否导航区域有变化 const hasNavChanges = mutations.some(mutation => { // 检查是否存在header或nav元素变化 return Array.from(mutation.addedNodes).some(node => { if (node.nodeType === Node.ELEMENT_NODE) { // 检查是否是导航元素或其子元素 if (node.tagName === 'HEADER' || node.tagName === 'NAV' || node.querySelector('header, nav')) { return true; } // 检查是否在导航元素内部 let parent = node.parentElement; while (parent) { if (parent.tagName === 'HEADER' || parent.tagName === 'NAV') { return true; } parent = parent.parentElement; } } return false; }); }); // 只在导航区域有变化时执行替换 if (hasNavChanges) { // 重置替换状态,允许再次替换 window._navLinksReplaced = false; replaceHeaderBranding(); replaceNavigationLinks(); } }); // 开始观察document.body的变化,包括子节点 if (document.body) { observer.observe(document.body, { childList: true, subtree: true }); } else { document.addEventListener('DOMContentLoaded', () => { observer.observe(document.body, { childList: true, subtree: true }); }); } })(); '\n # self.stuff = \"

te a me mi puppi la fava

\"\n self.contentChanged.emit()\n\nclass SectionViewer(QtCore.QObject):\n contentReady = QtCore.pyqtSignal()\n\n def __init__(self, proc, addr = 0):\n QtCore.QObject.__init__(self)\n self.proc = proc\n self.sect = None\n if addr != 0:\n self.sect = self.proc.sections[addr]\n\n def setMemoryView(self, vmv):\n self.vmv = vmv\n\n @QtCore.pyqtSlot(str, int, int)\n def viewAs(self, mode, start, end):\n #fragType = fragment.CodeFragment if mode == 'code' else fragment.DataFragment\n #self.sect.addFragment(fragType, start, end)\n viewType = 'D' if mode == 'D' else 'C'\n self.sect.cdmap[start-self.sect.start:end-self.sect.start] = viewType\n self.show()\n\n @QtCore.pyqtSlot(int)\n def setAddr(self, addr):\n self.sect = self.proc.sections[addr]\n self.show()\n\n # @QtCore.pyqtSlot(int)\n def show(self):\n out = []\n out.append('\\n')\n out.append('\\n')\n out.append(' \\n')\n out.append(' \\n')\n out.append(' \\n')\n out.append(' \\n')\n out.append('
\\n')\n \n out.append(self.vmv.getHTML())\n out.append(self.sect.getHTML())\n\n # end content\n out.append('\\n')\n out.append('')\n\n self.stuff = ''.join(out)\n self.contentReady.emit()\n\n\nclass VirtualMemoryView(object):\n def __init__(self, process):\n tmp = []\n\n self.min = -1\n self.max = -1\n self.sizes = []\n self.pixelWidth = 600.0\n last = -1\n\n for (interval, section) in process.sections.items():\n # just to avoid elf .strtab and .symtab, clearly it \n # has to be fixed\n if self.min == -1 and interval[0] > 2000:\n self.min = interval[0]\n self.max = interval[1]\n\n if last != -1:\n if last != interval[0] and last > 2000:\n tmp.append(('gap', interval[0]-last, interval[0]))\n\n last = interval[1]\n\n if interval[0] > 2000:\n tmp.append((section.name, interval[1]-interval[0], interval[0]))\n\n rangetot = self.max-self.min\n for (name, size, start) in tmp:\n self.sizes.append((name, int((float(size)/float(rangetot))*self.pixelWidth), start))\n\n def getHTML(self):\n ret = ''\n colors = { \\\n 'data' : '#3333bb', \\\n 'text' : '#bb3333', \\\n 'gap' : '#111111' \\\n }\n\n i = 0\n ret += '
\\n'\n for (name, size, start) in self.sizes:\n onclick = \"sv.setAddr(%d)\" % start\n if name == '.text' or ('__TEXT' in name):\n x = 'text'\n elif name == 'gap':\n x = 'gap'\n onclick = \"\"\n else:\n x = 'data'\n\n ret += '
\\n' % \\\n (onclick, colors[x], size)\n i += 1\n ret += '
\\n'\n ret += '
\\n'\n return ret\n\n\nclass MainView(QtGui.QMainWindow):\n def __init__(self):\n super(MainView, self).__init__()\n self.initUI()\n\n def showLoading(self):\n self.webView.setHtml('

Loading...

', QtCore.QUrl('qrc:/'))\n\n def showHtml(self):\n stuff = self.anal.stuff\n self.webView.setHtml(stuff, QtCore.QUrl('qrc:/'))\n self.webView.page().mainFrame().addToJavaScriptWindowObject(\"sv\", self.sectView)\n\n def showView(self):\n stuff = self.sectView.stuff\n self.webView.setHtml(stuff, QtCore.QUrl('qrc:/'))\n self.webView.page().mainFrame().addToJavaScriptWindowObject(\"sv\", self.sectView)\n\n def showBackground(self):\n self.webView.setHtml('', \\\n QtCore.QUrl('qrc:/'))\n\n def viewSections(self):\n self.anal.show()\n\n def openStuff(self):\n fileName = QtGui.QFileDialog.getOpenFileName( \\\n self, 'Open File', '', 'Files (*)')\n\n if fileName:\n self.showLoading()\n\n self.process = loader.SPUTA_FUORI_IL_MOSTO(fileName)\n self.virtualMemoryView = VirtualMemoryView(self.process)\n\n self.anal = ANALyzer(self.process)\n self.anal.setMemoryView(self.virtualMemoryView)\n self.anal.contentChanged.connect(self.showHtml)\n\n self.sectView = SectionViewer(self.process)\n self.sectView.setMemoryView(self.virtualMemoryView)\n self.sectView.contentReady.connect(self.showView)\n\n self.viewSections()\n\n def cpuStatus(self):\n if self.cpustatus.isHidden():\n self.cpustatus.show()\n else:\n self.cpustatus.hide()\n\n def _actions(self):\n ret = [ \\\n ('Open', ':icons/open.png', 'Ctrl+O', self.openStuff), \\\n (None, None, None, None), \\\n ('View Sections', ':icons/sections.png', 'Ctrl+Alt+S', self.viewSections), \\\n ('View CPU status', ':icons/cpu.png', 'Ctrl+Q', self.cpuStatus) \\\n ]\n\n return ret\n\n def _setupToolbar(self, toolbar):\n alist = self._actions()\n for (name, icon, shortcut, triggered) in alist:\n if name == None:\n toolbar.addSeparator()\n else:\n a = QtGui.QAction(QtGui.QIcon(icon), name, self)\n a.setShortcut(shortcut)\n a.triggered.connect(triggered)\n toolbar.addAction(a)\n\n def initUI(self):\n self.toolbar = self.addToolBar('Stuff')\n self._setupToolbar(self.toolbar)\n\n self.setGeometry(100, 100, 800, 600)\n self.setWindowTitle('QTDisassa!')\n #self.showMaximized()\n\n self.webView = QtWebKit.QWebView(self)\n self.setCentralWidget(self.webView)\n\n self.cpustatus = QtGui.QDockWidget(\"CPU Status\", self)\n self.cpustatus.setAllowedAreas(QtCore.Qt.BottomDockWidgetArea | QtCore.Qt.LeftDockWidgetArea)\n self.addDockWidget(QtCore.Qt.BottomDockWidgetArea, self.cpustatus)\n self.cpustatus.hide()\n\n self.showBackground()\n self.show()\n\ndef main():\n\n app = QtGui.QApplication(sys.argv)\n mainView = MainView()\n sys.exit(app.exec_())\n\n\nif __name__ == '__main__':\n main()"},"src_encoding":{"kind":"string","value":"UTF-8"},"language":{"kind":"string","value":"Python"},"is_vendor":{"kind":"bool","value":false,"string":"false"},"is_generated":{"kind":"bool","value":false,"string":"false"},"year":{"kind":"number","value":2013,"string":"2,013"}}},{"rowIdx":41109,"cells":{"__id__":{"kind":"number","value":4183298148073,"string":"4,183,298,148,073"},"blob_id":{"kind":"string","value":"03a1b7f38e7b53a0ae564440d73626b78bdb6b07"},"directory_id":{"kind":"string","value":"a373e287fbbbf78ebd7187c3a958926d6af15d9b"},"path":{"kind":"string","value":"/sage/combinat/sf/homogeneous.py"},"content_id":{"kind":"string","value":"3a6b423f7df3a16c7b9c5acdbf9efb7ad9ee3d49"},"detected_licenses":{"kind":"list like","value":[],"string":"[]"},"license_type":{"kind":"string","value":"no_license"},"repo_name":{"kind":"string","value":"thalespaiva/sagelib"},"repo_url":{"kind":"string","value":"https://github.com/thalespaiva/sagelib"},"snapshot_id":{"kind":"string","value":"a1b9f57869b61128476eb8dda6e62558ff2eefbc"},"revision_id":{"kind":"string","value":"fd0c7c46e6a2da4b84df582e0da0333ce5cf79d9"},"branch_name":{"kind":"string","value":"refs/heads/master"},"visit_date":{"kind":"timestamp","value":"2021-01-20T03:23:02.550743","string":"2021-01-20T03:23:02.550743"},"revision_date":{"kind":"timestamp","value":"2012-07-09T11:36:20","string":"2012-07-09T11:36:20"},"committer_date":{"kind":"timestamp","value":"2012-07-09T11:36:20","string":"2012-07-09T11:36:20"},"github_id":{"kind":"null"},"star_events_count":{"kind":"number","value":0,"string":"0"},"fork_events_count":{"kind":"number","value":0,"string":"0"},"gha_license_id":{"kind":"null"},"gha_fork":{"kind":"null"},"gha_event_created_at":{"kind":"null"},"gha_created_at":{"kind":"null"},"gha_updated_at":{"kind":"null"},"gha_pushed_at":{"kind":"null"},"gha_size":{"kind":"null"},"gha_stargazers_count":{"kind":"null"},"gha_forks_count":{"kind":"null"},"gha_open_issues_count":{"kind":"null"},"gha_language":{"kind":"null"},"gha_archived":{"kind":"null"},"gha_disabled":{"kind":"null"},"content":{"kind":"string","value":"\"\"\"\nHomogenous symmetric functions\n\"\"\"\n#*****************************************************************************\n# Copyright (C) 2007 Mike Hansen , \n#\n# Distributed under the terms of the GNU General Public License (GPL)\n#\n# This code is distributed in the hope that it will be useful,\n# but WITHOUT ANY WARRANTY; without even the implied warranty of\n# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU\n# General Public License for more details.\n#\n# The full text of the GPL is available at:\n#\n# http://www.gnu.org/licenses/\n#*****************************************************************************\n\n#################################### \n# #\n# Homogeneous Symmetric Functions #\n# #\n####################################\nimport multiplicative, sfa, classical\n\nclass SymmetricFunctionAlgebra_homogeneous(multiplicative.SymmetricFunctionAlgebra_multiplicative):\n def __init__(self, R):\n \"\"\"\n TESTS::\n \n sage: h = SFAHomogeneous(QQ)\n sage: h == loads(dumps(h))\n True\n \"\"\"\n classical.SymmetricFunctionAlgebra_classical.__init__(self, R, \"homogeneous\", 'h')\n\n def dual_basis(self, scalar=None, prefix=None):\n \"\"\"\n The dual basis of the homogeneous basis with respect to the\n standard scalar product is the monomial basis.\n \n EXAMPLES::\n \n sage: m = SFAMonomial(QQ)\n sage: h = SFAHomogeneous(QQ)\n sage: h.dual_basis() == m\n True\n \"\"\"\n if scalar is None:\n return sfa.SFAMonomial(self.base_ring())\n else:\n return sfa.SymmetricFunctionAlgebra(self, scalar, prefix=prefix)\n\n\n class Element(classical.SymmetricFunctionAlgebra_classical.Element):\n def omega(self):\n \"\"\"\n Returns the image of self under the Frobenius / omega\n automorphism.\n \n EXAMPLES::\n \n sage: h = SFAHomogeneous(QQ)\n sage: a = h([2,1]); a\n h[2, 1]\n sage: a.omega()\n h[1, 1, 1] - h[2, 1]\n sage: e = SFAElementary(QQ)\n sage: e(h([2,1]).omega())\n e[2, 1]\n \"\"\"\n e = sfa.SFAElementary(self.parent().base_ring())\n return self.parent()(e._from_element(self))\n\n def expand(self, n, alphabet='x'):\n \"\"\"\n Expands the symmetric function as a symmetric polynomial in n\n variables.\n \n EXAMPLES::\n \n sage: h = SFAHomogeneous(QQ)\n sage: h([3]).expand(2)\n x0^3 + x0^2*x1 + x0*x1^2 + x1^3\n sage: h([1,1,1]).expand(2)\n x0^3 + 3*x0^2*x1 + 3*x0*x1^2 + x1^3\n sage: h([2,1]).expand(3)\n x0^3 + 2*x0^2*x1 + 2*x0*x1^2 + x1^3 + 2*x0^2*x2 + 3*x0*x1*x2 + 2*x1^2*x2 + 2*x0*x2^2 + 2*x1*x2^2 + x2^3\n sage: h([3]).expand(2,alphabet='y')\n y0^3 + y0^2*y1 + y0*y1^2 + y1^3\n sage: h([3]).expand(2,alphabet='x,y')\n x^3 + x^2*y + x*y^2 + y^3\n sage: h([3]).expand(3,alphabet='x,y,z')\n x^3 + x^2*y + x*y^2 + y^3 + x^2*z + x*y*z + y^2*z + x*z^2 + y*z^2 + z^3\n \"\"\"\n condition = lambda part: False\n return self._expand(condition, n, alphabet)\n\n# Backward compatibility for unpickling\nfrom sage.structure.sage_object import register_unpickle_override\nregister_unpickle_override('sage.combinat.sf.homogeneous', 'SymmetricFunctionAlgebraElement_homogeneous', SymmetricFunctionAlgebra_homogeneous.Element)\n"},"src_encoding":{"kind":"string","value":"UTF-8"},"language":{"kind":"string","value":"Python"},"is_vendor":{"kind":"bool","value":false,"string":"false"},"is_generated":{"kind":"bool","value":false,"string":"false"},"year":{"kind":"number","value":2012,"string":"2,012"}}},{"rowIdx":41110,"cells":{"__id__":{"kind":"number","value":12489764914902,"string":"12,489,764,914,902"},"blob_id":{"kind":"string","value":"944da320203ffea97b7c6c7235deafdd25444738"},"directory_id":{"kind":"string","value":"9592dea816fdbf84992b031ee09eaf87419d0810"},"path":{"kind":"string","value":"/recursion_SumTo.py"},"content_id":{"kind":"string","value":"13597571748975a25a1a826e4305ddc45861ee61"},"detected_licenses":{"kind":"list like","value":[],"string":"[]"},"license_type":{"kind":"string","value":"no_license"},"repo_name":{"kind":"string","value":"MattAnderson16/Algorithms"},"repo_url":{"kind":"string","value":"https://github.com/MattAnderson16/Algorithms"},"snapshot_id":{"kind":"string","value":"5300afd3adf9bfd19237b435828bf6493f7808a5"},"revision_id":{"kind":"string","value":"57b355e0a7f9db5433c4f6b7ffed39400d1b7c21"},"branch_name":{"kind":"string","value":"refs/heads/master"},"visit_date":{"kind":"timestamp","value":"2020-06-01T14:08:36.317254","string":"2020-06-01T14:08:36.317254"},"revision_date":{"kind":"timestamp","value":"2014-10-20T15:06:58","string":"2014-10-20T15:06:58"},"committer_date":{"kind":"timestamp","value":"2014-10-20T15:06:58","string":"2014-10-20T15:06:58"},"github_id":{"kind":"null"},"star_events_count":{"kind":"number","value":0,"string":"0"},"fork_events_count":{"kind":"number","value":0,"string":"0"},"gha_license_id":{"kind":"null"},"gha_fork":{"kind":"null"},"gha_event_created_at":{"kind":"null"},"gha_created_at":{"kind":"null"},"gha_updated_at":{"kind":"null"},"gha_pushed_at":{"kind":"null"},"gha_size":{"kind":"null"},"gha_stargazers_count":{"kind":"null"},"gha_forks_count":{"kind":"null"},"gha_open_issues_count":{"kind":"null"},"gha_language":{"kind":"null"},"gha_archived":{"kind":"null"},"gha_disabled":{"kind":"null"},"content":{"kind":"string","value":"def SumTo(n):\r\n if n == 1:\r\n result = 1\r\n else:\r\n result = n + SumTo(n-1)\r\n return result\r\n\r\nn = int(input(\"Please enter a positive integer value >> \"))\r\nresult = SumTo(n)\r\nprint(result)\r\n"},"src_encoding":{"kind":"string","value":"UTF-8"},"language":{"kind":"string","value":"Python"},"is_vendor":{"kind":"bool","value":false,"string":"false"},"is_generated":{"kind":"bool","value":false,"string":"false"},"year":{"kind":"number","value":2014,"string":"2,014"}}},{"rowIdx":41111,"cells":{"__id__":{"kind":"number","value":17248588663249,"string":"17,248,588,663,249"},"blob_id":{"kind":"string","value":"fa23fb72695501c5f39e627e7b61dafbe46f68b5"},"directory_id":{"kind":"string","value":"1a554fcb0bdb08a58b90bba6e14c7710fd48a26a"},"path":{"kind":"string","value":"/uis_r_us/tests.py"},"content_id":{"kind":"string","value":"a8b5cae1af6c4e52c33cfae9b01b226a66f18a77"},"detected_licenses":{"kind":"list like","value":[],"string":"[]"},"license_type":{"kind":"string","value":"no_license"},"repo_name":{"kind":"string","value":"mvpdev/nmis"},"repo_url":{"kind":"string","value":"https://github.com/mvpdev/nmis"},"snapshot_id":{"kind":"string","value":"68c304d8657e587e4225231b20b276ffec3bf584"},"revision_id":{"kind":"string","value":"c85c0b4996b50329dd078845ccc0ff8804ae9760"},"branch_name":{"kind":"string","value":"refs/heads/master"},"visit_date":{"kind":"timestamp","value":"2020-05-20T00:50:37.654729","string":"2020-05-20T00:50:37.654729"},"revision_date":{"kind":"timestamp","value":"2011-08-19T19:46:12","string":"2011-08-19T19:46:12"},"committer_date":{"kind":"timestamp","value":"2011-08-19T19:46:12","string":"2011-08-19T19:46:12"},"github_id":{"kind":"number","value":1102934,"string":"1,102,934"},"star_events_count":{"kind":"number","value":2,"string":"2"},"fork_events_count":{"kind":"number","value":0,"string":"0"},"gha_license_id":{"kind":"null"},"gha_fork":{"kind":"null"},"gha_event_created_at":{"kind":"null"},"gha_created_at":{"kind":"null"},"gha_updated_at":{"kind":"null"},"gha_pushed_at":{"kind":"null"},"gha_size":{"kind":"null"},"gha_stargazers_count":{"kind":"null"},"gha_forks_count":{"kind":"null"},"gha_open_issues_count":{"kind":"null"},"gha_language":{"kind":"null"},"gha_archived":{"kind":"null"},"gha_disabled":{"kind":"null"},"content":{"kind":"string","value":"from django.test import TestCase\n\nfrom uis_r_us.views import get_nav_zones, get_nav_zones_inefficient\n\nclass TestLgaList(TestCase):\n fixtures = ['districts.json']\n\n def test_nav_zones(self):\n nav_zones = get_nav_zones()\n self.assertEqual(len(nav_zones), 6)\n\n def test_nav_zones2(self):\n nav_zones2 = get_nav_zones_inefficient()\n self.assertEqual(len(nav_zones2), 6)\n\n def test_nav_zone_equality(self):\n nzs = [get_nav_zones(), get_nav_zones_inefficient()]\n\n def get_names(z):\n [n['name'] for n in z]\n self.assertEqual(*[get_names(nz) for nz in nzs])\n\n def state_names(z):\n return [s['name'] for s in z['states']]\n\n for nzi in range(0, len(nz)):\n self.assertEqual(*[state_names(nz[nzi]) for nz in nzs])\n\n def ordered_lga_slugs(z):\n lga_slugs = []\n for s in z['states']:\n for lga in s['lgas']:\n lga_slugs.append(lga['unique_slug'])\n return lga_slugs\n\n for nzi in range(0, len(nz)):\n self.assertEqual(*[ordered_lga_slugs(nz[nzi]) for nz in nzs])\n"},"src_encoding":{"kind":"string","value":"UTF-8"},"language":{"kind":"string","value":"Python"},"is_vendor":{"kind":"bool","value":false,"string":"false"},"is_generated":{"kind":"bool","value":false,"string":"false"},"year":{"kind":"number","value":2011,"string":"2,011"}}},{"rowIdx":41112,"cells":{"__id__":{"kind":"number","value":4028679345171,"string":"4,028,679,345,171"},"blob_id":{"kind":"string","value":"be6f603b87db51fde408bbd0e4d2fe8578f9c8a0"},"directory_id":{"kind":"string","value":"4a2bfc2cce097eaf3147ebb39e89a70f4fc7312f"},"path":{"kind":"string","value":"/bundle/systemTests/moduleSuites/moduleEndToEndSuites/src/main/resources/scripts/testOISearch.py"},"content_id":{"kind":"string","value":"74cc884dd27327c781eebc9f6612cc8a28249b74"},"detected_licenses":{"kind":"list like","value":[],"string":"[]"},"license_type":{"kind":"string","value":"no_license"},"repo_name":{"kind":"string","value":"piyush76/EMS"},"repo_url":{"kind":"string","value":"https://github.com/piyush76/EMS"},"snapshot_id":{"kind":"string","value":"24da7f9caf5611bec80a095dc62c5bd6c7c41ea4"},"revision_id":{"kind":"string","value":"12320744e1cb6c492caba6d766056eaef2ade096"},"branch_name":{"kind":"string","value":"refs/heads/master"},"visit_date":{"kind":"timestamp","value":"2021-01-25T03:54:27.575584","string":"2021-01-25T03:54:27.575584"},"revision_date":{"kind":"timestamp","value":"2014-05-08T20:37:37","string":"2014-05-08T20:37:37"},"committer_date":{"kind":"timestamp","value":"2014-05-08T20:41:17","string":"2014-05-08T20:41:17"},"github_id":{"kind":"null"},"star_events_count":{"kind":"number","value":0,"string":"0"},"fork_events_count":{"kind":"number","value":0,"string":"0"},"gha_license_id":{"kind":"null"},"gha_fork":{"kind":"null"},"gha_event_created_at":{"kind":"null"},"gha_created_at":{"kind":"null"},"gha_updated_at":{"kind":"null"},"gha_pushed_at":{"kind":"null"},"gha_size":{"kind":"null"},"gha_stargazers_count":{"kind":"null"},"gha_forks_count":{"kind":"null"},"gha_open_issues_count":{"kind":"null"},"gha_language":{"kind":"null"},"gha_archived":{"kind":"null"},"gha_disabled":{"kind":"null"},"content":{"kind":"string","value":"# -*- coding: utf-8 -*-\nimport os,sys,time,socket,requests\nfrom com.m1.ems.mgmt import ManagementContainer\nfrom com.m1.ems.mgmt import SearchConstraint\nfrom com.m1.util.mgmt import SearchConstraintOperator\nfrom com.m1.ems.search import IndexSearchConstraint\nfrom com.m1.ems.search.solr import SolrQueryBuilder\nfrom com.m1.ems.search import UserQueryBuilder\nfrom com.m1.ems.search.IIndexSearchManager import CallerApp\nfrom com.m1.ems.mgmt import ICustomerManager\nfrom com.m1.ems.mgmt import Capabilities\nfrom com.m1.util.ender import Service\nfrom testUtils import *\nfrom com.m1.ems.mgmt.activemailbox import IReviewerGroupManager\nfrom com.m1.ems.mgmt import SavedUserSet\nfrom com.m1.ems.mgmt.activemailbox import ReviewerGroup\nfrom com.m1.ems.mgmt import InternalUserSets\n\n# wait until messages have been indexed...\ndef waitForindexing(mc,custid,count):\n print 'waitForindexing()'\n retries = 30\n while retries > 0:\n found = searchCount(mc,custid,'','','','','','any',None)\n print 'found',found\n if found > count:\n print 'failing...'\n return False;\n\n if found == count:\n print 'success - ending wait'\n return True\n\n print 'sleeping 30 seconds'\n time.sleep(30)\n retries = retries - 1\n\n print 'Failed to find correct number of messages'\n print 'found',found,'expected',count\n raise Exception('Failed to find messages in time')\n return False\n\n\n# run a search and report the count\ndef searchCount(mc,custid,keywords,sender,receiver,subject,attachment,language,attach_scope,rg = None, msgMode = False):\n global highpass\n global highfail\n\n sm = mc.getIndexSearchManager()\n isc = IndexSearchConstraint(custid,None)\n isc.constrainByLanguage(language)\n if rg is not None :\n isc.constrainByReviewerGroup(rg)\n if msgMode : \n\tisc.queryMessages(msgMode)\n qb = SolrQueryBuilder(custid);\n qb.applyLanguage(language)\n qb.applyAttachmentScope(attach_scope)\n qb.applyDefaultSearch(keywords,False,False)\n qb.applyFileName(attachment,None)\n qb.applyRecipients(receiver,False)\n qb.applyFrom(sender)\n qb.applySubject(subject)\n print 'generatedQuery=',qb.getQuery()\n sr = sm.search(qb.getQuery(),isc,None,CallerApp.REVIEWER_SEARCH)\n for doc in sr.documents():\n if True != msgMode : \n\t if '' != sender :\n if 0 <= doc.getSender().find('') :\n highpass = highpass + 1\n print 'highlight passed'\n else :\n highfail = highfail +1\n print 'highlight failed'\n print '\\t',doc.getSender()\t\t \n\n if '' != receiver :\n recip = ''\n\t\tr1 = doc.getString('mailto')\n\t\tif r1 is not None :\n recip = recip + r1\n\t\tr2 = doc.getString('mailcc')\n if r1 is not None :\n recip = recip + r1\n if 0 <= recip.find(''):\n highpass = highpass + 1\n print 'highlight passed'\n else :\n highfail = highfail +1\n print 'highlight failed'\n print '\\t', recip\n \n if '' != subject :\n if 0 <= doc.getSubject().find('') :\n highpass = highpass + 1\n print 'highlight passed'\n else :\n highfail = highfail +1\n print 'highlight failed'\n print '\\t',doc.getSubject()\n\n if '' != attachment :\n att = doc.getString(\"attachedfiles\")\n if att is None:\n att = doc.getString(\"filename\")\n if att is not None :\n if 0 <= att.find('') :\n highpass = highpass + 1\n print 'highlight passed'\n else :\n highfail = highfail +1\n print 'highlight failed'\n print '\\t',att \n return sr.getDocCount()\n \ndef searcher(mc,custid,keywords,sender,receiver,subject,attachment,language, attach_scope):\n sm = mc.getIndexSearchManager()\n isc = IndexSearchConstraint(custid,None)\n isc.constrainByLanguage(language)\n qb = SolrQueryBuilder(custid);\n qb.applyLanguage(language)\n qb.applyAttachmentScope(attach_scope)\n qb.applyDefaultSearch(keywords,False,False)\n qb.applyFileName(attachment,None)\n qb.applyRecipients(receiver,False)\n qb.applyFrom(sender)\n qb.applySubject(subject)\n sr = sm.search(qb.getQuery(),isc,None,CallerApp.REVIEWER_SEARCH)\n return sr\n \ndef testPartial(expected,mc,custid,keywords,sender,receiver,subject,attachment,language, attach_scope):\n global failed_count\n global ok_count\n\n print 'custid=\"'+str(custid)+'\"'\n print 'keywords=\"'+keywords+'\"'\n print 'sender=\"'+sender+'\"'\n print 'receivers=\"'+receiver+'\"'\n print 'subject=\"'+subject+'\"'\n print 'attachment=\"'+attachment+'\"'\n print 'language=\"'+language+'\"'\n print 'attachment_scope=',attach_scope\n sr = searcher(mc,custid,keywords,sender,receiver,subject,attachment,language, attach_scope)\n print 'expected =',expected,'\\t found =',sr.isPartial()\n if sr.isPartial() != expected:\n print 'FAILED'\n failed_count = failed_count + 1\n else:\n print 'PASSED'\n ok_count = ok_count + 1\n \n# run a test and count passes and failures\ndef test(expected,mc,custid,keywords,sender,receiver,subject,attachment,language,attach_scope,rg = None, msgMode = False):\n global failed_count\n global ok_count\n\n print 'custid=\"'+str(custid)+'\"'\n print 'keywords=\"'+repr(keywords)+'\"'\n print 'sender=\"'+sender+'\"'\n print 'receivers=\"'+receiver+'\"'\n print 'subject=\"'+subject+'\"'\n print 'attachment=\"'+attachment+'\"'\n print 'language=\"'+language+'\"'\n print 'attachment_scope=',attach_scope\n if rg is not None :\n print 'reviewerGroup ='+rg.toString()\n print 'message_mode=',msgMode\n c = searchCount(mc,custid,keywords,sender,receiver,subject,attachment,language,attach_scope,rg, msgMode)\n print 'expected =',expected,'\\t found =',c\n if c != expected:\n print 'FAILED'\n failed_count = failed_count + 1\n else:\n print 'PASSED'\n ok_count = ok_count + 1\n\n# run a test in both message and document mode and record successes and failures\ndef testAllModes(expected,mc,custid,keywords,sender,receiver,subject,attachment,language,attach_scope,rg = None, messageCount = None):\n global failed_count\n global ok_count\n\t\n messageExpected = expected\n if messageCount is not None :\n messageExpected = messageCount\n\n test(expected,mc,custid,keywords,sender,receiver,subject,attachment,language,attach_scope,rg, False)\n test(messageExpected,mc,custid,keywords,sender,receiver,subject,attachment,language,attach_scope,rg, True)\n\n\n\n\ndef partialTest(mc,custid):\n global ok_count\n global failed_count\n\n svc = Service(\"ems-solr\")\n \n try:\n languages = ['en', 'any']\n for language in languages:\n attach_scope = None\n # find keywords in body\n testPartial(False,mc,custid,'natural','','','','',language, attach_scope)\n \n im = mc.getIslandManager()\n cm = mc.getCustomerManager()\n \n parms = im.getIsland(cm.getCustomer(int(custid)).getSearchIslandId()).getSearchParms()\n print 'search parms',parms\n \n # this should no effect on search\n svc.invoke('stop','archive-7')\n print 'archive-7 stopped'\n \n languages = ['en', 'any']\n for language in languages:\n attach_scope = None\n # find keywords in body\n testPartial(False,mc,custid,'natural','','','','',language, attach_scope)\n\n svc.invoke('start','archive-7')\n print 'archive-7 started'\n\n # this should cause search URL to change\n svc.invoke('stop','archive-8')\n print 'archive-8 stopped'\n\n parms2 = im.getIsland(cm.getCustomer(int(custid)).getSearchIslandId()).getSearchParms()\n tries = 10\n while ((-1 == parms2.find('archive-7')) or (-1 == parms2.find('isPartial=0'))) and tries > 0:\n print 'waiting for search parms to change...',parms2\n time.sleep(30)\n parms2 = im.getIsland(cm.getCustomer(int(custid)).getSearchIslandId()).getSearchParms()\n tries = tries - 1\n \n if (-1 == parms2.find('archive-7')) or (-1 == parms2.find('isPartial=0')):\n print 'FAILED because distributed search parms are incorrect:',parms2\n failed_count = failed_count + 1\n return \n else:\n print 'distributed search parms changed to',parms2\n\n languages = ['en', 'any']\n for language in languages:\n attach_scope = None\n # find keywords in body\n testPartial(False,mc,custid,'natural','','','','',language, attach_scope)\n\n parms = parms2\n\n # search URL should change and isPartial should be set and detected\n svc.invoke('stop','archive-7')\n print 'archive-7 and archive-8 stopped'\n\n parms2 = im.getIsland(cm.getCustomer(int(custid)).getSearchIslandId()).getSearchParms()\n tries = 10\n while -1 == parms2.find('isPartial=1') and tries > 0:\n # wait for DS manager to notice archive-7,archive-8 is no longer there\n print 'waiting for search parms to change...',parms2\n time.sleep(30)\n parms2 = im.getIsland(cm.getCustomer(int(custid)).getSearchIslandId()).getSearchParms()\n tries = tries - 1\n\n if -1 == parms2.find('isPartial=1'):\n print 'FAILED because distributed search parms are incorrect:',parms\n failed_count = failed_count + 1\n return \n else:\n print 'distributed search parms changed to',parms2\n\n languages = ['en', 'any']\n for language in languages:\n attach_scope = None\n # find keywords in body\n testPartial(True,mc,custid,'natural','','','','',language, attach_scope)\n finally:\n svc.invoke('start',['archive-5','archive-6','archive-7','archive-8'])\n\ndef extractTest(mc,custid):\n global ok_count\n global failed_count\n\n # in the right languages\n languages = ['en', 'any']\n for language in languages:\n attach_scope = None\n # find keywords in body\n test(1,mc,custid,'\"if you have queries that contain these words\"','','','','sign.zip',language, attach_scope)\n test(2,mc,custid,'','','','','sign.zip',language, attach_scope)\n\ndef extraScopeTest(mc, custid):\n global ok_count\n global failed_count\n\n rgm = None\n rg = None\n \n try:\n rgm = mc.getReviewerGroupManager()\n mailboxScope = InternalUserSets.getAllUsersSet(custid)\n mc.getUserManager().saveUserSet(mailboxScope)\n reviewers = InternalUserSets.getAllUsersSet(custid)\n mc.getUserManager().saveUserSet(reviewers)\n\n rg = rgm.getReviewerGroup(custid, \"dummyGroup99\")\n if rg is not None:\n print 'removing pre-existing reviewergroup'\n rgm.deleteReviewerGroup(custid,rg.getGroupID())\n rg = rgm.createReviewerGroup(custid, \"dummyGroup99\", reviewers, mailboxScope, 'mailfrom:string(\"irfan\")')\n if rg is not None :\n print 'created rg='+rg.toString()\n rg1 = rgm.getReviewerGroup(custid, \"dummyGroup99\")\n\t if 'mailfrom:string(\"irfan\")' != rg1.getExtraScope() :\n print 'FAIL - extrascope not correctly set for rg'\n else:\n print 'new reviewerGroup has extrascope =' + rg1.getExtraScope()\n rg = rg1\n else :\n print 'failed to create reviewerGroup'\n test(2,mc,custid,'','','','','','any',None,rg) \n test(1,mc,custid,'','','','','','any',True,rg)\n test(1,mc,custid,'','','','','','any',False,rg)\n test(2,mc,custid,'','','','','','en',None,rg)\n test(1,mc,custid,'','','','','','en',True,rg)\n test(1,mc,custid,'','','','','','en',False,rg)\n test(2,mc,custid,'','','','','','ja',None,rg)\n test(1,mc,custid,'','','','','','ja',True,rg)\n test(1,mc,custid,'','','','','','ja',False,rg)\n\n rg.setExtraScope('mailsubject:\"character map\"')\n rgm.updateReviewerGroup(rg)\n test(2,mc,custid,'','','','','','any',None,rg) \n test(0,mc,custid,'','','','','','any',True,rg)\n test(2,mc,custid,'','','','','','any',False,rg)\n\n rg.setExtraScope('content:\"koala\"')\n rgm.updateReviewerGroup(rg)\n test(2,mc,custid,'','','','','','any',None,rg) \n test(1,mc,custid,'','','','','','any',True,rg)\n test(1,mc,custid,'','','','','','any',False,rg)\n\n rg.setExtraScope('emaildate:>2012-06-11')\n rgm.updateReviewerGroup(rg) \n test(6,mc,custid,'','','','','','any',None,rg) \n test(1,mc,custid,'','','','','','any',True,rg)\n test(5,mc,custid,'','','','','','any',False,rg)\n\n rg.setExtraScope('recipients:\"content1@lab062.m1dev.com\"')\n rgm.updateReviewerGroup(rg)\n test(8,mc,custid,'','','','','','any',None,rg) \n test(1,mc,custid,'','','','','','any',True,rg)\n test(7,mc,custid,'','','','','','any',False,rg)\n\n rg.setExtraScope('senders:\"irfan\"')\n rgm.updateReviewerGroup(rg) \n test(2,mc,custid,'','','','','','any',None,rg) \n test(1,mc,custid,'','','','','','any',True,rg)\n test(1,mc,custid,'','','','','','any',False,rg)\n\n finally:\n if rgm is not None:\n if rg is not None :\n print 'removing reviewergroup'\n rgm.deleteReviewerGroup(custid,rg.getGroupID())\n else :\n print 'no reviewer group to clean up'\n else : \n print ' no reviewer group manager available'\n\n\ndef getLoginAuthToken(mc, custid, emailAddr):\n um = mc.getUserManager()\n ua = um.getUser(emailAddr)\n oam = mc.getOutlookAccessManager()\n cis = oam.generateUserTokens(custid,[ua.getUserID()])\n cisToken = cis[0].getAuthToken()\n print 'cisToken=',cisToken\n return cisToken\n\ndef runOISQuery(cisToken, requestedItems, query = '', columns = None, sort = None, host = 'as-1.ems.labmanager.net'):\n\n cmd = 'curl -v -k https://'+host+\"?authToken=\"+cisToken+' -H \"Content-Type: text/xml\" '\n\n if requestedItems is not None :\n cmd += '-H \"x-m1-ems-requested-items: ' + requestedItems + '\" '\n\n if columns is not None :\n cmd += '-H \"x-m1-ems-search-columns: ' + columns + '\" '\n\n if sort is not None :\n cmd += '-H \"x-m1-ems-sort-order: ' + sort + '\" '\n\n cmd += ' -d \"\" + query + \"\"'\n\n###############################################\n\n# 'curl -v -k https://as-1.ems.labmanager.net/wfe/searchArchive?authToken='+cistoken+' -H \"x-m1-ems-requested-items: 1 2 3 4 5 6 7 8 9\" -H \"x-m1-ems-search-columns: 0x0E1B\" -H \"x-m1-ems-sort-order:\" -d \"\"'\n\nimport commands\n\ncmd = 'curl -s -v -k https://as-1.ems.labmanager.net/wfe/searchArchive?authToken=923-gmcxfootlrglnvgkhtlugwmpiwnsikpl -H \"x-m1-ems-requested-items: 1 2 3 4 5 6\" -H \"Content-Type: text/xml\" -d \"\"'\n\nr = commands.getstatusoutput(cmd);\noutput = r[1]\n#print output;\nlines = output.splitlines()\noutputStarted = False;\ncontentlines = []\noutputHeaders = {}\nfor line in lines:\n if line.startswith('<'):\n # this is an output header\n line1 = line.split(': ',1)\n outputHeaders[line1[0].strip('< ')] = line1[1]\n outputStarted = True\n elif line.startswith('*') or line.startswith('>'):\n # input or info - troll for RC\n # look for HTTP/1.1 200 OK like response at end of a line\n line2 = line\n else :\n if outputStarted:\n # output content\n print line\nprint '\\n'\nprint outputHeaders\n\n\n#########################################\n\ndef mainTest(mc,custid,isEdiscovery):\n global ok_count\n global failed_count\n\n loginAuthToken = getLoginAuthToken(mc, custid, \"alert@lab062.m1dev.com\")\n\n # in the right languages\n languages = ['en', 'any']\n for language in languages:\n attach_scope = None\n # find keywords in body\n testAllModes(1,mc,custid,'natural','','','','',language, attach_scope)\n testAllModes(1,mc,custid,'\"natural\"','','','','',language, attach_scope)\n testAllModes(1,mc,custid,'habitat','','','','',language, attach_scope)\n testAllModes(1,mc,custid,'\"habitat\"','','','','',language, attach_scope)\n testAllModes(1,mc,custid,'natural habitat','','','','',language, attach_scope)\n\tif isEdiscovery:\n testAllModes(1,mc,custid,'\"natural habitat\"','','','','',language, attach_scope)\n testAllModes(0,mc,custid,'\"naturally habitat\"','','','','',language, attach_scope)\n testAllModes(0,mc,custid,'\"natural habitats\"','','','','',language, attach_scope)\n else:\n testAllModes(1,mc,custid,'\"natural habitat\"','','','','',language, attach_scope)\n testAllModes(1,mc,custid,'\"naturally habitat\"','','','','',language, attach_scope)\n testAllModes(1,mc,custid,'\"natural habitats\"','','','','',language, attach_scope)\n\n testAllModes(1,mc,custid,'habitat natural','','','','',language, attach_scope)\n testAllModes(0,mc,custid,'\"habitat natural\"','','','','',language, attach_scope)\n # test EMSDEV-9804\n testAllModes(3,mc,custid,'\"the\"','','','','',language, attach_scope, None, 2)\n testAllModes(3,mc,custid,'the','','','','',language, attach_scope, None, 2)\n # test EMSDEV-9843\n testAllModes(1,mc,custid,'\"their natural habitat is in australia\"','','','','',language, attach_scope)\n\tif isEdiscovery:\n testAllModes(0,mc,custid,'\"their natural habitat are in australia\"','','','','',language, attach_scope)\n else: \n testAllModes(1,mc,custid,'\"their natural habitat are in australia\"','','','','',language, attach_scope)\n testAllModes(1,mc,custid,'their natural habitat is in australia','','','','',language, attach_scope)\n # find keywords in other fields\n testAllModes(0,mc,custid,'ziggurat','','','','',language, attach_scope)\n testAllModes(3,mc,custid,'irfan','','','','',language, attach_scope, None, 2)\n testAllModes(2,mc,custid,'tester','','','','',language, attach_scope, None, 1)\n testAllModes(2,mc,custid,'australia','','','','',language, attach_scope, None, 1)\n testAllModes(2,mc,custid,'100','','','','',language, attach_scope, None, 1)\n testAllModes(2,mc,custid,'koala','','','','',language, attach_scope, None, 1)\n testAllModes(2,mc,custid,'jpg','','','','',language, attach_scope, None, 1) \n testAllModes(2,mc,custid,'.jpg','','','','',language, attach_scope, None, 1)\n testAllModes(2,mc,custid,'*.jpg','','','','',language, attach_scope, None, 1)\n testAllModes(0,mc,custid,'800','','','','',language, attach_scope)\n # find keywords in sender\n testAllModes(0,mc,custid,'','koala','','','',language, attach_scope)\n testAllModes(2,mc,custid,'','irfan','','','',language, attach_scope, None, 1)\n testAllModes(2,mc,custid,'','jabbar','','','',language, attach_scope, None, 1)\n # find keywords in recipient\n testAllModes(0,mc,custid,'','','koala','','',language, attach_scope)\n testAllModes(2,mc,custid,'','','tester','','',language, attach_scope, None, 1)\n testAllModes(16,mc,custid,'','','lab062','','',language, attach_scope, None, 11)\n # find keywords in subject\n testAllModes(0,mc,custid,'','','','koala','',language, attach_scope)\n testAllModes(2,mc,custid,'','','','australia','',language, attach_scope, None, 1)\n testAllModes(2,mc,custid,'','','','100','',language, attach_scope, None, 1)\n testAllModes(0,mc,custid,'','','','800','',language, attach_scope)\n\t# verify EMSDEV-10004\n testAllModes(2,mc,custid,'','','','australia 100','',language, attach_scope, None, 1)\n testAllModes(0,mc,custid,'','','','australia habitat','',language, attach_scope)\n # find keywords in attachment\n testAllModes(0,mc,custid,'','','','','australia',language, attach_scope)\n testAllModes(2,mc,custid,'','','','','koala',language, attach_scope, None, 1)\n testAllModes(2,mc,custid,'','','','','jpg',language, attach_scope, None, 1)\n testAllModes(2,mc,custid,'','','','','.jpg',language, attach_scope, None, 1)\n testAllModes(2,mc,custid,'','','','','*.jpg',language, attach_scope, None, 1)\n\n attach_scope = True\n # find keywords in body\n testAllModes(0,mc,custid,'natural','','','','',language, attach_scope)\n testAllModes(0,mc,custid,'\"natural\"','','','','',language, attach_scope)\n testAllModes(0,mc,custid,'habitat','','','','',language, attach_scope)\n testAllModes(0,mc,custid,'\"habitat\"','','','','',language, attach_scope)\n testAllModes(0,mc,custid,'natural habitat','','','','',language, attach_scope)\n testAllModes(0,mc,custid,'\"natural habitat\"','','','','',language, attach_scope)\n testAllModes(0,mc,custid,'habitat natural','','','','',language, attach_scope)\n testAllModes(0,mc,custid,'\"habitat natural\"','','','','',language, attach_scope)\n # find keywords in other fields\n testAllModes(0,mc,custid,'ziggurat','','','','',language, attach_scope)\n testAllModes(2,mc,custid,'irfan','','','','',language, attach_scope)\n testAllModes(1,mc,custid,'tester','','','','',language, attach_scope)\n testAllModes(1,mc,custid,'australia','','','','',language, attach_scope)\n testAllModes(1,mc,custid,'100','','','','',language, attach_scope)\n testAllModes(0,mc,custid,'800','','','','',language, attach_scope)\n testAllModes(1,mc,custid,'koala','','','','',language, attach_scope)\n testAllModes(1,mc,custid,'jpg','','','','',language, attach_scope) \n testAllModes(1,mc,custid,'.jpg','','','','',language, attach_scope)\n testAllModes(1,mc,custid,'*.jpg','','','','',language, attach_scope)\n # find keywords in sender\n testAllModes(0,mc,custid,'','koala','','','',language, attach_scope)\n testAllModes(1,mc,custid,'','irfan','','','',language, attach_scope)\n testAllModes(1,mc,custid,'','jabbar','','','',language, attach_scope)\n # find keywords in recipient\n testAllModes(0,mc,custid,'','','koala','','',language, attach_scope)\n testAllModes(1,mc,custid,'','','tester','','',language, attach_scope)\n testAllModes(5,mc,custid,'','','lab062','','',language, attach_scope)\n # find keywords in subject\n testAllModes(0,mc,custid,'','','','koala','',language, attach_scope)\n testAllModes(1,mc,custid,'','','','australia','',language, attach_scope)\n if isEdiscovery:\n testAllModes(0,mc,custid,'','','','\"australias\"','',language, attach_scope)\n else:\n testAllModes(1,mc,custid,'','','','\"australias\"','',language, attach_scope)\n if isEdiscovery:\n testAllModes(1,mc,custid,'','','','\"but sjgy6343468\"','',language, attach_scope)\n testAllModes(0,mc,custid,'','','','\"and sjgy6343468\"','',language, attach_scope)\n else:\n testAllModes(1,mc,custid,'','','','\"but sjgy6343468\"','',language, attach_scope)\n testAllModes(1,mc,custid,'','','','\"and sjgy6343468\"','',language, attach_scope)\n\n testAllModes(1,mc,custid,'','','','100','',language, attach_scope)\n testAllModes(0,mc,custid,'','','','800','',language, attach_scope)\n # verify EMSDEV-10004\n testAllModes(1,mc,custid,'','','','australia 100','',language, attach_scope)\n testAllModes(0,mc,custid,'','','','australia habitat','',language, attach_scope)\n # test EMSDEV-9804\n testAllModes(2,mc,custid,'\"the\"','','','','',language, attach_scope)\n testAllModes(2,mc,custid,'the','','','','',language, attach_scope)\n # test EMSDEV-9843\n testAllModes(0,mc,custid,'\"their natural habitat is in australia\"','','','','',language, attach_scope)\n testAllModes(0,mc,custid,'their natural habitat is in australia','','','','',language, attach_scope)\n # find keywords in attachment\n testAllModes(0,mc,custid,'','','','','australia',language, attach_scope)\n testAllModes(1,mc,custid,'','','','','koala',language, attach_scope)\n testAllModes(1,mc,custid,'','','','','jpg',language, attach_scope)\n testAllModes(1,mc,custid,'','','','','.jpg',language, attach_scope)\n testAllModes(1,mc,custid,'','','','','*.jpg',language, attach_scope)\n\n attach_scope = False\n # find keywords in body\n testAllModes(1,mc,custid,'natural','','','','',language, attach_scope)\n testAllModes(1,mc,custid,'\"natural\"','','','','',language, attach_scope)\n testAllModes(1,mc,custid,'habitat','','','','',language, attach_scope)\n testAllModes(1,mc,custid,'\"habitat\"','','','','',language, attach_scope)\n testAllModes(1,mc,custid,'natural habitat','','','','',language, attach_scope)\n testAllModes(1,mc,custid,'\"natural habitat\"','','','','',language, attach_scope)\n testAllModes(1,mc,custid,'habitat natural','','','','',language, attach_scope)\n testAllModes(0,mc,custid,'\"habitat natural\"','','','','',language, attach_scope)\n # find keywords in other fields\n testAllModes(0,mc,custid,'ziggurat','','','','',language, attach_scope)\n testAllModes(1,mc,custid,'irfan','','','','',language, attach_scope)\n testAllModes(1,mc,custid,'tester','','','','',language, attach_scope)\n testAllModes(1,mc,custid,'australia','','','','',language, attach_scope)\n testAllModes(1,mc,custid,'100','','','','',language, attach_scope)\n testAllModes(0,mc,custid,'800','','','','',language, attach_scope)\n testAllModes(1,mc,custid,'koala','','','','',language, attach_scope)\n testAllModes(1,mc,custid,'jpg','','','','',language, attach_scope) \n testAllModes(1,mc,custid,'.jpg','','','','',language, attach_scope)\n testAllModes(1,mc,custid,'*.jpg','','','','',language, attach_scope)\n # find keywords in sender\n testAllModes(0,mc,custid,'','koala','','','',language, attach_scope)\n testAllModes(1,mc,custid,'','irfan','','','',language, attach_scope)\n testAllModes(1,mc,custid,'','jabbar','','','',language, attach_scope)\n # find keywords in recipient\n testAllModes(0,mc,custid,'','','koala','','',language, attach_scope)\n testAllModes(1,mc,custid,'','','tester','','',language, attach_scope)\n testAllModes(11,mc,custid,'','','lab062','','',language, attach_scope)\n # find keywords in subject\n testAllModes(0,mc,custid,'','','','koala','',language, attach_scope)\n testAllModes(1,mc,custid,'','','','australia','',language, attach_scope)\n testAllModes(1,mc,custid,'','','','100','',language, attach_scope)\n testAllModes(0,mc,custid,'','','','800','',language, attach_scope)\n # verify EMSDEV-10004\n testAllModes(1,mc,custid,'','','','australia 100','',language, attach_scope)\n testAllModes(0,mc,custid,'','','','australia habitat','',language, attach_scope)\n # test EMSDEV-9804\n testAllModes(1,mc,custid,'\"the\"','','','','',language, attach_scope)\n testAllModes(1,mc,custid,'the','','','','',language, attach_scope)\n # test EMSDEV-9843\n testAllModes(1,mc,custid,'\"their natural habitat is in australia\"','','','','',language, attach_scope)\n testAllModes(1,mc,custid,'their natural habitat is in australia','','','','',language, attach_scope)\n # find keywords in attachment\n testAllModes(0,mc,custid,'','','','','australia',language, attach_scope)\n testAllModes(1,mc,custid,'','','','','koala',language, attach_scope)\n testAllModes(1,mc,custid,'','','','','jpg',language, attach_scope)\n testAllModes(1,mc,custid,'','','','','.jpg',language, attach_scope)\n testAllModes(1,mc,custid,'','','','','*.jpg',language, attach_scope)\n\n # in \"wrong\" language\n languages = ['ru','zh-tw','zh-cn','fr','de','nl','sv','ja','pt','ar','he']\n for language in languages:\n attach_scope = None\n # find keywords in body\n testAllModes(0,mc,custid,'natural','','','','',language, attach_scope)\n testAllModes(0,mc,custid,'\"natural\"','','','','',language, attach_scope)\n testAllModes(0,mc,custid,'habitat','','','','',language, attach_scope)\n testAllModes(0,mc,custid,'\"habitat\"','','','','',language, attach_scope)\n testAllModes(0,mc,custid,'natural habitat','','','','',language, attach_scope)\n testAllModes(0,mc,custid,'\"natural habitat\"','','','','',language, attach_scope)\n testAllModes(0,mc,custid,'habitat natural','','','','',language, attach_scope)\n testAllModes(0,mc,custid,'\"habitat natural\"','','','','',language, attach_scope)\n # find keywords in other fields\n testAllModes(0,mc,custid,'ziggurat','','','','',language, attach_scope)\n testAllModes(2,mc,custid,'irfan','','','','',language, attach_scope, None, 1)\n testAllModes(2,mc,custid,'tester','','','','',language, attach_scope, None, 1)\n testAllModes(2,mc,custid,'koala','','','','',language, attach_scope, None, 1)\n testAllModes(2,mc,custid,'jpg','','','','',language, attach_scope, None, 1) \n testAllModes(2,mc,custid,'.jpg','','','','',language, attach_scope, None, 1)\n testAllModes(2,mc,custid,'*.jpg','','','','',language, attach_scope, None, 1)\n testAllModes(0,mc,custid,'australia','','','','',language, attach_scope)\n testAllModes(0,mc,custid,'100','','','','',language, attach_scope)\n testAllModes(0,mc,custid,'800','','','','',language, attach_scope)\n # find keywords in sender\n testAllModes(0,mc,custid,'','koala','','','',language, attach_scope)\n testAllModes(2,mc,custid,'','irfan','','','',language, attach_scope, None, 1)\n testAllModes(2,mc,custid,'','jabbar','','','',language, attach_scope, None, 1)\n # find keywords in recipient\n testAllModes(0,mc,custid,'','','koala','','',language, attach_scope)\n testAllModes(2,mc,custid,'','','tester','','',language, attach_scope, None, 1)\n testAllModes(16,mc,custid,'','','lab062','','',language, attach_scope, None, 11)\n # find keywords in subject\n testAllModes(0,mc,custid,'','','','koala','',language, attach_scope)\n testAllModes(0,mc,custid,'','','','australia','',language, attach_scope)\n testAllModes(0,mc,custid,'','','','100','',language, attach_scope)\n testAllModes(0,mc,custid,'','','','800','',language, attach_scope)\n # verify EMSDEV-10004\n testAllModes(0,mc,custid,'','','','australia 100','',language, attach_scope)\n testAllModes(0,mc,custid,'','','','australia habitat','',language, attach_scope)\n # test EMSDEV-9804\n testAllModes(0,mc,custid,'\"the\"','','','','',language, attach_scope)\n testAllModes(0,mc,custid,'the','','','','',language, attach_scope)\n # test EMSDEV-9843\n testAllModes(0,mc,custid,'\"their natural habitat is in australia\"','','','','',language, attach_scope)\n testAllModes(0,mc,custid,'their natural habitat is in australia','','','','',language, attach_scope)\n # find keywords in attachment\n testAllModes(0,mc,custid,'','','','','australia',language, attach_scope)\n testAllModes(2,mc,custid,'','','','','koala',language, attach_scope, None, 1)\n testAllModes(2,mc,custid,'','','','','jpg',language, attach_scope, None, 1)\n testAllModes(2,mc,custid,'','','','','.jpg',language, attach_scope, None, 1)\n testAllModes(2,mc,custid,'','','','','*.jpg',language, attach_scope, None, 1)\n\n attach_scope = True\n # find keywords in body\n testAllModes(0,mc,custid,'natural','','','','',language, attach_scope)\n testAllModes(0,mc,custid,'\"natural\"','','','','',language, attach_scope)\n testAllModes(0,mc,custid,'habitat','','','','',language, attach_scope)\n testAllModes(0,mc,custid,'\"habitat\"','','','','',language, attach_scope)\n testAllModes(0,mc,custid,'natural habitat','','','','',language, attach_scope)\n testAllModes(0,mc,custid,'\"natural habitat\"','','','','',language, attach_scope)\n testAllModes(0,mc,custid,'habitat natural','','','','',language, attach_scope)\n testAllModes(0,mc,custid,'\"habitat natural\"','','','','',language, attach_scope)\n # find keywords in other fields\n testAllModes(0,mc,custid,'ziggurat','','','','',language, attach_scope)\n testAllModes(1,mc,custid,'irfan','','','','',language, attach_scope)\n testAllModes(1,mc,custid,'tester','','','','',language, attach_scope)\n testAllModes(0,mc,custid,'australia','','','','',language, attach_scope)\n testAllModes(0,mc,custid,'100','','','','',language, attach_scope)\n testAllModes(0,mc,custid,'800','','','','',language, attach_scope)\n testAllModes(1,mc,custid,'koala','','','','',language, attach_scope)\n testAllModes(1,mc,custid,'jpg','','','','',language, attach_scope) \n testAllModes(1,mc,custid,'.jpg','','','','',language, attach_scope)\n testAllModes(1,mc,custid,'*.jpg','','','','',language, attach_scope)\n # find keywords in sender\n testAllModes(0,mc,custid,'','koala','','','',language, attach_scope)\n testAllModes(1,mc,custid,'','irfan','','','',language, attach_scope)\n testAllModes(1,mc,custid,'','jabbar','','','',language, attach_scope)\n # find keywords in recipient\n testAllModes(0,mc,custid,'','','koala','','',language, attach_scope)\n testAllModes(1,mc,custid,'','','tester','','',language, attach_scope)\n testAllModes(5,mc,custid,'','','lab062','','',language, attach_scope)\n # find keywords in subject\n testAllModes(0,mc,custid,'','','','koala','',language, attach_scope)\n testAllModes(0,mc,custid,'','','','australia','',language, attach_scope)\n testAllModes(0,mc,custid,'','','','100','',language, attach_scope)\n testAllModes(0,mc,custid,'','','','800','',language, attach_scope)\n # verify EMSDEV-10004\n testAllModes(0,mc,custid,'','','','australia 100','',language, attach_scope)\n testAllModes(0,mc,custid,'','','','australia habitat','',language, attach_scope)\n # find keywords in attachment\n testAllModes(0,mc,custid,'','','','','australia',language, attach_scope)\n testAllModes(1,mc,custid,'','','','','koala',language, attach_scope)\n testAllModes(1,mc,custid,'','','','','jpg',language, attach_scope)\n testAllModes(1,mc,custid,'','','','','.jpg',language, attach_scope)\n testAllModes(1,mc,custid,'','','','','*.jpg',language, attach_scope)\n\n attach_scope = False\n # find keywords in body\n testAllModes(0,mc,custid,'natural','','','','',language, attach_scope)\n testAllModes(0,mc,custid,'\"natural\"','','','','',language, attach_scope)\n testAllModes(0,mc,custid,'habitat','','','','',language, attach_scope)\n testAllModes(0,mc,custid,'\"habitat\"','','','','',language, attach_scope)\n testAllModes(0,mc,custid,'natural habitat','','','','',language, attach_scope)\n testAllModes(0,mc,custid,'\"natural habitat\"','','','','',language, attach_scope)\n testAllModes(0,mc,custid,'habitat natural','','','','',language, attach_scope)\n testAllModes(0,mc,custid,'\"habitat natural\"','','','','',language, attach_scope)\n # find keywords in other fields\n testAllModes(0,mc,custid,'ziggurat','','','','',language, attach_scope)\n testAllModes(1,mc,custid,'irfan','','','','',language, attach_scope)\n testAllModes(1,mc,custid,'tester','','','','',language, attach_scope)\n testAllModes(0,mc,custid,'australia','','','','',language, attach_scope)\n testAllModes(0,mc,custid,'100','','','','',language, attach_scope)\n testAllModes(0,mc,custid,'800','','','','',language, attach_scope)\n testAllModes(1,mc,custid,'koala','','','','',language, attach_scope)\n testAllModes(1,mc,custid,'jpg','','','','',language, attach_scope) \n testAllModes(1,mc,custid,'.jpg','','','','',language, attach_scope)\n testAllModes(1,mc,custid,'*.jpg','','','','',language, attach_scope)\n # find keywords in sender\n testAllModes(0,mc,custid,'','koala','','','',language, attach_scope)\n testAllModes(1,mc,custid,'','irfan','','','',language, attach_scope)\n testAllModes(1,mc,custid,'','jabbar','','','',language, attach_scope)\n # find keywords in recipient\n testAllModes(0,mc,custid,'','','koala','','',language, attach_scope)\n testAllModes(1,mc,custid,'','','tester','','',language, attach_scope)\n testAllModes(11,mc,custid,'','','lab062','','',language, attach_scope)\n # find keywords in subject\n testAllModes(0,mc,custid,'','','','koala','',language, attach_scope)\n testAllModes(0,mc,custid,'','','','australia','',language, attach_scope)\n testAllModes(0,mc,custid,'','','','100','',language, attach_scope)\n testAllModes(0,mc,custid,'','','','800','',language, attach_scope)\n # verify EMSDEV-10004\n testAllModes(0,mc,custid,'','','','australia 100','',language, attach_scope)\n testAllModes(0,mc,custid,'','','','australia habitat','',language, attach_scope)\n # find keywords in attachment\n testAllModes(0,mc,custid,'','','','','australia',language, attach_scope)\n testAllModes(1,mc,custid,'','','','','koala',language, attach_scope)\n testAllModes(1,mc,custid,'','','','','jpg',language, attach_scope)\n testAllModes(1,mc,custid,'','','','','.jpg',language, attach_scope)\n testAllModes(1,mc,custid,'','','','','*.jpg',language, attach_scope)\n\n # specialized tests for EMSDEV-10255, EMSDEV-10288\n # verify searchability for various dbcs encodings\n # EMSDEV-10288 encoded ja msg as attachment and main body both searchable\n string1 = unicode('同市安田のテレトラック横手第2駐車場で','utf-8')\n string2 = unicode('業者が正確な数値を算出するが','utf-8')\n testAllModes(1,mc,custid,string1,'','','','','any',None)\n testAllModes(0,mc,custid,string1,'','','','','any',True)\n testAllModes(1,mc,custid,string1,'','','','','any',False)\n testAllModes(1,mc,custid,string2,'','','','','any',None)\n testAllModes(1,mc,custid,string2,'','','','','any',True)\n testAllModes(0,mc,custid,string2,'','','','','any',False)\n testAllModes(1,mc,custid,string1,'','','','','ja',None)\n testAllModes(0,mc,custid,string1,'','','','','ja',True)\n testAllModes(1,mc,custid,string1,'','','','','ja',False)\n testAllModes(1,mc,custid,string2,'','','','','ja',None)\n testAllModes(1,mc,custid,string2,'','','','','ja',True)\n testAllModes(0,mc,custid,string2,'','','','','ja',False)\n\n string3 = unicode('んおやゆよににんかはきくまりねくりれ','utf-8')\n testAllModes(1,mc,custid,string3,'','','','','any',None)\n testAllModes(0,mc,custid,string3,'','','','','any',True)\n testAllModes(1,mc,custid,string3,'','','','','any',False)\n testAllModes(1,mc,custid,string3,'','','','','ja',None)\n testAllModes(0,mc,custid,string3,'','','','','ja',True)\n testAllModes(1,mc,custid,string3,'','','','','ja',False)\n testAllModes(0,mc,custid,string3,'','','','','en',None)\n testAllModes(0,mc,custid,string3,'','','','','en',True)\n testAllModes(0,mc,custid,string3,'','','','','en',False)\n \n string3 = unicode('きくまのりれけむらにかてふくこは','utf-8')\n testAllModes(1,mc,custid,string3,'','','','','any',None)\n testAllModes(0,mc,custid,string3,'','','','','any',True)\n testAllModes(1,mc,custid,string3,'','','','','any',False)\n testAllModes(1,mc,custid,string3,'','','','','ja',None)\n testAllModes(0,mc,custid,string3,'','','','','ja',True)\n testAllModes(1,mc,custid,string3,'','','','','ja',False)\n testAllModes(0,mc,custid,string3,'','','','','en',None)\n testAllModes(0,mc,custid,string3,'','','','','en',True)\n testAllModes(0,mc,custid,string3,'','','','','en',False)\n\n string3 = unicode('うそらガッぢクゅぽニずガぱれぽろよめざ','utf-8')\n testAllModes(2,mc,custid,string3,'','','','','any',None)\n testAllModes(0,mc,custid,string3,'','','','','any',True)\n testAllModes(2,mc,custid,string3,'','','','','any',False)\n testAllModes(2,mc,custid,string3,'','','','','ja',None)\n testAllModes(0,mc,custid,string3,'','','','','ja',True)\n testAllModes(2,mc,custid,string3,'','','','','ja',False)\n testAllModes(0,mc,custid,string3,'','','','','en',None)\n testAllModes(0,mc,custid,string3,'','','','','en',True)\n testAllModes(0,mc,custid,string3,'','','','','en',False)\n\n # arabic\n string3 = unicode('يؤكد السعي لمنع حرب أهلية','utf-8')\n testAllModes(3,mc,custid,string3,'','','','','any',None)\n testAllModes(1,mc,custid,string3,'','','','','any',True)\n testAllModes(2,mc,custid,string3,'','','','','any',False)\n testAllModes(3,mc,custid,string3,'','','','','ar',None)\n testAllModes(1,mc,custid,string3,'','','','','ar',True)\n testAllModes(2,mc,custid,string3,'','','','','ar',False)\n testAllModes(1,mc,custid,string3,'','','','','en',None)\n testAllModes(0,mc,custid,string3,'','','','','en',True)\n testAllModes(1,mc,custid,string3,'','','','','en',False)\n\n string3 = unicode('من عناصر قوة حفظ السلام الدولية','utf-8')\n testAllModes(1,mc,custid,string3,'','','','','any',None)\n testAllModes(1,mc,custid,string3,'','','','','any',True)\n testAllModes(0,mc,custid,string3,'','','','','any',False)\n testAllModes(1,mc,custid,string3,'','','','','ar',None)\n testAllModes(1,mc,custid,string3,'','','','','ar',True)\n testAllModes(0,mc,custid,string3,'','','','','ar',False)\n testAllModes(0,mc,custid,string3,'','','','','en',None)\n testAllModes(0,mc,custid,string3,'','','','','en',True)\n testAllModes(0,mc,custid,string3,'','','','','en',False)\n\n # spanish\n string3 = unicode('la motivación de nuestro rival será más grande pero hay','utf-8')\n testAllModes(1,mc,custid,string3,'','','','','any',None)\n testAllModes(1,mc,custid,string3,'','','','','any',True)\n testAllModes(0,mc,custid,string3,'','','','','any',False)\n testAllModes(1,mc,custid,string3,'','','','','es',None)\n testAllModes(1,mc,custid,string3,'','','','','es',True)\n testAllModes(0,mc,custid,string3,'','','','','es',False)\n testAllModes(0,mc,custid,string3,'','','','','en',None)\n testAllModes(0,mc,custid,string3,'','','','','en',True)\n testAllModes(0,mc,custid,string3,'','','','','en',False)\n\n string3 = unicode('o primeros con un punto más','utf-8')\n testAllModes(1,mc,custid,string3,'','','','','any',None)\n testAllModes(0,mc,custid,string3,'','','','','any',True)\n testAllModes(1,mc,custid,string3,'','','','','any',False)\n testAllModes(1,mc,custid,string3,'','','','','es',None)\n testAllModes(0,mc,custid,string3,'','','','','es',True)\n testAllModes(1,mc,custid,string3,'','','','','es',False)\n testAllModes(0,mc,custid,string3,'','','','','en',None)\n testAllModes(0,mc,custid,string3,'','','','','en',True)\n testAllModes(0,mc,custid,string3,'','','','','en',False)\n\n\ndef searchCountQL(mc,custid,query,language,attach_scope,msgMode=False):\n sm = mc.getIndexSearchManager()\n isc = IndexSearchConstraint(custid,None)\n isc.constrainByLanguage(language)\n isc.queryMessages(msgMode)\n qb = SolrQueryBuilder(custid)\n qb.applyLanguage(language)\n qb.applyAttachmentScope(attach_scope)\n qb.applyDefaultSearch(query,True,True)\n print 'generated query =',qb.getQuery()\n sr = sm.search(qb.getQuery(),isc,None,CallerApp.REVIEWER_SEARCH)\n return sr.getDocCount()\n\ndef searchCountUQL(mc,custid,query,language,attach_scope,msgMode=False):\n sm = mc.getIndexSearchManager()\n isc = IndexSearchConstraint(custid,None)\n isc.constrainByLanguage(language)\n isc.queryMessages(msgMode)\n qb = UserQueryBuilder(custid);\n qb.applyLanguage(language)\n qb.applyAttachmentScope(attach_scope)\n qb.applyDefaultSearch(query,True,True)\n print 'generated UQL query =',qb.getQuery()\n qbs = SolrQueryBuilder(custid);\n qbs.applyLanguage(language)\n qbs.applyAttachmentScope(attach_scope)\n qbs.applyDefaultSearch(qb.getQuery(),True,True)\n print 'generated query =',qbs.getQuery()\n sr = sm.search(qbs.getQuery(),isc,None,CallerApp.REVIEWER_SEARCH)\n return sr.getDocCount()\n\ndef testQLAllModes(expected,mc,custid,query,language,attach_scope,rg = None, msgCount = None):\n global failed_count\n global ok_count\n\n msgExpected = expected\n if msgCount is not None :\n msgExpected = msgCount\n \n testQL(expected,mc,custid,query,language,attach_scope,rg, False)\n testQL(msgExpected,mc,custid,query,language,attach_scope,rg, True)\n\ndef testQL(expected,mc,custid,query,language,attach_scope,rg = None, msgMode=False):\n global failed_count\n global ok_count\n print 'custid=\"'+str(custid)+'\"'\n print 'query=\"'+query+'\"'\n print 'language=\"'+language+'\"'\n print 'attachment_scope=',attach_scope\n print 'message_mode=',msgMode\n c = searchCountQL(mc,custid,query,language,attach_scope,msgMode)\n print 'expected =',expected,'\\t found =',c\n if c != expected:\n print 'FAILED'\n failed_count = failed_count + 1\n else:\n print 'PASSED'\n ok_count = ok_count + 1\n c = searchCountUQL(mc,custid,query,language,attach_scope,msgMode)\n print 'expected =',expected,'\\t found =',c\n if c != expected:\n print 'FAILED'\n failed_count = failed_count + 1\n else:\n print 'PASSED'\n ok_count = ok_count + 1\n\n\ndef mainTestQL(mc,custid):\n global ok_count\n global failed_count\n\n # in the right languages\n languages = ['en', 'any']\n for language in languages:\n attach_scope = None\n # find keywords in body\n testQLAllModes(1,mc,custid,'natural',language, attach_scope)\n testQLAllModes(1,mc,custid,'\"natural\"',language, attach_scope)\n testQLAllModes(1,mc,custid,'habitat',language, attach_scope)\n testQLAllModes(1,mc,custid,'\"habitat\"',language, attach_scope)\n testQLAllModes(1,mc,custid,'natural habitat',language, attach_scope)\n testQLAllModes(1,mc,custid,'\"natural habitat\"',language, attach_scope)\n testQLAllModes(1,mc,custid,'habitat natural',language, attach_scope)\n testQLAllModes(0,mc,custid,'\"habitat natural\"',language, attach_scope)\n # find keywords in other fields\n testQLAllModes(0,mc,custid,'ziggurat',language, attach_scope)\n testQLAllModes(3,mc,custid,'irfan',language, attach_scope, None, 2)\n testQLAllModes(2,mc,custid,'tester',language, attach_scope, None, 1)\n testQLAllModes(2,mc,custid,'australia',language, attach_scope, None, 1)\n testQLAllModes(2,mc,custid,'koala',language, attach_scope, None, 1)\n testQLAllModes(2,mc,custid,'jpg',language, attach_scope, None, 1)\n testQLAllModes(2,mc,custid,'.jpg',language, attach_scope, None,1 )\n# testQLAllModes(2,mc,custid,'*.jpg',language, attach_scope, None, 1)\n # find keywords in sender\n testQLAllModes(0,mc,custid,'mailfrom:koala',language, attach_scope)\n testQLAllModes(2,mc,custid,'mailfrom:irfan',language, attach_scope, None, 1)\n testQLAllModes(2,mc,custid,'mailfrom:jabbar',language, attach_scope, None, 1)\n # find keywords in recipient\n testQLAllModes(0,mc,custid,'mailto:koala',language, attach_scope)\n testQLAllModes(2,mc,custid,'mailto:tester',language, attach_scope, None, 1)\n testQLAllModes(16,mc,custid,'mailto:lab062',language, attach_scope, None, 11)\n # find keywords in subject\n testQLAllModes(0,mc,custid,'mailsubject:koala',language, attach_scope)\n testQLAllModes(2,mc,custid,'mailsubject:australia',language, attach_scope, None, 1)\n # find keywords in attachments\n testQLAllModes(0,mc,custid,'attachedfiles:australia',language, attach_scope)\n testQLAllModes(1,mc,custid,'attachedfiles:koala',language, attach_scope)\n testQLAllModes(1,mc,custid,'attachedfiles:jpg',language, attach_scope)\n testQLAllModes(1,mc,custid,'attachedfiles:.jpg',language, attach_scope)\n# testQLAllModes(1,mc,custid,'attachedfiles:*.jpg',language, attach_scope)\n # find keywords in filename\n testQLAllModes(0,mc,custid,'filename:australia',language, attach_scope)\n testQLAllModes(1,mc,custid,'filename:koala',language, attach_scope)\n testQLAllModes(1,mc,custid,'filename:jpg',language, attach_scope)\n testQLAllModes(1,mc,custid,'filename:.jpg',language, attach_scope)\n# testQLAllModes(1,mc,custid,'filename:*.jpg',language, attach_scope)\n # test EMSDEV-10026\n testQLAllModes(2,mc,custid,'recipients:tester',language, attach_scope, None, 1)\n\n attach_scope = True\n # find keywords in body\n testQLAllModes(0,mc,custid,'natural',language, attach_scope)\n testQLAllModes(0,mc,custid,'\"natural\"',language, attach_scope)\n testQLAllModes(0,mc,custid,'habitat',language, attach_scope)\n testQLAllModes(0,mc,custid,'\"habitat\"',language, attach_scope)\n testQLAllModes(0,mc,custid,'natural habitat',language, attach_scope)\n testQLAllModes(0,mc,custid,'\"natural habitat\"',language, attach_scope)\n testQLAllModes(0,mc,custid,'habitat natural',language, attach_scope)\n testQLAllModes(0,mc,custid,'\"habitat natural\"',language, attach_scope)\n # find keywords in other fields\n testQLAllModes(0,mc,custid,'ziggurat',language, attach_scope)\n testQLAllModes(2,mc,custid,'irfan',language, attach_scope)\n testQLAllModes(1,mc,custid,'tester',language, attach_scope)\n testQLAllModes(1,mc,custid,'australia',language, attach_scope)\n testQLAllModes(1,mc,custid,'koala',language, attach_scope)\n testQLAllModes(1,mc,custid,'jpg',language, attach_scope)\n testQLAllModes(1,mc,custid,'.jpg',language, attach_scope)\n# testQLAllModes(1,mc,custid,'*.jpg',language, attach_scope)\n # find keywords in sender\n testQLAllModes(0,mc,custid,'mailfrom:koala',language, attach_scope)\n testQLAllModes(1,mc,custid,'mailfrom:irfan',language, attach_scope)\n testQLAllModes(1,mc,custid,'mailfrom:jabbar',language, attach_scope)\n # find keywords in recipient\n testQLAllModes(0,mc,custid,'mailto:koala',language, attach_scope)\n testQLAllModes(1,mc,custid,'mailto:tester',language, attach_scope)\n testQLAllModes(5,mc,custid,'mailto:lab062',language, attach_scope)\n # find keywords in subject\n testQLAllModes(0,mc,custid,'mailsubject:koala',language, attach_scope)\n testQLAllModes(1,mc,custid,'mailsubject:australia',language, attach_scope)\n # find keywords in attachments\n testQLAllModes(0,mc,custid,'attachedfiles:australia',language, attach_scope)\n testQLAllModes(0,mc,custid,'attachedfiles:koala',language, attach_scope)\n testQLAllModes(0,mc,custid,'attachedfiles:jpg',language, attach_scope)\n testQLAllModes(0,mc,custid,'attachedfiles:.jpg',language, attach_scope)\n# testQLAllModes(0,mc,custid,'attachedfiles:*.jpg',language, attach_scope)\n # find keywords in filename\n testQLAllModes(0,mc,custid,'filename:australia',language, attach_scope)\n testQLAllModes(1,mc,custid,'filename:koala',language, attach_scope)\n testQLAllModes(1,mc,custid,'filename:jpg',language, attach_scope)\n testQLAllModes(1,mc,custid,'filename:.jpg',language, attach_scope)\n# testQLAllModes(1,mc,custid,'filename:*.jpg',language, attach_scope)\n\n attach_scope = False\n # find keywords in body\n # find keywords in body\n testQLAllModes(1,mc,custid,'natural',language, attach_scope)\n testQLAllModes(1,mc,custid,'\"natural\"',language, attach_scope)\n testQLAllModes(1,mc,custid,'habitat',language, attach_scope)\n testQLAllModes(1,mc,custid,'\"habitat\"',language, attach_scope)\n testQLAllModes(1,mc,custid,'natural habitat',language, attach_scope)\n testQLAllModes(1,mc,custid,'\"natural habitat\"',language, attach_scope)\n testQLAllModes(1,mc,custid,'habitat natural',language, attach_scope)\n testQLAllModes(0,mc,custid,'\"habitat natural\"',language, attach_scope)\n # find keywords in other fields\n testQLAllModes(0,mc,custid,'ziggurat',language, attach_scope)\n testQLAllModes(1,mc,custid,'irfan',language, attach_scope)\n testQLAllModes(1,mc,custid,'tester',language, attach_scope)\n testQLAllModes(1,mc,custid,'australia',language, attach_scope)\n testQLAllModes(1,mc,custid,'koala',language, attach_scope)\n testQLAllModes(1,mc,custid,'jpg',language, attach_scope)\n testQLAllModes(1,mc,custid,'.jpg',language, attach_scope)\n# testQLAllModes(1,mc,custid,'*.jpg',language, attach_scope)\n # find keywords in sender\n testQLAllModes(0,mc,custid,'mailfrom:koala',language, attach_scope)\n testQLAllModes(1,mc,custid,'mailfrom:irfan',language, attach_scope)\n testQLAllModes(1,mc,custid,'mailfrom:jabbar',language, attach_scope)\n # find keywords in recipient\n testQLAllModes(0,mc,custid,'mailto:koala',language, attach_scope)\n testQLAllModes(1,mc,custid,'mailto:tester',language, attach_scope)\n testQLAllModes(11,mc,custid,'mailto:lab062',language, attach_scope)\n # find keywords in subject\n testQLAllModes(0,mc,custid,'mailsubject:koala',language, attach_scope)\n testQLAllModes(1,mc,custid,'mailsubject:australia',language, attach_scope)\n # find keywords in attachments\n testQLAllModes(0,mc,custid,'attachedfiles:australia',language, attach_scope)\n testQLAllModes(1,mc,custid,'attachedfiles:koala',language, attach_scope)\n testQLAllModes(1,mc,custid,'attachedfiles:jpg',language, attach_scope)\n testQLAllModes(1,mc,custid,'attachedfiles:.jpg',language, attach_scope)\n# testQLAllModes(1,mc,custid,'attachedfiles:*.jpg',language, attach_scope)\n # find keywords in filename\n testQLAllModes(0,mc,custid,'filename:australia',language, attach_scope)\n testQLAllModes(0,mc,custid,'filename:koala',language, attach_scope)\n testQLAllModes(0,mc,custid,'filename:jpg',language, attach_scope)\n testQLAllModes(0,mc,custid,'filename:.jpg',language, attach_scope)\n# testQLAllModes(0,mc,custid,'filename:*.jpg',language, attach_scope)\n\n # in \"wrong\" language\n languages = ['ru','zh-tw','zh-cn','fr','de','nl','sv','ja','pt','ar','he']\n for language in languages:\n attach_scope = None\n # find keywords in body\n testQLAllModes(0,mc,custid,'natural',language, attach_scope)\n testQLAllModes(0,mc,custid,'\"natural\"',language, attach_scope)\n testQLAllModes(0,mc,custid,'habitat',language, attach_scope)\n testQLAllModes(0,mc,custid,'\"habitat\"',language, attach_scope)\n testQLAllModes(0,mc,custid,'natural habitat',language, attach_scope)\n testQLAllModes(0,mc,custid,'\"natural habitat\"',language, attach_scope)\n testQLAllModes(0,mc,custid,'habitat natural',language, attach_scope)\n testQLAllModes(0,mc,custid,'\"habitat natural\"',language, attach_scope)\n # find keywords in other fields\n testQLAllModes(0,mc,custid,'ziggurat',language, attach_scope)\n testQLAllModes(2,mc,custid,'irfan',language, attach_scope, None, 1)\n testQLAllModes(2,mc,custid,'tester',language, attach_scope, None, 1)\n # weird, but attachment is \"generic\"\n testQLAllModes(0,mc,custid,'australia',language, attach_scope)\n testQLAllModes(2,mc,custid,'koala',language, attach_scope, None, 1)\n testQLAllModes(2,mc,custid,'jpg',language, attach_scope, None, 1)\n testQLAllModes(2,mc,custid,'.jpg',language, attach_scope, None, 1)\n# testQLAllModes(2,mc,custid,'*.jpg',language, attach_scope)\n # find keywords in sender\n testQLAllModes(0,mc,custid,'mailfrom:koala',language, attach_scope)\n testQLAllModes(2,mc,custid,'mailfrom:irfan',language, attach_scope, None, 1)\n testQLAllModes(2,mc,custid,'mailfrom:jabbar',language, attach_scope, None, 1)\n # find keywords in recipient\n testQLAllModes(0,mc,custid,'mailto:koala',language, attach_scope)\n testQLAllModes(2,mc,custid,'mailto:tester',language, attach_scope, None, 1)\n testQLAllModes(16,mc,custid,'mailto:lab062',language, attach_scope, None, 11)\n # find keywords in subject\n testQLAllModes(0,mc,custid,'mailsubject:koala',language, attach_scope)\n # weird, but attachment is \"generic\"\n testQLAllModes(0,mc,custid,'mailsubject:australia',language, attach_scope)\n # find keywords in attachments\n testQLAllModes(0,mc,custid,'attachedfiles:australia',language, attach_scope)\n testQLAllModes(1,mc,custid,'attachedfiles:koala',language, attach_scope)\n testQLAllModes(1,mc,custid,'attachedfiles:jpg',language, attach_scope)\n testQLAllModes(1,mc,custid,'attachedfiles:.jpg',language, attach_scope)\n# testQLAllModes(1,mc,custid,'attachedfiles:*.jpg',language, attach_scope)\n # find keywords in filename\n testQLAllModes(0,mc,custid,'filename:australia',language, attach_scope)\n testQLAllModes(1,mc,custid,'filename:koala',language, attach_scope)\n testQLAllModes(1,mc,custid,'filename:jpg',language, attach_scope)\n testQLAllModes(1,mc,custid,'filename:.jpg',language, attach_scope)\n# testQLAllModes(1,mc,custid,'filename:*.jpg',language, attach_scope)\n\n attach_scope = True\n # find keywords in body\n testQLAllModes(0,mc,custid,'natural',language, attach_scope)\n testQLAllModes(0,mc,custid,'\"natural\"',language, attach_scope)\n testQLAllModes(0,mc,custid,'habitat',language, attach_scope)\n testQLAllModes(0,mc,custid,'\"habitat\"',language, attach_scope)\n testQLAllModes(0,mc,custid,'natural habitat',language, attach_scope)\n testQLAllModes(0,mc,custid,'\"natural habitat\"',language, attach_scope)\n testQLAllModes(0,mc,custid,'habitat natural',language, attach_scope)\n testQLAllModes(0,mc,custid,'\"habitat natural\"',language, attach_scope)\n # find keywords in other fields\n testQLAllModes(0,mc,custid,'ziggurat',language, attach_scope)\n testQLAllModes(1,mc,custid,'irfan',language, attach_scope)\n testQLAllModes(1,mc,custid,'tester',language, attach_scope)\n testQLAllModes(0,mc,custid,'australia',language, attach_scope)\n testQLAllModes(1,mc,custid,'koala',language, attach_scope)\n testQLAllModes(1,mc,custid,'jpg',language, attach_scope)\n testQLAllModes(1,mc,custid,'.jpg',language, attach_scope)\n# testQLAllModes(1,mc,custid,'*.jpg',language, attach_scope)\n # find keywords in sender\n testQLAllModes(0,mc,custid,'mailfrom:koala',language, attach_scope)\n testQLAllModes(1,mc,custid,'mailfrom:irfan',language, attach_scope)\n testQLAllModes(1,mc,custid,'mailfrom:jabbar',language, attach_scope)\n # find keywords in recipient\n testQLAllModes(0,mc,custid,'mailto:koala',language, attach_scope)\n testQLAllModes(1,mc,custid,'mailto:tester',language, attach_scope)\n testQLAllModes(5,mc,custid,'mailto:lab062',language, attach_scope)\n # find keywords in subject\n testQLAllModes(0,mc,custid,'mailsubject:koala',language, attach_scope)\n testQLAllModes(0,mc,custid,'mailsubject:australia',language, attach_scope)\n # find keywords in attachments\n testQLAllModes(0,mc,custid,'attachedfiles:australia',language, attach_scope)\n testQLAllModes(0,mc,custid,'attachedfiles:koala',language, attach_scope)\n testQLAllModes(0,mc,custid,'attachedfiles:jpg',language, attach_scope)\n testQLAllModes(0,mc,custid,'attachedfiles:.jpg',language, attach_scope)\n# testQLAllModes(0,mc,custid,'attachedfiles:*.jpg',language, attach_scope)\n # find keywords in filename\n testQLAllModes(0,mc,custid,'filename:australia',language, attach_scope)\n testQLAllModes(1,mc,custid,'filename:koala',language, attach_scope)\n testQLAllModes(1,mc,custid,'filename:jpg',language, attach_scope)\n testQLAllModes(1,mc,custid,'filename:.jpg',language, attach_scope)\n# testQLAllModes(1,mc,custid,'filename:*.jpg',language, attach_scope)\n\n attach_scope = False\n # find keywords in body\n testQLAllModes(0,mc,custid,'natural',language, attach_scope)\n testQLAllModes(0,mc,custid,'\"natural\"',language, attach_scope)\n testQLAllModes(0,mc,custid,'habitat',language, attach_scope)\n testQLAllModes(0,mc,custid,'\"habitat\"',language, attach_scope)\n testQLAllModes(0,mc,custid,'natural habitat',language, attach_scope)\n testQLAllModes(0,mc,custid,'\"natural habitat\"',language, attach_scope)\n testQLAllModes(0,mc,custid,'habitat natural',language, attach_scope)\n testQLAllModes(0,mc,custid,'\"habitat natural\"',language, attach_scope)\n # find keywords in other fields\n testQLAllModes(0,mc,custid,'ziggurat',language, attach_scope)\n testQLAllModes(1,mc,custid,'irfan',language, attach_scope)\n testQLAllModes(1,mc,custid,'tester',language, attach_scope)\n testQLAllModes(0,mc,custid,'australia',language, attach_scope)\n testQLAllModes(1,mc,custid,'koala',language, attach_scope)\n testQLAllModes(1,mc,custid,'jpg',language, attach_scope)\n testQLAllModes(1,mc,custid,'.jpg',language, attach_scope)\n# testQLAllModes(1,mc,custid,'*.jpg',language, attach_scope)\n # find keywords in sender\n testQLAllModes(0,mc,custid,'mailfrom:koala',language, attach_scope)\n testQLAllModes(1,mc,custid,'mailfrom:irfan',language, attach_scope)\n testQLAllModes(1,mc,custid,'mailfrom:jabbar',language, attach_scope)\n # find keywords in recipient\n testQLAllModes(0,mc,custid,'mailto:koala',language, attach_scope)\n testQLAllModes(1,mc,custid,'mailto:tester',language, attach_scope)\n testQLAllModes(11,mc,custid,'mailto:lab062',language, attach_scope)\n # find keywords in subject\n testQLAllModes(0,mc,custid,'mailsubject:koala',language, attach_scope)\n testQLAllModes(0,mc,custid,'mailsubject:australia',language, attach_scope)\n # find keywords in attachments\n testQLAllModes(0,mc,custid,'attachedfiles:australia',language, attach_scope)\n testQLAllModes(1,mc,custid,'attachedfiles:koala',language, attach_scope)\n testQLAllModes(1,mc,custid,'attachedfiles:jpg',language, attach_scope)\n testQLAllModes(1,mc,custid,'attachedfiles:.jpg',language, attach_scope)\n# testQLAllModes(1,mc,custid,'attachedfiles:*.jpg',language, attach_scope)\n # find keywords in filename\n testQLAllModes(0,mc,custid,'filename:australia',language, attach_scope)\n testQLAllModes(0,mc,custid,'filename:koala',language, attach_scope)\n testQLAllModes(0,mc,custid,'filename:jpg',language, attach_scope)\n testQLAllModes(0,mc,custid,'filename:.jpg',language, attach_scope)\n# testQLAllModes(0,mc,custid,'filename:*.jpg',language, attach_scope)\n\n\nif __name__ == '__main__':\n if len(sys.argv) < 2 or len(sys.argv) > 3:\n print sys.argv[0],'islandId [custId | - ]'\n sys.exit(-1)\n\n inCustId = None\n if len(sys.argv) != 2 :\n inCustId = sys.argv[2]\n\n ok_count = 0\n failed_count = 0\n\n highpass = 0\n highfail = 0\n\n custid = None\n if inCustId is not None and inCustId != '-':\n custid = int(inCustId) \n \n island = None\n edMode = False\n proxy = Service('solrproxy-Island102Cluster1')\n\n try:\n mc = ManagementContainer.getInstance()\n\n island = mc.getIslandManager().getIsland(int(sys.argv[1]))\n edMode = island.isEdiscoveryEnabled()\n if not island.isEdiscoveryEnabled():\n island.setEdiscoveryEnabled(True)\n mc.getIslandManager().updateIsland(island)\n\n print 'restarting proxy to ensure that island capability cache is in synch'\n proxy.invoke('restart','work-3')\n print 'proxy restarted'\n\n if custid is None :\n custid = setupCustomer(mc,sys.argv[1],'/tmp/searchcorpus','lab062.m1dev.com')\n waitForindexing(mc,custid,16)\n\telif mc.getCustomerManager().getCustomer(custid) is None :\n msg = 'customer with id '+ str(custid)+' does not exist.'\n print msg\n custid = None\n raise Exception(msg)\n\n caps = mc.getCustomerManager().getCustomerCapabilities(int(custid))\n caps.setBooleanCapability(Capabilities.CAP_ALLOW_ARCHIVE_EDISCOVERY,True)\n mc.getCustomerManager().saveCustomerCapabilities(caps)\n caps = mc.getCustomerManager().getCustomerCapabilities(int(custid))\n print custid,'ediscovery is',caps.getBooleanCapability(Capabilities.CAP_ALLOW_ARCHIVE_EDISCOVERY)\n\n mainTest(mc,custid,True)\n\n # turn off ediscovery for customer\n caps = mc.getCustomerManager().getCustomerCapabilities(int(custid))\n caps.setBooleanCapability(Capabilities.CAP_ALLOW_ARCHIVE_EDISCOVERY,False)\n mc.getCustomerManager().saveCustomerCapabilities(caps)\n caps = mc.getCustomerManager().getCustomerCapabilities(int(custid))\n print custid,'ediscovery is',caps.getBooleanCapability(Capabilities.CAP_ALLOW_ARCHIVE_EDISCOVERY)\n\n mainTest(mc,custid,False)\n\n mainTestQL(mc,custid)\n extraScopeTest(mc,custid)\n extractTest(mc,custid)\n partialTest(mc,custid)\n\n print highpass,'highlight tests passed'\n print highfail,'highlight failures'\n\n print ok_count,'succeeded'\n print failed_count,'failures'\n\n sys.exit(failed_count + highfail)\n\n finally:\n #delete the customer\n if custid is not None and inCustId is None:\n print 'deleting customer',custid\n mc.getCustomerManager().deleteCustomers([custid])\n if island is not None and edMode != island.isEdiscoveryEnabled():\n island.setEdiscoveryEnabled(edMode)\n mc.getIslandManager().updateIsland(island)\n print 'restarting proxy to ensure that island capability cache is in synch'\n proxy.invoke('restart','work-3')\n print 'proxy restarted'\n\n sys.exit(-1)\n\n"},"src_encoding":{"kind":"string","value":"UTF-8"},"language":{"kind":"string","value":"Python"},"is_vendor":{"kind":"bool","value":false,"string":"false"},"is_generated":{"kind":"bool","value":false,"string":"false"},"year":{"kind":"number","value":2014,"string":"2,014"}}},{"rowIdx":41113,"cells":{"__id__":{"kind":"number","value":6605659743829,"string":"6,605,659,743,829"},"blob_id":{"kind":"string","value":"450d13e77a021a22760d9ad65645a56f6e8ada31"},"directory_id":{"kind":"string","value":"090324db0c04d8c30ad6688547cfea47858bf3af"},"path":{"kind":"string","value":"/utils/prof.py"},"content_id":{"kind":"string","value":"6b8271252d128d7a7c169f71f2bdca0423042324"},"detected_licenses":{"kind":"list like","value":[],"string":"[]"},"license_type":{"kind":"string","value":"no_license"},"repo_name":{"kind":"string","value":"fidlej/sokobot"},"repo_url":{"kind":"string","value":"https://github.com/fidlej/sokobot"},"snapshot_id":{"kind":"string","value":"b82c4c36d73e224d0d0e1635021ca04485da589e"},"revision_id":{"kind":"string","value":"d3d04753a5043e6a22dafd132fa633d8bc66b9ea"},"branch_name":{"kind":"string","value":"refs/heads/master"},"visit_date":{"kind":"timestamp","value":"2021-01-21T13:14:29.523501","string":"2021-01-21T13:14:29.523501"},"revision_date":{"kind":"timestamp","value":"2011-06-12T07:34:14","string":"2011-06-12T07:34:14"},"committer_date":{"kind":"timestamp","value":"2011-06-12T07:34:14","string":"2011-06-12T07:34:14"},"github_id":{"kind":"number","value":32650745,"string":"32,650,745"},"star_events_count":{"kind":"number","value":1,"string":"1"},"fork_events_count":{"kind":"number","value":0,"string":"0"},"gha_license_id":{"kind":"null"},"gha_fork":{"kind":"null"},"gha_event_created_at":{"kind":"null"},"gha_created_at":{"kind":"null"},"gha_updated_at":{"kind":"null"},"gha_pushed_at":{"kind":"null"},"gha_size":{"kind":"null"},"gha_stargazers_count":{"kind":"null"},"gha_forks_count":{"kind":"null"},"gha_open_issues_count":{"kind":"null"},"gha_language":{"kind":"null"},"gha_archived":{"kind":"null"},"gha_disabled":{"kind":"null"},"content":{"kind":"string","value":"#!/usr/bin/env python\n\nimport sys\n\nimport sokopath\nfrom solve import main as command\n\nPROF_FILENAME = \"stats.prof\"\n\ndef _collect_profile(filename):\n import cProfile as profile\n profile.run(\"command(use_psyco=False)\", filename)\n\ndef _view_profile(filename):\n import pstats\n p = pstats.Stats(filename)\n p.strip_dirs()\n #p.sort_stats('time')\n p.sort_stats('cumulative')\n p.print_stats(20)\n\ndef main():\n args = sys.argv[1:]\n if len(args) == 1 and args[0].endswith(\".prof\"):\n filename = args[0]\n else:\n filename = PROF_FILENAME\n _collect_profile(filename)\n _view_profile(filename)\n\nif __name__ == \"__main__\":\n main()\n"},"src_encoding":{"kind":"string","value":"UTF-8"},"language":{"kind":"string","value":"Python"},"is_vendor":{"kind":"bool","value":false,"string":"false"},"is_generated":{"kind":"bool","value":false,"string":"false"},"year":{"kind":"number","value":2011,"string":"2,011"}}},{"rowIdx":41114,"cells":{"__id__":{"kind":"number","value":14276471300349,"string":"14,276,471,300,349"},"blob_id":{"kind":"string","value":"f4d615ff970d4630f565a2417a41b6866d74eea6"},"directory_id":{"kind":"string","value":"441a490ad58551132b0ff8dd6f750b0805a21c40"},"path":{"kind":"string","value":"/mockobjlib/row_proxy.py"},"content_id":{"kind":"string","value":"a8fe0b5c08e6ebf9be821adcef8ed851b0f5baa3"},"detected_licenses":{"kind":"list like","value":[],"string":"[]"},"license_type":{"kind":"string","value":"no_license"},"repo_name":{"kind":"string","value":"rhintz42/mockobjlib"},"repo_url":{"kind":"string","value":"https://github.com/rhintz42/mockobjlib"},"snapshot_id":{"kind":"string","value":"2080da4ab31758ea83cbc7becf9a3818a8ed9c78"},"revision_id":{"kind":"string","value":"9c5b3053e623c06d85bdd51512b318655d4efbe2"},"branch_name":{"kind":"string","value":"refs/heads/master"},"visit_date":{"kind":"timestamp","value":"2020-05-26T06:08:11.361175","string":"2020-05-26T06:08:11.361175"},"revision_date":{"kind":"timestamp","value":"2014-03-24T06:15:03","string":"2014-03-24T06:15:03"},"committer_date":{"kind":"timestamp","value":"2014-03-24T06:15:03","string":"2014-03-24T06:15:03"},"github_id":{"kind":"null"},"star_events_count":{"kind":"number","value":0,"string":"0"},"fork_events_count":{"kind":"number","value":0,"string":"0"},"gha_license_id":{"kind":"null"},"gha_fork":{"kind":"null"},"gha_event_created_at":{"kind":"null"},"gha_created_at":{"kind":"null"},"gha_updated_at":{"kind":"null"},"gha_pushed_at":{"kind":"null"},"gha_size":{"kind":"null"},"gha_stargazers_count":{"kind":"null"},"gha_forks_count":{"kind":"null"},"gha_open_issues_count":{"kind":"null"},"gha_language":{"kind":"null"},"gha_archived":{"kind":"null"},"gha_disabled":{"kind":"null"},"content":{"kind":"string","value":"\n\nclass RowProxy(object):\n def __init__(self, *args, **kwargs):\n self._row = ()\n self._keys = []\n self._values = []\n self._dict = {}\n for key,val in kwargs.items():\n setattr(self, key, val)\n setattr(self, key.lower(), val)\n setattr(self, key.upper(), val)\n self._keys.append(key)\n self._row = self._row + (val,)\n self._values.append(val)\n self._dict[key] = val\n\n def __repr__(self):\n return str(self._row)\n\n def __str__(self):\n return str(self._row)\n\n def __getitem__(self, attr):\n return self._dict[attr]\n\n def __getattr__(self, attr):\n self._raise_column_error(attr)\n\n def keys(self):\n return self._keys\n\n def values(self):\n return self._values\n\n @property\n def __dict__(self):\n self._raise_column_error('dict')\n\n def _raise_column_error(self, attr):\n raise AttributeError(\"Could not locate column in row for column '%s'\" % (attr))\n"},"src_encoding":{"kind":"string","value":"UTF-8"},"language":{"kind":"string","value":"Python"},"is_vendor":{"kind":"bool","value":false,"string":"false"},"is_generated":{"kind":"bool","value":false,"string":"false"},"year":{"kind":"number","value":2014,"string":"2,014"}}},{"rowIdx":41115,"cells":{"__id__":{"kind":"number","value":5703716603244,"string":"5,703,716,603,244"},"blob_id":{"kind":"string","value":"1d4e915019b673796da472079621065214427a95"},"directory_id":{"kind":"string","value":"5604903736cfd20eaf31e64ac29edb9da5be3fed"},"path":{"kind":"string","value":"/app/getter.py"},"content_id":{"kind":"string","value":"c5570d3297b707ed86cc829a3de7ba77413fe209"},"detected_licenses":{"kind":"list like","value":[],"string":"[]"},"license_type":{"kind":"string","value":"no_license"},"repo_name":{"kind":"string","value":"nervouna/XMLGetter"},"repo_url":{"kind":"string","value":"https://github.com/nervouna/XMLGetter"},"snapshot_id":{"kind":"string","value":"749d6a0aed6ca9d4aab3980521b46dc39221f109"},"revision_id":{"kind":"string","value":"90b7079db5d3ed89da18aa5b5f43ad9fdaae4408"},"branch_name":{"kind":"string","value":"refs/heads/master"},"visit_date":{"kind":"timestamp","value":"2016-08-06T15:55:57.143827","string":"2016-08-06T15:55:57.143827"},"revision_date":{"kind":"timestamp","value":"2014-03-05T04:13:20","string":"2014-03-05T04:13:20"},"committer_date":{"kind":"timestamp","value":"2014-03-05T04:13:20","string":"2014-03-05T04:13:20"},"github_id":{"kind":"null"},"star_events_count":{"kind":"number","value":0,"string":"0"},"fork_events_count":{"kind":"number","value":0,"string":"0"},"gha_license_id":{"kind":"null"},"gha_fork":{"kind":"null"},"gha_event_created_at":{"kind":"null"},"gha_created_at":{"kind":"null"},"gha_updated_at":{"kind":"null"},"gha_pushed_at":{"kind":"null"},"gha_size":{"kind":"null"},"gha_stargazers_count":{"kind":"null"},"gha_forks_count":{"kind":"null"},"gha_open_issues_count":{"kind":"null"},"gha_language":{"kind":"null"},"gha_archived":{"kind":"null"},"gha_disabled":{"kind":"null"},"content":{"kind":"string","value":"#!/usr/bin/env python\n#-*-coding: utf-8 -*-\n\nimport os\nimport time\nimport requests\nfrom app import app\nfrom datetime import datetime\nfrom xml.dom.minidom import parseString\n\nnewIssueAPI = dict(\n bbwc='http://content.cdn.bb.bbwc.cn/v4/app1/interface/content-getissue-1-3.xml',\n ilady='http://content.cdn.imlady.bbwc.cn/v4/app2/interface/content-getissue-1-3.xml')\n\n\ndef getXML(api):\n raw_xml = requests.get(api).text.encode('utf-8').replace('\\n', '')\n return raw_xml\n\n\ndef parseXML(raw_xml):\n dom = parseString(raw_xml)\n return dom\n\n\ndef saveStuff(dom, dirName):\n try:\n os.mkdir(os.path.join(app.config['STUFFDIR'], dirName))\n except OSError:\n pass\n\n # Saving The Source Cover\n covers = dom.getElementsByTagName('news:cover_art_icons')[0]\n for sucker in covers.childNodes[1:]:\n covers.removeChild(sucker)\n sourceCover = covers.firstChild\n # The raw xml has bad filenames.\n sourceCoverURL = sourceCover.getAttribute('src').replace('00.png', '.png')\n sourceCoverFile = os.path.join(\n app.config['STUFFDIR'], dirName, 'source.png')\n with file(sourceCoverFile, 'wb') as c:\n r = requests.get(sourceCoverURL)\n c.write(r.content)\n sourceCover.setAttribute(\n 'src', os.path.join(app.config['HOST'], 'slateXML', dirName, 'source.png'))\n\n # Writing The XML File\n updateTime = dom.getElementsByTagName('updated')[0]\n updateTime.firstChild.data = datetime.strftime(\n datetime.today(), '%Y-%m-%dT%H:%M:%SZ')\n xmlFile = os.path.join(app.config['STUFFDIR'], dirName, 'newIssue.xml')\n with file(xmlFile, 'w') as x:\n x.write(dom.toprettyxml(indent=' ').encode('utf-8'))\n\n\nfor key in newIssueAPI:\n raw_xml = getXML(newIssueAPI[key])\n dom = parseXML(raw_xml)\n saveStuff(dom, key)\n"},"src_encoding":{"kind":"string","value":"UTF-8"},"language":{"kind":"string","value":"Python"},"is_vendor":{"kind":"bool","value":false,"string":"false"},"is_generated":{"kind":"bool","value":false,"string":"false"},"year":{"kind":"number","value":2014,"string":"2,014"}}},{"rowIdx":41116,"cells":{"__id__":{"kind":"number","value":1975684958599,"string":"1,975,684,958,599"},"blob_id":{"kind":"string","value":"7db15e650b83c8e42b1e22fae5038016f45f4651"},"directory_id":{"kind":"string","value":"058c2dc9f24ced073968510e9f3c6e9a15894424"},"path":{"kind":"string","value":"/miitus/srv/prep.py"},"content_id":{"kind":"string","value":"c8bc4545fd4f510989b481264e3856f1ff695e5b"},"detected_licenses":{"kind":"list like","value":[],"string":"[]"},"license_type":{"kind":"string","value":"no_license"},"repo_name":{"kind":"string","value":"AntXlab/miitus"},"repo_url":{"kind":"string","value":"https://github.com/AntXlab/miitus"},"snapshot_id":{"kind":"string","value":"76a88e1e27e11ce168e48fe57002c3ae08734b91"},"revision_id":{"kind":"string","value":"8d7635be68f6670bcbfa7736884ef8283675876f"},"branch_name":{"kind":"string","value":"refs/heads/master"},"visit_date":{"kind":"timestamp","value":"2021-01-10T20:00:57.626175","string":"2021-01-10T20:00:57.626175"},"revision_date":{"kind":"timestamp","value":"2014-07-29T22:48:25","string":"2014-07-29T22:48:25"},"committer_date":{"kind":"timestamp","value":"2014-07-29T22:48:25","string":"2014-07-29T22:48:25"},"github_id":{"kind":"number","value":21154978,"string":"21,154,978"},"star_events_count":{"kind":"number","value":2,"string":"2"},"fork_events_count":{"kind":"number","value":0,"string":"0"},"gha_license_id":{"kind":"null"},"gha_fork":{"kind":"null"},"gha_event_created_at":{"kind":"null"},"gha_created_at":{"kind":"null"},"gha_updated_at":{"kind":"null"},"gha_pushed_at":{"kind":"null"},"gha_size":{"kind":"null"},"gha_stargazers_count":{"kind":"null"},"gha_forks_count":{"kind":"null"},"gha_open_issues_count":{"kind":"null"},"gha_language":{"kind":"null"},"gha_archived":{"kind":"null"},"gha_disabled":{"kind":"null"},"content":{"kind":"string","value":"from __future__ import absolute_import\nfrom sqlalchemy.ext.declarative import declarative_base\nfrom .utils import Singleton\n\nclass Preparation(Singleton):\n \"\"\"\n everything declarative\n \"\"\"\n def __init__(self):\n super(Preparation, self).__init__()\n\n self.__sql_base = declarative_base()\n\n @property\n def Base(self):\n \"\"\"\n get declarative_base of sqlalchemy\n \"\"\"\n return self.__sql_base\n\n"},"src_encoding":{"kind":"string","value":"UTF-8"},"language":{"kind":"string","value":"Python"},"is_vendor":{"kind":"bool","value":false,"string":"false"},"is_generated":{"kind":"bool","value":false,"string":"false"},"year":{"kind":"number","value":2014,"string":"2,014"}}},{"rowIdx":41117,"cells":{"__id__":{"kind":"number","value":8306466795309,"string":"8,306,466,795,309"},"blob_id":{"kind":"string","value":"aab18ec1a3de179ced99830e853a5ddc10d850b5"},"directory_id":{"kind":"string","value":"dc151b0d1fbf44e7f069e529a6f76fdb5682b862"},"path":{"kind":"string","value":"/randomGif.py"},"content_id":{"kind":"string","value":"90e367a43d99eba653fc9c91b42831dec4b469dd"},"detected_licenses":{"kind":"list like","value":[],"string":"[]"},"license_type":{"kind":"string","value":"no_license"},"repo_name":{"kind":"string","value":"Friss/random-gif-maker"},"repo_url":{"kind":"string","value":"https://github.com/Friss/random-gif-maker"},"snapshot_id":{"kind":"string","value":"20fd17b7747a34f675cb8461f7db2831848de1ca"},"revision_id":{"kind":"string","value":"54727ca356d9cdbf4c9a53816ec452c6a517e143"},"branch_name":{"kind":"string","value":"refs/heads/master"},"visit_date":{"kind":"timestamp","value":"2021-01-19T05:03:32.790852","string":"2021-01-19T05:03:32.790852"},"revision_date":{"kind":"timestamp","value":"2014-02-02T17:13:57","string":"2014-02-02T17:13:57"},"committer_date":{"kind":"timestamp","value":"2014-02-02T17:13:57","string":"2014-02-02T17:13:57"},"github_id":{"kind":"number","value":16439205,"string":"16,439,205"},"star_events_count":{"kind":"number","value":2,"string":"2"},"fork_events_count":{"kind":"number","value":0,"string":"0"},"gha_license_id":{"kind":"null"},"gha_fork":{"kind":"null"},"gha_event_created_at":{"kind":"null"},"gha_created_at":{"kind":"null"},"gha_updated_at":{"kind":"null"},"gha_pushed_at":{"kind":"null"},"gha_size":{"kind":"null"},"gha_stargazers_count":{"kind":"null"},"gha_forks_count":{"kind":"null"},"gha_open_issues_count":{"kind":"null"},"gha_language":{"kind":"null"},"gha_archived":{"kind":"null"},"gha_disabled":{"kind":"null"},"content":{"kind":"string","value":"import os\nimport random\nimport subprocess\nfrom moviepy.editor import *\n\n\ndef scanfolder(root):\n\t\"\"\"\n\tGet all movies with certain file extenstion andd append to list\n\tParams: root - path to root movies Directory\n\tReturns: movies - list of movies found.\n\t\"\"\"\n\tmovies = []\n\tfor path, dirs, files in os.walk(root):\n\t\tfor f in files:\n\t\t\tif f.endswith('.mkv') or f.endswith('.m2ts') or f.endswith('.avi'):\n\t\t\t #print os.path.join(path, f)\n\t\t\t movies.append(os.path.join(path,f))\n\t#print movies\t \n\treturn movies\n\ndef getLength(filename):\n\t\"\"\"\n\tGet information on choosen movie file. \n\tParams: filename - path to movie to check\n\tReturns: list containing the line \"Duration\"\n\tNote: For some reason json output wouldn't show Duration while this call does.\n\t\"\"\"\n\tresult = subprocess.Popen([\"ffprobe\", filename],stdout = subprocess.PIPE, stderr = subprocess.STDOUT)\n\treturn [x for x in result.stdout.readlines() if \"Duration\" in x]\n\n\ndef makeGif(root):\n\tfiles = scanfolder(root) #Get all Movies\n\n\tmoviepath = random.choice(files) #Random Movie from list\n\n\tmoviename = moviepath[root.__len__():].split(\"/\")[2] #Remove Root Directory and Subfolder\n\tmoviename = moviename[:moviename.index(\".\")] #Remove file extenstion \n\tmoviename = moviename.split(\"[\")[0] #Remove any [2013/1080p/720p]\n\tmoviename = moviename.split(\"(\")[0] #Remove any (2013/1080p/720p)\n\tprint \"Movie Chosen: \" + moviename\n\n\tduration = getLength(moviepath)[0].split(\",\") #Get Duration\n\tduration = duration[0].split(\" \")\n\tduration = duration[3].split(\":\") #Break into hour, mins, secs\n\t#print duration\n\t\n\thour = random.randint(0,int(duration[0])) #Random hour\n\tmins = random.randint(0,int(duration[1])+1) #Random min\n\tsecs = float(duration[2]) #Parse secs to float\n\n\ttimePassed = round(random.uniform(0, 3),2) #Random seconds to elapse up to 3.\n\t\n\t\"\"\"\n\tprint hour\n\tprint mins\n\tprint secs\n\tprint timePassed\n\t\"\"\"\n\t\n\t#Make GIF 1/3 sized.\n\tVideoFileClip(moviepath).\\\n\t\t\t\tsubclip((hour,mins,secs),(hour,mins,secs+timePassed)).\\\n\t\t\t\tresize(0.3).\\\n\t\t\t\tto_gif('movie.gif')\n\t\n\treturn moviename, hour, mins, secs\n\t\n\nif __name__ == \"__main__\":\n\tvar = raw_input(\"Enter Path to Movie Directory: \")\n\tprint \"Movies Path: \", var\n\tmoviename, hour, mins, secs = makeGif(var)\n\texit()"},"src_encoding":{"kind":"string","value":"UTF-8"},"language":{"kind":"string","value":"Python"},"is_vendor":{"kind":"bool","value":false,"string":"false"},"is_generated":{"kind":"bool","value":false,"string":"false"},"year":{"kind":"number","value":2014,"string":"2,014"}}},{"rowIdx":41118,"cells":{"__id__":{"kind":"number","value":14001593394290,"string":"14,001,593,394,290"},"blob_id":{"kind":"string","value":"7c900a8e65577a8277f7f8a1a02edee7d727c076"},"directory_id":{"kind":"string","value":"b1751df2f0c0207c82abc957696ca4d9e7ca38fe"},"path":{"kind":"string","value":"/python/keyValueInterface.py"},"content_id":{"kind":"string","value":"63eaa5594402889aae25e73a8e5919a242fcaac5"},"detected_licenses":{"kind":"list like","value":[],"string":"[]"},"license_type":{"kind":"string","value":"no_license"},"repo_name":{"kind":"string","value":"MichaelMathieu/neuromorphsSLAM"},"repo_url":{"kind":"string","value":"https://github.com/MichaelMathieu/neuromorphsSLAM"},"snapshot_id":{"kind":"string","value":"13ab4d8e2d3a423a0092e932420183ed8132697d"},"revision_id":{"kind":"string","value":"ec3379c98446e5b91432539ddabf21b1c2bf2f3f"},"branch_name":{"kind":"string","value":"refs/heads/master"},"visit_date":{"kind":"timestamp","value":"2021-01-10T20:32:27.555899","string":"2021-01-10T20:32:27.555899"},"revision_date":{"kind":"timestamp","value":"2013-07-19T04:30:12","string":"2013-07-19T04:30:12"},"committer_date":{"kind":"timestamp","value":"2013-07-19T04:30:12","string":"2013-07-19T04:30:12"},"github_id":{"kind":"null"},"star_events_count":{"kind":"number","value":0,"string":"0"},"fork_events_count":{"kind":"number","value":0,"string":"0"},"gha_license_id":{"kind":"null"},"gha_fork":{"kind":"null"},"gha_event_created_at":{"kind":"null"},"gha_created_at":{"kind":"null"},"gha_updated_at":{"kind":"null"},"gha_pushed_at":{"kind":"null"},"gha_size":{"kind":"null"},"gha_stargazers_count":{"kind":"null"},"gha_forks_count":{"kind":"null"},"gha_open_issues_count":{"kind":"null"},"gha_language":{"kind":"null"},"gha_archived":{"kind":"null"},"gha_disabled":{"kind":"null"},"content":{"kind":"string","value":"from streamclient import StreamClient\nimport json\nimport re\n\nclass keyValueInterface(StreamClient):\n def __init__(self, host, port, namespace=\"slam\"):\n super(keyValueInterface, self).__init__(host, port)\n self.namespace = namespace\n self.placeCellPositionKey = self.namespace+\"/placeCellPos\"\n self.placeCellStatusKey = self.namespace+\"/spikes\"\n self.positionKey = self.namespace+\"/position\"\n self.quitKey = self.namespace+\"/quit\"\n \n def getQuitCmd(self):\n return bool(self.get(self.quitKey)) \n\n def setQuitCmd(self, quitCmd):\n self.set(self.quitKey, json.dumps(bool(quitCmd)))\n\n def setPosition(self, posX, posY):\n self.set(self.positionKey, \"X=%f Y=%f\" % ( posX, 1 - posY )) \n #print \"Set Position X=%f Y%f\" % (posX, posY)\n \n def setPlaceCellPositions(self, positionMatrix):\n #print \"Set place cell positions \", positionMatrix\n self.set(self.placeCellPositionKey, json.dumps(positionMatrix)) \n def setPlaceCellStatus(self, placeCellStatusRaw):\n placeCellStatus = json.dumps(placeCellStatusRaw)\n #print \"Converted placeCellStatusRaw to \", placeCellStatus\n self.set(self.placeCellStatusKey, placeCellStatus)\n \nif __name__ == \"__main__\":\n import time\n k = keyValueInterface(\"10.1.95.82\", 21567, \"slam\")\n \n k.set('slam/velocity', 'dx=0.1 dy=0.000')\n print \"set velocity\"\n for i in range(1000000):\n time.sleep(0.5)\n j = 0\n if i % 10 == 0:\n if i % 20 == 0:\n j = 1\n else:\n j = 2\n spikes = [ fk * i / 10 for fk in range(j) ]\n t = i % 10 + 1\n spikes = json.dumps(spikes)\n print spikes\n k.set('slam/spikes', spikes)\n\n \n \n"},"src_encoding":{"kind":"string","value":"UTF-8"},"language":{"kind":"string","value":"Python"},"is_vendor":{"kind":"bool","value":false,"string":"false"},"is_generated":{"kind":"bool","value":false,"string":"false"},"year":{"kind":"number","value":2013,"string":"2,013"}}},{"rowIdx":41119,"cells":{"__id__":{"kind":"number","value":17927193509039,"string":"17,927,193,509,039"},"blob_id":{"kind":"string","value":"db8b17be6f719c2f0303f0a7c05ed02119053302"},"directory_id":{"kind":"string","value":"edefaa8f194215ce2c2062334ed9491b34edd39d"},"path":{"kind":"string","value":"/bin/master.py"},"content_id":{"kind":"string","value":"eda9e7a33477c8aa985a748f659f093c55ed8348"},"detected_licenses":{"kind":"list like","value":["Apache-2.0"],"string":"[\n \"Apache-2.0\"\n]"},"license_type":{"kind":"string","value":"permissive"},"repo_name":{"kind":"string","value":"nl5887/upscale"},"repo_url":{"kind":"string","value":"https://github.com/nl5887/upscale"},"snapshot_id":{"kind":"string","value":"f0a697dba1f7ff82b2e5f8fbf38044b26c98588e"},"revision_id":{"kind":"string","value":"efebc08af3355f5ad09a28aa41f99fb734386e6e"},"branch_name":{"kind":"string","value":"refs/heads/master"},"visit_date":{"kind":"timestamp","value":"2020-05-17T02:31:56.887538","string":"2020-05-17T02:31:56.887538"},"revision_date":{"kind":"timestamp","value":"2013-06-21T19:47:53","string":"2013-06-21T19:47:53"},"committer_date":{"kind":"timestamp","value":"2013-06-21T19:47:53","string":"2013-06-21T19:47:53"},"github_id":{"kind":"number","value":10342081,"string":"10,342,081"},"star_events_count":{"kind":"number","value":2,"string":"2"},"fork_events_count":{"kind":"number","value":0,"string":"0"},"gha_license_id":{"kind":"null"},"gha_fork":{"kind":"null"},"gha_event_created_at":{"kind":"null"},"gha_created_at":{"kind":"null"},"gha_updated_at":{"kind":"null"},"gha_pushed_at":{"kind":"null"},"gha_size":{"kind":"null"},"gha_stargazers_count":{"kind":"null"},"gha_forks_count":{"kind":"null"},"gha_open_issues_count":{"kind":"null"},"gha_language":{"kind":"null"},"gha_archived":{"kind":"null"},"gha_disabled":{"kind":"null"},"content":{"kind":"string","value":"# run queue, e.g. start / shutdown / balance\nimport zmq\n\nimport threading\nimport time\nimport sys\nimport os\n\nfrom threading import Thread\nfrom Queue import Queue\n\nfrom apscheduler.scheduler import Scheduler\n\nPOSSIBLE_TOPDIR = os.path.normpath(os.path.join(os.path.abspath(sys.argv[0]),\n os.pardir,\n os.pardir))\n\nif os.path.exists(os.path.join(POSSIBLE_TOPDIR, 'upscale', '__init__.py')):\n sys.path.insert(0, POSSIBLE_TOPDIR)\n\nfrom upscale.master import balancer\nfrom upscale.utils.rpc import RemoteClient\n#from upscale.utils.decorators import periodic_task, every, adecorator, Dec\n#from upscale.utils.decorators import periodic_task, every, adecorator, Dec\n\nfrom upscale import log as logging\nLOG = logging.getLogger('upscale.master')\n\nclass Tasks(RemoteClient):\n\tpass\n\nclass Worker(RemoteClient):\n\tpass\n\ndef queue(f):\n\t\"\"\" decorator function that will add function to queue instead of executing them directly \"\"\"\n\tdef wrapper(*args, **kwargs):\n\t\tq.put((f, args, kwargs))\n\treturn wrapper \n\n\nclass Master(object):\n\tdef __init__(self):\n\t\tself.scheduler = Scheduler()\n\t\tself.scheduler.configure({'daemonic': True})\n\t\tself.scheduler.add_interval_job(self._balance, seconds=60)\n\t\tself.scheduler.start()\n\t\tpass\n\n\tdef _balance(self):\n\t\tdef wrapper():\n\t\t\tbalancer.rebalance()\n\t\t\tself.reload_all()\n\n\t\tq.put((wrapper, [], {}))\n\n\t# reconfigure haproxy\n\tdef reload_all(self):\n\t\tfrom upscale.utils.common import get_hosts \n\t\tfor host in get_hosts():\n\t\t\tprint (\"Reloading host {0}.\".format(host.private_dns_name))\n\t\t\twith Tasks(\"tcp://{0}:10000/\".format(host.private_dns_name)) as h:\n\t\t\t\t# should run async and wait for all results to finish\n\t\t\t\th.reload()\n\n\t# start host\n\t@queue\n\tdef start(self, namespace, application):\n\t\tfrom upscale.master.balancer import get_containers\n\n\t\tprint namespace, application,\n\t\t(hosts, containers) = get_containers()\n\n\t\t# also weighted hosts, so one in static host, one on spot instance\n\t\tmin_host = None\n\t\tfor host in containers:\n\t\t\tif (not min_host or len(containers[host]) 6:\n fich.write(t)\n fich.write('\\n')\n #print \"chose vues ici{}\".format(items)\n"},"src_encoding":{"kind":"string","value":"UTF-8"},"language":{"kind":"string","value":"Python"},"is_vendor":{"kind":"bool","value":false,"string":"false"},"is_generated":{"kind":"bool","value":false,"string":"false"},"year":{"kind":"number","value":2014,"string":"2,014"}}},{"rowIdx":41130,"cells":{"__id__":{"kind":"number","value":11690901003124,"string":"11,690,901,003,124"},"blob_id":{"kind":"string","value":"1a76763c3cc0b94456da40ef09cfcc0082a42b1f"},"directory_id":{"kind":"string","value":"3ff5aab7d6b70715710d81d468e29b9f402ec791"},"path":{"kind":"string","value":"/functions.py"},"content_id":{"kind":"string","value":"007e72b0af4a25d6572862229045a2423a7c8d5c"},"detected_licenses":{"kind":"list like","value":[],"string":"[]"},"license_type":{"kind":"string","value":"no_license"},"repo_name":{"kind":"string","value":"ben18785/malaria-captureMonteCarlo"},"repo_url":{"kind":"string","value":"https://github.com/ben18785/malaria-captureMonteCarlo"},"snapshot_id":{"kind":"string","value":"5de910b2884bb179c69dce55dbd71d7aa25f3fab"},"revision_id":{"kind":"string","value":"f2cc22a47505a0e3f2c5699e6358771d75cf5eb4"},"branch_name":{"kind":"string","value":"refs/heads/master"},"visit_date":{"kind":"timestamp","value":"2020-06-08T06:45:48.424942","string":"2020-06-08T06:45:48.424942"},"revision_date":{"kind":"timestamp","value":"2014-10-27T14:48:39","string":"2014-10-27T14:48:39"},"committer_date":{"kind":"timestamp","value":"2014-10-27T14:48:39","string":"2014-10-27T14:48:39"},"github_id":{"kind":"null"},"star_events_count":{"kind":"number","value":0,"string":"0"},"fork_events_count":{"kind":"number","value":0,"string":"0"},"gha_license_id":{"kind":"null"},"gha_fork":{"kind":"null"},"gha_event_created_at":{"kind":"null"},"gha_created_at":{"kind":"null"},"gha_updated_at":{"kind":"null"},"gha_pushed_at":{"kind":"null"},"gha_size":{"kind":"null"},"gha_stargazers_count":{"kind":"null"},"gha_forks_count":{"kind":"null"},"gha_open_issues_count":{"kind":"null"},"gha_language":{"kind":"null"},"gha_archived":{"kind":"null"},"gha_disabled":{"kind":"null"},"content":{"kind":"string","value":"from IBM_functions import basic\nimport random as random\nimport numpy as np\nimport matplotlib.pyplot as plt\n\n# A function which puts male and female mosquitoes at random swarms and houses respectively throughout the domain\ndef initialise(aArea,numMaleMosquitoes,numFemaleMosquitoes,vPInParameters,vPMoveParameters,cPDie):\n\n # Get PIn parameters\n cPInHeterogeneityIndicator = vPInParameters[0]\n cPInMaleAll = vPInParameters[1]\n cPInMaleBetaA = vPInParameters[2]\n cPInMaleBetaB = vPInParameters[3]\n cPInFemaleAll = vPInParameters[4]\n cPInFemaleBetaA = vPInParameters[5]\n cPInFemaleBetaB = vPInParameters[6]\n\n # Get the PMove parameters\n cPMoveHeterogeneityIndicator = vPMoveParameters[0]\n cPMoveMaleAll = vPMoveParameters[1]\n cPMoveMaleBetaA = vPMoveParameters[2]\n cPMoveMaleBetaB = vPMoveParameters[3]\n cPMoveFemaleAll = vPMoveParameters[4]\n cPMoveFemaleBetaA = vPMoveParameters[5]\n cPMoveFemaleBetaB = vPMoveParameters[6]\n\n # First sort males\n cNumMaleMosquitoes = numMaleMosquitoes\n cNumSwarms= aArea.getNumSwarms()\n vSwarmSequence = range(0,cNumSwarms)\n aSwarmList = aArea.getSwarmGroup().getTargetList()\n aMaleMosquitoList = []\n\n # Put male mosquitoes in\n while cNumMaleMosquitoes > 0:\n cRandSwarm = random.choice(vSwarmSequence)\n if cPInHeterogeneityIndicator == 0 and cPMoveHeterogeneityIndicator == 0:\n aPInMale = cPInMaleAll\n aPMoveMale = cPMoveMaleAll\n elif cPInHeterogeneityIndicator == 0:\n aPInMale = cPInMaleAll\n aPMoveMale = random.betavariate(cPMoveMaleBetaA,cPMoveMaleBetaB)\n elif cPMoveHeterogeneityIndicator == 0:\n aPInMale = random.betavariate(cPInMaleBetaA,cPInMaleBetaB)\n aPMoveMale = cPMoveMaleAll\n else:\n aPInMale = random.betavariate(cPInMaleBetaA,cPInMaleBetaB)\n aPMoveMale = random.betavariate(cPMoveMaleBetaA,cPMoveMaleBetaB)\n\n aMaleMosquitoList.append(basic.maleMosquito(aSwarmList[cRandSwarm],aPInMale,aPMoveMale,cPDie))\n\n # Move the mosquito inside in relation to probability\n c_randInsideSwarm = random.random()\n if c_randInsideSwarm < aMaleMosquitoList[-1].getPIn():\n aMaleMosquitoList[-1].moveInside()\n\n cNumMaleMosquitoes-=1\n\n # Now sort females\n cNumFemaleMosquitoes = numFemaleMosquitoes\n cNumHouses = aArea.getNumHouses()\n vHouseSequence = range(0,cNumHouses)\n aHouseList = aArea.getHouseGroup().getTargetList()\n aFemaleMosquitoList = []\n\n # Put female mosquitoes in\n while cNumFemaleMosquitoes > 0:\n cRandHouse = random.choice(vHouseSequence)\n if cPInHeterogeneityIndicator == 0 and cPMoveHeterogeneityIndicator == 0:\n aPInFemale = cPInFemaleAll\n aPMoveFemale = cPMoveFemaleAll\n elif cPInHeterogeneityIndicator == 0:\n aPInFemale = cPInFemaleAll\n aPMoveFemale = random.betavariate(cPMoveFemaleBetaA,cPMoveFemaleBetaB)\n elif cPMoveHeterogeneityIndicator == 0:\n aPInFemale = random.betavariate(cPInFemaleBetaA,cPInFemaleBetaB)\n aPMoveFemale = cPMoveFemaleAll\n else:\n aPInFemale = random.betavariate(cPInFemaleBetaA,cPInFemaleBetaB)\n aPMoveFemale = random.betavariate(cPMoveFemaleBetaA,cPMoveFemaleBetaB)\n\n aFemaleMosquitoList.append(basic.femaleMosquito(aHouseList[cRandHouse],aPInFemale,aPMoveFemale,cPDie))\n\n\n # Move the mosquito inside in relation to its probability PIn\n cRandInsideHouse= random.random()\n if cRandInsideHouse < aFemaleMosquitoList[-1].getPIn():\n aFemaleMosquitoList[-1].moveInside()\n\n cNumFemaleMosquitoes-=1\n\n# A function which allows the mosquitoes to move around probabilistically\ndef evolveSystem(aArea,cDays,vReleaseParameters,vPInParameters,vPMoveParameters,aPDie,vSampleParameters):\n\n cNumberMaleReleases = vReleaseParameters[0]\n cReleaseMaleStartTime = vReleaseParameters[1]\n cReleaseMaleTimeGap = vReleaseParameters[2]\n cReleaseMaleMosquitoNumber = vReleaseParameters[3]\n cNumberFemaleReleases = vReleaseParameters[4]\n cReleaseFemaleStartTime = vReleaseParameters[5]\n cReleaseFemaleTimeGap = vReleaseParameters[6]\n cReleaseFemaleMosquitoNumber = vReleaseParameters[7]\n cIntroductionNew = vReleaseParameters[8]\n vReleaseMaleTimes = releaseTimeGenerator(cNumberMaleReleases,cReleaseMaleStartTime,cReleaseMaleTimeGap)\n vReleaseFemaleTimes = releaseTimeGenerator(cNumberFemaleReleases,cReleaseFemaleStartTime,cReleaseFemaleTimeGap)\n\n cMaleReleaseIndexCounter = 0\n cFemaleReleaseIndexCounter = 0\n fig = plt.figure()\n for t in range(0,cDays):\n print(t)\n # # print(aArea.getNumMosquitoes())\n # print(sum(aArea.getNumListMarkedTotalFemales()))\n\n vMale = aArea.getMaleMosquitoList()\n vFemale = aArea.getFemaleMosquitoList()\n\n MoveAndInsideMosquitoes(vMale,aArea,1,vPInParameters,vPMoveParameters,aPDie)\n MoveAndInsideMosquitoes(vFemale,aArea,0,vPInParameters,vPMoveParameters,aPDie)\n\n vFemales = aArea.getNumListInsideFemales()\n vMales = aArea.getNumListInsideMales()\n\n cMaleReleaseIndexCounter += releaseMosquitoes(t,vReleaseMaleTimes,cMaleReleaseIndexCounter,cReleaseMaleMosquitoNumber,1,aArea,cIntroductionNew,vPInParameters,vPMoveParameters,aPDie)\n cFemaleReleaseIndexCounter += releaseMosquitoes(t,vReleaseFemaleTimes,cFemaleReleaseIndexCounter,cReleaseFemaleMosquitoNumber,0,aArea,cIntroductionNew,vPInParameters,vPMoveParameters,aPDie)\n\n cSampleMaleTime = vSampleParameters[0]\n cSampleFemaleTime = vSampleParameters[1]\n if t == cSampleMaleTime:\n print(\"Sampling males\")\n [cCountMarked,cCountUnmarked] = sampleTargets(aArea,1,vSampleParameters)\n print(cCountMarked,cCountUnmarked)\n print(lincolnEstimate(cCountMarked,cCountUnmarked,cReleaseMaleMosquitoNumber))\n if t == cSampleFemaleTime:\n print(\"Sampling females\")\n [cCountMarked,cCountUnmarked] = sampleTargets(aArea,0,vSampleParameters)\n print(cCountMarked,cCountUnmarked)\n print(lincolnEstimate(cCountMarked,cCountUnmarked,cReleaseMaleMosquitoNumber))\n\n vColourMales = aArea.getMarkedIndicatorMales()\n vColourFemales = aArea.getMarkedIndicatorFemales()\n\n\n ax1 = fig.add_subplot(211)\n ax1.scatter(aArea.getHouseLocations()[:,0],aArea.getHouseLocations()[:,1],s=5*vFemales,c=vColourFemales,label='houses')\n plt.legend(loc='upper left')\n ax1.hold(False)\n\n ax2 = fig.add_subplot(212)\n ax2.scatter(aArea.getSwarmLocations()[:,0],aArea.getSwarmLocations()[:,1],s=5*vMales,c=vColourMales,label = 'swarms',vmin=0,vmax = 1)\n ax2.hold(False)\n plt.legend(loc='upper left')\n plt.draw()\n\n\n fig.show()\n\n\ndef MoveAndInsideMosquitoes(vMosquitoList,aArea,cSex,vPInParameters,vPMoveParameters,aPDie):\n k = 1\n cNumMoved = 0\n for mosquitoes in vMosquitoList:\n\n # First see whether or not mosquito dies\n cRandDie = random.random()\n if cRandDie < mosquitoes.getPDie():\n mosquitoes.die(aArea,vPInParameters,vPMoveParameters,aPDie)\n\n else: # If not dead move\n # Whether or not not move mosquito\n cMoveRand = random.random()\n if cMoveRand < mosquitoes.getPMove():\n moveMosquito(mosquitoes,aArea,cSex)\n cNumMoved += 1\n # Whether or not to move the mosquito inside\n cInRand = random.random()\n if cInRand < mosquitoes.getPIn():\n mosquitoes.moveInside()\n else:\n mosquitoes.moveOutside()\n\n return cNumMoved\n\ndef moveMosquito(mosquitoes,aArea,cSex):\n\n # Get a list of all relevant targets\n if cSex == 1:\n vTargetList = list(aArea.getSwarmList())\n else:\n vTargetList = list(aArea.getHouseList())\n\n # Remove the current target from this list\n vTargetList.remove(mosquitoes.getTarget())\n vMovePropensities = []\n aLocation = mosquitoes.getLocation()\n for targets in vTargetList:\n bLocation = targets.getLocation()\n vMovePropensities.append(1/squareDistance(aLocation,bLocation))\n\n # Normalise the propensities\n vMovePropensities = np.array(vMovePropensities)/sum(vMovePropensities)\n\n # Select a target at random\n targetSwitch = 0\n cNumTargets = len(vMovePropensities)\n while targetSwitch == 0:\n cTargetRandIndex = random.randint(0,cNumTargets-1)\n cTargetRand = random.random()\n\n if cTargetRand < vMovePropensities[cTargetRandIndex]:\n mosquitoes.move(vTargetList[cTargetRandIndex])\n targetSwitch = 1\n\n\ndef squareDistance(aLocation,bLocation):\n return (aLocation[0]-bLocation[0])**2 + (aLocation[1]-bLocation[1])**2\n\ndef releaseTimeGenerator(cNumReleases,cReleaseStartTime,cReleaseTimeGap):\n vReleaseTimes = []\n cReleaseTimeTemp = cReleaseStartTime\n for i in range(0,cNumReleases):\n vReleaseTimes.append(cReleaseTimeTemp)\n cReleaseTimeTemp += cReleaseTimeGap\n return vReleaseTimes\n\ndef releaseMosquitoes(t,vReleaseTimes,cReleaseIndexCounter,cReleaseMosquitoNumber,cSex,aArea,cIntroductionNew,vPInParameters,vPMoveParameters,aPDie):\n\n # If not a release time just return 0\n if t > vReleaseTimes[-1]:\n return 0\n if t != vReleaseTimes[cReleaseIndexCounter]:\n return 0\n\n if cSex == 1:\n vTargets = aArea.getSwarmList()\n else:\n vTargets = aArea.getHouseList()\n cLenTargets = len(vTargets)\n\n # Find a target that has a sufficient number of mosquitoes\n if cIntroductionNew == 0:\n switchRelease = 0\n while switchRelease == 0:\n cRandTargetIndex = random.randint(0,cLenTargets-1)\n if vTargets[cRandTargetIndex].getNumUnmarkedInside() > cReleaseMosquitoNumber:\n vUnmarkedInsideMosquitoList = vTargets[cRandTargetIndex].getUnmarkedMosquitoInsideList()\n switchRelease = 1\n\n # Only want to release the correct number, no more\n vUnmarkedInsideMosquitoList = vUnmarkedInsideMosquitoList[0:cReleaseMosquitoNumber]\n for mosquitoes in vUnmarkedInsideMosquitoList:\n mosquitoes.mark()\n\n else: # Just release that number of marked mosquitoes into a random target location\n cRandTargetIndex = random.randint(0,cLenTargets-1)\n cPInHeterogeneityIndicator = vPInParameters[0]\n cPInMaleAll = vPInParameters[1]\n cPInMaleBetaA = vPInParameters[2]\n cPInMaleBetaB = vPInParameters[3]\n cPInFemaleAll = vPInParameters[4]\n cPInFemaleBetaA = vPInParameters[5]\n cPInFemaleBetaB = vPInParameters[6]\n\n # Get the PMove parameters\n cPMoveHeterogeneityIndicator = vPMoveParameters[0]\n cPMoveMaleAll = vPMoveParameters[1]\n cPMoveMaleBetaA = vPMoveParameters[2]\n cPMoveMaleBetaB = vPMoveParameters[3]\n cPMoveFemaleAll = vPMoveParameters[4]\n cPMoveFemaleBetaA = vPMoveParameters[5]\n cPMoveFemaleBetaB = vPMoveParameters[6]\n\n for i in range(0,cReleaseMosquitoNumber):\n if cSex == 1:\n if cPInHeterogeneityIndicator == 0:\n aPIn = cPInMaleAll\n else:\n aPIn = random.betavariate(cPInMaleBetaA,cPInMaleBetaB)\n if cPMoveHeterogeneityIndicator == 0:\n aPMove = cPMoveMaleAll\n else:\n aPMove = random.betavariate(cPMoveMaleBetaA,cPMoveMaleBetaB)\n aMosquito = basic.maleMosquito(vTargets[cRandTargetIndex],aPIn,aPMove,aPDie)\n aMosquito.mark()\n else:\n if cPInHeterogeneityIndicator == 0:\n aPIn = cPInFemaleAll\n else:\n aPIn = random.betavariate(cPInFemaleBetaA,cPInFemaleBetaB)\n if cPMoveHeterogeneityIndicator == 0:\n aPMove = cPMoveFemaleAll\n else:\n aPMove = random.betavariate(cPMoveFemaleBetaA,cPMoveFemaleBetaB)\n aMosquito = basic.femaleMosquito(vTargets[cRandTargetIndex],aPIn,aPMove,aPDie)\n aMosquito.mark()\n\n return 1\n\ndef sampleTargets(aArea,cSex,vSampleParameters):\n\n if cSex == 0: # Females - assume we know the location of all houses\n vTargetsSampled = aArea.getHouseList()\n else:\n cKnownSwarmsPercentage = vSampleParameters[2]\n vTargets = aArea.getSwarmList()\n\n # Assume that only a random fraction of male swarms are known\n cNumSwarms = len(vTargets)\n vTargetsShuffled = random.sample(vTargets,cNumSwarms)\n cNumKnownSwarms = int(cKnownSwarmsPercentage*cNumSwarms)\n vTargetsKnown = vTargetsShuffled[0:cNumKnownSwarms]\n vTargetsSampled = vTargetsKnown\n\n cDailyNumTargetsSampled = vSampleParameters[3]\n vTargetsSampled = vTargetsSampled[0:cDailyNumTargetsSampled]\n\n cCountMarked = 0\n cCountUnmarked = 0\n for targets in vTargetsSampled:\n cCountMarked += targets.getNumMarkedInside()\n cCountUnmarked += targets.getNumUnmarkedInside()\n\n return [cCountMarked,cCountUnmarked]\n\ndef lincolnEstimate(cCountMarked,cCountUnmarked,cNumReleased):\n if cCountMarked == 0:\n print(\"No marked mosquitoes found\")\n return -1\n\n cCountTotal = cCountMarked + cCountUnmarked\n return cNumReleased*(cCountTotal/cCountMarked)\n"},"src_encoding":{"kind":"string","value":"UTF-8"},"language":{"kind":"string","value":"Python"},"is_vendor":{"kind":"bool","value":false,"string":"false"},"is_generated":{"kind":"bool","value":false,"string":"false"},"year":{"kind":"number","value":2014,"string":"2,014"}}},{"rowIdx":41131,"cells":{"__id__":{"kind":"number","value":18047452594593,"string":"18,047,452,594,593"},"blob_id":{"kind":"string","value":"7440a967f5cb1e017ef2be60e92de000b7bf3f78"},"directory_id":{"kind":"string","value":"6c7be3a7c642b1b26cf29c1b2ba3c1f39369e783"},"path":{"kind":"string","value":"/sphericaltrig.py"},"content_id":{"kind":"string","value":"ea9d31acede46ca6b7840787c2feeb51d65694bd"},"detected_licenses":{"kind":"list like","value":["LicenseRef-scancode-warranty-disclaimer"],"string":"[\n \"LicenseRef-scancode-warranty-disclaimer\"\n]"},"license_type":{"kind":"string","value":"non_permissive"},"repo_name":{"kind":"string","value":"AndrewSDFoster/AST4700"},"repo_url":{"kind":"string","value":"https://github.com/AndrewSDFoster/AST4700"},"snapshot_id":{"kind":"string","value":"8fe06196d8e39e4083ffd8e65e5021e838b126d1"},"revision_id":{"kind":"string","value":"d01e1cf5ef02c7f2149559e3acbf2e398ead9e7b"},"branch_name":{"kind":"string","value":"refs/heads/master"},"visit_date":{"kind":"timestamp","value":"2020-12-30T10:36:57.842274","string":"2020-12-30T10:36:57.842274"},"revision_date":{"kind":"timestamp","value":"2014-02-28T17:40:23","string":"2014-02-28T17:40:23"},"committer_date":{"kind":"timestamp","value":"2014-02-28T17:40:23","string":"2014-02-28T17:40:23"},"github_id":{"kind":"null"},"star_events_count":{"kind":"number","value":0,"string":"0"},"fork_events_count":{"kind":"number","value":0,"string":"0"},"gha_license_id":{"kind":"null"},"gha_fork":{"kind":"null"},"gha_event_created_at":{"kind":"null"},"gha_created_at":{"kind":"null"},"gha_updated_at":{"kind":"null"},"gha_pushed_at":{"kind":"null"},"gha_size":{"kind":"null"},"gha_stargazers_count":{"kind":"null"},"gha_forks_count":{"kind":"null"},"gha_open_issues_count":{"kind":"null"},"gha_language":{"kind":"null"},"gha_archived":{"kind":"null"},"gha_disabled":{"kind":"null"},"content":{"kind":"string","value":"#!/usr/bin/env python\n'''\ndms2deg - converst from dms to deg\nhms2deg - converts from hms to deg\ndeg2dms - converts from deg to dms\ndeg2hms - converts from deg to hms\nAngSepReal - uses spherical trig to find angle between two points\nAngSepEucl - estimates angle between two points with a right triangle\nAngSepPole - estimates angle between two points with a polar triangle\nequ2ecl - converts equatorial to ecliptic coordinates \necl2gal - converts ecliptic to galactic coordinates \ngal2equ - converts galactic to equatorial coordinates \nequ2gal - converts equatorial to galactic coordinates \ngal2ecl - converts galactic to ecliptic coordinates \necl2equ - converts ecliptic to equatorial coordinates \nfuck horizon coordinates. Seriously. I'm not doing that shit.\nsphericalLawOfCosines - what the name suggests\nsphericalLawOfSines - ^\neuclideanLawOfCosines - ^^\neuclideanLawOfSines - ^^^\nEquinoxToJ2000 - Converts from any equinox to J2000\nEpochWithJ2000equinox - finds a nearby epoch with J2000 equinox\nB1950toJ2000 - converts from the B1950 equinox to the J2000 equinox\nrefractionAnglehor - computes angle of atmospheric refraction\ntrueAltitudehor\t - computes true altitude from apparent\napparentAltitudehor - computes apparent altitude from true\n'''\n\nimport numpy as np\n\ndef dms2deg(DEC):\n '''DEC of form [degree, arcminute, arcsecond], returns as decimal'''\n #distribute the sign of the first nonzero entry to the others\n dec = np.float64(DEC.copy())\n if dec[0] == 0:\n if dec[1] < 0:\n dec[2] *= -1\n elif dec[0] < 0:\n dec[1] *= -1\n dec[2] *= -1\n\n #check input\n if (not ((dec[0] <= 0 and dec[1] <= 0 and dec[2] <= 0) \\\n or (dec[0] >= 0 and dec[1] >= 0 and dec[2] >= 0))\\\n or dec[0] > 90 or dec[0] < -90 \\\n or dec[1] > 60 or dec[1] < -60 \\\n or dec[2] > 60 or dec[2] < -60):\n print(\"ERROR bad input for dms2deg, trying to continue anyway\")\n \n return dec[0] + dec[1]/60. + dec[2]/3600.\n\ndef hms2deg(ra):\n '''ra of form [hour, minute, second], returns as decimal'''\n #type conversion\n ra = np.float64(ra.copy())\n #check input\n if (ra[0] < 0 or ra[0] > 24 \\\n or ra[1] < 0 or ra[1] > 60 \\\n or ra[2] < 0 or ra[2] > 60):\n print(\"ERROR bad input for hms2deg, trying to compute anyway\")\n\n return (ra[0] + ra[1]/60. + ra[2]/3600.)*15\n\ndef deg2dms(c):\n '''deg is decimal degrees, converts to array of [degrees, minutes, seconds]'''\n deg = int( c) \n amn = np.abs(int( (c-deg)*60.))\n asc = ((np.abs((c-deg)*60.))-amn)*60.\n\n if c < 0 and deg == 0:\n if amn == 0:\n asc *= -1\n else:\n amn *= -1\n\n return np.array([deg, amn, asc])\n\ndef deg2hms(c):\n '''c is decimal degrees, converts to an array of [hours, minutes, seconds]'''\n return deg2dms(c/15)\n\ndef AngSepReal(ra1, dec1, ra2, dec2):\n ''' ra1 and ra2 are lists of the form [ hour, minute, second]\n dec1 and dec2 are lists of the form [degree, arcmin, arcsec]\n returns angle between them\n '''\n #A is angle at pt 2, a is side length across from A (from pole to pt 1)\n #B is angle at pt 1, b is side length across from B (from pole to pt 2)\n #C is angle at pole, c is side length across from C (from pt 1 to pt 2)\n\n #Find angle C (difference of RA's)\n C = np.abs(ra1-ra2)\n\n #find sides a and b (90-dec)\n a = 90 - dec1\n b = 90 - dec2\n\n #spherical law of cosines\n c = sphericalLawOfCosines(a=a,b=b,c=None,C=C)\n\n return c\n\ndef AngSepEucl(ra1, dec1, ra2, dec2):\n ''' ra1 and ra2 are lists of the form [ hour, minute, second]\n dec1 and dec2 are lists of the form [degree, arcmin, arcsec]\n returns angle between them estimated from euclidean right triangle\n '''\n\n #convert to degrees\n RA1 = hms2deg( ra1)\n RA2 = hms2deg( ra2)\n DEC1 = dms2deg(dec1)\n DEC2 = dms2deg(dec2)\n\n #find avg dec, and the differences in dec and ra\n aDEC = (DEC1 + DEC2) / 2.\n dDEC = np.abs(DEC1 - DEC2)\n dRA = np.abs( RA1 - RA2)\n\n #find read \"dist\" in RA\n rdRA = dRA*np.cos(aDEC*np.pi/180.)\n\n #compute length of hypotenuse\n dist = np.sqrt(rdRA*rdRA + dDEC*dDEC)\n\n return deg2dms(dist)\n\ndef AngSepPole(ra1, dec1, ra2, dec2):\n ''' ra1 and ra2 are lists of the form [ hour, minute, second] \n dec1 and dec2 are lists of the form [degree, arcmin, arcsec] \n returns angle between them as estimated by a euclidean polar triangle\n ''' \n #get degrees of each angle \n RA1 = hms2deg( ra1) \n RA2 = hms2deg( ra2) \n DEC1 = dms2deg(dec1) \n DEC2 = dms2deg(dec2) \n \n #A is angle at pt 2, a is side length across from A (from pole to pt 1)\n #B is angle at pt 1, b is side length across from B (from pole to pt 2)\n #C is angle at pole, c is side length across from C (from pt 1 to pt 2)\n \n #Find angle C (difference of RA's) \n C = deg2dms(np.abs(RA1-RA2))\n \n #find sides a and b (90-dec) \n a = 90 - DEC1 \n b = 90 - DEC2 \n\n #switch to using south pole if it is closer\n if (a + b)/2 > 90:\n a = 180 - a\n b = 180 - b\n \n #euclidean law of cosines \n c = euclideanLawOfCosines(a=a, b=b, c=None, C=C)\n \n return deg2dms(c) \n\ndef equ2ecl((alpha, delta)):\n '''accepts tuple of dms arrays, returns the same.\n converts hms RA and dms DEC into dms beta and lambda'''\n #define constants and get things into decimals/radians\n epsilon = dms2deg([23, 26, 21]) * np.pi/180\n alpha = hms2deg(alpha) * np.pi/180\n delta = dms2deg(delta) * np.pi/180\n\n #calculate beta\n beta = np.arcsin(np.sin(delta)*np.cos(epsilon) - \\\n np.cos(delta)*np.sin(epsilon)*np.sin(alpha))\n\n #calculate cos and sin of lmbda\n coslmbda = np.cos(delta)*np.cos(alpha)/np.cos(beta)\n sinlmbda = (np.sin(delta) - np.cos(epsilon)*np.sin(beta)) / \\\n (np.sin(epsilon)*np.cos(beta))\n #use arctan2 to get the right quadrant\n lmbda = np.arctan2(sinlmbda, coslmbda)\n\n #bring it back to dms\n beta = deg2dms( beta * 180/np.pi)\n lmbda = deg2dms(lmbda * 180/np.pi)\n\n #return a tuple\n return (beta, lmbda)\n\ndef equ2gal((alpha, delta)):\n '''accepts a tuple of dms arrays, returns the same.\n converts hms RA and dms DEC into dms b an l'''\n #define constants and get things into decimals/radians\n deltag = dms2deg([27,07,42]) * np.pi/180\n alphag = hms2deg([12,51,26.3]) * np.pi/180\n lnode = dms2deg([32,55,55]) * np.pi/180\n delta = dms2deg(delta) * np.pi/180\n alpha = hms2deg(alpha) * np.pi/180\n\n #calculate b\n b = np.arcsin(np.sin(deltag)*np.sin(delta) + \\\n np.cos(deltag)*np.cos(delta)*np.cos(alpha-alphag))\n\n #calculate cos and sin of l\n cosl = np.cos(delta)*np.sin(alpha-alphag)/np.cos(b)\n sinl = (np.sin(delta)-np.sin(deltag)*np.sin(b))/(np.cos(deltag)*np.cos(b))\n\n #use arctan2 to get the right quadrant\n l = np.arctan2(sinl, cosl)\n\n #readd lnode back in\n l += lnode\n\n #bring it back to dms\n l = deg2dms(l * 180/np.pi)\n b = deg2dms(b * 180/np.pi)\n\n #return a tuple\n return (b,l)\n\ndef ecl2equ((beta, lmbda)):\n '''accepts a tuple of dms arrays, returns the same.\n converts dms beta and lmbda to hms RA and dms DEC'''\n #define constants and get things into decimals/radians\n epsilon = dms2deg([23, 26, 21]) * np.pi/180\n beta = dms2deg(beta) * np.pi/180\n lmbda = dms2deg(lmbda) * np.pi/180\n\n #calculate delta\n delta = np.arcsin(np.sin(beta)*np.cos(epsilon) + \\\n np.cos(beta)*np.sin(epsilon)*np.sin(lmbda))\n\n #calculate cos and sin of alpha\n cosalpha = np.cos(lmbda)*np.cos(beta)/np.cos(delta)\n sinalpha = (np.cos(epsilon)*np.sin(delta) - np.sin(beta)) / \\\n (np.sin(epsilon)*np.cos(delta))\n #use arctan2 to get the right quadrant\n alpha = np.arctan2(sinalpha, cosalpha)\n\n #bring it back to dms\n alpha = deg2hms(alpha * 180/np.pi)\n delta = deg2dms(delta * 180/np.pi)\n\n #return a tuple\n #return (alpha, delta)\n\ndef gal2equ((b, l)):\n '''accepts a tuple of dms arrays, returns the same.\n converts dms b and l to hms RA and dms DEC'''\n #define constants and get things into decimals/radians\n deltag = dms2deg([27,07,42]) * np.pi/180\n alphag = hms2deg([12,51,26.3]) * np.pi/180\n lnode = dms2deg([32,55,55]) * np.pi/180\n b = dms2deg(b) * np.pi/180\n l = hms2deg(l) * np.pi/180\n\n #calculate delta\n delta = np.arcsin(np.sin(deltag)*np.sin(b) + \\\n np.cos(deltag)*np.cos(b)*np.sin(l-lnode))\n\n #calculate cos and sin of alpha\n cosalp = (np.sin(b)-np.sin(deltag)*np.sin(delta))/(np.cos(deltag)*np.cos(delta))\n sinalp = np.cos(l-lnode)*np.cos(b)/np.cos(delta)\n\n #use arctan2 to get the right quadrant\n alpha = np.arctan2(sinalp, cosalp)\n\n #add alphag back in\n alpha += alphag\n\n #bring it back to dms\n alpha = deg2hms(alpha * 180/np.pi)\n delta = deg2dms(delta * 180/np.pi)\n\n #return a tuple\n return (alpha, delta)\n\ndef ecl2gal((beta, lmbda)):\n '''accepts a tuple of dms arrays, returns the same.\n converts dms beta and lambda to dms b and l'''\n return equ2gal(ecl2equ((beta, lmbda)))\n\ndef gal2ecl((b, l)):\n '''accepts a tuple of dms arrays, returns the same.\n converts dms b and l to dms beta and lambda'''\n return equ2ecl(gal2equ((b, l)))\n\ndef sphericalLawOfCosines(a, b, c, C=None):\n '''Law of cosines, lowercase variables are side lengths (dms arrays)\n either c or C (opposite side/angle pair) is calculated from the\n other parameters.\n '''\n #if angle must be found\n if C == None:\n #do math, notice the conversion to/from radians\n a = dms2deg(a)*np.pi/180\n b = dms2deg(b)*np.pi/180\n c = dms2deg(c)*np.pi/180\n C = np.arccos((np.cos(c) - np.cos(a)*np.cos(b))/np.sin(a)*np.sin(b))\n\n return deg2dms(C*180/np.pi)\n\n #if side must be found\n if c == None:\n #do math, notice the conversion to/from radians\n a = dms2deg(a)*np.pi/180 \n b = dms2deg(b)*np.pi/180 \n C = dms2deg(C)*np.pi/180\n c = np.arccos(np.cos(a)*np.cos(b) + np.sin(a)*np.sin(b)*np.cos(C))\n\n return deg2dms(c*180/np.pi)\n\n #tell the user they fucked up\n else:\n print(\"Error, law of cosines invalid parameters\")\n return np.array([0,0,0])\n\ndef sphericalLawOfSines(angle1, side1, angle2, side2=None):\n '''Law of sines. 4 possible inputs, angle1 and side1 must be dms arrays and \n either angle2 or side2 must also be dms arrays, with the other as None\n This function will determing the missing side/angle from the other three\n parameters\n '''\n #if side2 must be found\n if side2 == None:\n #do math, notice the conversion to/from radians\n angle1 = dms2deg(angle1)*np.pi/180\n angle2 = dms2deg(angle2)*np.pi/180\n side1 = dms2deg( side2)*np.pi/180\n side2 = np.arcsin(np.sin(angle2)*np.sin(side1)/np.sin(angle1))*180/np.pi\n\n return deg2dms(side2)\n\n #if angle2 must be found\n if angle2 == None:\n #do math, notice the conversion to/from radians\n angle1 = dms2deg(angle1)*np.pi/180\n side1 = dms2deg( side1)*np.pi/180\n side2 = dms2deg( side2)*np.pi/180\n angle2 = np.arcsin(np.sin(side2)*np.sin(angle1)/np.sin(side1))*180/np.pi\n\n return deg2dms(angle2)\n\n #tell the user that they fucked up\n else:\n print('Error, law of sines overconstrained, returning 0')\n return np.array([0,0,0])\n\ndef euclideanLawOfCosines(a, b, c, C=None):\n ''' euclidean law of cosines\n parameters of the sides of a triangle and one angle,\n when given 3, the fourth is found. C and c are opposite each other\n '''\n #if angle must be found\n if C == None:\n #do math (notice degree radian conversion)\n C = np.arccos((c*c - a*a - b*b)/(-2*a*b))*180/np.pi\n return deg2dms(C)\n\n #if side must be found\n if c == None:\n #do math (notice degree radian conversion)\n C = dms2deg(C)*np.pi/180\n c = np.sqrt(a*a + b*b - 2*a*b*np.cos(C))\n return c\n\n #tell the user that they fucked up\n else:\n print(\"invalid parameters for euclidean law of cosines\")\n return 0\n\ndef euclideanLawOfSines(A, a, B, b = None):\n ''' euclidean Law of sines\n parameters of opposite side/angle pairs (A/a and B/b)\n from three values, the fourth is calculated\n '''\n #if side must be found\n if b == None:\n #do math, notice radians/degrees conversions\n A = dms2deg(A)*np.pi/180\n B = dms2deg(B)*np.pi/180\n b = np.sin(B)*a/np.sin(A)\n return b\n\n #if angle must be found\n if B == None:\n #do math, notice radians/degrees conversions\n A = dms2deg(A)*np.pi/180\n B = np.arcsin(b*np.sin(A)/a)*180/np.pi\n return deg2dms(B)\n\n #tell the user that they fucked up\n else:\n print(\"invalid parameters for euclidean law of sines\")\n return 0\n\ndef EquinoxToJ2000(alpha, delta, pmA, pmD, date, BJD=False):\n '''converts Ephemeri from one time to J2000.0\n alpha and delta are the location of the star at start (RA and dec)\n pmA and pmD are the proper motions in alpha and delta in arcsec/yr\n returns alpha and delta in J2000.0\n Can also be done in BJD rather than years\n '''\n #compute time for time standard\n if BJD:\n T = np.float64(date-2451545.0)/36525.\n year = (T*100)+2000.0\n else:\n T = np.float64(date-2000.0)/100.\n year = date\n\n #Compute precession constants for time and get everything into radians\n M = (1.2812323*T + 0.0003879*T*T + 0.0000101*T*T*T)*np.pi/180.\n N = (0.5567530*T - 0.0001185*T*T - 0.0000116*T*T*T)*np.pi/180.\n alpha = hms2deg(alpha)*np.pi/180\n delta = dms2deg(delta)*np.pi/180\n\n #find the mean epoch for each time\n alpham = alpha - 0.5*(M + N*np.sin(alpha)*np.tan(delta))\n deltam = delta - 0.5*N*np.cos(alpham)\n\n #find the new location of the star's old position\n alpha0 = alpha - M - N*np.sin(alpham)*np.tan(deltam)\n delta0 = delta - N*np.cos(alpham)\n\n #return to hms and dms\n delta0 = deg2dms(delta0*180/np.pi)\n alpha0 = deg2hms(alpha0*180/np.pi)\n\n\n #Account for proper motions\n (alphaf, deltaf) = EpochWithJ2000equinox(alpha0, delta0, pmA, pmD, year)\n\n #return\n return (alphaf, deltaf)\n\ndef EpochWithJ2000equinox(alpha0, delta0, pmA, pmD, date, BJD=False):\n '''Uses J2000 Equinox and proper motions to find new locations of stars\n at a different epoch. Takes alpha0 and delta0 as the locations at epoch and\n equinox of J2000, and pmA, pmD, the proper motions in alpha and delta and\n calculates the starses positsdjolfjslifjoisjd ljals lksjdlkjk fuck you\n '''\n #years of how many of them go past since yeah\n #wow I was tired when I wrote this, making it more readable\n #get the amount of time in years since 2000.0\n if BJD:\n years = np.abs((date-2451545.0)/365.)\n else:\n years = np.abs(date-2000.0)\n\n delta0 = dms2deg(delta0)\n alpha0 = hms2deg(alpha0)\n\n #correct for proper motions, be sure to correct for cos(dec) factor in RA\n deltaf = delta0 + pmD*years/3600.\n #average delta, converted to radians inside of cosine\n alphaf = alpha0 + (pmA*years/3600.)/np.cos(((delta0+deltaf)/2.)*np.pi/180.)\n\n #back to dms/hms\n deltaf = deg2dms(deltaf)\n alphaf = deg2hms(alphaf)\n\n #return the values\n return (alphaf, deltaf)\n\ndef B1950toJ2000(alpha, delta, pmA, pmD):\n '''Converts from B1950 equinox/epoch to J2000 equinox/epoch\n takes alpha, delta, and proper motions in each for B1950\n returns alpha and delta in J2000\n NOTE: This is just a wrapper for EquinoxToJ2000() with the equinox\n coming from B1950\n '''\n return EquinoxToJ2000(alpha, delta, pmA, pmD, 2433282.423, BJD=True)\n\ndef refractionAnglehor(Aapp):\n ''' Finds the angle of refraction of an object at an apparent altitude of Aapp\n Aapp given in typical [deg, min, sec] numpy array\n NOTE: THIS IS AN APPROXIMATION THAT ONLY WORKS CLOSE TO THE HORIZON\n '''\n c0 = 35.338/60\n c1 = -13.059/60\n c2 = 2.765/60\n c3 = -0.244/60\n\n #get to degs\n Aapp = dms2deg(Aapp)\n\n #formula\n theta = deg2dms(c0 + c1*Aapp + c2*Aapp**2 + c3*Aapp**3)\n\n return theta\n\ndef trueAltitudehor(Aapp):\n ''' wrapper that finds true altitude from the apparent altitude, Aapp,\n Aapp given in typical [deg, min, sec] numpy array\n NOTE: THIS IS AN APPROXIMATION THAT ONLY WORKS CLOSE TO THE HORIZON\n '''\n #this is basically just a wrapper function\n a = deg2dms(dms2deg(Aapp) - dms2deg(refractionAnglehor(Aapp)))\n return a\n\ndef apparentAltitudehor(a):\n ''' Finds apparent altitude from true altitude, given in typical [deg,min,sec]\n numpy array\n NOTE: THIS IS AN APPROXIMATION THAT ONLY WORKS CLOSE TO THE HORIZON\n '''\n c0 = 35.338/60\n c1 = -13.059/60\n c2 = 2.765/60\n c3 = -0.244/60\n\n #this is the polynomial\n roots = np.roots((-c3, -c2, 1-c1, -c0-dms2deg(a)))\n\n #only return the real one\n for root in roots:\n if np.imag(root) == 0:\n Aapp = deg2dms(np.real(root))\n\n return Aapp\n\ndef trueAltZen(a):\n '''Finds true altitude from apparent using approximation near zenith\n a in [degrees,minutes,seconds] numpy array\n NOTE: THIS IS AN APPROXIMATION THAT ONLY WORKS CLOSE TO THE HORIZON\n '''\n n = 1.0002923\n return deg2dms(np.arcsin(n*np.sin(dms2deg(a)*np.pi/180))*180/np.pi)\n\ndef apparAltZen(z):\n '''Finds apparent altitude from apparent using approximation near zenith\n z in [degrees,minutes,seconds] numpy array\n NOTE: THIS IS AN APPROXIMATION THAT ONLY WORKS CLOSE TO THE HORIZON\n '''\n n = 1.0002923\n return deg2dms(np.arcsin(np.sin(dms2deg(z)*np.pi/180)/n)*180/np.pi)\n"},"src_encoding":{"kind":"string","value":"UTF-8"},"language":{"kind":"string","value":"Python"},"is_vendor":{"kind":"bool","value":false,"string":"false"},"is_generated":{"kind":"bool","value":false,"string":"false"},"year":{"kind":"number","value":2014,"string":"2,014"}}},{"rowIdx":41132,"cells":{"__id__":{"kind":"number","value":3685081964635,"string":"3,685,081,964,635"},"blob_id":{"kind":"string","value":"c90ecd3e6611c8efcb74bd7673a32a0b436baf13"},"directory_id":{"kind":"string","value":"c0fa3ffcdcdb6ab92c239abaa77211f1ae387493"},"path":{"kind":"string","value":"/engine/python/fife/extensions/serializers/__init__.py"},"content_id":{"kind":"string","value":"05615060adaad4b190a73f2c5502d0618e35a4f0"},"detected_licenses":{"kind":"list like","value":[],"string":"[]"},"license_type":{"kind":"string","value":"no_license"},"repo_name":{"kind":"string","value":"harendranathvegi9/Zero-Sum-Cascade-Old"},"repo_url":{"kind":"string","value":"https://github.com/harendranathvegi9/Zero-Sum-Cascade-Old"},"snapshot_id":{"kind":"string","value":"cfee79862e5f6dba24b51d0610b986d3051cafc3"},"revision_id":{"kind":"string","value":"4cf70a74d2282d4387e935402a9b94940e34bedc"},"branch_name":{"kind":"string","value":"refs/heads/main"},"visit_date":{"kind":"timestamp","value":"2021-01-17T23:41:41.489875","string":"2021-01-17T23:41:41.489875"},"revision_date":{"kind":"timestamp","value":"2010-09-12T13:18:59","string":"2010-09-12T13:18:59"},"committer_date":{"kind":"timestamp","value":"2010-09-12T13:18:59","string":"2010-09-12T13:18:59"},"github_id":{"kind":"null"},"star_events_count":{"kind":"number","value":0,"string":"0"},"fork_events_count":{"kind":"number","value":0,"string":"0"},"gha_license_id":{"kind":"null"},"gha_fork":{"kind":"null"},"gha_event_created_at":{"kind":"null"},"gha_created_at":{"kind":"null"},"gha_updated_at":{"kind":"null"},"gha_pushed_at":{"kind":"null"},"gha_size":{"kind":"null"},"gha_stargazers_count":{"kind":"null"},"gha_forks_count":{"kind":"null"},"gha_open_issues_count":{"kind":"null"},"gha_language":{"kind":"null"},"gha_archived":{"kind":"null"},"gha_disabled":{"kind":"null"},"content":{"kind":"string","value":"# -*- coding: utf-8 -*-\r\n\r\n# ####################################################################\r\n# Copyright (C) 2005-2009 by the FIFE team\r\n# http://www.fifengine.de\r\n# This file is part of FIFE.\r\n#\r\n# FIFE is free software; you can redistribute it and/or\r\n# modify it under the terms of the GNU Lesser General Public\r\n# License as published by the Free Software Foundation; either\r\n# version 2.1 of the License, or (at your option) any later version.\r\n#\r\n# This library is distributed in the hope that it will be useful,\r\n# but WITHOUT ANY WARRANTY; without even the implied warranty of\r\n# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU\r\n# Lesser General Public License for more details.\r\n#\r\n# You should have received a copy of the GNU Lesser General Public\r\n# License along with this library; if not, write to the\r\n# Free Software Foundation, Inc.,\r\n# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA\r\n# ####################################################################\r\n\r\nimport fife, sys, os\r\nfrom traceback import print_exc\r\n\r\n__all__ = ('ET', 'SerializerError', 'InvalidFormat', 'WrongFileType', 'NameClash', 'NotFound', 'warn', 'root_subfile', 'reverse_root_subfile')\r\n\r\ntry:\r\n\timport xml.etree.cElementTree as ET\r\nexcept:\r\n\timport xml.etree.ElementTree as ET\r\n\r\nclass SerializerError(Exception):\r\n\tpass\r\n\r\nclass InvalidFormat(SerializerError):\r\n\tpass\r\n\r\nclass WrongFileType(SerializerError):\r\n\tpass\r\n\r\nclass NameClash(SerializerError):\r\n\tpass\r\n\r\nclass NotFound(SerializerError):\r\n\tpass\r\n\r\ndef warn(self, msg):\r\n\tprint 'Warning (%s): %s' % (self.filename, msg)\r\n\r\ndef root_subfile(masterfile, subfile):\r\n\t\"\"\"\r\n\tReturns new path for given subfile (path), which is rooted against masterfile\r\n\tE.g. if masterfile is ./../foo/bar.xml and subfile is ./../foo2/subfoo.xml,\r\n\treturned path is ../foo2/subfoo.xml\r\n\tNOTE: masterfile is expected to be *file*, not directory. subfile can be either\r\n\t\"\"\"\r\n\ts = '/'\r\n\r\n\tmasterfile = norm_path(os.path.abspath(masterfile))\r\n\tsubfile = norm_path(os.path.abspath(subfile))\r\n\r\n\tmaster_fragments = masterfile.split(s)\r\n\tsub_fragments = subfile.split(s)\r\n\r\n\tmaster_leftovers = []\r\n\tsub_leftovers = []\r\n\r\n\tfor i in xrange(len(master_fragments)):\r\n\t\ttry:\r\n\t\t\tif master_fragments[i] == sub_fragments[i]:\r\n\t\t\t\tmaster_leftovers = master_fragments[i+1:]\r\n\t\t\t\tsub_leftovers = sub_fragments[i+1:]\r\n\t\texcept IndexError:\r\n\t\t\tbreak\r\n\r\n\tpathstr = ''\r\n\tfor f in master_leftovers[:-1]:\r\n\t\tpathstr += '..' + s\r\n\tpathstr += s.join(sub_leftovers)\r\n\treturn pathstr\r\n\r\ndef reverse_root_subfile(masterfile, subfile):\r\n\t\"\"\"\r\n\tdoes inverse operation to root_subfile. E.g. \r\n\tE.g. if masterfile is ./../foo/bar.xml and subfile is ../foo2/subfoo.xml,\r\n\treturned path ./../foo2/subfoo.xml\r\n\tUsually this function is used to convert saved paths into engine relative paths\r\n\tNOTE: masterfile is expected to be *file*, not directory. subfile can be either\r\n\t\"\"\"\r\n\ts = '/'\r\n\r\n\tmasterfile = norm_path(os.path.abspath(masterfile)).split(s)[:-1]\r\n\tsubfile = norm_path(os.path.abspath( s.join(masterfile) + s + subfile ))\r\n\tmasterfile = norm_path(os.getcwd()) + s + 'foo.bar' # cheat a little to satisfy root_subfile\r\n\treturn root_subfile(masterfile, subfile)\r\n\r\ndef norm_path(path):\r\n\t\"\"\"\r\n\tMakes the path use '/' delimited separators. FIFE always uses these delimiters, but some os-related\r\n routines will default to os.path.sep.\r\n\t\"\"\"\r\n\tif os.path.sep == '/':\r\n\t\treturn path\r\n\r\n\treturn '/'.join(path.split(os.path.sep))\t\r\n"},"src_encoding":{"kind":"string","value":"UTF-8"},"language":{"kind":"string","value":"Python"},"is_vendor":{"kind":"bool","value":false,"string":"false"},"is_generated":{"kind":"bool","value":false,"string":"false"},"year":{"kind":"number","value":2010,"string":"2,010"}}},{"rowIdx":41133,"cells":{"__id__":{"kind":"number","value":13469017445357,"string":"13,469,017,445,357"},"blob_id":{"kind":"string","value":"f3cdbdf43f34d2e2e0ec0665a03642b42234db8b"},"directory_id":{"kind":"string","value":"4ae36a0be4d359741c2278a89d1df9f0c7b680fa"},"path":{"kind":"string","value":"/wwatcher.py"},"content_id":{"kind":"string","value":"33d2e4096b2e4a250f0e6983cf8d1638f584c5c6"},"detected_licenses":{"kind":"list like","value":[],"string":"[]"},"license_type":{"kind":"string","value":"no_license"},"repo_name":{"kind":"string","value":"coxlab/wwatcher"},"repo_url":{"kind":"string","value":"https://github.com/coxlab/wwatcher"},"snapshot_id":{"kind":"string","value":"2297b34e4d01a792f647581fb173cf2c4f1a0454"},"revision_id":{"kind":"string","value":"e6efabb1d53f9b96b982cd3b53ec63b7492c1fa7"},"branch_name":{"kind":"string","value":"refs/heads/master"},"visit_date":{"kind":"timestamp","value":"2021-05-26T22:47:26.778609","string":"2021-05-26T22:47:26.778609"},"revision_date":{"kind":"timestamp","value":"2013-08-02T17:25:59","string":"2013-08-02T17:25:59"},"committer_date":{"kind":"timestamp","value":"2013-08-02T17:25:59","string":"2013-08-02T17:25:59"},"github_id":{"kind":"number","value":11372670,"string":"11,372,670"},"star_events_count":{"kind":"number","value":0,"string":"0"},"fork_events_count":{"kind":"number","value":0,"string":"0"},"gha_license_id":{"kind":"null"},"gha_fork":{"kind":"null"},"gha_event_created_at":{"kind":"null"},"gha_created_at":{"kind":"null"},"gha_updated_at":{"kind":"null"},"gha_pushed_at":{"kind":"null"},"gha_size":{"kind":"null"},"gha_stargazers_count":{"kind":"null"},"gha_forks_count":{"kind":"null"},"gha_open_issues_count":{"kind":"null"},"gha_language":{"kind":"null"},"gha_archived":{"kind":"null"},"gha_disabled":{"kind":"null"},"content":{"kind":"string","value":"#!/usr/bin/env python\n\nimport gspread\nimport sys\nimport pylab\nimport argparse\nimport getpass\nimport datetime\nimport wwatcher\nfrom matplotlib import pyplot\nimport matplotlib.dates\nfrom matplotlib.dates import DateFormatter, WeekdayLocator, DayLocator, MONDAY\nimport random\nfrom matplotlib import legend\n\ndef main():\n\n\t'''\n\tParse command line options to analyze animal weight data from Google Sheets. Creates a WeightWatcher class and executes\n\tmethods specified by the user on the command line. \n\t'''\n\t#TODO add spreadsheet name and url customizability to command line interface\n\tparser = argparse.ArgumentParser(description=\"A command line tool to analyze animal weights stored in Google Sheets\", \\\n\t\tusage=\"wwatcher.py Username animalName1 animalName2 animalName3 [options] \\n\\\n\t\tor \\n\\\n\t\twwatcher.py [options] Username animalName1 animalName2 animalName3\")\n\tparser.add_argument('username', help=\"Google Docs username, required as first argument (e.g. chapman@coxlab.org)\")\n\tparser.add_argument('animals', help=\"Animal IDs to analyze, separated by spaces. At least 1 is required, but you \\\n\t\tcan add as many as you want\", nargs=\"+\")\n\tparser.add_argument('-c', action=\"store_true\", default=False, help=\"Check to make sure each animal weighed at least \\\n\t\t90 percent of its most recent maximum (weekend) value for the last 4 weekdays\")\n\tparser.add_argument('-d', help=\"Specify the number of weekdays to analyze with -c option\")\n\tparser.add_argument('-g', action=\"store_true\", default=False, help=\"Make a graph of each animal's weight over time\")\n\tparser.add_argument('-a', action=\"store_true\", default=False, help=\"Make one graph of every animal's weight over time\")\n\tparser.add_argument('-r', action=\"store_true\", default=False, help=\"Graph a linear regression where x values are max weights \\\n\t\tand y values are the previous week's average daily weight\")\n\n\tparsed = parser.parse_args()\n\n\t#make sure at least 1 specified option calls a WeightWatcher class method, else give the user help and exit\n\tif (parsed.c == False) and (parsed.g == False) and (parsed.a == False) and (parsed.r == False):\n\t\tparser.print_help()\n\t\tsys.exit()\n\n\tusername = parsed.username\n\tanimals = parsed.animals\n\n\t#if the username is weights@coxlab.org, no need to ask for password in terminal. It's this crazy string, and we want to run\n\t#the script automatically without stopping for user input every week\n\tif username == \"weights@coxlab.org\":\n\t\tpassword = \"}ONCuD*Xh$LNN8ni;0P_HR_cIy|Q5p\"\n\telse:\n\t\tpassword = getpass.getpass(\"Enter your Google Docs password: \")\n\n\twatcher = wwatcher.WeightWatcher(username, password, animals)\n\t#if the user selects the -c option, check animal weights to make sure they don't go below 90% max\n\tif parsed.c:\n\t\tif parsed.d:\n\t\t\tHeavyEnoughDict = watcher.IsHeavyEnough(days=parsed.d)\n\t\telse:\n\t\t\tHeavyEnoughDict = watcher.IsHeavyEnough()\n\n\t\t#make a list of animals that aren't heavy enough\n\t\tproblem_animals = []\n\t\tfor animal in animals:\n\t\t\tif not HeavyEnoughDict[animal]:\n\t\t\t\tproblem_animals.append(animal)\n\t\t#TODO implement email functionality for alerts when this option is run automatically\n\t\tif len(problem_animals) == 0:\n\t\t\tprint \"Animal weights look fine. Awesome!\\n\"\n\t\telse:\n\t\t\tfor each in problem_animals:\n\t\t\t\tprint \"A stupid algorithm thinks %s is underweight. You might want to check on him!\" % each\n\n\tif parsed.g:\n\n\t\t#dict with animals ID strings as keys and a list of lists of the same length [[dates], [weights for those dates], [whether it was a weekend weight Boolean]]\n\t\tdata_for_graph = watcher.format_data_for_graph()\n\t\tfor animal in animals:\n\t\t\tdates = data_for_graph[animal][0]\n\t\t\tweights = data_for_graph[animal][1]\n\t\t\tfig = pyplot.figure(str(datetime.date.today()))\n\t\t\tpyplot.title(\"Animal weight over time\")\n\t\t\tpyplot.ylabel(\"Animal Weight (g)\")\n\t\t\tax = fig.gca()\n\t\t\tmondays = WeekdayLocator(MONDAY, interval=2)\n\t\t\talldays = DayLocator()\n\t\t\tweekFormatter = DateFormatter('%b %d %y')\n\t\t\tax.xaxis.set_major_locator(mondays)\n\t\t\tax.xaxis.set_minor_locator(alldays)\n\t\t\tax.xaxis.set_major_formatter(weekFormatter)\n\t\t\tr = lambda: random.randint(0,255)\n\t\t\tax.plot_date(matplotlib.dates.date2num(dates), weights, '#%02X%02X%02X' % (r(),r(),r()), lw=2, label=str(animal))\n\t\t\tpyplot.axis(ymin=400, ymax=750)\n\t\t\tax.legend(loc='best')\n\t\t\tax.xaxis_date()\n\t\t\tax.autoscale_view()\n\t\t\tpyplot.setp(fig.gca().get_xticklabels(), rotation=35, horizontalalignment='right')\n\t\t\tpyplot.show()\n\n\tif parsed.a:\n\n\t\t#dict with animals ID strings as keys and a list of lists of the same length [[dates], [weights for those dates], [whether it was a weekend weight Boolean]]\n\t\tdata_for_graph = watcher.format_data_for_graph()\n\t\tfor animal in animals:\n\t\t\tdates = data_for_graph[animal][0]\n\t\t\tweights = data_for_graph[animal][1]\n\t\t\tfig = pyplot.figure(str(datetime.date.today()))\n\t\t\tpyplot.title(\"Animal weight over time\")\n\t\t\tpyplot.ylabel(\"Animal Weight (g)\")\n\t\t\tax = fig.gca()\n\t\t\tmondays = WeekdayLocator(MONDAY, interval=2)\n\t\t\talldays = DayLocator()\n\t\t\tweekFormatter = DateFormatter('%b %d %y')\n\t\t\tax.xaxis.set_major_locator(mondays)\n\t\t\tax.xaxis.set_minor_locator(alldays)\n\t\t\tax.xaxis.set_major_formatter(weekFormatter)\n\t\t\tr = lambda: random.randint(0,255)\n\t\t\tax.plot_date(matplotlib.dates.date2num(dates), weights, '#%02X%02X%02X' % (r(),r(),r()), lw=2, label=str(animal))\n\t\t\tpyplot.axis(ymin=400, ymax=750)\n\t\t\tax.legend(loc='best')\n\t\t\tax.xaxis_date()\n\t\t\tax.autoscale_view()\n\t\t\tpyplot.setp(fig.gca().get_xticklabels(), rotation=35, horizontalalignment='right')\n\t\tpyplot.show()\n\n\tif parsed.r:\n\n\t\tdata_for_graph = watcher.regression()\n\t\tfitted = pylab.polyfit(data_for_graph[0], data_for_graph[1], 1)\n\t\tline = pylab.polyval(fitted, data_for_graph[0])\n\t\tpylab.plot(data_for_graph[0], line)\n\t\tpylab.scatter(data_for_graph[0], data_for_graph[1])\n\t\tpylab.xlabel('Weekend (max) weight')\n\t\tpylab.ylabel('Avg Weekday Weight')\n\t\tpylab.show()\n\nif __name__ == '__main__':\n\tmain()\n\nclass Spreadsheet(object):\n\n\t'''\n\tAn instance of this class uses the gspread package (https://github.com/burnash/gspread)\n\tto communicate with the Google Docs API. This opens the first worksheet in a spreadsheet \n\tspecified in __init__ (i.e. sheet1 in 'Daily Weights after 7-11-13')\n\n\t'''\n\n\tdef __init__(self, username, password, spreadsheet_name='Daily Weights after 7-11-13', spreadsheet_url=None):\n\n\t\t'''\n\t\tparam username: A string, the user's Google Docs email (e.g. chapman@coxlab.org)\n\n\t\tparam password: A string, the user's password for Google Docs\n\n\t\tparam spreadsheet_name: A string, name of the spreadsheet from which you want data,\n\t\t\tas it appears in Google Docs (e.g. \"Daily Weights after 7-11-13\")\n\n\t\tparam spreadsheet_url: A string, the url for a Google Docs spreadsheet if you want to use a different one\n\n\t\t'''\n\t\tprint \"\\nLogging into Google Docs...\"\n\t\tself.login = gspread.login(username, password)\n\t\tprint \"Importing spreadsheet from Google Docs...\"\n\t\tif spreadsheet_url == None:\n\t\t\tself.worksheet_open = self.login.open(spreadsheet_name).sheet1\n\t\telse:\n\t\t\tself.worksheet_open = self.login.open_by_url(spreadsheet_url).sheet1\n\nclass WeightWatcher(object):\n\n\tdef __init__(self, username, password, animals, spreadsheet_name='Daily Weights after 7-11-13', \\\n\t\t\tspreadsheet_url=None):\n\t\t'''\n\t\tAn instance of the WeightWatcher class has a spreadsheet class attribute to \n\t\taccess Google Sheets data with animal weights. The WeightWatcher class \n\t\talso has methods to monitor and analyze animal weights.\n\n\t\tparam username: a string, login email for Google Docs\n\t\tparam password: a string, login password for Google Docs\n\t\tparam animals: a list, where each item in the list is an animal ID (str)\n\t\tparam spreadsheet_name (optional): a string, Name of spreadsheet you want to parse, \n\t\t\tdefault is currently the Cox lab shared sheet 'Daily Weights after 7-11-13'\n\t\tparam spreadsheet_url (optional): a string, url for a spreadsheet if you want to \n\t\t\tuse this instead of a sheet name or the default spreadsheet_name\n\t\t'''\n\t\t\n\t\t#self.data is a list of lists with all the spreadsheet data\n\t\t#e.g. nested list ['date/time', 'username@coxlab.org', 'animal ID', 'weight', 'after water? yes or no'] <--one row from spreadsheet\n\t\n\t\tself.data = Spreadsheet(username, password, spreadsheet_name, spreadsheet_url).worksheet_open.get_all_values()\n\t\tprint \"Successfully imported spreadsheet\\n\"\n\t\tself.animals_to_analyze = animals\n\t\tself.data_list_length = len(self.data)\n\t\n\tdef IsHeavyEnough(self, days=4):\n\t\t'''\n\t\t#go through last 4 weekday weights of each aninmal specified by user and make sure each day it weighs at least 90 \n\t\tpercent its most recent max weight\n\n\t\tparam self.animals_to_analyze should be a list of strings\n\n\t\t*Returns a dict with animal names (str) as keys and True as the value iff each of the last 4 weekdays \n\t\tit weighed enough*\n\t\t'''\n\n\t\t#================================================================================================================\n\n\t\t#get latest max weights from backwards spreadsheet (backwards so it starts looking for most recent data)\n\t\t#make dictionary to store animal names as keys and max weights as values\n\t\t#use data_position to remember where you are in the backwards (i.e. most recent) weights data during while loop\n\t\tmaxes = {}\n\t\tanimals_copy = self.animals_to_analyze[:]\n\t\tdata_position = 0\n\t\tbackwards_data = self.data[::-1]\n\t\t#do the following until we've gotten every animal's max weight\n\t\t#backwards_data[data_position[4] is overnight h20 column, \"yes\" means the comp has found a max weight\n\t\t#backwards_data[data_position][2] is animal ID in the spreadsheet, so the first boolean makes sure it's an animal \n\t\t#for which the user wants to verify the weight\n\n\t\twhile (len(animals_copy)) > 0 and (data_position < self.data_list_length):\n\t\t\tif (backwards_data[data_position][2] in animals_copy) and (\"yes\" in backwards_data[data_position][4]):\n\t\t\t\t#make sure there's an animal weight (not '-' or 'x' in position backwards_data[data_pos][4]\n\t\t\t\t#by trying to make the string an int; if there's an exception it's not a valid animal weight\n\t\t\t\ttry:\n\t\t\t\t\tanimal_weight = int(backwards_data[data_position][3])\n\t\t\t\t\t#if no exception, add key (animal ID as string) and value (weight as int) to maxes dict\n\t\t\t\t\tmaxes[backwards_data[data_position][2]] = animal_weight\n\t\t\t\t\tanimals_copy.remove(backwards_data[data_position][2])\n\t\t\t\texcept ValueError:\n\t\t\t\t\tpass #print \"ValueError at %s, skipping to next cell\" % data_position (used for testing)\n\t\t\tdata_position += 1\n\t\t\t\n\n\t\tprint '\\nMax weights: ' + str(maxes) + \"\\n\"\n\t\t#make sure all animal max weights were found\n\t\tif len(animals_copy) > 0:\n\t\t\traise Exception(\"Could not find max weight for: \" + str(animals_copy).strip('[]'))\n\n\t\t#================================================================================================================\n\n\t\t#get most recent 4 weekday weights for each animal\n\t\t#make mins dict to store animal ID (str) as keys and 4 weekday weights as values (a list of ints)\n\t\tdef DaysNeeded(animals_copy, days):\n\t\t\t'''\n\t\t\tReturns a dict with a starting value of days (4 default) (int) for each animal ID key (str) in animals_copy\n\t\t\tUsed in the while loop below to make it keep looping until each animal has at least 4 weekday weights \n\t\t\t'''\n\t\t\tdays_status = {}\n\t\t\tfor each in animals_copy:\n\t\t\t\tdays_status[each] = days\n\t\t\treturn days_status\n\n\t\tdef AllDaysRetrieved(DaysNeededDic):\n\t\t\t'''\n\t\t\tReturns a boolean to indicate whether EVERY animal has 4 weekday weights recorded, indicated by a value of 0 \n\t\t\tin countdown\n\t\t\t'''\n\t\t\tdict_values = DaysNeededDic.values()\n\t\t\tfor each in dict_values:\n\t\t\t\tif each > 0:\n\t\t\t\t\treturn False\n\t\t\treturn True\n\n\t\tdef MakeDictLists(animals_copy):\n\t\t\t'''\n\t\t\tmake an empty list as the value for each animal (key) in weekday_weights \n\t\t\t'''\n\t\t\tdictionary = {}\n\t\t\tfor each in animals_copy:\n\t\t\t\tdictionary[each] = []\n\t\t\treturn dictionary\n\n\t\tanimals_copy = self.animals_to_analyze[:]\n\t\t#default number of days (4) used below \"DaysNeeded(animals_copy, days) specified in WeightWatcher.IsHeavyEnough attributes\n\t\tcountdown = DaysNeeded(animals_copy, days)\n\t\tweekday_weights = MakeDictLists(animals_copy)\n\t\tdata_position = 0\n\t\t#check to see if every animal has 4 weekday weights before continuing in the while loop\n\t\twhile not (AllDaysRetrieved(countdown)) and (data_position < self.data_list_length):\n\t\t\t#do the following if the data position (row) is for an animal in self.animals_to_analyze and it's \n\t\t\t#a weekday weight (i.e. \"no\" in column 5 of the spreadsheet)\n\t\t\tif (backwards_data[data_position][2] in animals_copy) and (\"no\" in backwards_data[data_position][4]):\n\t\t\t\ttry:\n\t\t\t\t\tanimal_weight = int(backwards_data[data_position][3])\n\t\t\t\t\t\n\t\t\t\texcept ValueError:\n\t\t\t\t\tpass #print \"Couldn't get weight at %s, skipping to next cell\" % data_position\n\t\t\t\telse:\n\t\t\t\t\tif countdown[backwards_data[data_position][2]] > 0:\n\t\t\t\t\t\tweekday_weights[backwards_data[data_position][2]].append(animal_weight)\n\t\t\t\t\t\tcountdown[backwards_data[data_position][2]] -= 1\n\t\t\tdata_position += 1\n\n\t\tprint \"Latest weekday weights: \" + str(weekday_weights) + \"\\n\"\n\t\tif not AllDaysRetrieved(countdown):\n\t\t\traise Exception(\"Could not find weekly weight for all animals\")\n\n\t\t#================================================================================================================\n\n\t\t#make a dict with animal ID keys (str) and True or False values if the animal weighed more than 90% of \n\t\t#its max (weekend) weight or less, respectively. Days equal to 90% of its max make\n\t\t#the animal \"false\" in IsHeavyEnoughDict\n\n\t\tIsHeavyEnoughDict = {}\n\t\tfor animal in self.animals_to_analyze:\n\t\t\tfor each in weekday_weights[animal]:\n\t\t\t\tif float(each) > (0.9*(maxes[animal])):\n\t\t\t\t\tIsHeavyEnoughDict[animal] = True\n\t\t\t\telse:\n\t\t\t\t\tIsHeavyEnoughDict[animal] = False\n\t\t\t\t\tbreak\n\t\treturn IsHeavyEnoughDict\n\n\t#====================================================================================================================\n\t#====================================================================================================================\n\n\tdef format_data_for_graph(self):\n\t\t'''\n\t\tReturns a dict with animal IDs (str) as keys and a list of lists [[date objects list], [weights as ints list], \n\t\t\t [is_maxwgt list of Booleans]] as values.\n\t\te.g. {\"Q4\":[[dates], [weights]]}\n\t\t'''\n\t\tdef date_string_to_object(date_string):\n\t\t\t'''\n\t\t\tTakes in a date as a string from the spreadsheet (format 'month/day/year hrs:min:secs' or 'month/day/year')\n\t\t\tand returns that date as a date object from the datetime module\n\t\t\t'''\n\t\t\t#make splat, which is a list with date info e.g. ['month', 'day', 'year', 'hrs', 'min', 'sec']\n\t\t\t#makes date_obj, which is a python datetime object\n\t\t\tformatted = date_string.replace(\":\", \"/\").replace(\" \", \"/\")\n\t\t\tsplat = formatted.split(\"/\")\n\t\t\t#splat[2] is year, splat[0] is month, and splat[1] is day. This is the format required by datetime.date\n\t\t\tdate_obj = datetime.date(*(map(int, [splat[2], splat[0], splat[1]])))\n\t\t\treturn date_obj\n\t\t\t\n\t\tdata_copy = self.data[:]\n\t\tanimals = self.animals_to_analyze[:]\n\t\tgraph_dict = {}\n\t\t\n\t\tfor animal in animals:\n\t\t\tprint \"Getting data for %s\" % animal\n\t\t\tdata_position = 0\n\t\t\t#dates is a list of date objects\n\t\t\tdates = []\n\t\t\t#weights is a list of weights corresponding to the date objects above\n\t\t\tweights = []\n\t\t\t#maxweight is a list of true or false for whether each date/weight pair was max weight \"true\"/\"yes\"\n\t\t\t#or a normal weekly weight \"false\"/\"no\" in data_copy[data_position][4]\n\t\t\tis_maxwgt = []\n\t\t\twhile (data_position < self.data_list_length):\n\n\t\t\t\tif (data_copy[data_position][2] == animal):\n\t\t\t\t\ttry:\n\t\t\t\t\t\twgt = int(data_copy[data_position][3])\n\t\t\t\t\t\tweights.append(wgt)\n\t\t\t\t\texcept ValueError:\n\t\t\t\t\t\tpass #print \"Couldn't get weight at %s, skipping to next cell\" % data_position\n\t\t\t\t\t\t\t #used for testing\n\n\t\t\t\t\telse: \n\t\t\t\t\t\tdate = date_string_to_object(data_copy[data_position][0])\n\t\t\t\t\t\tdates.append(date)\n\t\t\t\t\t\tif \"yes\" in data_copy[data_position][4]:\n\t\t\t\t\t\t\tis_maxwgt.append(True)\n\t\t\t\t\t\telse:\n\t\t\t\t\t\t\tis_maxwgt.append(False)\n\n\t\t\t\tdata_position += 1\n\n\t\t\t#after it has gotten dates, weights, is_maxwgt for each animal, put that info in graph_dict with \n\t\t\t#animal ID as the key for your list of lists\n\t\t\tgraph_dict[animal] = [dates, weights, is_maxwgt]\n\t\treturn graph_dict\n\n\t#====================================================================================================================\n\t#====================================================================================================================\n\t#TODO test this method better, lots of confusing while loops here\n\tdef regression(self):\n\t\t'''\n\t\tReturns 2 lists in a tuple: a weekend weights list, and a list of average weights from the most recent 4 weekdays (during\n\t\t\twater reprivation) associated with those weekend weights. \n\t\t'''\n\n\t\tclass addAppend(object):\n\t\t\t'''\n\t\t\tA class the counts to 4 items in a list then averages those items, helps in a while loop below\n\t\t\t'''\n\n\t\t\tdef __init__(self):\n\t\t\t\tself.intList = []\n\t\t\t\tself.avg = False\n\n\t\t\tdef addInt(self, num):\n\t\t\t\tif len(self.intList) < 4:\n\t\t\t\t\tself.intList.append(num)\n\t\t\t\telif len(self.intList) == 4:\n\t\t\t\t\tsummed = sum(self.intList)\n\t\t\t\t\tself.avg = summed/4.0\n\t\t\t\telse:\n\t\t\t\t\tpass\n\n\t\tweekend_weights = []\n\t\tweekday_avgs = []\n\n\t\tdata_rev = self.data[::-1]\n\t\tanimals_copy = self.animals_to_analyze[:]\n\n\t\tfor animal in animals_copy:\n\t\t\tdata_position = 0\n\t\t\twhile (data_position < self.data_list_length):\n\t\t\t\tif (data_rev[data_position][2] == animal) and (\"yes\" in data_rev[data_position][4]):\n\t\t\t\t\tnew_position = data_position\n\t\t\t\t\tcount_four = addAppend()\n\t\t\t\t\tweekend_wgt = None\n\t\t\t\t\twhile not count_four.avg and (new_position < self.data_list_length):\n\t\t\t\t\t\tif (data_rev[new_position][2] == animal) and (\"no\" in data_rev[new_position][4]):\n\t\t\t\t\t\t\ttry:\n\t\t\t\t\t\t\t\tweekend_wgt = int(data_rev[data_position][3])\n\t\t\t\t\t\t\t\tweekday_wgt = int(data_rev[new_position][3])\n\t\t\t\t\t\t\texcept ValueError:\n\t\t\t\t\t\t\t\tpass\n\t\t\t\t\t\t\telse:\n\t\t\t\t\t\t\t\tcount_four.addInt(weekday_wgt)\n\t\t\t\t\t\tnew_position += 1\n\t\t\t\t\tif type(count_four.avg) is float:\n\t\t\t\t\t\tweekday_avgs.append(count_four.avg)\n\t\t\t\t\t\tweekend_weights.append(weekend_wgt)\n\t\t\t\tdata_position += 1\n\n\t\treturn (weekend_weights, weekday_avgs)\n\n\n\t#====================================================================================================================\n\t#====================================================================================================================\n\n\n\t\n\n\n"},"src_encoding":{"kind":"string","value":"UTF-8"},"language":{"kind":"string","value":"Python"},"is_vendor":{"kind":"bool","value":false,"string":"false"},"is_generated":{"kind":"bool","value":false,"string":"false"},"year":{"kind":"number","value":2013,"string":"2,013"}}},{"rowIdx":41134,"cells":{"__id__":{"kind":"number","value":12309376319443,"string":"12,309,376,319,443"},"blob_id":{"kind":"string","value":"ffec9e1e0998545cbfa0519a33dda2f8e31cc070"},"directory_id":{"kind":"string","value":"3df60d3284be8fb481494a007b91f431efd43d92"},"path":{"kind":"string","value":"/src/adzone/managers.py"},"content_id":{"kind":"string","value":"79e819eabe181c967f9e97d7cf4ace8804f805e6"},"detected_licenses":{"kind":"list like","value":[],"string":"[]"},"license_type":{"kind":"string","value":"no_license"},"repo_name":{"kind":"string","value":"bmeyer71/django-adzone"},"repo_url":{"kind":"string","value":"https://github.com/bmeyer71/django-adzone"},"snapshot_id":{"kind":"string","value":"445fdbfdd8e1ba019134f56d55ea22dc615a4705"},"revision_id":{"kind":"string","value":"87710dfaa5088587ff30d3d5262c2d1fb25798ea"},"branch_name":{"kind":"string","value":"refs/heads/master"},"visit_date":{"kind":"timestamp","value":"2021-01-20T23:41:30.020489","string":"2021-01-20T23:41:30.020489"},"revision_date":{"kind":"timestamp","value":"2013-05-05T01:16:59","string":"2013-05-05T01:16:59"},"committer_date":{"kind":"timestamp","value":"2013-05-05T01:16:59","string":"2013-05-05T01:16:59"},"github_id":{"kind":"null"},"star_events_count":{"kind":"number","value":0,"string":"0"},"fork_events_count":{"kind":"number","value":0,"string":"0"},"gha_license_id":{"kind":"null"},"gha_fork":{"kind":"null"},"gha_event_created_at":{"kind":"null"},"gha_created_at":{"kind":"null"},"gha_updated_at":{"kind":"null"},"gha_pushed_at":{"kind":"null"},"gha_size":{"kind":"null"},"gha_stargazers_count":{"kind":"null"},"gha_forks_count":{"kind":"null"},"gha_open_issues_count":{"kind":"null"},"gha_language":{"kind":"null"},"gha_archived":{"kind":"null"},"gha_disabled":{"kind":"null"},"content":{"kind":"string","value":"from django.db import models\n\n\nclass AdManager(models.Manager):\n \"\"\" A Custom Manager for ads \"\"\"\n\n def get_random_ad(self, ad_zone, ad_category=None):\n \"\"\"\n Returns a random advert that belongs for the specified ``ad_category``\n and ``ad_zone``.\n If ``ad_category`` is None, the ad will be category independent.\n \"\"\"\n from adzone.models import AdImpression\n ad = None\n try:\n if ad_category:\n ads = self.get_query_set().filter(\n category__slug=ad_category,\n enabled=True,\n zone__slug=ad_zone).order_by('?')\n if ads != []:\n for item in ads:\n if item.impression_limit == 0:\n ad = item\n else:\n if AdImpression.objects.filter(ad_id=item.id).count() > item.impression_limit:\n ad = None\n continue\n else:\n ad = item\n break\n else:\n ads = self.get_query_set().filter(\n enabled=True,\n zone__slug=ad_zone).order_by('?')\n if ads != []:\n for item in ads:\n if item.impression_limit == 0:\n ad = item\n else:\n if AdImpression.objects.filter(ad_id=item.id).count() > item.impression_limit:\n ad = None\n continue\n else:\n ad = item\n break\n\n except IndexError:\n return None\n return ad\n"},"src_encoding":{"kind":"string","value":"UTF-8"},"language":{"kind":"string","value":"Python"},"is_vendor":{"kind":"bool","value":false,"string":"false"},"is_generated":{"kind":"bool","value":false,"string":"false"},"year":{"kind":"number","value":2013,"string":"2,013"}}},{"rowIdx":41135,"cells":{"__id__":{"kind":"number","value":12043088329795,"string":"12,043,088,329,795"},"blob_id":{"kind":"string","value":"e00379aa471675f2b6d457f2facbdc6ac0aced20"},"directory_id":{"kind":"string","value":"99746a3d0596510a971fd18f5b79246a7a2c1656"},"path":{"kind":"string","value":"/old/admin.py"},"content_id":{"kind":"string","value":"8adc7da85c17c5d7242281ed80b9af86787b70cb"},"detected_licenses":{"kind":"list like","value":[],"string":"[]"},"license_type":{"kind":"string","value":"no_license"},"repo_name":{"kind":"string","value":"amfarrell/20q"},"repo_url":{"kind":"string","value":"https://github.com/amfarrell/20q"},"snapshot_id":{"kind":"string","value":"76834df7b610d7984a5757021dc27f7f00b9fdfd"},"revision_id":{"kind":"string","value":"7a1c2cb47cd14ecf5499e64caaba8c255ae6f18e"},"branch_name":{"kind":"string","value":"refs/heads/master"},"visit_date":{"kind":"timestamp","value":"2020-05-18T04:19:00.648337","string":"2020-05-18T04:19:00.648337"},"revision_date":{"kind":"timestamp","value":"2010-04-30T19:04:13","string":"2010-04-30T19:04:13"},"committer_date":{"kind":"timestamp","value":"2010-04-30T19:04:13","string":"2010-04-30T19:04:13"},"github_id":{"kind":"number","value":612809,"string":"612,809"},"star_events_count":{"kind":"number","value":2,"string":"2"},"fork_events_count":{"kind":"number","value":0,"string":"0"},"gha_license_id":{"kind":"null"},"gha_fork":{"kind":"null"},"gha_event_created_at":{"kind":"null"},"gha_created_at":{"kind":"null"},"gha_updated_at":{"kind":"null"},"gha_pushed_at":{"kind":"null"},"gha_size":{"kind":"null"},"gha_stargazers_count":{"kind":"null"},"gha_forks_count":{"kind":"null"},"gha_open_issues_count":{"kind":"null"},"gha_language":{"kind":"null"},"gha_archived":{"kind":"null"},"gha_disabled":{"kind":"null"},"content":{"kind":"string","value":"\nfrom django.contrib import admin\nfrom models import SurveyResult, SurveyResultLine\n\nadmin.site.register(SurveyResult)\nadmin.site.register(SurveyResultLine)\n"},"src_encoding":{"kind":"string","value":"UTF-8"},"language":{"kind":"string","value":"Python"},"is_vendor":{"kind":"bool","value":false,"string":"false"},"is_generated":{"kind":"bool","value":false,"string":"false"},"year":{"kind":"number","value":2010,"string":"2,010"}}},{"rowIdx":41136,"cells":{"__id__":{"kind":"number","value":7610682060523,"string":"7,610,682,060,523"},"blob_id":{"kind":"string","value":"3c574ed476b853d6cf6c1350d92aea20c5e9ea5f"},"directory_id":{"kind":"string","value":"df6844af35fa8499d22f7917a950073f28dd6f15"},"path":{"kind":"string","value":"/SystemMain/auditec2.py"},"content_id":{"kind":"string","value":"1bc21ff35226a17273fdd80319bdd29a639956d7"},"detected_licenses":{"kind":"list like","value":[],"string":"[]"},"license_type":{"kind":"string","value":"no_license"},"repo_name":{"kind":"string","value":"Ipswitch/AWS_CI_Baseline"},"repo_url":{"kind":"string","value":"https://github.com/Ipswitch/AWS_CI_Baseline"},"snapshot_id":{"kind":"string","value":"bd9fa46666420c8dbf764d88a870cc33a1cc8d5c"},"revision_id":{"kind":"string","value":"0f206d8ed5d202c77603eae93cb24f2ae1ec828b"},"branch_name":{"kind":"string","value":"refs/heads/master"},"visit_date":{"kind":"timestamp","value":"2016-08-05T19:16:27.795976","string":"2016-08-05T19:16:27.795976"},"revision_date":{"kind":"timestamp","value":"2013-05-17T10:02:30","string":"2013-05-17T10:02:30"},"committer_date":{"kind":"timestamp","value":"2013-05-17T10:02:30","string":"2013-05-17T10:02:30"},"github_id":{"kind":"null"},"star_events_count":{"kind":"number","value":0,"string":"0"},"fork_events_count":{"kind":"number","value":0,"string":"0"},"gha_license_id":{"kind":"null"},"gha_fork":{"kind":"null"},"gha_event_created_at":{"kind":"null"},"gha_created_at":{"kind":"null"},"gha_updated_at":{"kind":"null"},"gha_pushed_at":{"kind":"null"},"gha_size":{"kind":"null"},"gha_stargazers_count":{"kind":"null"},"gha_forks_count":{"kind":"null"},"gha_open_issues_count":{"kind":"null"},"gha_language":{"kind":"null"},"gha_archived":{"kind":"null"},"gha_disabled":{"kind":"null"},"content":{"kind":"string","value":"'''\nCreated on May 4, 2013\n\n@author: bob\n'''\nfrom audit import AuditBase\nfrom ec2 import EC2\nimport logging\nimport logging.config\n\n\nclass AuditEC2(AuditBase):\n '''\n classdocs\n '''\n def __init__(self, sts_connection, role_to_assume=None, role_session_name=None, \n region_name=None, sourceCM=None, sqliteCM=None, usesqlite_for_masterCM=False, \n backupCMs=None, logging_config=None):\n self._sts_connection = sts_connection\n self._role_to_assume = role_to_assume\n self._role_session_name=role_session_name\n self._region_name =region_name\n self._account=role_to_assume.split(\":\")[4]\n self._sourceCM=sourceCM\n self._backupCMs=backupCMs\n self._sqlite=sqliteCM\n self._usesqlite_for_masterCM=usesqlite_for_masterCM\n self.SetLogging(logging_config)\n self._d = {'aws_access_key_id': self._sts_connection._connection.access_key, \n 'role_assumed':self._role_to_assume, \n 'aws is secure': self._sts_connection._connection.is_secure, \n 'region': self._sts_connection._connection.region,\n 'aws_account':role_to_assume.split(\":\")[4], \n 'hostname':self.hostname,\n 'app_name':self.app_name,\n 'severity':'6'}\n\n\n \n def Start(self, BaselineUnixDate):\n changes=dict()\n #EC2 items\n eC2 = EC2(self._sts_connection, self._role_to_assume, self._role_session_name, self._region_name)\n ConfigurationItemClass = \"Amazon.EC2\"\n self._logger.debug(\"Starting Audit of : %s\" , ConfigurationItemClass, extra=self._d)\n Inst = eC2.ListAllAccountImages(self._account)\n ConfigurationItemName = \"AccountImages\"\n self._logger.debug(\"Starting Audit of %s.%s\",ConfigurationItemClass,ConfigurationItemName, extra=self._d)\n changes[ConfigurationItemClass + \".\" + ConfigurationItemName] = self.CheckCIBaseline(self._account, Inst, ConfigurationItemClass + \".\" + ConfigurationItemName, ConfigurationItemName, BaselineUnixDate)\n Inst = eC2.ListInstances()\n ConfigurationItemName = \"Instances\"\n self._logger.debug(\"Starting Audit of %s.%s\",ConfigurationItemClass,ConfigurationItemName, extra=self._d)\n changes[ConfigurationItemClass + \".\" + ConfigurationItemName] = self.CheckCIBaseline(self._account, Inst, ConfigurationItemClass + \".\" + ConfigurationItemName, ConfigurationItemName, BaselineUnixDate)\n Addr = eC2.ListAddresses()\n ConfigurationItemName = \"Addresses\"\n self._logger.debug(\"Starting Audit of %s.%s\",ConfigurationItemClass,ConfigurationItemName, extra=self._d)\n changes[ConfigurationItemClass + \".\" + ConfigurationItemName] = self.CheckCIBaseline(self._account, Addr, ConfigurationItemClass + \".\" + ConfigurationItemName, ConfigurationItemName, BaselineUnixDate)\n SGs = eC2.ListSecurityGroups()\n self._logger.debug(\"Starting Audit of %s.%s\",ConfigurationItemClass,ConfigurationItemName, extra=self._d)\n ConfigurationItemName = \"SecurityGroups\"\n changes[ConfigurationItemClass + \".\" + ConfigurationItemName] = self.CheckCIBaseline(self._account, SGs, ConfigurationItemClass + \".\" + ConfigurationItemName, ConfigurationItemName, BaselineUnixDate)\n NICs = eC2.ListNetworkInterfaces()\n ConfigurationItemName = \"NetworkInterfaces\"\n self._logger.debug(\"Starting Audit of %s.%s\",ConfigurationItemClass,ConfigurationItemName, extra=self._d)\n changes[ConfigurationItemClass + \".\" + ConfigurationItemName] = self.CheckCIBaseline(self._account, NICs, ConfigurationItemClass + \".\" + ConfigurationItemName, ConfigurationItemName, BaselineUnixDate)\n PGs = eC2.ListPlacementGroups()\n ConfigurationItemName = \"PlacementGroups\"\n self._logger.debug(\"Starting Audit of %s.%s\",ConfigurationItemClass,ConfigurationItemName, extra=self._d)\n changes[ConfigurationItemClass + \".\" + ConfigurationItemName] = self.CheckCIBaseline(self._account, PGs, ConfigurationItemClass + \".\" + ConfigurationItemName, ConfigurationItemName, BaselineUnixDate)\n RDs = eC2.ListRamdisks()\n ConfigurationItemName = \"Ramdisks\"\n self._logger.debug(\"Starting Audit of %s.%s\",ConfigurationItemClass,ConfigurationItemName, extra=self._d)\n changes[ConfigurationItemClass + \".\" + ConfigurationItemName] = self.CheckCIBaseline(self._account, RDs, ConfigurationItemClass + \".\" + ConfigurationItemName, ConfigurationItemName, BaselineUnixDate)\n Vols = eC2.ListVolumes()\n ConfigurationItemName = \"Volumes\"\n self._logger.debug(\"Starting Audit of %s.%s\",ConfigurationItemClass,ConfigurationItemName, extra=self._d)\n changes[ConfigurationItemClass + \".\" + ConfigurationItemName] = self.CheckCIBaseline(self._account, Vols, ConfigurationItemClass + \".\" + ConfigurationItemName, ConfigurationItemName, BaselineUnixDate)\n return changes\n "},"src_encoding":{"kind":"string","value":"UTF-8"},"language":{"kind":"string","value":"Python"},"is_vendor":{"kind":"bool","value":false,"string":"false"},"is_generated":{"kind":"bool","value":false,"string":"false"},"year":{"kind":"number","value":2013,"string":"2,013"}}},{"rowIdx":41137,"cells":{"__id__":{"kind":"number","value":18408229860347,"string":"18,408,229,860,347"},"blob_id":{"kind":"string","value":"8625b977bf5737293098102de29433ec3f4beb89"},"directory_id":{"kind":"string","value":"1b11cb65d3eee295d3f41d608b0e5dcdf4122d69"},"path":{"kind":"string","value":"/application/pages/validators.py"},"content_id":{"kind":"string","value":"c1cf95a56629599288232d629bacecbdf1896888"},"detected_licenses":{"kind":"list like","value":[],"string":"[]"},"license_type":{"kind":"string","value":"no_license"},"repo_name":{"kind":"string","value":"mneudert/highball"},"repo_url":{"kind":"string","value":"https://github.com/mneudert/highball"},"snapshot_id":{"kind":"string","value":"582735da4747851b202bd37ba9c995eeef610c53"},"revision_id":{"kind":"string","value":"db9e832227c46c75b9e51c75ca72c2cee13fa00b"},"branch_name":{"kind":"string","value":"refs/heads/master"},"visit_date":{"kind":"timestamp","value":"2016-09-05T21:02:55.412142","string":"2016-09-05T21:02:55.412142"},"revision_date":{"kind":"timestamp","value":"2013-03-10T21:25:50","string":"2013-03-10T21:25:50"},"committer_date":{"kind":"timestamp","value":"2013-03-10T21:25:50","string":"2013-03-10T21:25:50"},"github_id":{"kind":"null"},"star_events_count":{"kind":"number","value":0,"string":"0"},"fork_events_count":{"kind":"number","value":0,"string":"0"},"gha_license_id":{"kind":"null"},"gha_fork":{"kind":"null"},"gha_event_created_at":{"kind":"null"},"gha_created_at":{"kind":"null"},"gha_updated_at":{"kind":"null"},"gha_pushed_at":{"kind":"null"},"gha_size":{"kind":"null"},"gha_stargazers_count":{"kind":"null"},"gha_forks_count":{"kind":"null"},"gha_open_issues_count":{"kind":"null"},"gha_language":{"kind":"null"},"gha_archived":{"kind":"null"},"gha_disabled":{"kind":"null"},"content":{"kind":"string","value":"import re\n\nfrom django.core.validators import RegexValidator\n\n\nslashslug_re = re.compile(r'^[-a-zA-Z0-9_/]+$')\nvalidate_slashslug = RegexValidator(slashslug_re,\n (\"Enter a valid 'slug' consisting of\"\n \" letters, numbers, underscores, hyphens\"\n \" or slashes.\"),\n 'invalid')"},"src_encoding":{"kind":"string","value":"UTF-8"},"language":{"kind":"string","value":"Python"},"is_vendor":{"kind":"bool","value":false,"string":"false"},"is_generated":{"kind":"bool","value":false,"string":"false"},"year":{"kind":"number","value":2013,"string":"2,013"}}},{"rowIdx":41138,"cells":{"__id__":{"kind":"number","value":850403560285,"string":"850,403,560,285"},"blob_id":{"kind":"string","value":"edac32b6f695135ec2ca9a8fd4a3448b054d663e"},"directory_id":{"kind":"string","value":"80327c03358eea8905805df2158707eaf4454a78"},"path":{"kind":"string","value":"/dialogs/monitoringAdministration/fMenuAuthority_intr.py"},"content_id":{"kind":"string","value":"03c8cc72fa7824652f0fe900bfb8a716b01d8ab9"},"detected_licenses":{"kind":"list like","value":[],"string":"[]"},"license_type":{"kind":"string","value":"no_license"},"repo_name":{"kind":"string","value":"wisnu/BMMEnterprise"},"repo_url":{"kind":"string","value":"https://github.com/wisnu/BMMEnterprise"},"snapshot_id":{"kind":"string","value":"97a3389279ba7831057cb083848215fecd6034d9"},"revision_id":{"kind":"string","value":"e424dd756d21de3a8f7591ee200fca0133aaaf55"},"branch_name":{"kind":"string","value":"refs/heads/master"},"visit_date":{"kind":"timestamp","value":"2016-03-24T00:42:06.952722","string":"2016-03-24T00:42:06.952722"},"revision_date":{"kind":"timestamp","value":"2011-06-15T03:33:42","string":"2011-06-15T03:33:42"},"committer_date":{"kind":"timestamp","value":"2011-06-15T03:33:42","string":"2011-06-15T03:33:42"},"github_id":{"kind":"number","value":1898198,"string":"1,898,198"},"star_events_count":{"kind":"number","value":1,"string":"1"},"fork_events_count":{"kind":"number","value":0,"string":"0"},"gha_license_id":{"kind":"null"},"gha_fork":{"kind":"null"},"gha_event_created_at":{"kind":"null"},"gha_created_at":{"kind":"null"},"gha_updated_at":{"kind":"null"},"gha_pushed_at":{"kind":"null"},"gha_size":{"kind":"null"},"gha_stargazers_count":{"kind":"null"},"gha_forks_count":{"kind":"null"},"gha_open_issues_count":{"kind":"null"},"gha_language":{"kind":"null"},"gha_archived":{"kind":"null"},"gha_disabled":{"kind":"null"},"content":{"kind":"string","value":"class fMenuAuthority:\r\n\r\n def __init__(self, formObj, parentForm):\r\n self.app = formObj.ClientApplication\r\n self.Lokasi_Source = ''\r\n #self.menulist.First()\r\n\r\n def Show(self, Lokasi_Source):\r\n self.Lokasi_Source = Lokasi_Source\r\n self.FormContainer.Show()\r\n\r\n def bSelect_Click (self, button):\r\n formname = 'monitoringAdministration/fMenuAuthority_Edit'\r\n Nama_Menu = self.menulist.menu_name\r\n ph = self.app.CreateValues(['Nama_Menu', Nama_Menu], ['Lokasi_Source', self.Lokasi_Source])\r\n editor = self.app.CreateForm(formname, formname, 0, ph, None)\r\n editor.FormContainer.Show()\r\n"},"src_encoding":{"kind":"string","value":"UTF-8"},"language":{"kind":"string","value":"Python"},"is_vendor":{"kind":"bool","value":false,"string":"false"},"is_generated":{"kind":"bool","value":false,"string":"false"},"year":{"kind":"number","value":2011,"string":"2,011"}}},{"rowIdx":41139,"cells":{"__id__":{"kind":"number","value":15006615774119,"string":"15,006,615,774,119"},"blob_id":{"kind":"string","value":"29a387772cd32a032d2b1c7cd7ad14cb81e1f937"},"directory_id":{"kind":"string","value":"8aae4e3ee7943f9293085fd57b4867e06ab836b2"},"path":{"kind":"string","value":"/ProgrammingLanguage/Python/PP4E/Chapter2/teststreams.py"},"content_id":{"kind":"string","value":"e52c5c40d39046b3248e7843e6bc1a59d4f9cd61"},"detected_licenses":{"kind":"list like","value":[],"string":"[]"},"license_type":{"kind":"string","value":"no_license"},"repo_name":{"kind":"string","value":"eboladev/Study"},"repo_url":{"kind":"string","value":"https://github.com/eboladev/Study"},"snapshot_id":{"kind":"string","value":"42213e73384788671deacd8d37004a571c95b7bb"},"revision_id":{"kind":"string","value":"388f3629e7651e41d2569be92d4f65c4c94cd512"},"branch_name":{"kind":"string","value":"refs/heads/master"},"visit_date":{"kind":"timestamp","value":"2020-12-25T22:47:03.624272","string":"2020-12-25T22:47:03.624272"},"revision_date":{"kind":"timestamp","value":"2014-10-24T12:10:13","string":"2014-10-24T12:10:13"},"committer_date":{"kind":"timestamp","value":"2014-10-24T12:10:13","string":"2014-10-24T12:10:13"},"github_id":{"kind":"null"},"star_events_count":{"kind":"number","value":0,"string":"0"},"fork_events_count":{"kind":"number","value":0,"string":"0"},"gha_license_id":{"kind":"null"},"gha_fork":{"kind":"null"},"gha_event_created_at":{"kind":"null"},"gha_created_at":{"kind":"null"},"gha_updated_at":{"kind":"null"},"gha_pushed_at":{"kind":"null"},"gha_size":{"kind":"null"},"gha_stargazers_count":{"kind":"null"},"gha_forks_count":{"kind":"null"},"gha_open_issues_count":{"kind":"null"},"gha_language":{"kind":"null"},"gha_archived":{"kind":"null"},"gha_disabled":{"kind":"null"},"content":{"kind":"string","value":"#! /usr/bin/env python\n# -*- coding:utf-8 -*-\n\ndef interact():\n \"\"\"docstring for interact\"\"\"\n print 'Hello stream world'\n while True:\n try:\n reply = raw_input('Enter a number> ')\n except EOFError as e:\n break\n else:\n num = int(reply)\n print \"%d squared is %d\" %(num, num**2)\n print 'Bye'\n \nif __name__ == \"__main__\": interact()"},"src_encoding":{"kind":"string","value":"UTF-8"},"language":{"kind":"string","value":"Python"},"is_vendor":{"kind":"bool","value":false,"string":"false"},"is_generated":{"kind":"bool","value":false,"string":"false"},"year":{"kind":"number","value":2014,"string":"2,014"}}},{"rowIdx":41140,"cells":{"__id__":{"kind":"number","value":1013612284214,"string":"1,013,612,284,214"},"blob_id":{"kind":"string","value":"3f0129f885dd20a365438f37d622985e3a0b0865"},"directory_id":{"kind":"string","value":"37df673e3501b3ee11bc5427f294855eaf0bd0dd"},"path":{"kind":"string","value":"/oxidizr/crawler.py"},"content_id":{"kind":"string","value":"a85a94c8ce77525e18cea1e007f55d4e41047364"},"detected_licenses":{"kind":"list like","value":["GPL-2.0-only"],"string":"[\n \"GPL-2.0-only\"\n]"},"license_type":{"kind":"string","value":"non_permissive"},"repo_name":{"kind":"string","value":"techunits/oxidizr"},"repo_url":{"kind":"string","value":"https://github.com/techunits/oxidizr"},"snapshot_id":{"kind":"string","value":"62250d441b841e3e1f25ffb02a4093f1e7f8e5c3"},"revision_id":{"kind":"string","value":"c7d9bd26ef4f6dbb8efbf175389930b6993b1160"},"branch_name":{"kind":"string","value":"refs/heads/master"},"visit_date":{"kind":"timestamp","value":"2020-12-26T05:02:21.484117","string":"2020-12-26T05:02:21.484117"},"revision_date":{"kind":"timestamp","value":"2014-10-30T19:59:27","string":"2014-10-30T19:59:27"},"committer_date":{"kind":"timestamp","value":"2014-10-30T19:59:27","string":"2014-10-30T19:59:27"},"github_id":{"kind":"null"},"star_events_count":{"kind":"number","value":0,"string":"0"},"fork_events_count":{"kind":"number","value":0,"string":"0"},"gha_license_id":{"kind":"null"},"gha_fork":{"kind":"null"},"gha_event_created_at":{"kind":"null"},"gha_created_at":{"kind":"null"},"gha_updated_at":{"kind":"null"},"gha_pushed_at":{"kind":"null"},"gha_size":{"kind":"null"},"gha_stargazers_count":{"kind":"null"},"gha_forks_count":{"kind":"null"},"gha_open_issues_count":{"kind":"null"},"gha_language":{"kind":"null"},"gha_archived":{"kind":"null"},"gha_disabled":{"kind":"null"},"content":{"kind":"string","value":"#!/usr/bin/env python\n# -*- coding: utf-8 -*-\nfrom twisted.web.client import getPage\nfrom twisted.python.util import println\nfrom BeautifulSoup import BeautifulSoup\nfrom twisted.python import log\nfrom twisted.internet import defer, task, reactor\nimport re\nfrom urlparse import urlparse\n# Needs : PyOpenSSL and Twisted 12.3+\n \n\ndef sleep(secs):\n d = defer.Deferred()\n reactor.callLater(secs, d.callback, None)\n return d\n\n\ndef parallel(iterable, count, callable, *args, **named):\n coop = task.Cooperator()\n work = (callable(elem, *args, **named) for elem in iterable)\n return defer.DeferredList([coop.coiterate(work) for i in xrange(count)])\n\n\ndef union(p, q):\n for url in p:\n parsed = urlparse(url)\n if parsed.netloc and parsed.netloc != 'www.webhostingtalk.com':\n url = 'http://%s/' % parsed.netloc\n if url not in q:\n print url\n # q.append(url)\n\n\ndef extractLinks(html, url):\n print \"URL in extractLinks: \", url\n soup = BeautifulSoup(html)\n soup.prettify()\n return [str(anchor['href'])\n for anchor in soup.findAll('a', attrs={'href': re.compile(\"^http://\")}) if anchor['href']]\n \n\ndef crawlPage(url, urlList):\n sleep(10)\n d = getPage(url)\n d.addCallback(extractLinks, url)\n d.addCallback(union, urlList)\n d.addErrback(log.err)\n return d\n \n \n# def crawler(urls):\n# urls = list(urls)\n\n \ndef main(reactor, *args):\n urls = list(args)\n return parallel(urls, len(urls), crawlPage, urls)\n \n \nif __name__ == '__main__':\n import sys\n task.react(main, [\"http://www.webhostingtalk.com\"]) # Can pass a list of urls\n"},"src_encoding":{"kind":"string","value":"UTF-8"},"language":{"kind":"string","value":"Python"},"is_vendor":{"kind":"bool","value":false,"string":"false"},"is_generated":{"kind":"bool","value":false,"string":"false"},"year":{"kind":"number","value":2014,"string":"2,014"}}},{"rowIdx":41141,"cells":{"__id__":{"kind":"number","value":10299331591064,"string":"10,299,331,591,064"},"blob_id":{"kind":"string","value":"8ec9db3e0695ac68cee5dff6616f0318a0e5832f"},"directory_id":{"kind":"string","value":"a24215dd8c072bb55890e8e232100b20d88b0583"},"path":{"kind":"string","value":"/deli/artist/base_artist.py"},"content_id":{"kind":"string","value":"8ae4efd8cf4e3ed71d644dbb1f5085047be50c0c"},"detected_licenses":{"kind":"list like","value":["LicenseRef-scancode-unknown-license-reference","BSD-3-Clause"],"string":"[\n \"LicenseRef-scancode-unknown-license-reference\",\n \"BSD-3-Clause\"\n]"},"license_type":{"kind":"string","value":"non_permissive"},"repo_name":{"kind":"string","value":"tonysyu/deli"},"repo_url":{"kind":"string","value":"https://github.com/tonysyu/deli"},"snapshot_id":{"kind":"string","value":"d98f05b3900229e47f0c0185ba40216beb43de83"},"revision_id":{"kind":"string","value":"ea71d49318e40dc7752d435db0ce31b133994c4f"},"branch_name":{"kind":"string","value":"refs/heads/master"},"visit_date":{"kind":"timestamp","value":"2021-01-10T20:19:22.104751","string":"2021-01-10T20:19:22.104751"},"revision_date":{"kind":"timestamp","value":"2014-10-20T04:17:45","string":"2014-10-20T04:17:45"},"committer_date":{"kind":"timestamp","value":"2014-10-20T04:17:45","string":"2014-10-20T04:17:45"},"github_id":{"kind":"null"},"star_events_count":{"kind":"number","value":0,"string":"0"},"fork_events_count":{"kind":"number","value":0,"string":"0"},"gha_license_id":{"kind":"null"},"gha_fork":{"kind":"null"},"gha_event_created_at":{"kind":"null"},"gha_created_at":{"kind":"null"},"gha_updated_at":{"kind":"null"},"gha_pushed_at":{"kind":"null"},"gha_size":{"kind":"null"},"gha_stargazers_count":{"kind":"null"},"gha_forks_count":{"kind":"null"},"gha_open_issues_count":{"kind":"null"},"gha_language":{"kind":"null"},"gha_archived":{"kind":"null"},"gha_disabled":{"kind":"null"},"content":{"kind":"string","value":"\"\"\" Defines the base class for artists.\n\"\"\"\nfrom contextlib import contextmanager\n\nfrom traits.api import Instance, Property, Tuple\n\nfrom ..core.component import Component\nfrom ..layout.bounding_box import BoundingBox\nfrom ..layout.bbox_transform import BboxTransform\n\n\nclass BaseArtist(Component):\n \"\"\" Base class for all artists.\n\n Unlike styluses, artists contain the data that they render. Artists are\n simply specific types of plots: For example, line artists, marker artists,\n and bar artists, all operate on the same type of data, but those artist\n will render the data differently.\n\n Artists may use a few different styluses to compose a plot; for example,\n a box-and-whisker artist might have separate styluses to draw rectangles,\n error-bars (whiskers), and points (outliers).\n \"\"\"\n\n # -----------------------------------------------------------------------\n # Data-related traits\n # -----------------------------------------------------------------------\n\n #: The extents of the data (x_min, y_min, x_max, y_max)\n data_extents = Property(Tuple)\n\n #: Styluses associated with this artist.\n styluses = Property(Tuple)\n\n #: Bounding box for data in the graph. Note that this bounding box\n #: does not just describe the data in this artist; it's the currently\n #: displayed limits of the plot in data space.\n data_bbox = Instance(BoundingBox)\n\n #: Transform from data space to screen space.\n data_to_screen = Instance(BboxTransform)\n\n #: Transform from data space to screen space.\n screen_to_data = Property(Instance(BboxTransform),\n depends_on='data_to_screen')\n\n def _data_to_screen_default(self):\n return BboxTransform(self.data_bbox, self.screen_bbox)\n\n def _get_screen_to_data(self):\n return self.data_to_screen.inverted()\n\n # -------------------------------------------------------------------------\n # BaseArtist interface\n # -------------------------------------------------------------------------\n\n def _get_data_extents(self):\n msg = \"`BaseArtist` subclasses must implement `_get_data_extents`\"\n raise NotImplementedError(msg)\n\n def _container_changed(self):\n if self.container is not None:\n self.data_bbox = self.container.data_bbox\n self.screen_bbox = self.container.local_bbox\n\n @contextmanager\n def _clipped_context(self, gc):\n with gc:\n gc.clip_to_rect(*self.screen_bbox.rect)\n yield\n"},"src_encoding":{"kind":"string","value":"UTF-8"},"language":{"kind":"string","value":"Python"},"is_vendor":{"kind":"bool","value":false,"string":"false"},"is_generated":{"kind":"bool","value":false,"string":"false"},"year":{"kind":"number","value":2014,"string":"2,014"}}},{"rowIdx":41142,"cells":{"__id__":{"kind":"number","value":18743237301253,"string":"18,743,237,301,253"},"blob_id":{"kind":"string","value":"d123429e7149cd1115f41c5989b82d3caa5bffd3"},"directory_id":{"kind":"string","value":"20dfad54026fb3d14630af9d45ffd20043de64f5"},"path":{"kind":"string","value":"/treasure/collection/forms.py"},"content_id":{"kind":"string","value":"879e183b1d040ab6a01514f104c1814e6f327b8e"},"detected_licenses":{"kind":"list like","value":[],"string":"[]"},"license_type":{"kind":"string","value":"no_license"},"repo_name":{"kind":"string","value":"nuty/treasure"},"repo_url":{"kind":"string","value":"https://github.com/nuty/treasure"},"snapshot_id":{"kind":"string","value":"7ad538bd340bb68f96f8efcfc41f87a1b71875d3"},"revision_id":{"kind":"string","value":"65b39f62bd0182afa88d5a7c2d787955a65ff3f0"},"branch_name":{"kind":"string","value":"refs/heads/master"},"visit_date":{"kind":"timestamp","value":"2016-04-03T01:49:03.340319","string":"2016-04-03T01:49:03.340319"},"revision_date":{"kind":"timestamp","value":"2014-03-21T06:40:38","string":"2014-03-21T06:40:38"},"committer_date":{"kind":"timestamp","value":"2014-03-21T06:40:38","string":"2014-03-21T06:40:38"},"github_id":{"kind":"null"},"star_events_count":{"kind":"number","value":0,"string":"0"},"fork_events_count":{"kind":"number","value":0,"string":"0"},"gha_license_id":{"kind":"null"},"gha_fork":{"kind":"null"},"gha_event_created_at":{"kind":"null"},"gha_created_at":{"kind":"null"},"gha_updated_at":{"kind":"null"},"gha_pushed_at":{"kind":"null"},"gha_size":{"kind":"null"},"gha_stargazers_count":{"kind":"null"},"gha_forks_count":{"kind":"null"},"gha_open_issues_count":{"kind":"null"},"gha_language":{"kind":"null"},"gha_archived":{"kind":"null"},"gha_disabled":{"kind":"null"},"content":{"kind":"string","value":"# -*- coding: utf-8 -*-\nimport re\nfrom models import Category, Treasure\n\nfrom wtforms.fields import HiddenField, TextField,\\\n IntegerField, SelectField, TextAreaField, BooleanField\nfrom wtforms.form import Form\nfrom wtforms.ext.sqlalchemy.fields import QuerySelectField\nfrom wtforms import validators\n\nfrom flask.ext.uploads import UploadSet, IMAGES\nfrom treasure.utils.tools import CKTextAreaField\n\nfrom flask_wtf.file import FileAllowed, FileRequired, FileField\n\nimages = UploadSet('images', IMAGES)\n\n\nclass GenFormSelect(object):\n STSTUS_DICTS = {\n '0': u\"N\",\n '1': u\"B\",\n '2': u\"M\",\n '3': u\"P\",\n '4': u\"S\",\n '5': u\"D\",\n }\n\n @classmethod\n def categories(cls):\n return Category.query.all()\n\n @classmethod\n def treasures(cls):\n return Treasure.query.all()\n\n @classmethod\n def status(cls):\n return iter(cls.STSTUS_DICTS.items())\n\n\nclass CategoryForm(Form):\n parent = QuerySelectField(\n query_factory=GenFormSelect.categories, label=u\"parent\", allow_blank=True)\n name = TextField(label=u\"name\")\n pic = HiddenField(label=u'pic')\n image_file = FileField(label=u\"image\")\n description = CKTextAreaField(label=u\"dic\")\n\n def validate_cover(form, field):\n if field.data:\n field.data = re.sub(r'[^a-z0-9_.-]', '_', field.data)\n\n\nclass TreasureForm(Form):\n category = QuerySelectField(\n query_factory=GenFormSelect.categories, label=u\"category\")\n name = TextField(label=u\"title\")\n status = SelectField(label=u\"status\", default=\"0\")\n brand = TextField(label=u\"brand\")\n code = TextField(label=u\"code\")\n origin = TextField(label=u\"origin\")\n period = TextField(label=u\"period\")\n viewpoint = TextAreaField(label=u\"viewpoint\")\n description = CKTextAreaField(label=u\"desc\")\n price = TextField(label=u\"price\")\n online = BooleanField(label=u\"line\")\n pick = BooleanField(label=u\"pick\")\n cover = HiddenField(label=u\"cover\")\n image_file = FileField(label=u\"img\", validators=[\n # FileRequired(),\n FileAllowed(['jpg', 'png'], 'Images only!')\n ])\n position = IntegerField(\n label=u\"position\", validators=[validators.optional()])\n detail = TextField(label=u\"detail\")\n story = CKTextAreaField(label=u\"story\")\n style = CKTextAreaField(label=u\"style\")\n\n def __init__(self, *args, **kwargs):\n super(TreasureForm, self).__init__(*args, **kwargs)\n self.status.choices = GenFormSelect.status()\n\n def validate_cover(form, field):\n if field.data:\n field.data = re.sub(r'[^a-z0-9_.-]', '_', field.data)\n \n\nclass PhotoForm(Form):\n treasure = QuerySelectField(\n query_factory=GenFormSelect.treasures, allow_blank=True, label=u\"treasure\")\n pic = HiddenField(label=u\"photo\")\n image_file = FileField(label=u\"image\")\n description = CKTextAreaField(label=u\"desc\")\n"},"src_encoding":{"kind":"string","value":"UTF-8"},"language":{"kind":"string","value":"Python"},"is_vendor":{"kind":"bool","value":false,"string":"false"},"is_generated":{"kind":"bool","value":false,"string":"false"},"year":{"kind":"number","value":2014,"string":"2,014"}}},{"rowIdx":41143,"cells":{"__id__":{"kind":"number","value":14645838521534,"string":"14,645,838,521,534"},"blob_id":{"kind":"string","value":"fe67040be8cc086b6bf3df928c5cef4e9f6e5702"},"directory_id":{"kind":"string","value":"068f358de35f7e57ad969c635aff95a4b1d3b670"},"path":{"kind":"string","value":"/core/baseHeaders.py"},"content_id":{"kind":"string","value":"feb128576b056367e16c078de9a60490bc9c5ecf"},"detected_licenses":{"kind":"list like","value":["GPL-1.0-or-later","GPL-2.0-only"],"string":"[\n \"GPL-1.0-or-later\",\n \"GPL-2.0-only\"\n]"},"license_type":{"kind":"string","value":"non_permissive"},"repo_name":{"kind":"string","value":"BackupTheBerlios/pyhttpd-svn"},"repo_url":{"kind":"string","value":"https://github.com/BackupTheBerlios/pyhttpd-svn"},"snapshot_id":{"kind":"string","value":"39de2ac95ab22ff9c97a0ffdb879879c4ce213fb"},"revision_id":{"kind":"string","value":"7e9b81b4ceae67fc653f4c7083c3657ac295a841"},"branch_name":{"kind":"string","value":"refs/heads/master"},"visit_date":{"kind":"timestamp","value":"2021-01-22T19:13:51.006813","string":"2021-01-22T19:13:51.006813"},"revision_date":{"kind":"timestamp","value":"2006-03-22T08:56:44","string":"2006-03-22T08:56:44"},"committer_date":{"kind":"timestamp","value":"2006-03-22T08:56:44","string":"2006-03-22T08:56:44"},"github_id":{"kind":"number","value":40803887,"string":"40,803,887"},"star_events_count":{"kind":"number","value":0,"string":"0"},"fork_events_count":{"kind":"number","value":0,"string":"0"},"gha_license_id":{"kind":"null"},"gha_fork":{"kind":"null"},"gha_event_created_at":{"kind":"null"},"gha_created_at":{"kind":"null"},"gha_updated_at":{"kind":"null"},"gha_pushed_at":{"kind":"null"},"gha_size":{"kind":"null"},"gha_stargazers_count":{"kind":"null"},"gha_forks_count":{"kind":"null"},"gha_open_issues_count":{"kind":"null"},"gha_language":{"kind":"null"},"gha_archived":{"kind":"null"},"gha_disabled":{"kind":"null"},"content":{"kind":"string","value":"# -*- coding: utf-8 -*-\n\n##################################################################\n#\tpyHTTPd\n#\t$Id$\n#\t(c) 2006 by Tim Taubert\n##################################################################\n\n'''\ndef parseHeaders(httpd):\n\tparseCookies(httpd)\n\t\ndef parseCookies(httpd):\n\tcookies = httpd.headers.getheader(\"cookie\")\n\tif cookies:\n\t\tcookies = cookies.split(\";\")\n\t\tfor cookie in cookies:\n\t\t\tname, value = cookie.strip().split(\"=\")\n\t\t\thttpd.cookies[name] = value\n\ndef parsePOSTData(httpd):\n\tclen = httpd.headers.getheader(\"content-length\")\n\thttpd.posttype = httpd.headers.getheader(\"content-type\")\n\tif clen:\n\t\thttpd.postdata = httpd.rfile.read(int(clen))\n\ndef parseGETData(httpd, data):\n\tparseValues(httpd, data)\n\ndef parseValues(data):\n\tpostdata = {}\n\tfields = data.split(\"&\")\n\tfor field in fields:\n\t\tprint field\n\t\tname, value = field.split(\"=\")\n\t\tpostdata[name] = value\n\treturn postdata\n'''"},"src_encoding":{"kind":"string","value":"UTF-8"},"language":{"kind":"string","value":"Python"},"is_vendor":{"kind":"bool","value":false,"string":"false"},"is_generated":{"kind":"bool","value":false,"string":"false"},"year":{"kind":"number","value":2006,"string":"2,006"}}},{"rowIdx":41144,"cells":{"__id__":{"kind":"number","value":8203387545178,"string":"8,203,387,545,178"},"blob_id":{"kind":"string","value":"ab0e0fdeab16ac4a2bcf67ae1535744e842b4fc3"},"directory_id":{"kind":"string","value":"e867517068ade1572691ac86c6f2ad6596c0d559"},"path":{"kind":"string","value":"/film20/messages/models.py"},"content_id":{"kind":"string","value":"b80ccf10f14b6308f8439074778b17320221aedc"},"detected_licenses":{"kind":"list like","value":[],"string":"[]"},"license_type":{"kind":"string","value":"no_license"},"repo_name":{"kind":"string","value":"manlan2/filmaster"},"repo_url":{"kind":"string","value":"https://github.com/manlan2/filmaster"},"snapshot_id":{"kind":"string","value":"044ec124d91da0b6dcf2eb5b8af5aec6f0fffd53"},"revision_id":{"kind":"string","value":"90b2bb72c2bab9dfea0c0837971a625bc6880630"},"branch_name":{"kind":"string","value":"refs/heads/master"},"visit_date":{"kind":"timestamp","value":"2021-05-26T22:24:55.012908","string":"2021-05-26T22:24:55.012908"},"revision_date":{"kind":"timestamp","value":"2012-05-27T09:30:37","string":"2012-05-27T09:30:37"},"committer_date":{"kind":"timestamp","value":"2012-05-27T09:30:37","string":"2012-05-27T09:30:37"},"github_id":{"kind":"number","value":107661541,"string":"107,661,541"},"star_events_count":{"kind":"number","value":1,"string":"1"},"fork_events_count":{"kind":"number","value":0,"string":"0"},"gha_license_id":{"kind":"null"},"gha_fork":{"kind":"bool","value":true,"string":"true"},"gha_event_created_at":{"kind":"timestamp","value":"2017-10-20T09:51:53","string":"2017-10-20T09:51:53"},"gha_created_at":{"kind":"timestamp","value":"2017-10-20T09:51:53","string":"2017-10-20T09:51:53"},"gha_updated_at":{"kind":"timestamp","value":"2017-10-20T09:51:53","string":"2017-10-20T09:51:53"},"gha_pushed_at":{"kind":"timestamp","value":"2012-05-27T09:38:13","string":"2012-05-27T09:38:13"},"gha_size":{"kind":"number","value":9548,"string":"9,548"},"gha_stargazers_count":{"kind":"number","value":0,"string":"0"},"gha_forks_count":{"kind":"number","value":0,"string":"0"},"gha_open_issues_count":{"kind":"number","value":0,"string":"0"},"gha_language":{"kind":"null"},"gha_archived":{"kind":"null"},"gha_disabled":{"kind":"null"},"content":{"kind":"string","value":"#-------------------------------------------------------------------------------\n# Filmaster - a social web network and recommendation engine\n# Copyright (c) 2009 Filmaster (Borys Musielak, Adam Zielinski).\n# \n# This program is free software: you can redistribute it and/or modify\n# it under the terms of the GNU Affero General Public License as\n# published by the Free Software Foundation, either version 3 of the\n# License, or (at your option) any later version.\n# \n# This program is distributed in the hope that it will be useful,\n# but WITHOUT ANY WARRANTY; without even the implied warranty of\n# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the\n# GNU Affero General Public License for more details.\n# \n# You should have received a copy of the GNU Affero General Public License\n# along with this program. If not, see .\n#-------------------------------------------------------------------------------\nimport datetime\nfrom django.db import models\nfrom django.conf import settings\nfrom django.db.models import signals, get_app\nfrom django.core.exceptions import ImproperlyConfigured\nfrom django.contrib.auth.models import User\nfrom django.utils.translation import ugettext_lazy as _\nfrom django.core.urlresolvers import reverse\n\nfrom film20.utils import cache_helper as cache\nfrom film20.utils.db import QuerySet\n\nimport logging\nlogger = logging.getLogger(__name__)\n\nclass MessageManager(models.Manager):\n\n def inbox_for(self, user):\n \"\"\"\n Returns all messages that were received by the given user and are not\n marked as deleted.\n \"\"\"\n return self.filter(\n recipient=user,\n recipient_deleted_at__isnull=True,\n )\n\n def outbox_for(self, user):\n \"\"\"\n Returns all messages that were sent by the given user and are not\n marked as deleted.\n \"\"\"\n return self.filter(\n sender=user,\n sender_deleted_at__isnull=True,\n )\n\n def trash_for(self, user):\n \"\"\"\n Returns all messages that were either received or sent by the given\n user and are marked as deleted.\n \"\"\"\n return self.filter(\n recipient=user,\n recipient_deleted_at__isnull=False,\n ) | self.filter(\n sender=user,\n sender_deleted_at__isnull=False,\n )\n \nclass Message(models.Model):\n \"\"\"\n A private message from user to user\n \"\"\"\n subject = models.CharField(_(\"Subject\"), max_length=120)\n body = models.TextField(_(\"Body\"))\n sender = models.ForeignKey(User, related_name='sent_messages', verbose_name=_(\"Sender\"))\n recipient = models.ForeignKey(User, related_name='received_messages', null=True, blank=True, verbose_name=_(\"Recipient\"))\n parent_msg = models.ForeignKey('self', related_name='next_messages', null=True, blank=True, verbose_name=_(\"Parent message\"))\n conversation = models.ForeignKey('Conversation', related_name='messages', null=True, blank=True, verbose_name=_(\"Conversation\"))\n sent_at = models.DateTimeField(_(\"sent at\"), null=True, blank=True, auto_now_add=True)\n read_at = models.DateTimeField(_(\"read at\"), null=True, blank=True)\n replied_at = models.DateTimeField(_(\"replied at\"), null=True, blank=True)\n sender_deleted_at = models.DateTimeField(_(\"Sender deleted at\"), null=True, blank=True)\n recipient_deleted_at = models.DateTimeField(_(\"Recipient deleted at\"), null=True, blank=True)\n \n objects = MessageManager()\n \n def mark_as_read(self):\n if self.read_at is None:\n self.read_at = datetime.datetime.now()\n self.save()\n if self.conversation:\n # decrement conversation unread cnt for message recipient\n if self.recipient_id == self.conversation.recipient_id:\n self.conversation.recipient_unread_cnt = max(self.conversation.recipient_unread_cnt - 1, 0)\n if self.recipient_id == self.conversation.sender_id:\n self.conversation.sender_unread_cnt = max(self.conversation.sender_unread_cnt - 1, 0)\n self.conversation.save()\n\n def new(self):\n \"\"\"returns whether the recipient has read the message or not\"\"\"\n return self.read_at is None\n \n def replied(self):\n \"\"\"returns whether the recipient has written a reply to this message\"\"\"\n if self.replied_at is not None:\n return True\n return False\n \n def __unicode__(self):\n return self.subject\n \n def get_subject(self):\n return (self.subject or '').strip() or _('no subject')\n\n def get_absolute_url(self):\n return reverse('messages_view_conversation', args=[self.conversation_id]) + '#message_%s' % self.id\n\n def delete_by(self, user, update_conversation=True):\n now = datetime.datetime.now()\n c1 = user == self.sender and not self.sender_deleted_at\n c2 = user == self.recipient and not self.recipient_deleted_at\n if c1 or c2:\n if c1:\n self.sender_deleted_at = now\n if c2:\n self.recipient_deleted_at = now\n self.save()\n if update_conversation and self.conversation:\n self.conversation.inc_msg_cnt(user, -1)\n if not self.read_at:\n self.conversation.inc_unread_cnt(user, -1)\n self.conversation.save()\n return True\n\n def undelete_by(self, user, update_conversation=True):\n c1 = user == self.sender and self.sender_deleted_at\n c2 = user == self.recipient and self.recipient_deleted_at\n if c1 or c2:\n if c1:\n self.sender_deleted_at = None\n if c2:\n self.recipient_deleted_at = None\n self.save()\n if update_conversation and self.conversation:\n self.conversation.inc_msg_cnt(user, 1)\n if not self.read_at:\n self.conversation.inc_unread_cnt(user, 1)\n self.conversation.save()\n return True\n \n def fix(self, level=0):\n if not self.conversation_id:\n if self.parent_msg:\n # make sure parent is fixed\n self.parent_msg.fix(level+1)\n conversation = self.parent_msg.conversation\n conversation.sender_cnt += 1\n conversation.recipient_cnt += 1\n else:\n conversation = Conversation()\n conversation.sender_cnt = 1\n conversation.recipient_cnt = 1\n conversation.sender = self.sender\n conversation.recipient = self.recipient\n\n if self.read_at is None and self.recipient_deleted_at is None:\n conversation.inc_unread_cnt(self.recipient, 1)\n if self.replied_at is not None:\n conversation.is_replied = True\n\n conversation.subject = self.subject\n conversation.body = self.body\n conversation.last_sender = self.sender\n conversation.updated_at = self.sent_at\n conversation.save()\n\n self.conversation=conversation\n self.save()\n \n print ' '*level, \"FIXED\"\n\n @classmethod\n def fix_all(cls):\n while True:\n q=cls.objects.filter(conversation__isnull=True).order_by('sent_at')\n item = list(q[0:1])\n item = item and item[0]\n if not item:\n break\n item.fix()\n\n total = Conversation.objects.count()\n cnt = 0\n\n for c in Conversation.objects.all():\n last_msg = c.messages.order_by('-sent_at')\n last_msg = last_msg and last_msg[0]\n if last_msg:\n c.updated_at = last_msg.sent_at\n c.is_replied = bool(c.messages.filter(replied_at__isnull=False))\n c.save()\n cnt += 1\n print cnt, '/', total\n\n def save(self, force_insert=False, force_update=False):\n if not self.id:\n if self.parent_msg:\n conversation = self.parent_msg.conversation\n # parent_msg.conversation is not None only for new conversations\n if conversation:\n conversation.sender_cnt += 1\n conversation.recipient_cnt += 1\n if self.recipient_id == self.parent_msg.sender_id:\n self.parent_msg.replied_at = datetime.datetime.now()\n self.parent_msg.save()\n conversation.is_replied = True\n else:\n conversation = Conversation()\n conversation.sender_cnt = 1\n conversation.recipient_cnt = 1\n conversation.sender = self.sender\n conversation.recipient = self.recipient\n\n if conversation:\n conversation.inc_unread_cnt(self.recipient, 1)\n conversation.subject = self.subject\n conversation.body = self.body\n conversation.last_sender = self.sender\n conversation.updated_at = self.sent_at or datetime.datetime.now()\n conversation.save()\n self.conversation = conversation\n \n super(Message, self).save(force_insert, force_update) \n \n @classmethod\n def send(cls, sender, recipients, subject, body, parent_msg=None):\n message_list = []\n for r in recipients:\n msg = cls(\n sender = sender,\n recipient = r,\n subject = subject,\n body = body,\n parent_msg = parent_msg,\n sent_at = datetime.datetime.now()\n )\n msg.save()\n message_list.append(msg)\n if notification:\n replied = msg.parent_msg and (msg.recipient_id == msg.parent_msg.sender_id)\n if replied:\n notification.send([sender], \"messages_replied\", {'message': msg,})\n notification.send([r], \"messages_reply_received\", {'message': msg,}, priority=notification.PRIORITY_REALTIME)\n else:\n notification.send([sender], \"messages_sent\", {'message': msg,})\n notification.send([r], \"messages_received\", {'message': msg,}, priority=notification.PRIORITY_REALTIME)\n return message_list\n\n class Meta:\n ordering = ['-sent_at']\n verbose_name = _(\"Message\")\n verbose_name_plural = _(\"Messages\")\n\nclass ConversationQuerySet(QuerySet):\n def default_filter(self):\n return self.order_by('-updated_at')\n\n def user_conversations(self, user, replied=False):\n extra = replied and {'is_replied':True} or {}\n query = self.filter(models.Q(sender=user, sender_cnt__gt=0, **extra) | \\\n models.Q(recipient=user, recipient_cnt__gt=0))\n return query._clone(user=user)\n\n def unread_counter(self, user):\n key = cache.Key(\"conversation_unread_counter\", user.id)\n cnt = cache.get(key)\n if cnt is None:\n query = self.filter(models.Q(sender=user, sender_cnt__gt=0, sender_unread_cnt__gt=0) | \\\n models.Q(recipient=user, recipient_cnt__gt=0, recipient_unread_cnt__gt=0))\n cnt = query.distinct().count()\n cache.set(key, cnt)\n return cnt\n\n def iterator(self):\n items = super(ConversationQuerySet, self).iterator()\n def _fix(self, item):\n if hasattr(self, 'user'):\n item.user = self.user\n return item\n return (_fix(self, i) for i in items)\n \n def _clone(self, *args, **kw):\n ret = super(ConversationQuerySet, self)._clone(*args, **kw)\n if not hasattr(ret, 'user') and hasattr(self, 'user'):\n ret.user = self.user\n return ret\n \n\nclass Conversation(models.Model):\n sender = models.ForeignKey(User, related_name=\"sent_conversations\")\n recipient = models.ForeignKey(User, related_name=\"received_conversations\")\n\n last_sender = models.ForeignKey(User)\n subject = models.CharField(_(\"Subject\"), max_length=120)\n body = models.TextField(null=True, blank=True)\n \n created_at = models.DateTimeField(_(\"created at\"), auto_now_add=True)\n updated_at = models.DateTimeField(_(\"sent at\"), auto_now_add=True)\n\n sender_cnt = models.IntegerField(default=0, null=False, blank=False)\n recipient_cnt =models.IntegerField(default=0, null=False, blank=False)\n\n sender_unread_cnt = models.IntegerField(default=0, null=False, blank=False)\n recipient_unread_cnt =models.IntegerField(default=0, null=False, blank=False)\n \n is_replied = models.BooleanField(default=False)\n \n objects = ConversationQuerySet.as_manager()\n \n class Meta:\n ordering = ('-updated_at',)\n verbose_name = _(\"Conversation\")\n verbose_name_plural = _(\"Conversations\")\n\n def user_messages(self, user):\n return self.messages.exclude(sender_deleted_at__isnull=False, sender=user)\\\n .exclude(recipient_deleted_at__isnull=False, recipient=user)\\\n .order_by('sent_at')\n\n def delete_by(self, user):\n for msg in self.messages.all():\n msg.delete_by(user, update_conversation=False)\n if user == self.sender:\n self.sender_cnt = 0\n self.sender_unread_cnt = 0\n if user == self.recipient:\n self.recipient_cnt = 0\n self.recipient_unread_cnt = 0\n self.save()\n\n def undelete_by(self, user):\n cnt = 0\n unread_cnt = 0\n for msg in self.messages.all():\n undeleted = msg.undelete_by(user, update_conversation=False)\n cnt += 1\n unread_cnt += bool(undeleted and not msg.read_at)\n \n if user == self.sender:\n self.sender_cnt = cnt\n self.sender_unread_cnt += unread_cnt\n if user == self.recipient:\n self.recipient_cnt = cnt\n self.recipient_unread_cnt += unread_cnt\n self.save()\n \n def inc_msg_cnt(self, user, delta):\n if user == self.sender:\n self.sender_cnt = max(0, self.sender_cnt + delta)\n if user == self.recipient:\n self.recipient_cnt = max(0, self.recipient_cnt + delta)\n \n def inc_unread_cnt(self, user, delta):\n if user == self.sender:\n self.sender_unread_cnt = max(0, self.sender_unread_cnt + delta)\n if user == self.recipient:\n self.recipient_unread_cnt = max(0, self.recipient_unread_cnt + delta)\n\n def threaded_messages(self, user):\n cache_key = \"conversation_thread_%s\" % self.pk\n thread = cache.get(cache_key)\n if thread is not None:\n return thread\n query = self.messages.order_by('id')\n messages = {}\n root = None\n for msg in query:\n msg.children = []\n messages[msg.id] = msg \n if msg.parent_msg_id:\n parent = messages.get(msg.parent_msg_id)\n if parent:\n msg.level = parent.level + 1\n parent.children.append(msg)\n else:\n msg.level = 0\n root = msg\n\n\tdef traverse(root):\n\t yield root\n\t for c in root.children:\n\t for i in traverse(c):\n\t yield i\n\n def not_deleted(msg):\n return msg.sender_id == user.id and msg.sender_deleted_at is None or \\\n msg.recipient_id == user.id and msg.recipient_deleted_at is None\n \n thread = root and list(traverse(root)) or ()\n thread = filter(not_deleted, thread)\n cache.set(cache_key, thread)\n return thread\n \n @classmethod\n def invalidate_cache(cls, sender, instance, created, *args, **kw):\n cache.delete(\"conversation_thread_%s\" % instance.pk)\n key1 = cache.Key(\"conversation_unread_counter\", instance.sender_id)\n key2 = cache.Key(\"conversation_unread_counter\", instance.recipient_id)\n cache.delete(key1)\n cache.delete(key2)\n \n def is_read(self):\n assert self.user\n if self.user == self.sender:\n return not bool(self.sender_unread_cnt)\n else:\n return not bool(self.recipient_unread_cnt)\n \n def mark_read(self):\n assert self.user\n if self.user == self.sender:\n self.sender_unread_cnt = 0\n if self.user == self.recipient:\n self.recipient_unread_cnt = 0\n \n def cnt(self):\n assert self.user\n return self.sender_cnt if self.user == self.sender else self.recipient_cnt\n\n @models.permalink\n def get_absolute_url(self):\n return ('messages_view_conversation', [self.id])\n \nsignals.post_save.connect(Conversation.invalidate_cache, sender=Conversation)\n\nclass LazyUnreadCnt(object):\n def __get__(self, user, obj_type=None):\n if user.is_authenticated():\n if not hasattr(user, '_unread_cnt'):\n user._unread_cnt = Conversation.objects.unread_counter(user)\n return user._unread_cnt\n\nUser.add_to_class('unread_conversation_counter', LazyUnreadCnt())\n\n# fallback for email notification if django-notification could not be found\ntry:\n notification = get_app('notification')\nexcept ImproperlyConfigured:\n notification = None\n from messages.utils import new_message_email\n signals.post_save.connect(new_message_email, sender=Message)\n\ndef inbox_count_for(user):\n \"\"\"\n returns the number of unread messages for the given user but does not\n mark them seen\n \"\"\"\n return Message.objects.filter(recipient=user, read_at__isnull=True, recipient_deleted_at__isnull=True).count()\n"},"src_encoding":{"kind":"string","value":"UTF-8"},"language":{"kind":"string","value":"Python"},"is_vendor":{"kind":"bool","value":false,"string":"false"},"is_generated":{"kind":"bool","value":false,"string":"false"},"year":{"kind":"number","value":2012,"string":"2,012"}}},{"rowIdx":41145,"cells":{"__id__":{"kind":"number","value":4423816337459,"string":"4,423,816,337,459"},"blob_id":{"kind":"string","value":"821c67341cf90aefffd5b690519e11cd8364eb50"},"directory_id":{"kind":"string","value":"5499d1dab4d2f0594384ce1a19213dbb8d0d7d43"},"path":{"kind":"string","value":"/sickbeard/providers/fanzub.py"},"content_id":{"kind":"string","value":"ee43e96651414d3fb07d14be2e5c5480ca3a8c32"},"detected_licenses":{"kind":"list like","value":["GPL-1.0-or-later","LicenseRef-scancode-warranty-disclaimer","GPL-3.0-only","GPL-3.0-or-later","LGPL-2.0-or-later","LGPL-2.1-or-later"],"string":"[\n \"GPL-1.0-or-later\",\n \"LicenseRef-scancode-warranty-disclaimer\",\n \"GPL-3.0-only\",\n \"GPL-3.0-or-later\",\n \"LGPL-2.0-or-later\",\n \"LGPL-2.1-or-later\"\n]"},"license_type":{"kind":"string","value":"non_permissive"},"repo_name":{"kind":"string","value":"keithbarrett/Sick-Beard-Animes"},"repo_url":{"kind":"string","value":"https://github.com/keithbarrett/Sick-Beard-Animes"},"snapshot_id":{"kind":"string","value":"7d912fd72c03f7d6464725ff2b11be50ff8ac337"},"revision_id":{"kind":"string","value":"732fe4f33121e8f04deaede9a4ac7d99547b1915"},"branch_name":{"kind":"string","value":"refs/heads/master"},"visit_date":{"kind":"timestamp","value":"2021-01-16T22:31:30.050481","string":"2021-01-16T22:31:30.050481"},"revision_date":{"kind":"timestamp","value":"2014-12-23T18:00:32","string":"2014-12-23T18:00:32"},"committer_date":{"kind":"timestamp","value":"2014-12-23T18:00:32","string":"2014-12-23T18:00:32"},"github_id":{"kind":"number","value":36258020,"string":"36,258,020"},"star_events_count":{"kind":"number","value":1,"string":"1"},"fork_events_count":{"kind":"number","value":0,"string":"0"},"gha_license_id":{"kind":"null"},"gha_fork":{"kind":"bool","value":true,"string":"true"},"gha_event_created_at":{"kind":"timestamp","value":"2015-05-25T22:32:29","string":"2015-05-25T22:32:29"},"gha_created_at":{"kind":"timestamp","value":"2015-05-25T22:32:29","string":"2015-05-25T22:32:29"},"gha_updated_at":{"kind":"timestamp","value":"2014-12-23T18:00:44","string":"2014-12-23T18:00:44"},"gha_pushed_at":{"kind":"timestamp","value":"2015-05-22T17:07:27","string":"2015-05-22T17:07:27"},"gha_size":{"kind":"number","value":12220,"string":"12,220"},"gha_stargazers_count":{"kind":"number","value":0,"string":"0"},"gha_forks_count":{"kind":"number","value":0,"string":"0"},"gha_open_issues_count":{"kind":"number","value":0,"string":"0"},"gha_language":{"kind":"null"},"gha_archived":{"kind":"null"},"gha_disabled":{"kind":"null"},"content":{"kind":"string","value":"# Author: Nic Wolfe \r\n# URL: http://code.google.com/p/sickbeard/\r\n#\r\n# This file is part of Sick Beard.\r\n#\r\n# Sick Beard is free software: you can redistribute it and/or modify\r\n# it under the terms of the GNU General Public License as published by\r\n# the Free Software Foundation, either version 3 of the License, or\r\n# (at your option) any later version.\r\n#\r\n# Sick Beard is distributed in the hope that it will be useful,\r\n# but WITHOUT ANY WARRANTY; without even the implied warranty of\r\n# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the\r\n# GNU General Public License for more details.\r\n#\r\n# You should have received a copy of the GNU General Public License\r\n# along with Sick Beard. If not, see .\r\n\r\n\r\n\r\nimport urllib\r\nimport datetime\r\nimport time\r\n\r\nfrom xml.dom.minidom import parseString\r\n\r\nimport sickbeard\r\nimport generic\r\n\r\nfrom sickbeard import classes, show_name_helpers, helpers\r\n\r\nfrom sickbeard import exceptions, logger, db\r\nfrom sickbeard.common import *\r\nfrom sickbeard import tvcache\r\nfrom lib.dateutil.parser import parse as parseDate\r\n\r\nclass Fanzub(generic.NZBProvider):\r\n\r\n def __init__(self):\r\n\r\n generic.NZBProvider.__init__(self, \"Fanzub\")\r\n\r\n self.supportsBacklog = False\r\n self.description = u\"Only useful for anime.
Pseudo backlog support.\"\r\n self.supportsAbsoluteNumbering = True\r\n\r\n self.cache = FanzubCache(self)\r\n\r\n self.url = 'http://fanzub.com/'\r\n\r\n def isEnabled(self):\r\n return sickbeard.FANZUB\r\n\r\n def _checkAuth(self):\r\n return True\r\n\r\n def _get_season_search_strings(self, show, season, scene=False):\r\n names = []\r\n if season is -1:\r\n names = [show.name.encode('utf-8')]\r\n names.extend(show_name_helpers.makeSceneSeasonSearchString(show, season, scene=scene))\r\n return names\r\n\r\n def _get_episode_search_strings(self, ep_obj):\r\n # names = [(ep_obj.show.name + \" \" + str(ep_obj.absolute_number)).encode('utf-8')]\r\n names = show_name_helpers.makeSceneSearchString(ep_obj)\r\n return names\r\n\r\n def _doSearch(self, search_string, show=None):\r\n if show and not show.is_anime:\r\n logger.log(u\"\" + str(show.name) + \" is not an anime skiping \" + str(self.name))\r\n return []\r\n\r\n params = {\r\n \"cat\": \"anime\",\r\n \"q\": search_string.encode('utf-8'),\r\n \"max\": \"100\"\r\n }\r\n\r\n searchURL = self.url + \"rss?\" + urllib.urlencode(params)\r\n\r\n logger.log(u\"Search string: \" + searchURL, logger.DEBUG)\r\n\r\n searchResult = self.getURL(searchURL)\r\n\r\n # Pause to avoid 503's\r\n time.sleep(5)\r\n\r\n if searchResult == None:\r\n return []\r\n\r\n try:\r\n parsedXML = parseString(searchResult)\r\n items = parsedXML.getElementsByTagName('item')\r\n except Exception, e:\r\n logger.log(u\"Error trying to load FANZUB RSS feed: \" + str(e).decode('utf-8'), logger.ERROR)\r\n return []\r\n\r\n results = []\r\n\r\n for curItem in items:\r\n (title, url) = self._get_title_and_url(curItem)\r\n\r\n if not title or not url:\r\n logger.log(u\"The XML returned from the FANZUB RSS feed is incomplete, this result is unusable: \" + searchResult, logger.ERROR)\r\n continue\r\n\r\n url = url.replace('&amp;', '&')\r\n\r\n results.append(curItem)\r\n\r\n return results\r\n\r\nclass FanzubCache(tvcache.TVCache):\r\n\r\n def __init__(self, provider):\r\n\r\n tvcache.TVCache.__init__(self, provider)\r\n\r\n # only poll Fanzub every 20 minutes max\r\n # we get 100 post each call !\r\n self.minTime = 20\r\n\r\n def _getRSSData(self):\r\n url = self.provider.url + 'rss?'\r\n urlArgs = {\"cat\": \"anime\".encode('utf-8'),\r\n \"max\": \"100\".encode('utf-8')\r\n }\r\n\r\n url += urllib.urlencode(urlArgs)\r\n\r\n logger.log(u\"FANZUB cache update URL: \" + url, logger.DEBUG)\r\n\r\n data = self.provider.getURL(url)\r\n\r\n return data\r\n\r\n def _checkItemAuth(self, title, url):\r\n return True\r\n\r\n\r\nprovider = Fanzub()\r\n"},"src_encoding":{"kind":"string","value":"UTF-8"},"language":{"kind":"string","value":"Python"},"is_vendor":{"kind":"bool","value":false,"string":"false"},"is_generated":{"kind":"bool","value":false,"string":"false"},"year":{"kind":"number","value":2014,"string":"2,014"}}},{"rowIdx":41146,"cells":{"__id__":{"kind":"number","value":10995116302941,"string":"10,995,116,302,941"},"blob_id":{"kind":"string","value":"d262d18ab6c3d1fc02cf4e76edb666edb399fc3f"},"directory_id":{"kind":"string","value":"4683fba0f2f6a2d5065a7de1b11a8a53ae6c46a0"},"path":{"kind":"string","value":"/velo-monitor/tests/utils.py"},"content_id":{"kind":"string","value":"6232b049df5c6698e7cee1362381943edeb77cf1"},"detected_licenses":{"kind":"list like","value":[],"string":"[]"},"license_type":{"kind":"string","value":"no_license"},"repo_name":{"kind":"string","value":"suvayu/LHCbVeloView"},"repo_url":{"kind":"string","value":"https://github.com/suvayu/LHCbVeloView"},"snapshot_id":{"kind":"string","value":"bc42d1a76f56fe4f395a61880e206596bec2dfa8"},"revision_id":{"kind":"string","value":"c7fb34a7a6c81baa32cb33589fd6eb3931f0dd85"},"branch_name":{"kind":"string","value":"refs/heads/master"},"visit_date":{"kind":"timestamp","value":"2020-04-06T07:09:30.173842","string":"2020-04-06T07:09:30.173842"},"revision_date":{"kind":"timestamp","value":"2014-12-02T15:10:49","string":"2014-12-02T15:10:49"},"committer_date":{"kind":"timestamp","value":"2014-12-02T15:10:49","string":"2014-12-02T15:10:49"},"github_id":{"kind":"null"},"star_events_count":{"kind":"number","value":0,"string":"0"},"fork_events_count":{"kind":"number","value":0,"string":"0"},"gha_license_id":{"kind":"null"},"gha_fork":{"kind":"null"},"gha_event_created_at":{"kind":"null"},"gha_created_at":{"kind":"null"},"gha_updated_at":{"kind":"null"},"gha_pushed_at":{"kind":"null"},"gha_size":{"kind":"null"},"gha_stargazers_count":{"kind":"null"},"gha_forks_count":{"kind":"null"},"gha_open_issues_count":{"kind":"null"},"gha_language":{"kind":"null"},"gha_archived":{"kind":"null"},"gha_disabled":{"kind":"null"},"content":{"kind":"string","value":"\"\"\"Utilities for testing the VELO monitor.\"\"\"\nfrom veloview.core import config as veloview_config\n\nRUNS = sorted(range(123987, 123995) + range(123960, 123975), reverse=True)\n\n\ndef set_up_run_list():\n \"\"\"Create a dummy run list file filled with RUNS.\"\"\"\n prlf = '/tmp/runList.txt'\n veloview_config.old_prlf = veloview_config.processed_run_list_file\n veloview_config.processed_run_list_file = prlf\n with open(prlf, \"w\") as f:\n for r in RUNS:\n f.write(\"{0}\\n\".format(r))\n\n\ndef tear_down_run_list():\n veloview_config.processed_run_list_file = veloview_config.old_prlf\n"},"src_encoding":{"kind":"string","value":"UTF-8"},"language":{"kind":"string","value":"Python"},"is_vendor":{"kind":"bool","value":false,"string":"false"},"is_generated":{"kind":"bool","value":false,"string":"false"},"year":{"kind":"number","value":2014,"string":"2,014"}}},{"rowIdx":41147,"cells":{"__id__":{"kind":"number","value":19559281089660,"string":"19,559,281,089,660"},"blob_id":{"kind":"string","value":"8aae06e175b2a013b3852e13374aaed4b1ef5d88"},"directory_id":{"kind":"string","value":"f0199a19b273238d73080282d80654f0e57a9e3e"},"path":{"kind":"string","value":"/ovobot/production.py"},"content_id":{"kind":"string","value":"62e93c971aee1b86187bc3904e6a49d22480b134"},"detected_licenses":{"kind":"list like","value":[],"string":"[]"},"license_type":{"kind":"string","value":"no_license"},"repo_name":{"kind":"string","value":"mordaha/ovobot"},"repo_url":{"kind":"string","value":"https://github.com/mordaha/ovobot"},"snapshot_id":{"kind":"string","value":"38c29e8c8fd043bed229f9a9429a475d9cfe00dd"},"revision_id":{"kind":"string","value":"e197c6b523666b56770be731a689ad8a15b6b182"},"branch_name":{"kind":"string","value":"refs/heads/master"},"visit_date":{"kind":"timestamp","value":"2020-05-16T13:42:23.731142","string":"2020-05-16T13:42:23.731142"},"revision_date":{"kind":"timestamp","value":"2011-12-04T16:41:50","string":"2011-12-04T16:41:50"},"committer_date":{"kind":"timestamp","value":"2011-12-04T16:41:50","string":"2011-12-04T16:41:50"},"github_id":{"kind":"number","value":1776536,"string":"1,776,536"},"star_events_count":{"kind":"number","value":2,"string":"2"},"fork_events_count":{"kind":"number","value":0,"string":"0"},"gha_license_id":{"kind":"null"},"gha_fork":{"kind":"null"},"gha_event_created_at":{"kind":"null"},"gha_created_at":{"kind":"null"},"gha_updated_at":{"kind":"null"},"gha_pushed_at":{"kind":"null"},"gha_size":{"kind":"null"},"gha_stargazers_count":{"kind":"null"},"gha_forks_count":{"kind":"null"},"gha_open_issues_count":{"kind":"null"},"gha_language":{"kind":"null"},"gha_archived":{"kind":"null"},"gha_disabled":{"kind":"null"},"content":{"kind":"string","value":"\nfrom ovobot.settings import *\n"},"src_encoding":{"kind":"string","value":"UTF-8"},"language":{"kind":"string","value":"Python"},"is_vendor":{"kind":"bool","value":false,"string":"false"},"is_generated":{"kind":"bool","value":false,"string":"false"},"year":{"kind":"number","value":2011,"string":"2,011"}}},{"rowIdx":41148,"cells":{"__id__":{"kind":"number","value":5549097753820,"string":"5,549,097,753,820"},"blob_id":{"kind":"string","value":"4d5eaebfb51ec08e39faad3bc95cb0b4b78169b7"},"directory_id":{"kind":"string","value":"bfc874767de27c84f3b61b7b5d0b6a4ee1fefb7f"},"path":{"kind":"string","value":"/core/data/SpiderPendingResponsesDataModel.py"},"content_id":{"kind":"string","value":"ee88914eb6e087b42033ac300f2743446c2758c4"},"detected_licenses":{"kind":"list like","value":["GPL-3.0-only"],"string":"[\n \"GPL-3.0-only\"\n]"},"license_type":{"kind":"string","value":"non_permissive"},"repo_name":{"kind":"string","value":"pombreda/raft"},"repo_url":{"kind":"string","value":"https://github.com/pombreda/raft"},"snapshot_id":{"kind":"string","value":"294774b70d07fb4b7d57fac3ddb92e2681fb6a7f"},"revision_id":{"kind":"string","value":"c81c5778a8113e3c7095334ed91dc68352e5da5d"},"branch_name":{"kind":"string","value":"refs/heads/master"},"visit_date":{"kind":"timestamp","value":"2021-01-01T19:07:04.417738","string":"2021-01-01T19:07:04.417738"},"revision_date":{"kind":"timestamp","value":"2014-08-12T21:17:50","string":"2014-08-12T21:17:50"},"committer_date":{"kind":"timestamp","value":"2014-08-12T21:17:50","string":"2014-08-12T21:17:50"},"github_id":{"kind":"number","value":32209251,"string":"32,209,251"},"star_events_count":{"kind":"number","value":1,"string":"1"},"fork_events_count":{"kind":"number","value":1,"string":"1"},"gha_license_id":{"kind":"null"},"gha_fork":{"kind":"null"},"gha_event_created_at":{"kind":"null"},"gha_created_at":{"kind":"null"},"gha_updated_at":{"kind":"null"},"gha_pushed_at":{"kind":"null"},"gha_size":{"kind":"null"},"gha_stargazers_count":{"kind":"null"},"gha_forks_count":{"kind":"null"},"gha_open_issues_count":{"kind":"null"},"gha_language":{"kind":"null"},"gha_archived":{"kind":"null"},"gha_disabled":{"kind":"null"},"content":{"kind":"string","value":"#\n# This module supports the data model for the spider results\n#\n# Author: Gregory Fleischer (gfleischer@gmail.com)\n#\n# Copyright (c) 2011 RAFT Team\n#\n# This file is part of RAFT.\n#\n# RAFT is free software: you can redistribute it and/or modify\n# it under the terms of the GNU General Public License as published by\n# the Free Software Foundation, either version 3 of the License, or\n# (at your option) any later version.\n# \n# RAFT is distributed in the hope that it will be useful,\n# but WITHOUT ANY WARRANTY; without even the implied warranty of\n# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the\n# GNU General Public License for more details.\n#\n# You should have received a copy of the GNU General Public License\n# along with RAFT. If not, see .\n#\n\nfrom core.data.DataTableDataModel import DataTableDataModel\nfrom core.database.constants import SpiderPendingResponsesTable\n\nclass SpiderPendingResponsesDataModel(DataTableDataModel):\n\n ITEM_DEFINITION = (\n ('#', SpiderPendingResponsesTable.RESPONSE_ID),\n ('Type', SpiderPendingResponsesTable.REQUEST_TYPE),\n ('Depth', SpiderPendingResponsesTable.DEPTH),\n ('Status', SpiderPendingResponsesTable.STATUS),\n )\n\n def __init__(self, framework, parent = None):\n DataTableDataModel.__init__(self, framework, SpiderPendingResponsesDataModel.ITEM_DEFINITION, parent)\n\n"},"src_encoding":{"kind":"string","value":"UTF-8"},"language":{"kind":"string","value":"Python"},"is_vendor":{"kind":"bool","value":false,"string":"false"},"is_generated":{"kind":"bool","value":false,"string":"false"},"year":{"kind":"number","value":2014,"string":"2,014"}}},{"rowIdx":41149,"cells":{"__id__":{"kind":"number","value":8203387540094,"string":"8,203,387,540,094"},"blob_id":{"kind":"string","value":"bc994962d9c36a73a3f3a400ec54a3f72e8036c9"},"directory_id":{"kind":"string","value":"0d2c482f24e6770626f071871adb416231160670"},"path":{"kind":"string","value":"/myapp/mp4_parser.py"},"content_id":{"kind":"string","value":"e1b47a90bbcb681fe11b3dbb02db0f7b3a5ecdc5"},"detected_licenses":{"kind":"list like","value":[],"string":"[]"},"license_type":{"kind":"string","value":"no_license"},"repo_name":{"kind":"string","value":"tomerf8/AdInsertionPy"},"repo_url":{"kind":"string","value":"https://github.com/tomerf8/AdInsertionPy"},"snapshot_id":{"kind":"string","value":"c0c956cc48b6cd505138d9648d5c72e4f7247984"},"revision_id":{"kind":"string","value":"617cb13c3abd06d7ea97052510e2f852819b1910"},"branch_name":{"kind":"string","value":"refs/heads/master"},"visit_date":{"kind":"timestamp","value":"2021-01-10T02:44:39.639851","string":"2021-01-10T02:44:39.639851"},"revision_date":{"kind":"timestamp","value":"2014-01-17T23:11:22","string":"2014-01-17T23:11:22"},"committer_date":{"kind":"timestamp","value":"2014-01-17T23:11:22","string":"2014-01-17T23:11:22"},"github_id":{"kind":"number","value":50796127,"string":"50,796,127"},"star_events_count":{"kind":"number","value":3,"string":"3"},"fork_events_count":{"kind":"number","value":1,"string":"1"},"gha_license_id":{"kind":"null"},"gha_fork":{"kind":"null"},"gha_event_created_at":{"kind":"null"},"gha_created_at":{"kind":"null"},"gha_updated_at":{"kind":"null"},"gha_pushed_at":{"kind":"null"},"gha_size":{"kind":"null"},"gha_stargazers_count":{"kind":"null"},"gha_forks_count":{"kind":"null"},"gha_open_issues_count":{"kind":"null"},"gha_language":{"kind":"null"},"gha_archived":{"kind":"null"},"gha_disabled":{"kind":"null"},"content":{"kind":"string","value":"\n'''\nCreated on Apr 26, 2013\n\n@author: mrklin\n'''\nfrom myutils import utils_class\nfrom mytools import tools_class\nimport os\nimport re\n\ntools = tools_class()\nutils = utils_class()\nad_path = '/home/mrklsin/Temp/mayo/'\nmedia_path = '/home/mrklin/Temp/car/' \n\n\ndef scan_mp4_files(path): \n # check for list in path\n output = commands.getoutput('ls '+path)\n file_list = output.split('\\n')\n \n # Scan files\n file_dict = {}\n for file_name in file_list:\n # make sure files are m4s\n if '.m4s' not in file_name:\n continue\n \n res = tools.get_mp4_data(path + file_name)\n # check file search of TFDT ok\n if res != {}:\n # Try to get segment number\n match = re.search(\".*?([0-9]+).m4s\",file_name)\n if match:\n segmnet_num = match.group(1)\n res['full_path'] = path + file_name\n res['file_name'] = file_name\n file_dict[int(segmnet_num)] = res \n \n return file_dict\n\ndef copy_tfdt_src_to_dst(src_data,dst_data): \n utils.debug_print('Updating TFDT from '+src_data['full_path']+' to '+dst_data['full_path'],'log')\n tools.change_mp4_data(src_data['full_path'],src_data['tfdt1'],dst_data['tfdt1'])\n tools.change_mp4_data(src_data['full_path'],src_data['tfdt2'],dst_data['tfdt2'])\n utils.debug_print('Done updating TFDT','log')\n\ndef insert_comercial_to(ad_dict, all_media_dict, segmnet_num):\n # Insert ad\n utils.debug_print('Updating TFDT of ad','log')\n if segmnet_num in all_media_dict:\n copy_tfdt_src_to_dst(ad_dict,all_media_dict[segmnet_num])\n \n while (segmnet_num in all_media_dict) :\n curr_file = all_media_dict[segmnet_num]\n # Calculate new offset of TFDT1/2\n new_tfdt1 = tools.make_tfdt_header(curr_file['tfdt1'],ad_dict['tfdt1_offset'])\n new_tfdt2 = tools.make_tfdt_header(curr_file['tfdt2'],ad_dict['tfdt2_offset'])\n # Update the files\n tools.change_mp4_data(curr_file['full_path'],curr_file['tfdt1'],new_tfdt1)\n tools.change_mp4_data(curr_file['full_path'],curr_file['tfdt2'],new_tfdt2)\n segmnet_num += 1 \n \n \n \n \n '''# update all following\n utils.debug_print('Updating TFDT of files after ad')\n while (segmnet_num in all_media_dict) and ((segmnet_num+1) in all_media_dict):\n copy_tfdt_src_to_dst(all_media_dict[segmnet_num], all_media_dict[segmnet_num+1])\n segmnet_num += 1\n \n utils.debug_print('Updating TFDT of last file') \n # Fix last segmnet'''\n \n\n\nimport commands\nif __name__ == '__main__':\n\n # Scan both dirs\n ad_dict = scan_mp4_files(ad_path)\n ad_dict[1]['tfdt1_offset'] = '192512'\n ad_dict[1]['tfdt2_offset'] = '109000'\n media_dict = scan_mp4_files(media_path)\n import pdb;pdb.set_trace()\n # Update the rest of the files\n insert_comercial_to(ad_dict[1], media_dict, 4)\n \n \n \n"},"src_encoding":{"kind":"string","value":"UTF-8"},"language":{"kind":"string","value":"Python"},"is_vendor":{"kind":"bool","value":false,"string":"false"},"is_generated":{"kind":"bool","value":false,"string":"false"},"year":{"kind":"number","value":2014,"string":"2,014"}}},{"rowIdx":41150,"cells":{"__id__":{"kind":"number","value":14104672625066,"string":"14,104,672,625,066"},"blob_id":{"kind":"string","value":"b59902498c1f08fbb4d548886e569e6e590466fe"},"directory_id":{"kind":"string","value":"099c4154c0d54c9687c4c8de38d63755568c2451"},"path":{"kind":"string","value":"/exercises/ex30.py"},"content_id":{"kind":"string","value":"2de8270de4b0d3bd19153f382173ab2cc5f2be49"},"detected_licenses":{"kind":"list like","value":[],"string":"[]"},"license_type":{"kind":"string","value":"no_license"},"repo_name":{"kind":"string","value":"suptaphilip/python_tutorials"},"repo_url":{"kind":"string","value":"https://github.com/suptaphilip/python_tutorials"},"snapshot_id":{"kind":"string","value":"c7cd57f575d76b0d5d56c10014241b628ff2b70e"},"revision_id":{"kind":"string","value":"2582a1eb52a48641f0ecf259d6e42680b591b776"},"branch_name":{"kind":"string","value":"refs/heads/master"},"visit_date":{"kind":"timestamp","value":"2020-04-05T19:04:08.626860","string":"2020-04-05T19:04:08.626860"},"revision_date":{"kind":"timestamp","value":"2013-11-14T23:54:25","string":"2013-11-14T23:54:25"},"committer_date":{"kind":"timestamp","value":"2013-11-14T23:54:25","string":"2013-11-14T23:54:25"},"github_id":{"kind":"null"},"star_events_count":{"kind":"number","value":0,"string":"0"},"fork_events_count":{"kind":"number","value":0,"string":"0"},"gha_license_id":{"kind":"null"},"gha_fork":{"kind":"null"},"gha_event_created_at":{"kind":"null"},"gha_created_at":{"kind":"null"},"gha_updated_at":{"kind":"null"},"gha_pushed_at":{"kind":"null"},"gha_size":{"kind":"null"},"gha_stargazers_count":{"kind":"null"},"gha_forks_count":{"kind":"null"},"gha_open_issues_count":{"kind":"null"},"gha_language":{"kind":"null"},"gha_archived":{"kind":"null"},"gha_disabled":{"kind":"null"},"content":{"kind":"string","value":"#! usr/bin/env python\n\n#exercise 30: else and if\n\npeople = 30\ncars = 30\nbuses = 5\n\n#if the number of cars are greater than the number of people than print ...\nif cars > people:\n print \"We should take the cars.\"\n#other wise if the number of cars are less than people print...\nelif cars < people:\n print \"We should not take the cars.\"\n#other wise print ...\n# *notice the else statement is immediately followed by a colon.\nelse:\n print \"We can't decide.\"\n\nif buses > cars:\n print \"That's too many buses.\"\nelif buses < cars:\n print \"Maybe we could take the buses.\"\nelse:\n print \"We still can't decide.\"\n\nif people > buses:\n print \"Alright, let's just take the buses.\"\nelse:\n print \"Fine, let's stay home then.\"\n\n#extra credit:\nif cars > buses and people < cars:\n print \"There's no reason to drive.\"\nelif cars < buses and people > buses:\n print \"So maybe driving would be good?\"\nelif people <= cars and people > buses:\n print \"Let's take the bus!\"\nelse:\n print \"Driving is a last resort!\""},"src_encoding":{"kind":"string","value":"UTF-8"},"language":{"kind":"string","value":"Python"},"is_vendor":{"kind":"bool","value":false,"string":"false"},"is_generated":{"kind":"bool","value":false,"string":"false"},"year":{"kind":"number","value":2013,"string":"2,013"}}},{"rowIdx":41151,"cells":{"__id__":{"kind":"number","value":15599321260629,"string":"15,599,321,260,629"},"blob_id":{"kind":"string","value":"c63a15192d520e223c58d7a706f735504bf23429"},"directory_id":{"kind":"string","value":"06283ebd66870627aecc230c4aa8b9606ff7c4f5"},"path":{"kind":"string","value":"/hitranlbl/vss_query.py"},"content_id":{"kind":"string","value":"ec5ac9005d52cfed607539fdee6fd3f88983fe91"},"detected_licenses":{"kind":"list like","value":[],"string":"[]"},"license_type":{"kind":"string","value":"no_license"},"repo_name":{"kind":"string","value":"xnx/www_hitran"},"repo_url":{"kind":"string","value":"https://github.com/xnx/www_hitran"},"snapshot_id":{"kind":"string","value":"c0e26a87134b7e2aac435008dde39ea4713ef633"},"revision_id":{"kind":"string","value":"023eb6aa75541b530330d13601be27aceed926de"},"branch_name":{"kind":"string","value":"refs/heads/master"},"visit_date":{"kind":"timestamp","value":"2015-08-01T23:56:22","string":"2015-08-01T23:56:22"},"revision_date":{"kind":"timestamp","value":"2013-01-18T12:39:20","string":"2013-01-18T12:39:20"},"committer_date":{"kind":"timestamp","value":"2013-01-18T12:39:20","string":"2013-01-18T12:39:20"},"github_id":{"kind":"number","value":5718406,"string":"5,718,406"},"star_events_count":{"kind":"number","value":1,"string":"1"},"fork_events_count":{"kind":"number","value":1,"string":"1"},"gha_license_id":{"kind":"null"},"gha_fork":{"kind":"null"},"gha_event_created_at":{"kind":"null"},"gha_created_at":{"kind":"null"},"gha_updated_at":{"kind":"null"},"gha_pushed_at":{"kind":"null"},"gha_size":{"kind":"null"},"gha_stargazers_count":{"kind":"null"},"gha_forks_count":{"kind":"null"},"gha_open_issues_count":{"kind":"null"},"gha_language":{"kind":"null"},"gha_archived":{"kind":"null"},"gha_disabled":{"kind":"null"},"content":{"kind":"string","value":"# -*- coding: utf-8 -*-\n# vss_query.py\n# Defines the class VSSQuery, representing a query made of the database\n# in the VSS query language, a subset of SQL.\n\nfrom caseless_dict import CaselessDict\nfrom string import lower\nfrom datetime import date\nimport sqlparse\nimport logging\nlog = logging.getLogger('vamdc.hitran_node')\nfrom tap_utils import get_base_URL, dquote\nfrom vamdc_standards import REQUESTABLES\nfrom dictionaries import restrictable_types\nfrom hitranmeta.models import Iso\nfrom xsams_queries import get_xsams_src_query, get_xsams_states_query,\\\n get_xsams_trans_query, get_xsams_trans_count_query,\\\n get_xsams_isos_count_query\n\nclass VSSQuery(object):\n \"\"\"\n A class representing the VSS query, with methods to parse and validate it.\n\n \"\"\"\n\n def __init__(self, request):\n self.is_valid = True\n self.error_message = ''\n try:\n self.request = CaselessDict(dict(request.REQUEST))\n except Exception, e:\n self.is_valid = False\n self.error_message = 'Failed to read argument dictionary: %s' % e\n log.error(self.error_message)\n if self.is_valid:\n self.parse_query()\n self.full_url = '%ssync?%s' % (get_base_URL(request),\n request.META.get('QUERY_STRING'))\n\n def parse_query(self):\n \"\"\" Parse and validate the query as VSS2. \"\"\"\n\n error_list = []\n\n # check LANG=VSS2\n try:\n self.lang = lower(self.request['LANG'])\n except:\n error_list.append('Couldn\\'t find LANG in request')\n else:\n if self.lang != 'vss2':\n error_list.append('Only LANG=VSS2 is supported')\n # get the QUERY string\n try:\n self.query = self.request['QUERY']\n except:\n error_list.append('Couldn\\'t find QUERY in request')\n # get the FORMAT\n try:\n self.format = lower(self.request['FORMAT'])\n except:\n error_list.append('Couldn\\'t find FORMAT in request')\n else:\n if self.format not in ('xsams', 'par'):\n error_list.append('Only XSAMS and PAR formats are supported')\n # parse the query\n try:\n self.parsed_sql = sqlparse.SQL.parseString(self.query,\n parseAll=True)\n except:\n # we failed to parse the query: bail with extreme prejudice\n error_list.append('Couldn\\'t parse the QUERY string: %s'\n % self.query)\n self.error_message = '\\n'.join(error_list)\n self.is_valid = False\n return\n\n self.requestables = set()\n self.where = self.parsed_sql.where\n if self.parsed_sql.columns not in ('*', 'ALL'):\n for requested in self.parsed_sql.columns:\n requested = lower(requested)\n if requested not in REQUESTABLES:\n self.error_list.append(\n 'Unsupported or unknown REQUESTABLE: %s' % requested)\n else:\n self.requestables.add(requested)\n\n if 'processes' in self.requestables:\n self.requestables.add('radiativetransitions')\n # always return sources\n self.requestables.add('sources')\n\n if error_list:\n # validation failed\n self.error_message = '\\n'.join(error_list)\n self.is_valid = False\n\n def __str__(self):\n \"\"\" Return a string representation of the query. \"\"\"\n return self.query\n\n def make_sql_queries(self):\n \"\"\"\n Turn the VSS query into a series of SQL queries on the database.\n The returned queries are in a dictionary, keyed by 'src_query',\n 'st_query', 't_query' for the sources query, the states query, and\n the transitions query respectively.\n\n \"\"\"\n\n if not self.where:\n return {}\n\n # parse the where clause into restrictions, joined by logic:\n logic, restrictions, count = sqlparse.splitWhere(self.where)\n # logic is e.g. ['r0', 'and', 'r1', 'and', '(', 'r2', 'or', 'r3', ')']\n # restrictions is a dictionary, keyed by '0', '1', ..., e.g.\n # {'1': ['RadTransWavenumber', '<', '6100.'],\n # '0': ['RadTransWavenumber', '>', '5000.'],\n # '2': ['MoleculeChemicalName', 'in', '(', \"'H2O'\", \"'Ammonia'\", ')']\n # ... }\n node_restrictions = {}\n for ri in restrictions:\n restrictable, op, s_rvals = (restrictions[ri][0],\n restrictions[ri][1],\n restrictions[ri][2:])\n # refer to all restrictables in lower case from here\n restrictable = restrictable.lower()\n if op not in sqlparse.OPTRANS.keys():\n raise Exception('Illegal or unsupported operator in'\n ' restriction: %s' % op)\n try:\n restrictable_type = restrictable_types[restrictable]\n except KeyError:\n raise Exception('Unknown RESTRICTABLE: %s' % restrictable)\n try:\n self.check_rvals_type(s_rvals, restrictable_type)\n except:\n raise Exception('Invalid value for restrictable %s: %s'\n % (restrictable, s_rvals))\n\n # translate the VAMDC restrictable keywords into the\n # appropriate of the hitranlbl_trans table in the HITRAN database\n # the hitranlbl_trans table must be aliased to 't'. Note that\n # node_restrictions[2] is *always a list*, unlike\n # restrictions[ri][2]\n if restrictable == 'radtranswavenumber':\n node_restrictions['r%s' % ri] = ['t.nu', op] + [s_rvals,]\n elif restrictable == 'radtranswavelength':\n op, s_nus = self.lambda_to_nu(op, s_rvals)\n node_restrictions['r%s' % ri] = ['t.nu', op] + [s_nus,]\n elif restrictable == 'radtransprobability':\n node_restrictions['r%s' % ri] = ['t.A', op] + [s_rvals,]\n elif restrictable in ('inchikey', 'moleculeinchikey'):\n op, s_iso_ids = self.get_isos_from_other(op, s_rvals,\n self.iso_from_inchikey)\n node_restrictions['r%s' % ri] = 't.iso_id', op, s_iso_ids\n elif restrictable == 'moleculestoichiometricformula':\n op, s_iso_ids = self.get_isos_from_other(op, s_rvals,\n self.iso_from_molec_stoich)\n node_restrictions['r%s' % ri] = 't.iso_id', op, s_iso_ids\n elif restrictable == 'moleculechemicalname':\n op, s_iso_ids = self.get_isos_from_other(op, s_rvals,\n self.iso_from_molec_name)\n node_restrictions['r%s' % ri] = 't.iso_id', op, s_iso_ids\n else:\n raise Exception('Unsupported or invalid restrictable keyword:'\n ' %s' % restrictable)\n\n # add restrictions on valid_to, valid_from dates:\n # XXX Hard-code these to the current date, because there's currently\n # no keyword (is there?) for valid_on date in the VAMDC standards.\n today = date.today().strftime('%Y-%m-%d')\n logic.extend(['and', 'r_valid_from', 'and', 'r_valid_to'])\n node_restrictions['r_valid_from'] = ['t.valid_from','<=',\n [dquote(today),]]\n node_restrictions['r_valid_to'] = ['t.valid_to','>',\n [dquote(today),]]\n\n q_where = []\n for x in logic:\n if x in node_restrictions.keys():\n q_where.append(self.make_sql_restriction(node_restrictions[x]))\n else:\n q_where.append(x)\n q_where = ' '.join(q_where)\n\n queries = {'src_query': get_xsams_src_query(q_where),\n 'st_query': get_xsams_states_query(q_where),\n 't_query': get_xsams_trans_query(q_where),\n 'tc_query': get_xsams_trans_count_query(q_where),\n 'ic_query': get_xsams_isos_count_query(q_where),\n }\n return queries\n\n def make_sql_restriction(self, node_restriction):\n \"\"\"\n Turn the node_restriction, a tuple of (field, operator, values) into\n the string representation of a valid SQL restriction.\n\n \"\"\"\n\n name, op, args = node_restriction\n\n if len(args) > 1:\n s_val = '(%s)' % ', '.join(args)\n else:\n s_val = args[0]\n return '%s %s %s' % (name, op, s_val)\n \n def lambda_to_nu(self, op, lambdas):\n \"\"\"\n Convert the arguments of a selection of wavelength (in Å) to the\n corresponding selection on wavenumber (in cm-1). Adjust the operator\n in the query fragment accordingly (e.g. '<' to '>').\n\n Arguments:\n op: the operator in the query fragment (e.g. '<', 'in', '>=', etc.)\n lambdas: a list of arguments (wavelengths) to the query fragment.\n Typically, just one value (for operators '<', '>', '=', '>=', '<=',\n etc.), but could be a list (e.g. for 'in' operator).\n\n Returns:\n a tuple (op, ret_list) of op, the new operator applying to the query\n fragment on wavenumber and ret_list, a list of wavenumber values\n corresponding to the query.\n\n \"\"\"\n\n nu_list = []\n has_parentheses = False\n if lambdas[0] == '(' and lambdas[-1] == ')':\n has_parentheses = True\n for lamda in lambdas:\n if lamda in ('(', ')'):\n continue\n try:\n # lambda in Å to nu in cm-1\n nu = 1.e8/float(lamda)\n print '%f A = %f cm-1' % (float(lamda), nu)\n except ZeroDivisionError:\n # set nu to something huge if lambda = 0\n nu = 1.e20\n nu_list.append(str(nu))\n \n op = sqlparse.reverse_op(op)\n\n if not has_parentheses:\n if len(nu_list) > 1:\n raise Exception('Invalid argument to RadTransWavelength: %s'\n % lambdas)\n else:\n return op, nu_list\n #return op, '(%s)' % (', '.join(nu_list),)\n ret_list = ['(',]\n ret_list.extend(nu_list)\n ret_list.append(')')\n return op, ret_list\n\n def iso_from_inchikey(self, inchikey):\n \"\"\"\n Return a list of isotopologue IDs matching the provided InChIKey.\n\n \"\"\"\n return Iso.objects.filter(InChIKey=\n inchikey).values_list('id', flat=True)\n\n def iso_from_molec_stoich(self, stoichiometric_formula):\n \"\"\"\n Return a list of isotopologue IDs matching the provided \n molecular (ie isotope-independent) stoichiometric formula.\n\n \"\"\"\n return Iso.objects.filter(molecule__stoichiometric_formula=\n stoichiometric_formula).values_list('id', flat=True)\n\n def iso_from_molec_name(self, name):\n \"\"\"\n Return a list of isotopologue IDs matching the provided \n molecule name (ie common chemical name).\n\n \"\"\"\n return Iso.objects.filter(molecule__moleculename__name=\n name).values_list('id', flat=True)\n\n def get_isos_from_other(self, op, s_rvals, isos_get_method):\n \"\"\"\n Return a string of requested isotopologue IDs corresponding to the\n requested list of s_rvals, using the method specified by\n isos_get_method.\n\n \"\"\"\n iso_ids = []\n has_parentheses = False\n if s_rvals[0] == '(' and s_rvals[-1] == ')':\n has_parentheses = True\n for s_rval in s_rvals:\n s_rval = s_rval.strip('\"\\'') # strip all outside quotes, \" and '\n if s_rval in ('(', ')'):\n continue\n iso_id_list = isos_get_method(s_rval)\n iso_ids.extend([str(iso_id) for iso_id in iso_id_list])\n if not iso_ids:\n # we didn't find any isotopologues matching the requested InChIKeys\n return op, ['(-1)',]\n if not has_parentheses:\n if len(iso_ids) > 1:\n # a single e.g. molecular stoichiometric formula maps to more\n # than one isotopologue, so generalise the operator\n if op == '=':\n op = 'in'\n elif op == '<>':\n op = 'not in'\n else:\n return op, iso_ids\n return op, iso_ids\n\n def check_rvals_type(self, s_rvals, rtype):\n \"\"\"\n Check that s_rvals corresponds to a list of strings which can be\n legitimately cast into their correct types.\n\n \"\"\"\n\n if rtype == str:\n # s_rvals is already a list of strings!\n return\n for s_rval in s_rvals:\n if s_rval in ('(', ')'):\n # skip the parentheses\n continue\n try:\n rval = rtype(s_rval)\n except:\n raise\n"},"src_encoding":{"kind":"string","value":"UTF-8"},"language":{"kind":"string","value":"Python"},"is_vendor":{"kind":"bool","value":false,"string":"false"},"is_generated":{"kind":"bool","value":false,"string":"false"},"year":{"kind":"number","value":2013,"string":"2,013"}}},{"rowIdx":41152,"cells":{"__id__":{"kind":"number","value":14645838518007,"string":"14,645,838,518,007"},"blob_id":{"kind":"string","value":"efbfaaba1496f55df82627f0d32d7954501a2c5f"},"directory_id":{"kind":"string","value":"cf991f5b59a1ac384bf98f599a4e0e836ee58a26"},"path":{"kind":"string","value":"/fabfile.py"},"content_id":{"kind":"string","value":"2a205afabfbddbd7a2707d1416395ab4f3b30aac"},"detected_licenses":{"kind":"list like","value":["MIT"],"string":"[\n \"MIT\"\n]"},"license_type":{"kind":"string","value":"permissive"},"repo_name":{"kind":"string","value":"DjangoLover/gitality"},"repo_url":{"kind":"string","value":"https://github.com/DjangoLover/gitality"},"snapshot_id":{"kind":"string","value":"ba932c2966dda6cf8352059c3ce3de11197420d8"},"revision_id":{"kind":"string","value":"8cb063582c113c147ad04f83899c9ee3686a93ab"},"branch_name":{"kind":"string","value":"refs/heads/master"},"visit_date":{"kind":"timestamp","value":"2020-04-09T01:55:46.515025","string":"2020-04-09T01:55:46.515025"},"revision_date":{"kind":"timestamp","value":"2013-09-29T22:12:10","string":"2013-09-29T22:12:10"},"committer_date":{"kind":"timestamp","value":"2013-09-29T22:12:10","string":"2013-09-29T22:12:10"},"github_id":{"kind":"null"},"star_events_count":{"kind":"number","value":0,"string":"0"},"fork_events_count":{"kind":"number","value":0,"string":"0"},"gha_license_id":{"kind":"null"},"gha_fork":{"kind":"null"},"gha_event_created_at":{"kind":"null"},"gha_created_at":{"kind":"null"},"gha_updated_at":{"kind":"null"},"gha_pushed_at":{"kind":"null"},"gha_size":{"kind":"null"},"gha_stargazers_count":{"kind":"null"},"gha_forks_count":{"kind":"null"},"gha_open_issues_count":{"kind":"null"},"gha_language":{"kind":"null"},"gha_archived":{"kind":"null"},"gha_disabled":{"kind":"null"},"content":{"kind":"string","value":"from ConfigParser import RawConfigParser\n\nfrom fabric.api import cd, env, run, shell_env\nfrom fabric.colors import green\nfrom fabric.contrib.files import exists\nfrom fabric.utils import puts\n\n\nconfig = RawConfigParser()\n\nwith open('deploy/config.ini') as f:\n config.readfp(f)\n\nenv.host_string = config.get('fabric_env', 'host_string')\n\nenv.project_name = config.get('fabric_env', 'project_name')\nenv.project_db_name = env.project_name\n\nenv.project_home = config.get('fabric_env', 'project_home')\nenv.project_root_dirname = config.get('fabric_env', 'project_root_dirname')\nenv.project_root = '{0.project_home}/{0.project_root_dirname}'.format(env)\n\nenv.virtualenv_home = config.get('fabric_env', 'virtualenv_home')\nenv.virtualenv_root = '{0.virtualenv_home}/{0.project_name}'.format(env)\nenv.virtualenv_activate_command = 'source {.virtualenv_root}/bin/activate'.format(env)\n\nenv.site_down_file = '.down'\nenv.touch_reload_file = '.reload'\n\nenv.git_repository = config.get('fabric_env', 'git_repository')\n\n\ndef prun(command):\n \"\"\"\n Runs command from project root directoy.\n \"\"\"\n with cd(env.project_root), shell_env(WORKON_HOME=env.virtualenv_home):\n run(command)\n\n\ndef make(target):\n \"\"\"\n Invokes Makefile target.\n \"\"\"\n prun('make {}'.format(target))\n\n\ndef supervisorctl(action, program='', options=''):\n run('supervisorctl {0} {1} {2}'.format(action, options, program))\n\n\ndef site_down():\n prun('touch {}'.format(env.site_down_file))\n puts(green('Site is down for maintenance'))\n\n\ndef site_up():\n prun('rm {}'.format(env.site_down_file))\n puts(green('Site is up and running'))\n\n\ndef touch_reload():\n \"\"\"\n uWSGI touch reload\n \"\"\"\n prun('touch {}'.format(env.touch_reload_file))\n\n\ndef git_clone():\n with cd(env.project_home):\n if exists(env.project_root_dirname):\n run('rm -rf {.project_root_dirname}'.format(env))\n run('git clone -q {0.git_repository} {0.project_root_dirname}'.format(env))\n\n\ndef git_pull():\n prun('git pull -q')\n\n\ndef bootstrap():\n \"\"\"\n Bootstraps project for the first time.\n \"\"\"\n\n git_clone()\n\n make('bootstrap')\n make('settings_production')\n make('requirements')\n make('db_production')\n make('collectstatic')\n\n\ndef deploy():\n \"\"\"\n Deploys updated project.\n \"\"\"\n\n site_down()\n\n git_pull()\n\n make('requirements')\n make('syncdb')\n make('migrate')\n make('seed_production')\n make('collectstatic')\n\n touch_reload()\n\n supervisorctl('restart', 'celery')\n\n site_up()\n"},"src_encoding":{"kind":"string","value":"UTF-8"},"language":{"kind":"string","value":"Python"},"is_vendor":{"kind":"bool","value":true,"string":"true"},"is_generated":{"kind":"bool","value":false,"string":"false"},"year":{"kind":"number","value":2013,"string":"2,013"}}},{"rowIdx":41153,"cells":{"__id__":{"kind":"number","value":6897717515379,"string":"6,897,717,515,379"},"blob_id":{"kind":"string","value":"265294fb22dc321c31b2862bf4a4f737a4db2417"},"directory_id":{"kind":"string","value":"ec7f01918700ea39115446a323cd76ad23708a29"},"path":{"kind":"string","value":"/src/Server/controllers/basecontroller.py"},"content_id":{"kind":"string","value":"cd1af7a3a2b46bdbbd542bc734c28261ff9a9344"},"detected_licenses":{"kind":"list like","value":["MIT"],"string":"[\n \"MIT\"\n]"},"license_type":{"kind":"string","value":"permissive"},"repo_name":{"kind":"string","value":"SkyLapse/DMS"},"repo_url":{"kind":"string","value":"https://github.com/SkyLapse/DMS"},"snapshot_id":{"kind":"string","value":"f218f591059c13596e34b2e9f45c81f4f51e5741"},"revision_id":{"kind":"string","value":"a8906498e2801f7f022d7d2d72d90ba2be6f0d21"},"branch_name":{"kind":"string","value":"refs/heads/master"},"visit_date":{"kind":"timestamp","value":"2021-01-04T14:06:48.640996","string":"2021-01-04T14:06:48.640996"},"revision_date":{"kind":"timestamp","value":"2014-05-26T16:46:17","string":"2014-05-26T16:46:17"},"committer_date":{"kind":"timestamp","value":"2014-05-26T16:46:17","string":"2014-05-26T16:46:17"},"github_id":{"kind":"number","value":16044209,"string":"16,044,209"},"star_events_count":{"kind":"number","value":0,"string":"0"},"fork_events_count":{"kind":"number","value":1,"string":"1"},"gha_license_id":{"kind":"null"},"gha_fork":{"kind":"null"},"gha_event_created_at":{"kind":"null"},"gha_created_at":{"kind":"null"},"gha_updated_at":{"kind":"null"},"gha_pushed_at":{"kind":"null"},"gha_size":{"kind":"null"},"gha_stargazers_count":{"kind":"null"},"gha_forks_count":{"kind":"null"},"gha_open_issues_count":{"kind":"null"},"gha_language":{"kind":"null"},"gha_archived":{"kind":"null"},"gha_disabled":{"kind":"null"},"content":{"kind":"string","value":"__author__ = 'SkyLapse'\n\nfrom abc import abstractmethod\nfrom flask import current_app\nfrom flask.ext import restful\n\n\nclass BaseController(restful.Resource):\n def __init__(self):\n self.app = current_app\n pass\n\n @abstractmethod\n def get(self, id=None):\n pass"},"src_encoding":{"kind":"string","value":"UTF-8"},"language":{"kind":"string","value":"Python"},"is_vendor":{"kind":"bool","value":false,"string":"false"},"is_generated":{"kind":"bool","value":false,"string":"false"},"year":{"kind":"number","value":2014,"string":"2,014"}}},{"rowIdx":41154,"cells":{"__id__":{"kind":"number","value":2671469663261,"string":"2,671,469,663,261"},"blob_id":{"kind":"string","value":"f767e7f6546ae461e58f43c48d5bf0438b8e6dd7"},"directory_id":{"kind":"string","value":"e147f9ff39e19d4d3bb035ce0b466fea3e247477"},"path":{"kind":"string","value":"/src/edge.py"},"content_id":{"kind":"string","value":"eb80fc6d597df5db7f580b11553faa862d0e9fd1"},"detected_licenses":{"kind":"list like","value":[],"string":"[]"},"license_type":{"kind":"string","value":"no_license"},"repo_name":{"kind":"string","value":"yatsek/mindmaps-qt"},"repo_url":{"kind":"string","value":"https://github.com/yatsek/mindmaps-qt"},"snapshot_id":{"kind":"string","value":"e737e417ca88b48df763e2cd345e983eeab05c6d"},"revision_id":{"kind":"string","value":"bf93dc2731c26bab2782aba62ab2ec4ff2b927c0"},"branch_name":{"kind":"string","value":"refs/heads/master"},"visit_date":{"kind":"timestamp","value":"2021-01-01T16:39:47.162525","string":"2021-01-01T16:39:47.162525"},"revision_date":{"kind":"timestamp","value":"2011-02-06T23:51:41","string":"2011-02-06T23:51:41"},"committer_date":{"kind":"timestamp","value":"2011-02-06T23:51:41","string":"2011-02-06T23:51:41"},"github_id":{"kind":"number","value":37589605,"string":"37,589,605"},"star_events_count":{"kind":"number","value":0,"string":"0"},"fork_events_count":{"kind":"number","value":0,"string":"0"},"gha_license_id":{"kind":"null"},"gha_fork":{"kind":"null"},"gha_event_created_at":{"kind":"null"},"gha_created_at":{"kind":"null"},"gha_updated_at":{"kind":"null"},"gha_pushed_at":{"kind":"null"},"gha_size":{"kind":"null"},"gha_stargazers_count":{"kind":"null"},"gha_forks_count":{"kind":"null"},"gha_open_issues_count":{"kind":"null"},"gha_language":{"kind":"null"},"gha_archived":{"kind":"null"},"gha_disabled":{"kind":"null"},"content":{"kind":"string","value":"from PyQt4.QtCore import *\nfrom PyQt4.QtGui import *\nfrom math import sin,cos,pi,acos\n\nTwoPi = pi*2\n\nclass Edge(QGraphicsItem):\n\t\"\"\"Overrid of QGraphicsItem to handle\n\t drawing of edge connecting nodes\"\"\"\n\tdef __init__(self,sourceNode, destNode,visible=True):\n\t\t\"\"\"Constructor which connects nodes \n\t\t together\"\"\"\n\t\tsuper(Edge,self).__init__()\n\t\tself.sourcePoint=None\n\t\tself.destPoint=None\t\n\t\tself.setFlags(self.ItemIsSelectable)\n\t\tself.setAcceptedMouseButtons(Qt.NoButton)\n\t\tself.source=sourceNode\n\t\tself.dest=destNode\n\t\tself.source.addEdge(self)\n\t\tself.dest.addEdge(self)\n\t\tself.visible=visible\n\t\tself.adjust()\n\t\tself.setZValue(-1)\n\n\tdef sourceNode(self):\n\t\t\"\"\"returns the source node\"\"\"\n\t\treturn self.source\n\n\tdef setSourceNode(self,node):\n\t\t\"\"\" sets the source node\"\"\"\n\t\tself.source=node\n\t\tadjust()\n\n\tdef destNode(self):\n\t\t\"\"\"returns destination node\"\"\"\n\t\treturn self.dest\n\n\tdef setDestNode(self,node):\n\t\t\"\"\"sets destination node\"\"\"\n\t\tself.dest=node\n\t\tadjust()\n\n\tdef adjust(self):\n\t\t\"\"\"Calculates new position of end points\n\t\t based on node positions\"\"\"\n\t\tif not self.source or not self.dest:\n\t\t\treturn\n\t\tsrcCenter=self.source.ellipsisCenter()\n\t\tdstCenter=self.dest.ellipsisCenter()\n\t\tline=QLineF(self.mapFromItem(self.source,srcCenter.x(),srcCenter.y()), \\\n\t\t\t\tself.mapFromItem(self.dest,dstCenter.x(),dstCenter.y()))\n\t\tlength = line.length()\n\n\t\tself.prepareGeometryChange()\n\n\t\tif length > 20.0:\n\t\t\tedgeOffset=QPointF((line.dx()*10)/length,(line.dy()*10)/length)\n\t\t\tself.sourcePoint = line.p1() + edgeOffset\n\t\t\tself.destPoint = line.p2() - edgeOffset\n\t\telse:\n\t\t\tself.sourcePoint = self.destPoint = line.p1()\n\n\tdef boundingRect(self):\n\t\t\"\"\"Sets bounding rectangle of a scene\"\"\"\n\t\tif not self.source or not self.dest:\n\t\t\treturn QRectF()\n\t\treturn QRectF(self.sourcePoint, QSizeF(self.destPoint.x() - self.sourcePoint.x(),self.destPoint.y() - self.sourcePoint.y())).normalized()\n\n\tdef paint(self,painter, option=None, widget=None):\n\t\t\"\"\"Paint edge on a scene\"\"\"\n\t\tif not self.source or not self.dest:\n\t\t\treturn\n\t\tline=QLineF(self.sourcePoint,self.destPoint)\n\t\tif line.length() == 0.0:\n\t\t\treturn\n\t\t#draw the line itself\n\t\tcolor=QColor(Qt.black)\n\t\tif not self.visible: color=QColor(Qt.green)\n\t\tpainter.setPen(QPen(color,1,Qt.SolidLine,Qt.RoundCap,Qt.RoundJoin))\n\t\tpainter.drawLine(line)\n"},"src_encoding":{"kind":"string","value":"UTF-8"},"language":{"kind":"string","value":"Python"},"is_vendor":{"kind":"bool","value":false,"string":"false"},"is_generated":{"kind":"bool","value":false,"string":"false"},"year":{"kind":"number","value":2011,"string":"2,011"}}},{"rowIdx":41155,"cells":{"__id__":{"kind":"number","value":5858335434780,"string":"5,858,335,434,780"},"blob_id":{"kind":"string","value":"9b7c2abad406df41fd7926bc94e32aec97fbdc1b"},"directory_id":{"kind":"string","value":"dc13636c35adefbf1579c93705a155781c071d5c"},"path":{"kind":"string","value":"/app/managers.py"},"content_id":{"kind":"string","value":"a02d12c7f9098f01a351e5dc812c4a6234c5c8df"},"detected_licenses":{"kind":"list like","value":["GPL-3.0-only","GPL-3.0-or-later"],"string":"[\n \"GPL-3.0-only\",\n \"GPL-3.0-or-later\"\n]"},"license_type":{"kind":"string","value":"non_permissive"},"repo_name":{"kind":"string","value":"rosix-ru/barbaris"},"repo_url":{"kind":"string","value":"https://github.com/rosix-ru/barbaris"},"snapshot_id":{"kind":"string","value":"289047d19a6712d54210190498958425f5de94f0"},"revision_id":{"kind":"string","value":"1d300a65ef62285c54e748a8fec8cef32a5848ba"},"branch_name":{"kind":"string","value":"refs/heads/master"},"visit_date":{"kind":"timestamp","value":"2021-01-10T04:26:21.554920","string":"2021-01-10T04:26:21.554920"},"revision_date":{"kind":"timestamp","value":"2014-06-07T01:46:31","string":"2014-06-07T01:46:31"},"committer_date":{"kind":"timestamp","value":"2014-06-07T01:46:31","string":"2014-06-07T01:46:31"},"github_id":{"kind":"number","value":44802688,"string":"44,802,688"},"star_events_count":{"kind":"number","value":1,"string":"1"},"fork_events_count":{"kind":"number","value":2,"string":"2"},"gha_license_id":{"kind":"null"},"gha_fork":{"kind":"null"},"gha_event_created_at":{"kind":"null"},"gha_created_at":{"kind":"null"},"gha_updated_at":{"kind":"null"},"gha_pushed_at":{"kind":"null"},"gha_size":{"kind":"null"},"gha_stargazers_count":{"kind":"null"},"gha_forks_count":{"kind":"null"},"gha_open_issues_count":{"kind":"null"},"gha_language":{"kind":"null"},"gha_archived":{"kind":"null"},"gha_disabled":{"kind":"null"},"content":{"kind":"string","value":"# -*- coding: utf-8 -*-\n\"\"\"\n###############################################################################\n# Copyright 2012 Grigoriy Kramarenko.\n###############################################################################\n# This file is part of Barbaris.\n#\n# Barbaris is free software: you can redistribute it and/or modify\n# it under the terms of the GNU General Public License as published by\n# the Free Software Foundation, either version 3 of the License, or\n# (at your option) any later version.\n#\n# Barbaris is distributed in the hope that it will be useful,\n# but WITHOUT ANY WARRANTY; without even the implied warranty of\n# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the\n# GNU General Public License for more details.\n#\n# You should have received a copy of the GNU General Public License\n# along with Barbaris. If not, see .\n#\n# Этот файл — часть Barbaris.\n#\n# Barbaris - свободная программа: вы можете перераспространять ее и/или\n# изменять ее на условиях Стандартной общественной лицензии GNU в том виде,\n# в каком она была опубликована Фондом свободного программного обеспечения;\n# либо версии 3 лицензии, либо (по вашему выбору) любой более поздней\n# версии.\n#\n# Barbaris распространяется в надежде, что она будет полезной,\n# но БЕЗО ВСЯКИХ ГАРАНТИЙ; даже без неявной гарантии ТОВАРНОГО ВИДА\n# или ПРИГОДНОСТИ ДЛЯ ОПРЕДЕЛЕННЫХ ЦЕЛЕЙ. Подробнее см. в Стандартной\n# общественной лицензии GNU.\n#\n# Вы должны были получить копию Стандартной общественной лицензии GNU\n# вместе с этой программой. Если это не так, см.\n# .\n###############################################################################\n\"\"\"\nfrom django.db import models\nfrom django.conf import settings\n\nimport datetime\n\nclass OrgManager(models.Manager):\n use_for_related_fields = True\n def get_query_set(self):\n return super(OrgManager, self).get_query_set().filter(\n client__isnull=False,\n )\n\nclass PersonManager(models.Manager):\n use_for_related_fields = True\n def get_query_set(self):\n return super(PersonManager, self).get_query_set().filter(\n client__isnull=False,\n )\n\nclass ActivePriceManager(models.Manager):\n use_for_related_fields = True\n def get_query_set(self):\n return super(ActivePriceManager, self).get_query_set().filter(\n is_active=True,\n start_date__lte=datetime.date.today()\n )\n\nclass CreateOrderManager(models.Manager):\n use_for_related_fields = True\n def get_query_set(self):\n return super(CreateOrderManager, self).get_query_set().filter(\n state=settings.STATE_ORDER_CREATE\n )\n\nclass AcceptOrderManager(models.Manager):\n use_for_related_fields = True\n def get_query_set(self):\n return super(AcceptOrderManager, self).get_query_set().filter(\n state=settings.STATE_ORDER_ACCEPT\n )\n\nclass CloseOrderManager(models.Manager):\n use_for_related_fields = True\n def get_query_set(self):\n return super(CloseOrderManager, self).get_query_set().filter(\n state=settings.STATE_ORDER_CLOSE\n )\n\nclass CancelOrderManager(models.Manager):\n use_for_related_fields = True\n def get_query_set(self):\n return super(CancelOrderManager, self).get_query_set().filter(\n state=settings.STATE_ORDER_CANCEL\n )\n\nclass ListOrderManager(models.Manager):\n use_for_related_fields = True\n def get_query_set(self):\n return super(ListOrderManager, self).get_query_set().filter(\n state__in=settings.SELECT_LIST_ORDERS\n )\n\nclass WorkOrderManager(models.Manager):\n use_for_related_fields = True\n def get_query_set(self):\n return super(WorkOrderManager, self).get_query_set().filter(\n state__in=settings.SELECT_WORK_ORDERS\n )\n\nclass WorkSpecificationManager(models.Manager):\n use_for_related_fields = True\n def get_query_set(self):\n return super(WorkSpecificationManager, self).get_query_set().filter(\n order__state__in=settings.SELECT_WORK_ORDERS\n )\n\nclass CreateInvoiceManager(models.Manager):\n use_for_related_fields = True\n def get_query_set(self):\n return super(CreateInvoiceManager, self).get_query_set().filter(\n state=settings.STATE_INVOICE_CREATE\n )\n\nclass PaymentInvoiceManager(models.Manager):\n use_for_related_fields = True\n def get_query_set(self):\n return super(PaymentInvoiceManager, self).get_query_set().filter(\n state=settings.STATE_INVOICE_PAYMENT\n )\n\nclass AvanceInvoiceManager(models.Manager):\n use_for_related_fields = True\n def get_query_set(self):\n return super(AvanceInvoiceManager, self).get_query_set().filter(\n state=settings.STATE_INVOICE_AVANCE\n )\n\nclass CashInvoiceManager(models.Manager):\n use_for_related_fields = True\n def get_query_set(self):\n return super(CashInvoiceManager, self).get_query_set().filter(\n state__in=settings.SELECT_CASH_INVOICES\n )\n\nclass WorkInvoiceManager(models.Manager):\n use_for_related_fields = True\n def get_query_set(self):\n return super(WorkInvoiceManager, self).get_query_set().filter(\n state__in=settings.SELECT_WORK_INVOICES\n )\n\nclass PrivatePersonManager(models.Manager):\n use_for_related_fields = True\n def get_query_set(self):\n return super(PrivatePersonManager, self).get_query_set().filter(\n org=None, client__isnull=False,\n )\n"},"src_encoding":{"kind":"string","value":"UTF-8"},"language":{"kind":"string","value":"Python"},"is_vendor":{"kind":"bool","value":false,"string":"false"},"is_generated":{"kind":"bool","value":false,"string":"false"},"year":{"kind":"number","value":2014,"string":"2,014"}}},{"rowIdx":41156,"cells":{"__id__":{"kind":"number","value":8031588869741,"string":"8,031,588,869,741"},"blob_id":{"kind":"string","value":"68d8ead2f38f13777545ea79960de5996fc3b7b5"},"directory_id":{"kind":"string","value":"14f85485b3115c3cc86de3fe86a6735eb8fbddea"},"path":{"kind":"string","value":"/playgroundApp/urls.py"},"content_id":{"kind":"string","value":"0208931a9bcae335e736e5d17fa50a0e66ae4ac8"},"detected_licenses":{"kind":"list like","value":[],"string":"[]"},"license_type":{"kind":"string","value":"no_license"},"repo_name":{"kind":"string","value":"josieh/playground-finder"},"repo_url":{"kind":"string","value":"https://github.com/josieh/playground-finder"},"snapshot_id":{"kind":"string","value":"3edac25a798ad834c68abd7b646521fa1e4cc48f"},"revision_id":{"kind":"string","value":"5685fa8fec4aecbf7df025f4b5907f4e0f888843"},"branch_name":{"kind":"string","value":"refs/heads/master"},"visit_date":{"kind":"timestamp","value":"2020-04-01T17:32:48.337324","string":"2020-04-01T17:32:48.337324"},"revision_date":{"kind":"timestamp","value":"2014-04-23T02:53:20","string":"2014-04-23T02:53:20"},"committer_date":{"kind":"timestamp","value":"2014-04-23T02:53:20","string":"2014-04-23T02:53:20"},"github_id":{"kind":"null"},"star_events_count":{"kind":"number","value":0,"string":"0"},"fork_events_count":{"kind":"number","value":0,"string":"0"},"gha_license_id":{"kind":"null"},"gha_fork":{"kind":"null"},"gha_event_created_at":{"kind":"null"},"gha_created_at":{"kind":"null"},"gha_updated_at":{"kind":"null"},"gha_pushed_at":{"kind":"null"},"gha_size":{"kind":"null"},"gha_stargazers_count":{"kind":"null"},"gha_forks_count":{"kind":"null"},"gha_open_issues_count":{"kind":"null"},"gha_language":{"kind":"null"},"gha_archived":{"kind":"null"},"gha_disabled":{"kind":"null"},"content":{"kind":"string","value":"from django.conf.urls import patterns, url\n\nfrom playgroundApp import views\n\nurlpatterns=patterns('',\n\turl(r'^$', views.Playground_List, name='playgroundapp_home'),\n url(r'^list$', views.testFilter),\n\turl(r'^playgroundapp/playground_info/(?P\\d+)$', views.playgroundDetail, name='playground_info'),\n url(r'^playgroundapp/playground_suggest$', views.suggestPlayground, name='userSuggest'),\n\turl(r'^playgroundapp/user_profile$', views.userProfile, name='userProfile'),\n\turl(r'^playgroundapp/user_suggest$', views.userSuggest, name='userSuggest'),\n\turl(r'^playgroundapp/user_signup$', views.userSignUp, name='userSignUp'),\n\turl(r'^playgroundapp/user_login$', views.userLogin, name='userLogin'),\n\n #urls for the suggest a playground page\n url(r'^playgroundapp/map$', views.map, name='map'),\n url(r'^playgroundapp/add-playground$', views.formSuggest, name='form_suggest'),\n \n #url to test form created by following youTube video\n url(r'^playgroundapp/testForm', views.testCreate, name='testForm'),\n)"},"src_encoding":{"kind":"string","value":"UTF-8"},"language":{"kind":"string","value":"Python"},"is_vendor":{"kind":"bool","value":false,"string":"false"},"is_generated":{"kind":"bool","value":false,"string":"false"},"year":{"kind":"number","value":2014,"string":"2,014"}}},{"rowIdx":41157,"cells":{"__id__":{"kind":"number","value":1228360663344,"string":"1,228,360,663,344"},"blob_id":{"kind":"string","value":"1de2e50e7e1aa9bc574c1fc3730f42916dddaf61"},"directory_id":{"kind":"string","value":"1256539b405370d21c8f11cab73eb2d6f39a2c23"},"path":{"kind":"string","value":"/src/git-got"},"content_id":{"kind":"string","value":"4ee672ab27a6879c9c027a90683c985b4c979d8d"},"detected_licenses":{"kind":"list like","value":[],"string":"[]"},"license_type":{"kind":"string","value":"no_license"},"repo_name":{"kind":"string","value":"jhj125/git-got"},"repo_url":{"kind":"string","value":"https://github.com/jhj125/git-got"},"snapshot_id":{"kind":"string","value":"3387dc5ac80ba5e68e099ffea3c239e2cb9b8c34"},"revision_id":{"kind":"string","value":"1a6d6f526243c5b99d708dde8198c1bed499a7ce"},"branch_name":{"kind":"string","value":"refs/heads/master"},"visit_date":{"kind":"timestamp","value":"2021-01-14T11:19:59.891515","string":"2021-01-14T11:19:59.891515"},"revision_date":{"kind":"timestamp","value":"2014-12-17T16:19:29","string":"2014-12-17T16:19:29"},"committer_date":{"kind":"timestamp","value":"2014-12-17T16:19:29","string":"2014-12-17T16:19:29"},"github_id":{"kind":"null"},"star_events_count":{"kind":"number","value":0,"string":"0"},"fork_events_count":{"kind":"number","value":0,"string":"0"},"gha_license_id":{"kind":"null"},"gha_fork":{"kind":"null"},"gha_event_created_at":{"kind":"null"},"gha_created_at":{"kind":"null"},"gha_updated_at":{"kind":"null"},"gha_pushed_at":{"kind":"null"},"gha_size":{"kind":"null"},"gha_stargazers_count":{"kind":"null"},"gha_forks_count":{"kind":"null"},"gha_open_issues_count":{"kind":"null"},"gha_language":{"kind":"null"},"gha_archived":{"kind":"null"},"gha_disabled":{"kind":"null"},"content":{"kind":"string","value":"#!/usr/bin/python\n\nimport sys\nimport subprocess\nimport os.path\nimport hashlib\nimport json\nimport fnmatch\nimport hashlib\n\n# For SRR support\nimport httplib\nimport re\n\nDEFAULT_LOG_LEVELS=['INFO', 'WARN', 'ERROR']\n\nLOG_LEVELS = DEFAULT_LOG_LEVELS\n\ndef load_scp(filename, checksum):\n remote = get_remote()\n subprocess.check_call(['scp', '%s/%s.got' % (remote, checksum), filename])\n\ndef store_scp(filename, checksum):\n remote = get_remote()\n subprocess.check_call(['scp', filename, '%s/%s.got' % (remote, checksum)])\n\ndef store_srr(filename, checksum):\n (server_name, parent_id) = get_location_info_srr()\n local_path = filename\n target_id = ''\n remote_path = '%s' % checksum\n description = 'Got storage for %s/%s @ TBD hashtag' % (get_root(), filename)\n\n # \"body\" includes everything up to (but not including) the file\n boundary = \"-------------iRobot-Multipart-Boundary-------------\"\n body = '--' + boundary + '\\r\\n'\n body += 'Content-Disposition: form-data; name=\"parent_id\"\\r\\n\\r\\n'\n body += str(parent_id) + '\\r\\n' + '--' + boundary + '\\r\\n'\n body += 'Content-Disposition: form-data; name=\"target_id\"\\r\\n\\r\\n'\n body += str(target_id) + '\\r\\n' + '--' + boundary + '\\r\\n'\n body += 'Content-Disposition: form-data; name=\"description\"\\r\\n\\r\\n'\n body += description + '\\r\\n' + '--' + boundary + '\\r\\n'\n body += 'Content-Disposition: form-data; name=\"file\"; filename=\"%s\"\\r\\n'\\\n % remote_path\n body += 'Content-Transfer-Encoding: binary\\r\\n'\n body += 'MIME-Version: 1.0\\r\\n\\r\\n'\n tail = '\\r\\n--' + boundary + '--\\r\\n\\r\\n'\n content_length = len(body) + os.path.getsize(local_path) + len(tail)\n # Upload form and file to server\n # @todo Exception Handling\n http_c = httplib.HTTPConnection(server_name)\n http_c.putrequest('POST', '/srr/api/add_file')\n http_c.putheader('Content-Type',\n 'multipart/form-data; boundary=%s' % boundary)\n http_c.putheader('Content-Length', content_length)\n http_c.endheaders()\n http_c.send(body)\n # Send file in reasonably-sized blocks\n fp = open(local_path, 'rb')\n data_block = fp.read(4096)\n bytes_sent = 0\n while data_block:\n http_c.send(data_block)\n bytes_sent += len(data_block)\n data_block = fp.read(4096)\n fp.close()\n http_c.send(tail)\n response = http_c.getresponse()\n result = response.read()\n http_c.close()\n if response.status != 200:\n raise SRRError(\"%s: %s\" % (response.reason, result))\n new_id_re = re.compile(r' file_id=(\\d+)\\s*$')\n m = new_id_re.search(result)\n if m:\n return int(m.group(1))\n else:\n raise SRRError(\"Unexpected result from SRR: %s\" % result)\n\ndef parse_path_srr(path):\n result_re = re.compile('http://(.*)/.*/(\\d+)$')\n matches = result_re.match(path)\n return (matches.group(1), matches.group(2))\n \ndef get_location_info_srr():\n return parse_path_srr(get_remote())\n\ndef load_srr(filename, checksum):\n (server, parent_id) = get_location_info_srr()\n\n path = get_remote_path_srr(server, parent_id)\n subprocess.check_call(\n ['curl', '-o' , filename , '-#', '%s/%s' % (path, checksum)])\n\nremote_store_file = store_srr\nremote_load_file = load_srr\n\ndef init_backing_store():\n configuration = load_configuration()\n if configuration['remote_type'] == 'srr':\n remote_load_file = load_srr\n remote_store_file = store_srr\n else:\n remote_load_file = load_scp\n remote_store_file = store_scp\n\ndef usage():\n print 'git got []'\n print\n print 'The most commonly used git got commands are:'\n print ' init Initialize the remote to be used with the repository'\n print ' '\n print ' get Retrieve all remote files to the local working area'\n print ' add Add a file to the remote repository'\n print ' status Request the status of a got tracked file'\n print ' reset Ovewrite a gotted file with the remote copy'\n\nroot_valid = False\nroot_path = \"\"\n\ndef get_root():\n global root_valid\n global root_path\n if not root_valid:\n root_path = subprocess.check_output(['git', 'rev-parse', '--show-toplevel']).rstrip()\n root_valid = True\n return root_path\n\nconfiguration_loaded = False\nconfiguration = []\n\ndef file_hash(filename):\n hasher = hashlib.sha1()\n file = open(filename, 'rb')\n try:\n while True:\n data = file.read(8192)\n if not data:\n break\n hasher.update(data)\n except Exception as e:\n raise\n finally:\n file.close()\n return hasher.hexdigest()\n\ndef load_configuration():\n global configuration_loaded\n global configuration\n if not configuration_loaded:\n file = open('%s/.got/storage' % get_root(), 'r')\n configuration = json.load(file)\n file.close()\n configuration_loaded = True\n return configuration\n\ndef get_remote():\n configuration = load_configuration()\n return configuration['remote']\n\ndef get_local_got_filename(fully_qualified_filename):\n (base, filename) = os.path.split(fully_qualified_filename)\n return os.path.join(base, '.%s.got' % filename)\n\ndef get_real_filename(fully_qualified_filename):\n (root, filename) = os.path.split(fully_qualified_filename)\n return os.path.join(root, filename[1:-4])\n\ndef get_cb(filename):\n try:\n real_filename = get_real_filename(filename)\n log_debug('get_cb: Using %s for local file' % real_filename)\n sum = open(filename).read().rstrip()\n remote_load_file(rel_filename, sum)\n except Exception as e:\n log_error('Failed to retrieve file %s' % filename, e)\n\ndef reset_cb(filename):\n try:\n log_debug('reset_cb: Reseting %s' % filename)\n got_filename = get_local_got_filename(filename)\n log_debug('reset_cb: Using %s for local got file' % got_filename)\n sum = open(got_filename).read().rstrip()\n remote_load_file(filename, sum)\n except Exception as e:\n log_error('Failed to reset %s' % filename, e)\n\ndef add_cb(filename):\n try:\n log_debug('add_cb: Adding %s' % filename)\n sum = file_hash(filename)\n remote_store_file(filename, sum)\n got_filename = get_local_got_filename(filename)\n hash_file = open(got_filename, 'w')\n hash_file.write('%s' % sum)\n hash_file.close()\n subprocess.check_call(['git', 'add', got_filename])\n file = open('%s/.gitignore' % get_root(), 'w')\n file.write('%s\\n' % filename)\n file.close()\n except Exception as e:\n print sys.exc_traceback.tb_lineno\n log_error('Failed to add %s' % filename, e)\n\ndef status_cb(filename):\n try:\n actual_filename = get_real_filename(filename)\n log_debug('Actual %s' % actual_filename)\n if not os.path.exists(actual_filename):\n return 'Remote: %s' % actual_filename\n sum1 = file_hash(actual_filename)\n sum2 = open(filename).read().rstrip()\n if sum1 != sum2:\n return 'Modified: %s' % actual_filename\n except Exception as e:\n log_error('Failed to status %s' % filename, e)\n\ndef start_transaction():\n pass\n\ndef end_transaction():\n pass\n\ndef log(level, message, exception):\n if None is exception:\n print '%s : %s' % (level, message)\n else:\n print '%s:%s:%s' % (level, message, exception)\n\ndef log_error(message, exception = None):\n if 'ERROR' in LOG_LEVELS:\n log('ERROR', message, exception)\n\ndef log_warn(message, exception = None):\n if 'WARN' in LOG_LEVELS:\n log('WARN', message, exception)\n\ndef log_info(message, exception = None):\n if 'INFO' in LOG_LEVELS:\n log('INFO', message, exception)\n\ndef log_debug(message, exception = None):\n if 'DEBUG' in LOG_LEVELS:\n log('DEBUG', message, exception)\n\ndef upgrade_cb(new):\n pass\n\ndef walker(function, args):\n output = []\n for arg in args:\n log_debug('walker: processing argument %s' % arg)\n if os.path.isfile(arg):\n log_debug('walker: processing file %s' % arg)\n output.append(function(arg))\n else:\n for base, dirs, filenames in os.walk(arg):\n if '.git' in dirs:\n dirs.remove('.git')\n if '.got' in dirs:\n dirs.remove('.got')\n for filename in fnmatch.filter(filenames, '.*.got'):\n log_debug('walker: processing file %s/%s' % (base, filename))\n output.append(function('%s/%s' % (base, filename)))\n return output\n\ndef check_initialized():\n if os.path.isfile('%s/.got/storage' % get_root()):\n init_backing_store()\n return True\n return False\n\ndef check_version(version):\n configuration = load_configuration()\n if VERSION != configuration['version']:\n return False\n return True\n\nVERSION = 1\n\nnum_args = len(sys.argv)\n\nif num_args < 2:\n usage()\n exit()\n\ncommand = sys.argv[1]\n\nif command == 'init':\n start_transaction()\n try:\n os.mkdir('%s/.got' % get_root())\n except OSError:\n # This means the directory already existed according to\n # the python documentation\n pass\n print sys.argv\n type = sys.argv[2]\n remote = sys.argv[3]\n configuration = { 'remote' : remote , 'remote_type' : type , 'version' : VERSION }\n file = open('%s/.got/storage' % get_root(), 'w')\n json.dump(configuration, file)\n file.close()\n subprocess.check_call(['git', 'add', '%s/.got' % get_root()])\n file = open('%s/.gitignore' % get_root(), 'w')\n file.close()\n subprocess.check_call(['git', 'add', '%s/.gitignore' % get_root()])\n end_transaction()\nelif not check_initialized():\n print 'Got not initialized\\n'\n usage()\n exit()\nelif command == 'upgrade':\n upgrade_cb(VERSION)\nelif not check_version(VERSION):\n print 'Version of got repository requires upgrading, run upgrade command'\n usage()\n exit()\nelif command == 'add':\n start_transaction()\n log_debug('main: Add command %s' % sys.argv[2:])\n walker(add_cb, sys.argv[2:])\n end_transaction()\nelif command == 'reset':\n walker(reset_cb, sys.argv[2:])\nelif command == 'get':\n walker(get_cb, [get_root()])\nelif command == 'status':\n changes = walker(status_cb, [get_root()])\n print '# Changes',\n for change in changes:\n if None != change:\n print '\\n# %s' % change,\n print '\\n',\nelse:\n usage()\n\n# vim: set filetype=python :\n"},"src_encoding":{"kind":"string","value":"UTF-8"},"language":{"kind":"string","value":"Python"},"is_vendor":{"kind":"bool","value":false,"string":"false"},"is_generated":{"kind":"bool","value":false,"string":"false"},"year":{"kind":"number","value":2014,"string":"2,014"}}},{"rowIdx":41158,"cells":{"__id__":{"kind":"number","value":7095286006136,"string":"7,095,286,006,136"},"blob_id":{"kind":"string","value":"94607dc10ece7936d9bd0d00c295e0e64f1d6a47"},"directory_id":{"kind":"string","value":"7bafa3568fc321abfe8fc5a4a0c3982116cfa65f"},"path":{"kind":"string","value":"/paypal/signatures.py"},"content_id":{"kind":"string","value":"ed4d5f64bdb731eeb3ba3bc13570edaf3dc80d9e"},"detected_licenses":{"kind":"list like","value":[],"string":"[]"},"license_type":{"kind":"string","value":"no_license"},"repo_name":{"kind":"string","value":"softak/webfaction_demo"},"repo_url":{"kind":"string","value":"https://github.com/softak/webfaction_demo"},"snapshot_id":{"kind":"string","value":"f8ea504e16609d7198f98333ff1472e0f23528d0"},"revision_id":{"kind":"string","value":"cc4b308ce964b04907f4c23777178ff900e0ad8b"},"branch_name":{"kind":"string","value":"refs/heads/master"},"visit_date":{"kind":"timestamp","value":"2016-09-06T03:19:04.919531","string":"2016-09-06T03:19:04.919531"},"revision_date":{"kind":"timestamp","value":"2013-01-23T17:33:52","string":"2013-01-23T17:33:52"},"committer_date":{"kind":"timestamp","value":"2013-01-23T17:33:52","string":"2013-01-23T17:33:52"},"github_id":{"kind":"null"},"star_events_count":{"kind":"number","value":0,"string":"0"},"fork_events_count":{"kind":"number","value":0,"string":"0"},"gha_license_id":{"kind":"null"},"gha_fork":{"kind":"null"},"gha_event_created_at":{"kind":"null"},"gha_created_at":{"kind":"null"},"gha_updated_at":{"kind":"null"},"gha_pushed_at":{"kind":"null"},"gha_size":{"kind":"null"},"gha_stargazers_count":{"kind":"null"},"gha_forks_count":{"kind":"null"},"gha_open_issues_count":{"kind":"null"},"gha_language":{"kind":"null"},"gha_archived":{"kind":"null"},"gha_disabled":{"kind":"null"},"content":{"kind":"string","value":"import os\nimport re\nimport urlparse\nimport urllib\nimport httplib\nimport hmac\nimport base64\nimport hashlib\n\n\nppencode_re = re.compile(r'([A-Za-z0-9_]+)')\n\ndef ppencode(string):\n global ppencode_re\n result = ''\n for char in string:\n if re.match(ppencode_re, char) is None:\n result += '%' + hex(ord(char))[2:]\n elif char == ' ':\n result += '+'\n else:\n result += char\n return result\n\n\nclass Uri(object):\n scheme = None\n host = None\n port = None\n path = None\n\n def __init__(self, scheme=None, host=None, port=None,\n path=None, query=None):\n self.query = query or {}\n if scheme is not None:\n self.scheme = scheme\n if host is not None:\n self.host = host\n if port is not None:\n self.port = port\n if path:\n self.path = path\n\n @staticmethod\n def parse_uri(uri_string):\n parts = urlparse.urlparse(uri_string)\n uri = Uri()\n if parts[0]:\n uri.scheme = parts[0]\n if parts[1]:\n host_parts = parts[1].split(':')\n if host_parts[0]:\n uri.host = host_parts[0]\n if len(host_parts) > 1:\n uri.port = int(host_parts[1])\n if parts[2]:\n uri.path = parts[2]\n\n if parts[4]:\n param_pairs = parts[4].split('&')\n for pair in param_pairs:\n pair_parts = pair.split('=')\n if len(pair_parts) > 1:\n uri.query[urllib.unquote_plus(pair_parts[0])] = \\\n urllib.unquote_plus(pair_parts[1])\n elif len(pair_parts) == 1:\n uri.query[urllib.unquote_plus(pair_parts[0])] = None\n return uri\n\n\nclass HttpRequest(object):\n method = None\n uri = None\n\n def __init__(self, uri=None, method=None, headers=None):\n self.headers = headers or {}\n self._body_parts = []\n if method is not None:\n self.method = method\n if isinstance(uri, (str, unicode)):\n uri = Uri.parse_uri(uri)\n self.uri = uri or Uri()\n\n\ndef build_oauth_base_string(http_request, consumer_key, signature_type,\n timestamp, version, token):\n params = {}\n params['oauth_consumer_key'] = consumer_key\n params['oauth_signature_method'] = signature_type\n params['oauth_timestamp'] = str(timestamp)\n params['oauth_token'] = token\n params['oauth_version'] = version\n \n sorted_keys = sorted(params.keys())\n \n pairs = []\n for key in sorted_keys:\n pairs.append('%s=%s' % (key, params[key]))\n \n all_parameters = '&'.join(pairs)\n normalized_host = http_request.uri.host.lower()\n normalized_scheme = (http_request.uri.scheme or 'http').lower()\n non_default_port = None\n\n if (http_request.uri.port is not None\n and ((normalized_scheme == 'https' and http_request.uri.port != 443)\n or (normalized_scheme == 'http' and http_request.uri.port != 80))):\n non_default_port = http_request.uri.port\n \n path = http_request.uri.path or '/'\n request_path = None\n\n if not path.startswith('/'):\n path = '/%s' % path\n if non_default_port is not None:\n request_path = '%s://%s:%s%s' % (normalized_scheme, normalized_host,\n non_default_port, path)\n else:\n request_path = '%s://%s%s' % (normalized_scheme, normalized_host,\n path)\n \n base_string = '&'.join(\n (http_request.method.upper(),\n ppencode(request_path),\n ppencode(all_parameters)))\n return base_string\n\n\ndef generate_hmac_signature(http_request, consumer_key, consumer_secret,\n timestamp, version, token, token_secret):\n base_string = build_oauth_base_string(\n http_request, consumer_key, 'HMAC-SHA1', timestamp, version, token)\n hash_key = '%s&%s' % (ppencode(consumer_secret), ppencode(token_secret))\n hashed = hmac.new(hash_key, base_string, hashlib.sha1)\n return base64.b64encode(hashed.digest())\n"},"src_encoding":{"kind":"string","value":"UTF-8"},"language":{"kind":"string","value":"Python"},"is_vendor":{"kind":"bool","value":false,"string":"false"},"is_generated":{"kind":"bool","value":false,"string":"false"},"year":{"kind":"number","value":2013,"string":"2,013"}}},{"rowIdx":41159,"cells":{"__id__":{"kind":"number","value":10462540356015,"string":"10,462,540,356,015"},"blob_id":{"kind":"string","value":"88c06a907a9815582e4a199f2aa6a53b7b4e6c25"},"directory_id":{"kind":"string","value":"364249d5c7e9af7a7fd5d6122d4489fa5250919c"},"path":{"kind":"string","value":"/salest/cart/middleware.py"},"content_id":{"kind":"string","value":"0042d91c147c8c26d01dad8692571e154b189799"},"detected_licenses":{"kind":"list like","value":["GPL-3.0-or-later"],"string":"[\n \"GPL-3.0-or-later\"\n]"},"license_type":{"kind":"string","value":"non_permissive"},"repo_name":{"kind":"string","value":"anvil8/salest"},"repo_url":{"kind":"string","value":"https://github.com/anvil8/salest"},"snapshot_id":{"kind":"string","value":"b67cbaee6edf4cdfae77bd31191f5bd05ace213b"},"revision_id":{"kind":"string","value":"a25b9ab5ff2fab309b5d8b85b4c46d0e60f71410"},"branch_name":{"kind":"string","value":"refs/heads/master"},"visit_date":{"kind":"timestamp","value":"2020-05-30T14:23:47.923820","string":"2020-05-30T14:23:47.923820"},"revision_date":{"kind":"timestamp","value":"2012-08-07T11:29:27","string":"2012-08-07T11:29:27"},"committer_date":{"kind":"timestamp","value":"2012-08-07T11:29:27","string":"2012-08-07T11:29:27"},"github_id":{"kind":"null"},"star_events_count":{"kind":"number","value":0,"string":"0"},"fork_events_count":{"kind":"number","value":0,"string":"0"},"gha_license_id":{"kind":"null"},"gha_fork":{"kind":"null"},"gha_event_created_at":{"kind":"null"},"gha_created_at":{"kind":"null"},"gha_updated_at":{"kind":"null"},"gha_pushed_at":{"kind":"null"},"gha_size":{"kind":"null"},"gha_stargazers_count":{"kind":"null"},"gha_forks_count":{"kind":"null"},"gha_open_issues_count":{"kind":"null"},"gha_language":{"kind":"null"},"gha_archived":{"kind":"null"},"gha_disabled":{"kind":"null"},"content":{"kind":"string","value":"from django.utils.functional import SimpleLazyObject\nfrom salest.cart.models import Cart\n\n\ndef get_cart(request):\n if not hasattr(request, '_cached_cart'):\n request._cached_cart = Cart.objects.get_or_create_from_request(request)\n return request._cached_cart\n\n\nclass ShopingCartMiddleware(object):\n\n def process_request(self, request):\n assert hasattr(request, 'session'), \"The ShopingCartMiddleware requires session middleware to be installed. Edit your MIDDLEWARE_CLASSES setting to insert 'django.contrib.sessions.middleware.SessionMiddleware'.\"\n request.cart = SimpleLazyObject(lambda: get_cart(request))\n\n"},"src_encoding":{"kind":"string","value":"UTF-8"},"language":{"kind":"string","value":"Python"},"is_vendor":{"kind":"bool","value":false,"string":"false"},"is_generated":{"kind":"bool","value":false,"string":"false"},"year":{"kind":"number","value":2012,"string":"2,012"}}},{"rowIdx":41160,"cells":{"__id__":{"kind":"number","value":764504187249,"string":"764,504,187,249"},"blob_id":{"kind":"string","value":"1f1a04f6321918c0eb82f2d26790028b1a92b3b2"},"directory_id":{"kind":"string","value":"60130678bae6eaa3abb3b0336720d506173aeba1"},"path":{"kind":"string","value":"/routes.py"},"content_id":{"kind":"string","value":"cf9b04fb3a7357a0fd6f61b147a66ef809cd8553"},"detected_licenses":{"kind":"list like","value":[],"string":"[]"},"license_type":{"kind":"string","value":"no_license"},"repo_name":{"kind":"string","value":"zrenx/pp4gae"},"repo_url":{"kind":"string","value":"https://github.com/zrenx/pp4gae"},"snapshot_id":{"kind":"string","value":"b2ca8340bbfd170cc0b65661e29ddb32700db18f"},"revision_id":{"kind":"string","value":"9da14062ce449e999167faa313adc25df82816cd"},"branch_name":{"kind":"string","value":"refs/heads/master"},"visit_date":{"kind":"timestamp","value":"2021-01-13T01:49:10.739165","string":"2021-01-13T01:49:10.739165"},"revision_date":{"kind":"timestamp","value":"2010-09-06T04:29:08","string":"2010-09-06T04:29:08"},"committer_date":{"kind":"timestamp","value":"2010-09-06T04:29:08","string":"2010-09-06T04:29:08"},"github_id":{"kind":"number","value":394886,"string":"394,886"},"star_events_count":{"kind":"number","value":7,"string":"7"},"fork_events_count":{"kind":"number","value":0,"string":"0"},"gha_license_id":{"kind":"null"},"gha_fork":{"kind":"null"},"gha_event_created_at":{"kind":"null"},"gha_created_at":{"kind":"null"},"gha_updated_at":{"kind":"null"},"gha_pushed_at":{"kind":"null"},"gha_size":{"kind":"null"},"gha_stargazers_count":{"kind":"null"},"gha_forks_count":{"kind":"null"},"gha_open_issues_count":{"kind":"null"},"gha_language":{"kind":"null"},"gha_archived":{"kind":"null"},"gha_disabled":{"kind":"null"},"content":{"kind":"string","value":"#!/usr/bin/python\n# -*- coding: utf-8 -*-\n# routes_in is a tuple of tuples. The first item in each is a regexp that will\n# be used to match the incoming request URL. The second item in the tuple is\n# what it will be replaced with. This mechanism allows you to redirect incoming\n# routes to different web2py locations\n#\n# Example: If you wish for your entire website to use init's static directory:\n#\n# routes_in=( ('/static/(?P[\\w\\./_-]+)','/init/static/\\g') )\n#\n\nroutes_in = (\n #('.*:/','/pp4gae/'), # rewrite to pp4gae\n ('/static/(?P[\\w\\./_-]+)','/pp4gae/static/\\g'),\n ('/(?P.*)', '/pp4gae/default/\\g'),\n #'.*:/robots.txt', '/pp4gae/static/robots.txt'))\n)\n\n# routes_out, like routes_in translates URL paths created with the web2py URL()\n# function in the same manner that route_in translates inbound URL paths.\n#\n\nroutes_out = (\n ('/pp4gae/default/(?P.*)', '/\\g'),\n)\n\n# Error-handling redirects all HTTP errors (status codes >= 400) to a specified\n# path. If you wish to use error-handling redirects, uncomment the tuple\n# below. You can customize responses by adding a tuple entry with the first\n# value in 'appName/HTTPstatusCode' format. ( Only HTTP codes >= 400 are\n# routed. ) and the value as a path to redirect the user to. You may also use\n# '*' as a wildcard.\n#\n# The error handling page is also passed the error code and ticket as\n# variables. Traceback information will be stored in the ticket.\n#\n# routes_onerror = [\n# ('init/400', '/init/default/login')\n# ,('init/*', '/init/static/fail.html')\n# ,('*/404', '/init/static/cantfind.html')\n# ,('*/*', '/init/error/index')\n# ]\n\n# specify action in charge of error handling\n#\n# error_handler = dict(application='error',\n# controller='default',\n# function='index')\n\n# In the event that the error-handling page itself returns an error, web2py will\n# fall back to its old static responses. You can customize them here.\n# ErrorMessageTicket takes a string format dictionary containing (only) the\n# \"ticket\" key.\n\n# error_message = '

Invalid request

'\n# error_message_ticket = '

Internal error

Ticket issued: %(ticket)s'\n"},"src_encoding":{"kind":"string","value":"UTF-8"},"language":{"kind":"string","value":"Python"},"is_vendor":{"kind":"bool","value":false,"string":"false"},"is_generated":{"kind":"bool","value":false,"string":"false"},"year":{"kind":"number","value":2010,"string":"2,010"}}},{"rowIdx":41161,"cells":{"__id__":{"kind":"number","value":4217657905551,"string":"4,217,657,905,551"},"blob_id":{"kind":"string","value":"43dd4b75c9396ed4d52ab512d29adbdfa6a3818c"},"directory_id":{"kind":"string","value":"c5747e63f7bfef9bee2010ac85c7db7abd2423ef"},"path":{"kind":"string","value":"/instasend.py"},"content_id":{"kind":"string","value":"b9a450b105e55a86956ce97237a8bdd4d65bbde5"},"detected_licenses":{"kind":"list like","value":[],"string":"[]"},"license_type":{"kind":"string","value":"no_license"},"repo_name":{"kind":"string","value":"mountaindude/instasend"},"repo_url":{"kind":"string","value":"https://github.com/mountaindude/instasend"},"snapshot_id":{"kind":"string","value":"016abc281233dd4bb9097a1793418e8d597a3c04"},"revision_id":{"kind":"string","value":"1a1dae83cde3e96970e5e90300277a5453108f34"},"branch_name":{"kind":"string","value":"refs/heads/master"},"visit_date":{"kind":"timestamp","value":"2021-01-19T08:16:33.082799","string":"2021-01-19T08:16:33.082799"},"revision_date":{"kind":"timestamp","value":"2011-09-09T22:23:52","string":"2011-09-09T22:23:52"},"committer_date":{"kind":"timestamp","value":"2011-09-09T22:23:52","string":"2011-09-09T22:23:52"},"github_id":{"kind":"number","value":2358461,"string":"2,358,461"},"star_events_count":{"kind":"number","value":0,"string":"0"},"fork_events_count":{"kind":"number","value":0,"string":"0"},"gha_license_id":{"kind":"null"},"gha_fork":{"kind":"null"},"gha_event_created_at":{"kind":"null"},"gha_created_at":{"kind":"null"},"gha_updated_at":{"kind":"null"},"gha_pushed_at":{"kind":"null"},"gha_size":{"kind":"null"},"gha_stargazers_count":{"kind":"null"},"gha_forks_count":{"kind":"null"},"gha_open_issues_count":{"kind":"null"},"gha_language":{"kind":"null"},"gha_archived":{"kind":"null"},"gha_disabled":{"kind":"null"},"content":{"kind":"string","value":"# Sending commands to Insta RX/TX radio module\n# Goran Sander \n# Based on miniterm.py by Chris Liechti \n\n# python instasend.py --port /dev/ttyUSB2 -c a4on\n\n\nimport sys, os, serial, threading, array, time\n\nEXITCHARCTER = '\\x1d' # GS/CTRL+]\nMENUCHARACTER = '\\x14' # Menu: CTRL+T\n\n\ndef key_description(character):\n \"\"\"generate a readable description for a key\"\"\"\n ascii_code = ord(character)\n if ascii_code < 32:\n return 'Ctrl+%c' % (ord('@') + ascii_code)\n else:\n return repr(character)\n\n# help text, starts with blank line! it's a function so that the current values\n# for the shortcut keys is used and not the value at program start\ndef get_help_text():\n return \"\"\"\n--- pySerial - miniterm - help\n---\n--- %(exit)-8s Exit program\n--- %(menu)-8s Menu escape key, followed by:\n--- Menu keys:\n--- %(itself)-8s Send the menu character itself to remote\n--- %(exchar)-8s Send the exit character to remote\n--- %(info)-8s Show info\n--- %(upload)-8s Upload file (prompt will be shown)\n--- Toggles:\n--- %(rts)s RTS %(echo)s local echo\n--- %(dtr)s DTR %(break)s BREAK\n--- %(lfm)s line feed %(repr)s Cycle repr mode\n---\n--- Port settings (%(menu)s followed by the following):\n--- 7 8 set data bits\n--- n e o s m change parity (None, Even, Odd, Space, Mark)\n--- 1 2 3 set stop bits (1, 2, 1.5)\n--- b change baud rate\n--- x X disable/enable software flow control\n--- r R disable/enable hardware flow control\n\"\"\" % {\n 'exit': key_description(EXITCHARCTER),\n 'menu': key_description(MENUCHARACTER),\n 'rts': key_description('\\x12'),\n 'repr': key_description('\\x01'),\n 'dtr': key_description('\\x04'),\n 'lfm': key_description('\\x0c'),\n 'break': key_description('\\x02'),\n 'echo': key_description('\\x05'),\n 'info': key_description('\\x09'),\n 'upload': key_description('\\x15'),\n 'itself': key_description(MENUCHARACTER),\n 'exchar': key_description(EXITCHARCTER),\n}\n\n# first choose a platform dependant way to read single characters from the console\nglobal console\n\nif os.name == 'nt':\n import msvcrt\n class Console:\n def __init__(self):\n pass\n\n def setup(self):\n pass # Do nothing for 'nt'\n\n def cleanup(self):\n pass # Do nothing for 'nt'\n\n def getkey(self):\n while 1:\n z = msvcrt.getch()\n if z == '\\0' or z == '\\xe0': #functions keys\n msvcrt.getch()\n else:\n if z == '\\r':\n return '\\n'\n return z\n\n console = Console()\n\nelif os.name == 'posix':\n import termios, sys, os\n class Console:\n def __init__(self):\n self.fd = sys.stdin.fileno()\n\n def setup(self):\n self.old = termios.tcgetattr(self.fd)\n new = termios.tcgetattr(self.fd)\n new[3] = new[3] & ~termios.ICANON & ~termios.ECHO & ~termios.ISIG\n new[6][termios.VMIN] = 1\n new[6][termios.VTIME] = 0\n termios.tcsetattr(self.fd, termios.TCSANOW, new)\n #s = '' # We'll save the characters typed and add them to the pool.\n\n def getkey(self):\n c = os.read(self.fd, 1)\n return c\n\n def cleanup(self):\n termios.tcsetattr(self.fd, termios.TCSAFLUSH, self.old)\n\n console = Console()\n\n def cleanup_console():\n console.cleanup()\n\n console.setup()\n sys.exitfunc = cleanup_console #terminal modes have to be restored on exit...\n\nelse:\n raise \"Sorry no implementation for your platform (%s) available.\" % sys.platform\n\n\nCONVERT_CRLF = 2\nCONVERT_CR = 1\nCONVERT_LF = 0\nNEWLINE_CONVERISON_MAP = ('\\n', '\\r', '\\r\\n')\nLF_MODES = ('LF', 'CR', 'CR/LF')\n\nREPR_MODES = ('raw', 'some control', 'all control', 'hex')\n\nINQ = 0xfa\nACK = 0x05\nCRLF = '\\r\\n' \n\nclass Miniterm:\n def __init__(self, port, baudrate, cmd, echo=False, convert_outgoing=CONVERT_CRLF, repr_mode=0):\n self.serial = serial.Serial(port, baudrate, parity='N', rtscts=False, xonxoff=False, timeout=0.7)\n\n self.echo = echo\n self.repr_mode = repr_mode\n self.convert_outgoing = convert_outgoing\n self.newline = NEWLINE_CONVERISON_MAP[self.convert_outgoing]\n self.cmd = cmd\n self.break_state = False\n\n\n def start(self):\n self.alive = True\n\n # enter keyboard handling loop\n self.keyboard_thread = threading.Thread(target=self.keyb)\n self.keyboard_thread.setDaemon(1)\n self.keyboard_thread.start()\n\n # Send INSTA command \n cmd = self.cmd.lower()\n if self.echo:\n sys.stdout.write(\"cmd:%s\\n\" % cmd)\n sys.stdout.write(\"cmd len:%d\\n\" % len(cmd))\n\n telegram = '\\x55\\x16\\x00'\n device = 0;\n \n # add group #\n if cmd[0] == 'a':\n device = 0x00\n elif cmd[0] == 'b':\n device = 0x08\n elif cmd[0] == 'c':\n device = 0x10\n\n # Add channel #\n channel = int(cmd[1])\n # sys.stdout.write(\"channel:%d\\n\" % channel)\n device = device | (channel - 1)\n\n # Add on/off\n if cmd[2:4] == \"on\":\n device = device | 0x40\n elif cmd [2:5] == \"off\":\n device = device | 0x80\n\n # sys.stdout.write(\"device:%x\\n\" % device)\n \n \n telegram = telegram + chr(device) + '\\x01\\x00\\x00\\x00\\x00\\x00'\n \n sum = 0\n for i in range(0, len(telegram)):\n sum = sum + ord(telegram[i])\n# sys.stdout.write(\"i=%d, %x\\n\" % (i, ord(telegram[i])))\n \n # sys.stdout.write(\"sum is:%x\\n\" % sum)\n sum = sum & 0xff\n # sys.stdout.write(\"sum2 is:%x\\n\" % sum)\n crc = 2**8 - sum\n # sys.stdout.write(\"CRC is:%x\\n\" % crc)\n\n telegram = telegram + chr(crc) + '\\xaa'\n\n \n if self.echo:\n sys.stdout.write(\"INQ\\n\")\n self.serial.write(chr(INQ))\n while self.serial.inWaiting() == 0:\n pass\n \n if self.serial.inWaiting() > 0:\n data = self.serial.read(1)\n\n# sys.stdout.write(\"\\\\x%s \" % data.encode('hex'))\n if data == chr(ACK):\n try:\n if self.echo:\n sys.stdout.write(\"ACK\\n\")\n# telegram = '\\x55\\x16\\x00\\x42\\x01\\x00\\x00\\x00\\x00\\x00\\x52\\xaa'\n# version query\n# telegram = \"\\x55\\x32\\xcd\\xf1\\xfa\\x00\\x00\\x00\\x00\\x00\\xc1\\xaa\"\n self.serial.write(telegram)\n self.serial.flush()\n\n if self.echo:\n for i in range(0, len(telegram)):\n sys.stdout.write(\"i=%d, \\\\x%x\\n\" % (i, ord(telegram[i])))\n sys.stdout.flush()\n\n# while self.serial.inWaiting() == 0:\n# pass\n\n# sys.stdout.write(\"Response:\")\n# while self.serial.inWaiting() > 0:\n# data = self.serial.read(1)\n# sys.stdout.write(\"\\\\x%s \" % data.encode('hex'))\n# sys.stdout.write(\"\\n\\n\")\n# if data == chr(INQ):\n# sys.stdout.write(\"Received INQ\\n\")\n# self.serial.write(chr(ACK))\n# self.serial.flush()\n# sys.stdout.write(\"Sending ACK\\n\")\n \n# i = 1\n# while self.serial.inWaiting() == 0:\n# pass\n# while self.serial.inWaiting() > 0:\n# data = self.serial.read(1)\n# sys.stdout.write(\"%d:\\\\x%s \" % (i, data.encode('hex')))\n# i=i+1\n# sys.stdout.write(\"\\n\")\n# sys.stdout.write(\"\\n\")\n\n except:\n print \"Serial write exception\"\n raise\n \n \n \n \n def stop(self):\n self.alive = False\n\n def join(self, transmit_only=False):\n pass\n# self.transmitter_thread.join()\n# if not transmit_only:\n# self.receiver_thread.join()\n# self.keyboard_thread.join()\n\n\n def keyb(self):\n \"\"\"loop and copy console->serial until EXITCHARCTER character is\n found. when MENUCHARACTER is found, interpret the next key\n locally.\n \"\"\"\n try:\n while self.alive:\n# print \"bbb\" \n try:\n c = console.getkey()\n except KeyboardInterrupt:\n c = '\\x03'\n\n if c == EXITCHARCTER: \n self.stop()\n break # exit app\n elif c == 's':\n# if self.echo:\n sys.stdout.write(\"Sending INQ\\r\\n\")\n elif c == '\\n':\n self.serial.write(self.newline) # send newline character(s)\n if self.echo:\n sys.stdout.write(c) # local echo is a real newline in any case\n# sys.stdout.flush()\n else:\n self.serial.write(c) # send character\n if self.echo:\n sys.stdout.write(c)\n sys.stdout.flush()\n except:\n self.alive = False\n raise\n \n\n\ndef main():\n import optparse\n\n parser = optparse.OptionParser(\n usage = \"%prog [options] [port [baudrate]]\",\n description = \"Miniterm - A simple terminal program for the serial port.\"\n )\n\n parser.add_option(\"-c\", \"--cmd\",\n dest = \"cmd\",\n help =\"command to send to INSTA transciever\",\n default = \"\"\n )\n\n parser.add_option(\"-p\", \"--port\",\n dest = \"port\",\n help = \"port, a number (default 0) or a device name (deprecated option)\",\n default = \"COM2\"\n )\n\n parser.add_option(\"-b\", \"--baud\",\n dest = \"baudrate\",\n action = \"store\",\n type = 'int',\n help = \"set baud rate, default %default\",\n default = 9600\n )\n\n parser.add_option(\"-e\", \"--echo\",\n dest = \"echo\",\n action = \"store_true\",\n help = \"enable local echo (default off)\",\n default = False\n )\n\n parser.add_option(\"--cr\",\n dest = \"cr\",\n action = \"store_true\",\n help = \"do not send CR+LF, send CR only\",\n default = False\n )\n\n parser.add_option(\"--lf\",\n dest = \"lf\",\n action = \"store_true\",\n help = \"do not send CR+LF, send LF only\",\n default = False\n )\n\n parser.add_option(\"-D\", \"--debug\",\n dest = \"repr_mode\",\n action = \"count\",\n help = \"\"\"debug received data (escape non-printable chars)\n--debug can be given multiple times:\n0: just print what is received\n1: escape non-printable characters, do newlines as unusual\n2: escape non-printable characters, newlines too\n3: hex dump everything\"\"\",\n default = 0 \n )\n\n parser.add_option(\"-q\", \"--quiet\",\n dest = \"quiet\",\n action = \"store_true\",\n help = \"suppress non error messages\",\n default = False\n )\n\n parser.add_option(\"--exit-char\",\n dest = \"exit_char\",\n action = \"store\",\n type = 'int',\n help = \"ASCII code of special character that is used to exit the application\",\n default = 0x20 # Default 0x1d\n )\n\n parser.add_option(\"--menu-char\",\n dest = \"menu_char\",\n action = \"store\",\n type = 'int',\n help = \"ASCII code of special character that is used to control miniterm (menu)\",\n default = 0x14\n )\n\n (options, args) = parser.parse_args()\n\n if options.cr and options.lf:\n parser.error(\"only one of --cr or --lf can be specified\")\n\n if options.cmd is \"\":\n parser.error('Must provide command')\n\n global EXITCHARCTER, MENUCHARACTER\n EXITCHARCTER = chr(options.exit_char)\n MENUCHARACTER = chr(options.menu_char)\n\n port = options.port\n baudrate = options.baudrate\n if args:\n if options.port is not None:\n parser.error(\"no arguments are allowed, options only when --port is given\")\n port = args.pop(0)\n if args:\n try:\n baudrate = int(args[0])\n except ValueError:\n parser.error(\"baud rate must be a number, not %r\" % args[0])\n args.pop(0)\n if args:\n parser.error(\"too many arguments\")\n else:\n if port is None: port = 0\n\n convert_outgoing = CONVERT_CRLF\n if options.cr:\n convert_outgoing = CONVERT_CR\n elif options.lf:\n convert_outgoing = CONVERT_LF\n\n try:\n miniterm = Miniterm(\n port,\n baudrate,\n cmd=options.cmd,\n echo=options.echo,\n convert_outgoing=convert_outgoing,\n repr_mode=options.repr_mode\n )\n except serial.SerialException:\n sys.stderr.write(\"could not open port %r\\n\" % port)\n sys.exit(1)\n\n if not options.quiet:\n sys.stderr.write('--- InstaSend on %s: %d,%s,%s,%s ---\\n' % (\n miniterm.serial.portstr,\n miniterm.serial.baudrate,\n miniterm.serial.bytesize,\n miniterm.serial.parity,\n miniterm.serial.stopbits,\n ))\n# sys.stderr.write('--- Quit: %s | Menu: %s | Help: %s followed by %s ---\\n' % (\n# key_description(EXITCHARCTER),\n# key_description(MENUCHARACTER),\n# key_description(MENUCHARACTER),\n# key_description('\\x08'),\n# ))\n\n miniterm.start()\n miniterm.join(True)\n if not options.quiet:\n sys.stderr.write(\"\\n--- exit ---\\n\")\n miniterm.join()\n \n\n\nif __name__ == '__main__':\n main()\n"},"src_encoding":{"kind":"string","value":"UTF-8"},"language":{"kind":"string","value":"Python"},"is_vendor":{"kind":"bool","value":false,"string":"false"},"is_generated":{"kind":"bool","value":false,"string":"false"},"year":{"kind":"number","value":2011,"string":"2,011"}}},{"rowIdx":41162,"cells":{"__id__":{"kind":"number","value":3238405386727,"string":"3,238,405,386,727"},"blob_id":{"kind":"string","value":"ce328ab4c6642f5225135a7425e3290fb08ac920"},"directory_id":{"kind":"string","value":"7a488c1f0657c8fe8856e8ca9c34acd2b8c74ce3"},"path":{"kind":"string","value":"/presentation/synthetic/invert.py"},"content_id":{"kind":"string","value":"81d5cd0384eae77395085e1de2bff6f06d60b4f3"},"detected_licenses":{"kind":"list like","value":[],"string":"[]"},"license_type":{"kind":"string","value":"no_license"},"repo_name":{"kind":"string","value":"whigg/seg2012"},"repo_url":{"kind":"string","value":"https://github.com/whigg/seg2012"},"snapshot_id":{"kind":"string","value":"3aeaa9c9d265aade8fe16ce8c5653fe275509d75"},"revision_id":{"kind":"string","value":"33a09bdad1531231deb5fffc2928c7032fc8f277"},"branch_name":{"kind":"string","value":"refs/heads/master"},"visit_date":{"kind":"timestamp","value":"2021-05-28T10:49:09.682200","string":"2021-05-28T10:49:09.682200"},"revision_date":{"kind":"timestamp","value":"2014-01-15T16:30:28","string":"2014-01-15T16:30:28"},"committer_date":{"kind":"timestamp","value":"2014-01-15T16:30:28","string":"2014-01-15T16:30:28"},"github_id":{"kind":"null"},"star_events_count":{"kind":"number","value":0,"string":"0"},"fork_events_count":{"kind":"number","value":0,"string":"0"},"gha_license_id":{"kind":"null"},"gha_fork":{"kind":"null"},"gha_event_created_at":{"kind":"null"},"gha_created_at":{"kind":"null"},"gha_updated_at":{"kind":"null"},"gha_pushed_at":{"kind":"null"},"gha_size":{"kind":"null"},"gha_stargazers_count":{"kind":"null"},"gha_forks_count":{"kind":"null"},"gha_open_issues_count":{"kind":"null"},"gha_language":{"kind":"null"},"gha_archived":{"kind":"null"},"gha_disabled":{"kind":"null"},"content":{"kind":"string","value":"import sys\nimport cPickle as pickle\nimport fatiando as ft\nimport numpy as np\n\nlog = ft.log.get()\nlog.info(ft.log.header())\n\ndef setview1(scene):\n scene.scene.camera.position = [-2267.5718325185544, 516.89047192363171, 325.41328402454576]\n scene.scene.camera.focal_point = [486.1565293791673, 491.11737744276104, 577.28350756789393]\n scene.scene.camera.view_angle = 30.0\n scene.scene.camera.view_up = [0.091053683141375921, -0.0033155163338156627, -0.99584046620823252]\n scene.scene.camera.clipping_range = [1653.5666619996091, 4186.5783724965167]\n scene.scene.camera.compute_view_plane_normal()\n scene.scene.render()\n\ndef setview2(scene):\n scene.scene.camera.position = [-2083.5891203179367, 2196.562816405461, -698.77837411337339]\n scene.scene.camera.focal_point = [467.08916568702983, 471.73128900160287, 610.85856263017729]\n scene.scene.camera.view_angle = 30.0\n scene.scene.camera.view_up = [0.33814580712856857, -0.1984415469462483, -0.91993389195471487]\n scene.scene.camera.clipping_range = [1619.2366959256301, 5453.5227454505075]\n scene.scene.camera.compute_view_plane_normal()\n scene.scene.render()\n\nseedfile = sys.argv[1]\nmu = float(sys.argv[2])\ndelta = float(sys.argv[3])\nif sys.argv[4] == 'classic':\n useshape = False\nelif sys.argv[4] == 'shape':\n useshape = True\nelse:\n print \"invalid argument\"\n sys.exit()\n\nxp, yp, zp, gxx, gxy, gxz, gyy, gyz, gzz = np.loadtxt('data.txt', unpack=True)\nwith open('model.pickle') as f:\n model = pickle.load(f)\n\nbounds = [0, 1000, 0, 1000, 0, 1000]\nmesh = ft.msh.ddd.PrismMesh(bounds, (30, 30, 30))\ndms = ft.pot.harvester.wrapdata(mesh, xp, yp, zp, gxx=gxx, gxy=gxy, gxz=gxz,\n gyy=gyy, gyz=gyz, gzz=gzz)\nseeds = ft.pot.harvester.sow(ft.pot.harvester.loadseeds(seedfile), mesh,\n mu=mu, delta=delta, useshape=useshape)\n\nscene = ft.vis.figure3d(size=(1000, 1000))\nft.vis.prisms(model, 'density', style='wireframe', linewidth=5)\nft.vis.prisms([s.get_prism() for s in seeds], 'density', vmin=0, vmax=1000)\nft.vis.axes3d(ft.vis.outline3d(bounds), ranges=[b*0.001 for b in bounds],\n fmt='%0.1f', nlabels=3)\nft.vis.wall_bottom(bounds)\nft.vis.wall_north(bounds)\nsetview1(scene)\nft.vis.savefig3d('seeds-%s-%s1.png' % (seedfile, sys.argv[4]))\nsetview2(scene)\nft.vis.savefig3d('seeds-%s-%s2.png' % (seedfile, sys.argv[4]))\n#ft.vis.show3d()\n\nestimate, goals, misfits = ft.pot.harvester.harvest(dms, seeds)\nmesh.addprop('density', estimate['density'])\nresult = ft.msh.ddd.vremove(0, 'density', mesh)\n\nwith open('results-%s-%s.pickle' % (seedfile, sys.argv[4]), 'w') as f:\n pickle.dump({'estimate':result, 'predicted':dms[-1].predicted,\n 'seeds':[s.get_prism() for s in seeds]}, f)\n\nshape = [51, 51]\nft.vis.figure(figsize=(3.33,4))\nft.vis.axis('scaled')\nlevels = ft.vis.contourf(yp, xp, gzz, shape, 6)\nft.vis.colorbar(orientation='horizontal', shrink=0.8)\nft.vis.contour(yp, xp, dms[-1].predicted, shape, levels, color='k',\n linewidth=1.5)\nft.vis.xlabel('y (km)')\nft.vis.ylabel('x (km)')\nft.vis.m2km()\nft.vis.savefig('fit-%s-%s.png' % (seedfile, sys.argv[4]), dpi=300)\n#ft.vis.show()\n\nscene = ft.vis.figure3d(size=(1000, 1000))\nft.vis.prisms(model, 'density', style='wireframe', linewidth=8)\nft.vis.prisms(result, 'density', vmin=0, vmax=1000)\nft.vis.axes3d(ft.vis.outline3d(bounds), ranges=[b*0.001 for b in bounds],\n fmt='%0.1f', nlabels=3)\nft.vis.wall_bottom(bounds)\nft.vis.wall_north(bounds)\nsetview1(scene)\nft.vis.savefig3d('result-%s-%s1.png' % (seedfile, sys.argv[4]))\nsetview2(scene)\nft.vis.savefig3d('result-%s-%s2.png' % (seedfile, sys.argv[4]))\nft.vis.show3d()\n"},"src_encoding":{"kind":"string","value":"UTF-8"},"language":{"kind":"string","value":"Python"},"is_vendor":{"kind":"bool","value":false,"string":"false"},"is_generated":{"kind":"bool","value":false,"string":"false"},"year":{"kind":"number","value":2014,"string":"2,014"}}},{"rowIdx":41163,"cells":{"__id__":{"kind":"number","value":11596411742801,"string":"11,596,411,742,801"},"blob_id":{"kind":"string","value":"dc241d2323075856a8a0138e1052b1593aaf51ae"},"directory_id":{"kind":"string","value":"7fefbf96f1fb509ad2213d59ec33fdf1ef4ab9fe"},"path":{"kind":"string","value":"/examples/basic_usages.py"},"content_id":{"kind":"string","value":"e060dd064dda98af9c5775c21637d6a8f12bf1c9"},"detected_licenses":{"kind":"list like","value":["MIT"],"string":"[\n \"MIT\"\n]"},"license_type":{"kind":"string","value":"permissive"},"repo_name":{"kind":"string","value":"kaixiang-li/askme"},"repo_url":{"kind":"string","value":"https://github.com/kaixiang-li/askme"},"snapshot_id":{"kind":"string","value":"1a6157296b222113f7e1e23e49f2d6039d42768e"},"revision_id":{"kind":"string","value":"a53cf7affa6f6d16e47e5d05482ff046be384da0"},"branch_name":{"kind":"string","value":"refs/heads/master"},"visit_date":{"kind":"timestamp","value":"2022-11-05T20:01:18.150746","string":"2022-11-05T20:01:18.150746"},"revision_date":{"kind":"timestamp","value":"2013-04-05T14:02:07","string":"2013-04-05T14:02:07"},"committer_date":{"kind":"timestamp","value":"2013-04-06T16:06:34","string":"2013-04-06T16:06:34"},"github_id":{"kind":"null"},"star_events_count":{"kind":"number","value":0,"string":"0"},"fork_events_count":{"kind":"number","value":0,"string":"0"},"gha_license_id":{"kind":"null"},"gha_fork":{"kind":"null"},"gha_event_created_at":{"kind":"null"},"gha_created_at":{"kind":"null"},"gha_updated_at":{"kind":"null"},"gha_pushed_at":{"kind":"null"},"gha_size":{"kind":"null"},"gha_stargazers_count":{"kind":"null"},"gha_forks_count":{"kind":"null"},"gha_open_issues_count":{"kind":"null"},"gha_language":{"kind":"null"},"gha_archived":{"kind":"null"},"gha_disabled":{"kind":"null"},"content":{"kind":"string","value":"from datetime import date\nfrom askme import Askme\n\nterminal = Askme()\n\nname = terminal.ask(\"<% cprint('Hello, World!', 'green', 'on_red') %>\")\n\ncondition = terminal.ask(\"hi?where are you from: \", default = \"so nice\", uppercase = True\n ,validate=\"^[A-Z]{2}$\")\n\nbirthday = terminal.ask(\"birthday?(year,month,day): \", date)\n\npassword = terminal.ask(\"password: \", echo = False)\n\n\n\nprint birthday\nprint password\nprint \"the man %(name)s is from %(condition)s\" % locals()\n"},"src_encoding":{"kind":"string","value":"UTF-8"},"language":{"kind":"string","value":"Python"},"is_vendor":{"kind":"bool","value":false,"string":"false"},"is_generated":{"kind":"bool","value":false,"string":"false"},"year":{"kind":"number","value":2013,"string":"2,013"}}},{"rowIdx":41164,"cells":{"__id__":{"kind":"number","value":6957847050812,"string":"6,957,847,050,812"},"blob_id":{"kind":"string","value":"9e1890b9e4e70becb07f301bb69cae48eaf54927"},"directory_id":{"kind":"string","value":"a15ed3d4e5351e174c8afde0bf0f7cc9c592e2d2"},"path":{"kind":"string","value":"/googleappengine/electionsurvey/bin/load_freshdata.py"},"content_id":{"kind":"string","value":"5a1b59e1c2cb288775d4e67a01cff5f7244f33de"},"detected_licenses":{"kind":"list like","value":["BSD-3-Clause","LicenseRef-scancode-proprietary-license"],"string":"[\n \"BSD-3-Clause\",\n \"LicenseRef-scancode-proprietary-license\"\n]"},"license_type":{"kind":"string","value":"non_permissive"},"repo_name":{"kind":"string","value":"sebbacon/theyworkforyou"},"repo_url":{"kind":"string","value":"https://github.com/sebbacon/theyworkforyou"},"snapshot_id":{"kind":"string","value":"9d2c0a57711fb43b5ec84fefa6576f037b656803"},"revision_id":{"kind":"string","value":"5a95c73c0be78f9a677bd5861d855942bd0475d4"},"branch_name":{"kind":"string","value":"refs/heads/master"},"visit_date":{"kind":"timestamp","value":"2021-01-15T20:17:59.071755","string":"2021-01-15T20:17:59.071755"},"revision_date":{"kind":"timestamp","value":"2010-04-24T21:05:54","string":"2010-04-24T21:05:54"},"committer_date":{"kind":"timestamp","value":"2010-04-24T21:05:54","string":"2010-04-24T21:05:54"},"github_id":{"kind":"number","value":628088,"string":"628,088"},"star_events_count":{"kind":"number","value":1,"string":"1"},"fork_events_count":{"kind":"number","value":0,"string":"0"},"gha_license_id":{"kind":"null"},"gha_fork":{"kind":"null"},"gha_event_created_at":{"kind":"null"},"gha_created_at":{"kind":"null"},"gha_updated_at":{"kind":"null"},"gha_pushed_at":{"kind":"null"},"gha_size":{"kind":"null"},"gha_stargazers_count":{"kind":"null"},"gha_forks_count":{"kind":"null"},"gha_open_issues_count":{"kind":"null"},"gha_language":{"kind":"null"},"gha_archived":{"kind":"null"},"gha_disabled":{"kind":"null"},"content":{"kind":"string","value":"#!/usr/bin/python2.5\n# coding=utf-8\n\n#\n# load_freshdata.py:\n# Loads data from YourNextMP and DemocracyClub into GAE. Call this script as\n# main.\n#\n# Copyright (c) 2010 UK Citizens Online Democracy. All rights reserved.\n# Email: francis@mysociety.org; WWW: http://www.mysociety.org/\n#\n\nimport sys\nimport csv\nimport os\nimport getpass\nimport datetime\nimport optparse\nimport re\nimport urllib2\nimport gzip\n\nsys.path = [\"../\", \"../google_appengine/\"] + sys.path\nimport django.utils.simplejson as json\nfrom google.appengine.ext import db\nfrom google.appengine.ext.remote_api import remote_api_stub\nfrom google.appengine.api.datastore_types import Key\n\nimport settings\nfrom models import Party, Candidate, Seat, Candidacy, RefinedIssue\n\n# Parameters\nDEMOCLUB_URL=\"http://www.democracyclub.org.uk/issues/refined_csv\"\nYOURNEXTMP_URL=\"http://www.yournextmp.com/data/%s/latest/json_main\"\n\nparser = optparse.OptionParser()\n\nparser.set_usage('''Load or update data in TheyWorkForYou election, from YourNextMP and Democracy Club. Arguments are JSON files from YourNextMP or CSV files from Democracy Club to load. You must specify *all* the files, as other entries in the database will be marked as deleted.''')\nparser.add_option('--host', type='string', dest=\"host\", help='domain:port of application, e.g. localhost:8080, election.theyworkforyou.com', default=\"localhost:8080\")\nparser.add_option('--email', type='string', dest=\"email\", help='email address for authentication to application', default=\"francis@flourish.org\")\nparser.add_option('--fetch', action='store_true', dest='fetch', help='as well as command line arguments, also retrieve latest full dumps from YourNextMP and DemocracyClub and use this', default=False)\n\n(options, args) = parser.parse_args()\n\nfor arg in args:\n if not re.search(\"(\\.json|\\.csv)$\", arg):\n raise Exception(\"Please only .json or .csv files: \" + arg)\n\n######################################################################\n# Helpers\n\ndef convdate(d):\n return datetime.datetime.strptime(d, \"%Y-%m-%dT%H:%M:%S\")\n\ndef int_or_null(i):\n if i is None:\n return i\n return int(i)\n\nseats_by_name = {}\ndef find_seat(seat_name):\n if seat_name not in seats_by_name and '&' in seat_name:\n seat_name = seat_name.replace(\"&\", \"and\")\n\n if seat_name not in seats_by_name:\n raise Exception(\"Could not find seat \" + seat_name)\n\n return seats_by_name[seat_name]\n\ndef log(msg):\n print datetime.datetime.now(), msg\n\ndef put_in_batches(models, limit = 250):\n tot = len(models)\n c = 0\n while len(models) > 0:\n put_models = models[0:limit]\n log(\" db.put batch \" + str(c) + \", size \" + str(len(put_models)) + \", total \" + str(tot))\n db.put(put_models)\n models = models[limit:]\n c += 1\n\n######################################################################\n# Load from YourNextMP\n\n# Find out which constituencies (seats) do not allow updates to local issues\n# any more (i.e. because a survey has already been sent out)\ndef get_frozen_local_issues_seats():\n log(\"Getting seats which are frozen to local issues changes\")\n frozen_seats = {}\n fs = Seat.all().filter(\"frozen_local_issues =\", True).fetch(100)\n while fs:\n for f in fs:\n log(\" Seat is frozen to local issues changes: \" + f.name)\n frozen_seats[f.key().name()] = f\n fs = Seat.all().filter(\"frozen_local_issues =\",True).filter('__key__ >', fs[-1].key()).fetch(100)\n\n return frozen_seats\n\ndef load_from_ynmp(ynmp, frozen_seats):\n # Put parties in datastore - don't worry about deleted ones, they just\n # won't be referenced by other tables.\n parties_by_key = {}\n for party_id, party_data in ynmp[\"Party\"].iteritems():\n key_name = party_id\n party = Party(\n ynmp_id = int(party_id),\n name = party_data[\"name\"],\n code = party_data[\"code\"],\n image_id = int_or_null(party_data[\"image_id\"]),\n created = convdate(party_data[\"created\"]),\n updated = convdate(party_data[\"updated\"]),\n key_name = key_name\n )\n log(\" Storing party \" + party.name)\n parties_by_key[key_name] = party\n log(\"Putting all parties\")\n put_in_batches(parties_by_key.values())\n\n # Put candidates in datastore - don't worry about deleted ones, they\n # just won't be referenced by a candidacy\n candidates_by_key = {}\n for candidate_id, candidate_data in ynmp[\"Candidate\"].iteritems():\n if \"status\" not in candidate_data:\n raise Exception(\"No status entry for \" + str(candidate_data))\n key_name = candidate_id\n candidate = Candidate(\n ynmp_id = int(candidate_id),\n name = candidate_data[\"name\"],\n code = candidate_data[\"code\"],\n status = candidate_data[\"status\"],\n email = candidate_data[\"email\"],\n party = parties_by_key[candidate_data[\"party_id\"]],\n image_id = int_or_null(candidate_data[\"image_id\"]),\n created = convdate(candidate_data[\"created\"]),\n updated = convdate(candidate_data[\"updated\"]),\n key_name = key_name\n )\n log(\" Storing candidate \" + candidate.name)\n candidates_by_key[key_name] = candidate\n log(\"Putting all candidates\")\n put_in_batches(candidates_by_key.values())\n\n # Put seats in datastore - don't worry about deleted ones, they\n # just won't be referenced by a candidacy\n seats_by_key = {}\n for seat_id, seat_data in ynmp[\"Seat\"].iteritems():\n key_name = seat_id\n seat = Seat(\n ynmp_id = int(seat_id),\n name = seat_data[\"name\"],\n code = seat_data[\"code\"],\n created = convdate(seat_data[\"created\"]),\n updated = convdate(seat_data[\"updated\"]),\n key_name = key_name\n )\n if key_name in frozen_seats:\n seat.frozen_local_issues = True\n log(\" Storing seat \" + seat.name)\n seats_by_key[key_name] = seat\n seats_by_name[seat.name] = seat\n log(\"Putting all seats\")\n put_in_batches(seats_by_key.values())\n\n # Get list of existing candiacies in remote datastore\n # in batches due to 1000 entity at a time limit, as per http://code.google.com/appengine/articles/remote_api.html\n log(\"Getting list of Candidacies\")\n candidacies = Candidacy.all().filter(\"deleted =\", False).fetch(100)\n to_be_marked_deleted = {}\n while candidacies:\n for candidacy in candidacies:\n key_name = candidacy.key().name()\n log(\"Marking before have candidacy key \" + key_name)\n to_be_marked_deleted[key_name] = candidacy\n candidacies = Candidacy.all().filter(\"deleted =\", False).filter('__key__ >', candidacies[-1].key()).fetch(100)\n\n # Loop through new dump of candidacies from YourNextMP, adding new ones\n candidacies_by_key = {}\n for candidacy_id, candidacy_data in ynmp[\"Candidacy\"].iteritems():\n candidate = candidates_by_key[candidacy_data[\"candidate_id\"]]\n assert candidate.status in ['standing', 'standing_down', 'not-standing']\n if candidate.status == 'standing_down' or candidate.status == 'not-standing':\n continue\n\n key_name = candidacy_data[\"seat_id\"] + \"-\" + candidacy_data[\"candidate_id\"]\n\n # find existing entry if there is one, or else make new one\n if key_name in to_be_marked_deleted:\n candidacy = to_be_marked_deleted[key_name]\n else:\n candidacy = Candidacy(key_name = key_name)\n\n # fill in values\n candidacy.ynmp_id = int(candidacy_id)\n candidacy.seat = seats_by_key[candidacy_data[\"seat_id\"]]\n candidacy.candidate = candidate\n candidacy.created = convdate(candidacy_data[\"created\"])\n candidacy.updated = convdate(candidacy_data[\"updated\"])\n candidacy.deleted = False\n # make sure it has a survey token\n if not candidacy.survey_token:\n log(\"Generating survey token for \" + candidacy.seat.name + \" \" + candidacy.candidate.name)\n candidacy.generate_survey_token() # this does save too, since it logs\n log(\"Storing candidacy \" + candidacy.seat.name + \" \" + candidacy.candidate.name)\n candidacies_by_key[key_name] = candidacy\n\n # record we still have this candidacy\n if key_name in to_be_marked_deleted:\n del to_be_marked_deleted[key_name]\n log(\"Putting all candidacies\")\n put_in_batches(candidacies_by_key.values())\n\n # See which candidacies are left, i.e. are deleted\n for key_name, candidacy in to_be_marked_deleted.iteritems():\n log(\"Marking deleted \" + candidacy.seat.name + \" \" + candidacy.candidate.name)\n candidacy.deleted = True\n log(\"Putting marked deleted candidacies\")\n put_in_batches(to_be_marked_deleted.values())\n\n######################################################################\n# Load from DemocracyClub\n\n fs = Seat.all().filter(\"frozen_local_issues =\", True).fetch(100)\n while fs:\n for f in fs:\n log(\" Seat is frozen to local issues changes: \" + f.name)\n frozen_seats[f.key().name()] = f\n fs = Seat.all().filter(\"frozen_local_issues =\",True).filter('__key__ >', fs[-1].key()).fetch(100)\n\n\ndef load_from_democlub(csv_files, frozen_seats):\n # Get list of existing refined issues in remote datastore, so can track what to delete\n log(\"Getting list of refined issues\")\n refined_issues = RefinedIssue.all().filter(\"deleted =\", False).fetch(100)\n to_be_marked_deleted = {}\n while refined_issues:\n for refined_issue in refined_issues:\n key_name = refined_issue.key().name()\n log(\" Marking before have refined issue key \" + key_name)\n to_be_marked_deleted[key_name] = refined_issue\n refined_issues = RefinedIssue.all().filter(\"deleted =\", False).filter('__key__ >', refined_issues[-1].key()).fetch(100)\n\n # Load in CSV file and create/update all the issues\n refined_issues_by_key = {}\n for csv_file in csv_files:\n log(\"Reading CSV file \" + csv_file)\n reader = csv.reader(open(csv_file, \"rb\"))\n for row in reader:\n\n if len(row) == 6:\n row.append(None)\n (democlub_id, question, reference_url, seat_name, created, updated, short_name) = row\n key_name = democlub_id\n\n # DemocracyClub has this constituency without its accent, YourNextMP has it with it.\n seat_name = seat_name.replace(\"Ynys Mon\", \"Ynys Môn\")\n seat = find_seat(seat_name.decode('utf-8'))\n\n if seat.key().name() in frozen_seats:\n log(\" Frozen seat \" + seat_name + \", not storing issue: \" + question)\n else:\n refined_issue = RefinedIssue(\n democlub_id = int(democlub_id),\n question = question.decode('utf-8'),\n reference_url = reference_url.decode('utf-8'),\n short_name = short_name and short_name.decode('utf-8') or None,\n national = (seat.name == 'National'),\n seat = seat,\n created = convdate(created),\n updated = convdate(updated),\n key_name = key_name\n )\n log(\" Storing local issue for \" + seat_name + \": \" + question)\n refined_issues_by_key[key_name] = refined_issue\n\n # record we still have this issue\n if key_name in to_be_marked_deleted:\n del to_be_marked_deleted[key_name]\n log(\"Putting all refined issues\")\n put_in_batches(refined_issues_by_key.values())\n\n # See which refined issues are left, i.e. are deleted\n for key_name, refined_issue in to_be_marked_deleted.iteritems():\n log(\" Marking deleted issue for \" + refined_issue.seat.name + \":\" + refined_issue.question)\n refined_issue.deleted = True\n log(\"Putting marked deleted refined issues\")\n put_in_batches(to_be_marked_deleted.values())\n\n\n######################################################################\n# Main\n\n# Configure connection via remote_api to datastore - after this\n# data store calls are remote\nlog(\"Connecting to \" + options.host)\ndef auth_func():\n return (options.email, getpass.getpass('Password:'))\nremote_api_stub.ConfigureRemoteDatastore('theyworkforyouelection', '/remote_api', auth_func, servername=options.host)\n\n# Load in extra files\nif options.fetch:\n log(\"Fetching latest Democracy Club CSV file\")\n democlub_file = \"/tmp/load_freshdata_democracy_club.csv\"\n democlub_h = open(democlub_file, 'w')\n democlub_h.write(urllib2.urlopen(DEMOCLUB_URL).read())\n democlub_h.close()\n args.append(democlub_file)\n \n log(\"Fetching latest YourNextMP JSON file\")\n ynmp_url = YOURNEXTMP_URL % (settings.YOURNEXTMP_API_TOKEN)\n ynmp_file = \"/tmp/load_freshdata_yournextmp.json\"\n ynmp_h = open(ynmp_file + \".gz\", 'w')\n ynmp_h.write(urllib2.urlopen(ynmp_url).read())\n ynmp_h.close()\n ynmp_h = open(ynmp_file, 'w')\n ynmp_h.write(gzip.GzipFile(ynmp_file + \".gz\").read())\n ynmp_h.close()\n args.append(ynmp_file)\nlog(\"File list: \" + str(args))\n\n# Which seats are frozen to changes in local issues?\nfrozen_seats = get_frozen_local_issues_seats()\n\n# Load in JSON files, merging as we go\nynmp = {}\nfor arg in args:\n if re.search(\"(\\.json)$\", arg):\n content = open(arg).read()\n json_load = json.loads(content)\n \n for k, v in json_load.iteritems():\n if k in ynmp:\n ynmp[k].update(json_load[k])\n else:\n ynmp[k] = json_load[k]\nload_from_ynmp(ynmp, frozen_seats)\n\n# Get list of CSV files\ncsv_files = []\nfor arg in args:\n if re.search(\"(\\.csv)$\", arg):\n csv_files.append(arg)\nload_from_democlub(csv_files, frozen_seats)\n\n"},"src_encoding":{"kind":"string","value":"UTF-8"},"language":{"kind":"string","value":"Python"},"is_vendor":{"kind":"bool","value":false,"string":"false"},"is_generated":{"kind":"bool","value":false,"string":"false"},"year":{"kind":"number","value":2010,"string":"2,010"}}},{"rowIdx":41165,"cells":{"__id__":{"kind":"number","value":3865470577782,"string":"3,865,470,577,782"},"blob_id":{"kind":"string","value":"ed36eff765d5260da2c27481e4fcbb010d38defd"},"directory_id":{"kind":"string","value":"0d5b73ef3f531fb8a5cacf71548309b30835c1aa"},"path":{"kind":"string","value":"/lib/prompt.py"},"content_id":{"kind":"string","value":"06fcaf73d18514be0f06b975af1a5e2291d57252"},"detected_licenses":{"kind":"list like","value":["GPL-3.0-only","GPL-3.0-or-later","GPL-1.0-or-later"],"string":"[\n \"GPL-3.0-only\",\n \"GPL-3.0-or-later\",\n \"GPL-1.0-or-later\"\n]"},"license_type":{"kind":"string","value":"non_permissive"},"repo_name":{"kind":"string","value":"nsubiron/nscmd"},"repo_url":{"kind":"string","value":"https://github.com/nsubiron/nscmd"},"snapshot_id":{"kind":"string","value":"82dacb779d58bbf16d773f118be67881f275e22a"},"revision_id":{"kind":"string","value":"a3775f53d748638dd61b57c1d242124debf13694"},"branch_name":{"kind":"string","value":"refs/heads/master"},"visit_date":{"kind":"timestamp","value":"2016-09-05T15:53:43.668841","string":"2016-09-05T15:53:43.668841"},"revision_date":{"kind":"timestamp","value":"2013-06-04T18:57:25","string":"2013-06-04T18:57:25"},"committer_date":{"kind":"timestamp","value":"2013-06-04T18:57:25","string":"2013-06-04T18:57:25"},"github_id":{"kind":"number","value":9841521,"string":"9,841,521"},"star_events_count":{"kind":"number","value":1,"string":"1"},"fork_events_count":{"kind":"number","value":0,"string":"0"},"gha_license_id":{"kind":"null"},"gha_fork":{"kind":"null"},"gha_event_created_at":{"kind":"null"},"gha_created_at":{"kind":"null"},"gha_updated_at":{"kind":"null"},"gha_pushed_at":{"kind":"null"},"gha_size":{"kind":"null"},"gha_stargazers_count":{"kind":"null"},"gha_forks_count":{"kind":"null"},"gha_open_issues_count":{"kind":"null"},"gha_language":{"kind":"null"},"gha_archived":{"kind":"null"},"gha_disabled":{"kind":"null"},"content":{"kind":"string","value":"import platform\n\n# Ask a yes/no question via input() and return their answer.\n# 'question' is a string that is presented to the user.\n# 'default' is the presumed answer if the user just hits .\n# It must be 'yes' (the default), 'no' or None (meaning an answer is required of\n# the user). The 'answer' return value is one of 'yes' or 'no'.\ndef yes_no(question, default='yes'):\n valid = {'yes': True, 'y': True, 'no': False, 'n': False}\n prompts = {True: '[Y/n]', False: '[y/N]', None: '[y/n]'}\n try:\n if default is not None:\n default = valid[default.lower()]\n prompt = prompts[default]\n except KeyError:\n raise ValueError('Invalid default answer \\'%s\\'.' % default)\n while True:\n choice = raw_input('%s %s: ' % (question, prompt)).lower()\n if default is not None and choice == '':\n return default\n elif choice in valid:\n return valid[choice]\n else:\n print('Please answer \\'%s\\'.' % '\\' or \\''.join(valid.keys()))\n\nif platform.system() == 'Windows':\n def color_string(string, *attributes):\n return string\nelse:\n def color_string(string, *attributes):\n \"\"\" If 'bold', it must be last attribute.\"\"\"\n attrmap = {\n 'bold': '1',\n 'black': '0;30',\n 'blue': '0;34',\n 'brown': '0;33',\n 'cyan': '0;36',\n 'dark gray': '1;30',\n 'green': '0;32',\n 'light blue': '1;34',\n 'light cyan': '1;36',\n 'light gray': '0;37',\n 'light green': '1;32',\n 'light purple': '1;35',\n 'light red': '1;31',\n 'purple': '0;35',\n 'red': '0;31',\n 'white': '1;37',\n 'yellow': '1;33'}\n attrstr = ';'.join(a for a in map(attrmap.get, attributes) if a is not None)\n return '\\x1b[%sm%s\\x1b[0m' % (attrstr, string)\n"},"src_encoding":{"kind":"string","value":"UTF-8"},"language":{"kind":"string","value":"Python"},"is_vendor":{"kind":"bool","value":false,"string":"false"},"is_generated":{"kind":"bool","value":false,"string":"false"},"year":{"kind":"number","value":2013,"string":"2,013"}}},{"rowIdx":41166,"cells":{"__id__":{"kind":"number","value":5566277653166,"string":"5,566,277,653,166"},"blob_id":{"kind":"string","value":"9f287a8ae5f99998c273670d6229e0ca4f14db44"},"directory_id":{"kind":"string","value":"a40897e5221f837a7c270ceb3a625197b64402e2"},"path":{"kind":"string","value":"/plugins/plugin_help.py"},"content_id":{"kind":"string","value":"e8063ee903614db9d5d400e995d86e65a1f09734"},"detected_licenses":{"kind":"list like","value":[],"string":"[]"},"license_type":{"kind":"string","value":"no_license"},"repo_name":{"kind":"string","value":"rogovvladimir/xmpp-bot"},"repo_url":{"kind":"string","value":"https://github.com/rogovvladimir/xmpp-bot"},"snapshot_id":{"kind":"string","value":"02a677bee6aba77d6e021c4e893b078d37193224"},"revision_id":{"kind":"string","value":"bd0bda7985295cfa5fcbef92256dfbd07c6a1b59"},"branch_name":{"kind":"string","value":"refs/heads/master"},"visit_date":{"kind":"timestamp","value":"2021-01-21T12:39:29.256985","string":"2021-01-21T12:39:29.256985"},"revision_date":{"kind":"timestamp","value":"2012-07-19T15:23:46","string":"2012-07-19T15:23:46"},"committer_date":{"kind":"timestamp","value":"2012-07-19T15:23:46","string":"2012-07-19T15:23:46"},"github_id":{"kind":"number","value":2190618,"string":"2,190,618"},"star_events_count":{"kind":"number","value":2,"string":"2"},"fork_events_count":{"kind":"number","value":0,"string":"0"},"gha_license_id":{"kind":"null"},"gha_fork":{"kind":"null"},"gha_event_created_at":{"kind":"null"},"gha_created_at":{"kind":"null"},"gha_updated_at":{"kind":"null"},"gha_pushed_at":{"kind":"null"},"gha_size":{"kind":"null"},"gha_stargazers_count":{"kind":"null"},"gha_forks_count":{"kind":"null"},"gha_open_issues_count":{"kind":"null"},"gha_language":{"kind":"null"},"gha_archived":{"kind":"null"},"gha_disabled":{"kind":"null"},"content":{"kind":"string","value":"import re\n\nfrom . import BaseCommand, commands\n\nfrom twilix.base.myelement import BreakStanza\n\nclass helpCommand(BaseCommand):\n \n COMMAND = u'help'\n HELP = u'get help'\n COMMAND_REGEX = re.compile(ur'^(?:help)?(.*)$')\n \n def chatHandler(self):\n res = u''\n cmnd = self.cmdpars.group(1)\n if cmnd:\n res = u'[%s] is a bad command for me' % self.cmdpars.group()\n helpdict = {}\n for cmd in commands:\n helpdict[cmd.COMMAND] = getattr(cmd, 'HELP', \n u\"(haven't help for this command)\")\n res += u'\\nThere are :\\n\\t%s\\nlist of \\\ncommands, supported by this bot' % \\\n '\\n\\t'.join(['[%s] -- %s;' % \\\n (cmd, helpdict[cmd]) \\\n for cmd in sorted(helpdict)])\n reply = self.get_reply()\n reply.body = res\n return (reply, BreakStanza())\n \n def groupchatHandler(self):\n res = u''\n cmnd = self.cmdpars.group(1)\n if cmnd:\n return BreakStanza()\n helpdict = {}\n for cmd in commands:\n helpdict[cmd.COMMAND] = getattr(cmd, 'HELP', \n u\"(haven't help for this command)\")\n res += u'\\nThere are :\\n\\t%s\\nlist of \\\ncommands, supported by this bot' % \\\n '\\n\\t'.join(['[%s] -- %s;' % \\\n (cmd, helpdict[cmd]) \\\n for cmd in sorted(helpdict)])\n reply = self.get_reply()\n reply.body = u'%s: %s' %(reply.to.resource, res)\n reply.to = reply.to.bare()\n return (reply, BreakStanza())\n"},"src_encoding":{"kind":"string","value":"UTF-8"},"language":{"kind":"string","value":"Python"},"is_vendor":{"kind":"bool","value":false,"string":"false"},"is_generated":{"kind":"bool","value":false,"string":"false"},"year":{"kind":"number","value":2012,"string":"2,012"}}},{"rowIdx":41167,"cells":{"__id__":{"kind":"number","value":16939351019605,"string":"16,939,351,019,605"},"blob_id":{"kind":"string","value":"b502ae178207d72543ed7894228158204793ce74"},"directory_id":{"kind":"string","value":"a1101dd9d2a37c38254e365e0723bfb06101f3c0"},"path":{"kind":"string","value":"/CSSE1001Assignment3/circuit_editor_csse1k/circuit_gui/objectwidgets.py"},"content_id":{"kind":"string","value":"c7b1944bf383c1d4be338bce6d8ee4acf5927d0e"},"detected_licenses":{"kind":"list like","value":["MIT"],"string":"[\n \"MIT\"\n]"},"license_type":{"kind":"string","value":"permissive"},"repo_name":{"kind":"string","value":"joeandersen/CSSE7030"},"repo_url":{"kind":"string","value":"https://github.com/joeandersen/CSSE7030"},"snapshot_id":{"kind":"string","value":"a7712c749838a9ed9a19ed59fa955fd086360af5"},"revision_id":{"kind":"string","value":"bd6a49a754c28de14cdcb8de20e33bf50b5331c0"},"branch_name":{"kind":"string","value":"refs/heads/master"},"visit_date":{"kind":"timestamp","value":"2020-04-23T08:18:19.165696","string":"2020-04-23T08:18:19.165696"},"revision_date":{"kind":"timestamp","value":"2014-10-14T07:35:39","string":"2014-10-14T07:35:39"},"committer_date":{"kind":"timestamp","value":"2014-10-14T07:35:39","string":"2014-10-14T07:35:39"},"github_id":{"kind":"null"},"star_events_count":{"kind":"number","value":0,"string":"0"},"fork_events_count":{"kind":"number","value":0,"string":"0"},"gha_license_id":{"kind":"null"},"gha_fork":{"kind":"null"},"gha_event_created_at":{"kind":"null"},"gha_created_at":{"kind":"null"},"gha_updated_at":{"kind":"null"},"gha_pushed_at":{"kind":"null"},"gha_size":{"kind":"null"},"gha_stargazers_count":{"kind":"null"},"gha_forks_count":{"kind":"null"},"gha_open_issues_count":{"kind":"null"},"gha_language":{"kind":"null"},"gha_archived":{"kind":"null"},"gha_disabled":{"kind":"null"},"content":{"kind":"string","value":"\"\"\"\nThis file contains the main classes of the GUI system, comprising the actual\ncircuit editing widget and its components. \n\nIt also contains a set of 'Sleepy' classes, serialization and analysis adapter\nclasses used in tandem with the corresponding GUI widgets for saving/loading\nof layouts, and for performing circuit analysis.\n\"\"\"\n\n# import that barge, tote that bale!\nfrom PyQt4 import *\nfrom PyQt4.QtCore import *\nfrom PyQt4.QtGui import *\n#from py.magic import greenlet\nimport circuit as analysis\nfrom dialogs import AttributeDialog\n\nclass ObjectAttributeModel(QAbstractItemModel):\n \"\"\"A Qt item model representing the attributes of a circuit component.\n \n C'tor: ObjectAttributeModel(ObjectWidget)\n \"\"\"\n def __init__(self, object):\n QAbstractItemModel.__init__(self)\n self._object = object\n \n # all these methods' signatures and function are documented extensively in the Qt documentation\n # so there is no real need to duplicate that effort here in a subclass\n \n def index(self, row, column, parent):\n return self.createIndex(row, column)\n \n def flags(self, index):\n # only make the values column editable\n if index.column() == 0:\n return Qt.ItemIsEnabled | Qt.ItemIsSelectable\n elif index.column() == 1:\n return Qt.ItemIsEnabled | Qt.ItemIsSelectable | Qt.ItemIsEditable\n \n def headerData(self, section, orientation, role):\n if orientation == Qt.Horizontal:\n if role == Qt.DisplayRole:\n if section == 0:\n return QVariant(\"Attribute\")\n elif section == 1:\n return QVariant(\"Value\")\n return QVariant()\n \n def data(self, index, role):\n if role in [Qt.DisplayRole, Qt.EditRole]:\n if index.column() == 0:\n return QVariant(self._object.attributes()[index.row()])\n elif index.column() == 1:\n return QVariant(self._object[self._object.attributes()[index.row()]])\n else:\n return QVariant()\n \n def rowCount(self, index):\n return len(self._object.attributes())\n \n def columnCount(self, index):\n return 2\n \n def setData(self, index, value, role):\n if role == Qt.EditRole:\n if index.column() == 1:\n # set the attribute on the object\n self._object[self._object.attributes()[index.row()]] = value.toString()\n #self.emit(SIGNAL(\"dataChanged(QModelIndex,QModelIndex)\"), index, index)\n return True\n return False\n \nclass ObjectWidget(QWidget):\n \"\"\"The base class for all circuit object widgets. It implements a lot of basic\n functionality, including selecting, dragging, connection/disconnection and attribute storage.\n \n Each widget has a number of anchor points where connections can be made. \n Subclasses must set these up in their constructors, by changing the properties\n self._anchorPoints, self._connections (set it to empty lists initially)\n and self._maxConnections. See NodeWidget and friends below for examples of\n subclasses.\n \n SIGNALS:\n selected() - emitted when this object is selected\n unselected()\n connectionsChanged()\n clicked(int, QPoint) - emitted when the object is clicked,\n with distance from nearest anchor and which\n anchor it is.\n moved(QPoint) - emitted when the object moves.\n flipped() - emitted when the object flips orientation\n \"\"\"\n def __init__(self, parent=None):\n QWidget.__init__(self, parent)\n self._orientation = 'h'\n self._isSelected = False\n self._isDragging = False\n self._anchorPoints = [(0,0)]\n self._connections = {self._anchorPoints[0]: []}\n self._maxConnections = {self._anchorPoints[0]: 0}\n self._attributes = {'nickname': ''}\n \n # attribute model\n self._attrmodel = ObjectAttributeModel(self)\n \n # slot connections\n QObject.connect(self, SIGNAL(\"selected()\"), self, SLOT(\"update()\"))\n QObject.connect(self, SIGNAL(\"unselected()\"), self, SLOT(\"update()\"))\n \n def __getitem__(self, attr):\n \"\"\"Get an attribute of the object.\"\"\"\n return self._attributes[attr]\n \n def __setitem__(self, attr, value):\n \"\"\"Set an attribute of the object.\"\"\"\n self._attributes[attr] = value\n \n def attributes(self):\n \"\"\"Returns the list of available attributes.\n \n attributes() -> list\n \"\"\"\n return self._attributes.keys()\n \n def anchorPoints(self):\n \"\"\"Returns the list of available anchorpoints.\n \n anchorPoints() -> list\n \"\"\"\n return self._anchorPoints\n \n def connections(self, ancPoint=None):\n \"\"\"If given an anchor point, this functionr returns a list of \n connections associated with that anchor point. Otherwise, it will\n return all connections associated with this object.\n \n connections are of the form: list, tuple>\n \n connections([tup]) -> list\n \"\"\"\n if ancPoint:\n return [[(self, ancPoint), (other, other.anchorOf(self))] for other in self._connections[ancPoint]]\n else:\n cons = []\n for ap in self._anchorPoints:\n cons = cons + self.connections(ap)\n return cons\n \n def anchorOf(self, other):\n \"\"\"Gets the anchor point that connects this object to other.\n anchorOf(object) -> QPoint\n \"\"\"\n for x in self._connections:\n if other in self._connections[x]:\n return x\n return None\n \n def canConnect(self, ancPoint):\n \"\"\"Is there space for another connection on the given anchor point?\n \n canConnect(QPoint) -> bool\n \"\"\"\n return (len(self._connections[ancPoint]) < self._maxConnections[ancPoint] or self._maxConnections[ancPoint] == -1)\n \n def connectTo(self, ancPoint, object):\n \"\"\"Connects this object to another via the given anchor point, if possible,\n in a one-way fashion.\n \n connectTo(QPoint, object) -> void\n \"\"\"\n if self.canConnect(ancPoint):\n self._connections[ancPoint].append(object)\n self.emit(SIGNAL(\"connectionsChanged()\"))\n \n def connect(self, ancPoint, object, otherAncPoint):\n \"\"\"Connects this object to another via the two given anchors, if possible,\n in a two-way fashion.\n \n connect(QPoint, object ,QPoint) -> void\n \"\"\"\n if self.canConnect(ancPoint) and object.canConnect(otherAncPoint):\n self.connectTo(ancPoint, object)\n object.connectTo(otherAncPoint, self)\n \n def disconnectFrom(self, ancPoint, object):\n \"\"\"Destroys one end of a connection towards object via anchor point.\"\"\"\n if object in self._connections[ancPoint]:\n self._connections[ancPoint].remove(object)\n self.emit(SIGNAL(\"connectionsChanged()\"))\n \n def disconnect(self, ancPoint, object, otherAncPoint):\n \"\"\"Destroys an entire connection towards object's otherAncPoint \n via this object's ancPoint.\"\"\"\n self.disconnectFrom(ancPoint, object)\n object.disconnectFrom(otherAncPoint, self)\n \n def mouseDoubleClickEvent(self, event):\n \"\"\"Handles a mouse double click by opening an attribute dialog\"\"\"\n self._attrdlg = AttributeDialog(self._attrmodel)\n self._attrdlg.show()\n \n def mousePressEvent(self, event):\n \"\"\"Handles a mouse press event on the object widget. Prepares to\n start the drag-movement process, and attempts to select the widget.\"\"\"\n\t\tself._isDragging = True\n\t\tself._dragLastPos = QPoint(event.globalX(), event.globalY())\n\t\tself._dragStartPos = QPoint(self.x(), self.y())\n \n # find nearest anchor point to where the user clicked\n anchorPoints = [QPoint(x,y) for x,y in self._anchorPoints]\n dragPoint = QPoint(event.x(), event.y())\n leastDist = (dragPoint - anchorPoints[0]).manhattanLength()\n leastPoint = anchorPoints[0]\n for p in anchorPoints:\n dist = (dragPoint - p).manhattanLength()\n if dist < leastDist:\n leastDist = dist\n leastPoint = p\n self._dragClosestAnchor = leastPoint\n \n # emit the Qt signal\n self.emit(SIGNAL(\"clicked(int,QPoint)\"), leastDist, leastPoint)\n\t\t#self.parent().selectMe(self)\n \n def mouseMoveEvent(self, event):\n \"\"\"Handles mouse movement on the widget. If we're in drag mode, ie\n a button is held down, the widget will move by snapped intervals.\"\"\"\n\t\tif self._isDragging:\n\t\t\tcurPos = QPoint(self.x(), self.y())\n\t\t\tnewPos = self._dragStartPos + (event.globalPos() - self._dragLastPos)\n snapPos = self.parent().snap(newPos)\n\t\t\tself.move(snapPos)\n\t\t\tself.emit(SIGNAL(\"moved(QPoint)\"), snapPos)\n\t\t\t#self.lastPos = event.globalPos()\n \n def mouseReleaseEvent(self, event):\n\t\tself._isDragging = False\n \n def flip(self):\n \"\"\"Flips the object's orientation over.\"\"\"\n self._orientation = {'h': 'v', 'v': 'h'}[self._orientation]\n self.update()\n self.emit(SIGNAL(\"flipped()\"))\n \n def orientation(self):\n \"\"\"Returns the object's present orientation.\n \n orientation() -> char (one of 'h' or 'v')\n \"\"\"\n return self._orientation\n \n def selected(self):\n \"\"\"Returns True if the object is selected, otherwise False.\"\"\"\n return self._isSelected\n \n def select(self):\n \"\"\"Sets this object as selected.\"\"\"\n self._isSelected = True\n self.emit(SIGNAL(\"selected()\"))\n \n def unselect(self):\n \"\"\"Sets this object as unselected.\"\"\"\n self._isSelected = False\n self.emit(SIGNAL(\"unselected()\"))\n \n def drawSelectedOutline(self, painter):\n \"\"\"A convenience function for subclasses to paint a red box around\n the widget when it is selected.\n \n drawSelectedOutline(QPainter) -> void\n \"\"\"\n \n if self._isSelected:\n\t\t\tpainter.setPen(Qt.red)\n\t\t\tpainter.drawRect(QRect(0,0,self.width(), self.height()))\n\nclass CircuitWidget(QWidget):\n \"\"\"The parent grid widget that holds circuit component widgets.\"\"\"\n \n def __init__(self, parent=None):\n QWidget.__init__(self, parent)\n self.resize(600,600)\n self._grid = 30\n self.setAcceptDrops(True)\n \n def addWidget(self, widget):\n \"\"\"Connects a child widget's signals appropriately to make sure\n everything works as expected (redrawing of connections etc)\"\"\"\n # connect signals on the child\n QObject.connect(widget, SIGNAL(\"flipped()\"), self, SLOT(\"update()\"))\n QObject.connect(widget, SIGNAL(\"clicked(int,QPoint)\"), self._childClickedLambda(widget))\n QObject.connect(widget, SIGNAL(\"connectionsChanged()\"), self, SLOT(\"update()\"))\n QObject.connect(widget, SIGNAL(\"moved(QPoint)\"), self, SLOT(\"update()\"))\n \n def _childClickedLambda(self, child):\n \"\"\"Generates a lambda for calling the _childClicked function for a given\n child.\n \n _childClickedLambda(object) -> lambda\n \"\"\"\n return (lambda dist, point: self._childClicked(child, dist, (point.x(),point.y())))\n \n def _childClicked(self, child, dist, point):\n \"\"\"Handles a child click event.\"\"\"\n for c in self.children():\n c.unselect()\n child.select()\n self.emit(SIGNAL(\"childClicked\"), child, dist, point)\n \n def dragEnterEvent(self, event):\n event.acceptProposedAction()\n \n def dropEvent(self, event):\n \"\"\"Circuit widget accepts drag-drops from a list widget\n with items carrying particular texts.\"\"\"\n try:\n wtype = event.source().selectedItems()[0].text(0)\n except:\n print \"Not from a source list, can't drop this.\"\n return\n \n if wtype == 'Normal node':\n n = NodeWidget(self)\n n.move(self.snap(event.pos()))\n n.show()\n self.addWidget(n)\n elif wtype == 'Voltage source':\n vs = VoltageSourceWidget(self)\n vs.move(self.snap(event.pos()))\n vs.show()\n self.addWidget(vs)\n elif wtype == 'Current source':\n cs = CurrentSourceWidget(self)\n cs.move(self.snap(event.pos()))\n cs.show()\n self.addWidget(cs)\n elif wtype == 'Shockley diode':\n d = ShockleyWidget(self)\n d.move(self.snap(event.pos()))\n d.show()\n self.addWidget(d)\n elif wtype == 'Resistor':\n r = ResistorWidget(self)\n r.move(self.snap(event.pos()))\n r.show()\n self.addWidget(r)\n else:\n print \"dropped mimedata:\", wtype\n return\n event.setDropAction(Qt.CopyAction)\n event.accept()\n self.update()\n \n def snap(self, point):\n \"\"\"Takes a given point, and snaps it based on the grid configuration\n for this widget.\n \n snap(QPoint) -> QPoint\n \"\"\"\n x = round(float(point.x()) / self._grid) * self._grid\n y = round(float(point.y()) / self._grid) * self._grid\n return QPoint(x,y)\n \n def grid(self):\n \"\"\"Returns the grid unit size.\n \n grid() -> int\n \"\"\"\n return self._grid\n \n def setGrid(self, size):\n \"\"\"Sets the grid unit size.\n \n setGrid(int) -> void\n \"\"\"\n self._grid = size\n \n def mousePressEvent(self, event):\n\t\t# when we see a mouse click event it's not hitting one of our children\n\t\t# so deselect anything selected\n\t\tfor c in self.children():\n\t\t\tc.unselect()\n\t\t\tc.update()\n\t\t\n\t\tself.update()\n \n def findConnections(self):\n \"\"\"Finds all unique connections between all child widgets of this\n circuit widget. Used in painting to determine which links should be\n drawn.\n \n findConnections() -> list\n (for the signature of a connection, see ObjectWidget.connections()\n \"\"\"\n found = []\n for c in self.children():\n for cn in c.connections():\n if len(cn)>0:\n if [cn[0],cn[1]] not in found and [cn[1],cn[0]] not in found:\n found.append(cn)\n return found\n \n def makeSleepy(self):\n sleepies,cons = self.makeSleepyWithHash()\n return (sleepies.values(), cons)\n \n def makeSleepyWithHash(self):\n sleepies = {}\n for c in self.children():\n sleepies[c] = c.makeSleepy()\n \n cons = []\n \n for c in self.findConnections():\n nc = [(sleepies[c[0][0]], c[0][1]), \\\n (sleepies[c[1][0]], c[1][1])]\n cons.append(nc)\n \n return (sleepies, cons)\n \n def killChildren(self):\n for c in self.children():\n c.deleteLater()\n self.update()\n \n def wakeUp(self, sleeptup):\n sleepies,cons = sleeptup\n \n wakies = {}\n for s in sleepies:\n wakies[s] = s.create(self)\n \n for c in cons:\n firstwidget = wakies[c[0][0]]\n firstpoint = c[0][1]\n secondwidget = wakies[c[1][0]]\n secondpoint = c[1][1]\n firstwidget.connect(firstpoint, secondwidget, secondpoint)\n \n self.update()\n # now we are awake\n \n\tdef paintEvent(self, event):\n\t\tpainter = QPainter(self)\n\t\tpainter.setRenderHint(QPainter.Antialiasing)\n \n # draw grid\n painter.setPen(Qt.gray)\n for y in range(0, self.height(), self._grid):\n painter.drawLine(QPoint(0, y), QPoint(self.width(), y))\n \n for x in range(0, self.width(), self._grid):\n painter.drawLine(QPoint(x, 0), QPoint(x, self.height()))\n \n # draw connections\n painter.setPen(Qt.black)\n cnx = self.findConnections()\n for c in cnx:\n point_a = QPoint(c[0][1][0],c[0][1][1]) + c[0][0].pos()\n point_b = QPoint(c[1][1][0],c[1][1][1])+c[1][0].pos()\n diff = point_a - point_b\n \n if abs(diff.x()) <= abs(diff.y()):\n painter.drawLine(point_a, QPoint(point_b.x(), point_a.y()))\n painter.drawLine(QPoint(point_b.x(), point_a.y()), point_b)\n else:\n painter.drawLine(point_a, QPoint(point_a.x(), point_b.y()))\n painter.drawLine(QPoint(point_a.x(), point_b.y()), point_b)\n \n \n \nclass SleepyNodeWidget():\n \"\"\"A 'sleepy' partner to the node widget. This partner contains all of the \n necessary information needed to recreate the node widget, minus connections,\n it is serializable, and also knows how to turn itself into an analysis object.\n \n C'tor: SleepyNodeWidget(QPoint, dict)\n \"\"\"\n\n def __init__(self, location, attributes):\n self._location = (location.x(), location.y())\n self._attributes = attributes\n \n def create(self, parent):\n n = NodeWidget(parent)\n parent.addWidget(n)\n n.show()\n n.move(QPoint(self._location[0], self._location[1]))\n for a in self._attributes:\n n[a] = self._attributes[a]\n return n\n \n def analyse(self, circuit):\n if not self._attributes['reference']:\n ref = None\n else:\n ref = float(self._attributes['reference'])\n \n n = analysis.Node(nickname=self._attributes['nickname'], reference=ref)\n circuit.nodes.append(n)\n return n\n \n \nclass NodeWidget(ObjectWidget):\n \"\"\"ObjectWidget subclass representing a simple circuit node.\"\"\"\n \n def __init__(self, parent=None):\n ObjectWidget.__init__(self, parent)\n self._midPoint = (15,15)\n self._anchorPoints = [self._midPoint]\n self._connections = {self._midPoint: []}\n self._maxConnections = {self._midPoint: -1}\n \n self['nickname'] = 'n'\n self['reference'] = ''\n \n self.resize(30,30)\n \n def makeSleepy(self):\n return SleepyNodeWidget(self.pos(), self._attributes)\n \n def paintEvent(self, pevent):\n painter = QPainter(self)\n\t\tpainter.setRenderHint(QPainter.Antialiasing)\n\t\tpainter.setPen(Qt.black)\n \n if self._orientation == 'v':\n painter.rotate(90.0)\n painter.translate(0.0, -30.0)\n\t\t\n\t\tpainter.drawEllipse(QRect(self.width()/2 - 10,self.height()/2 - 10,20,20))\n \n path = QPainterPath()\n path.addEllipse(QRectF(self.width()/2 - 10,self.height()/2 - 10,20,20))\n painter.fillPath(path, QColor(40,40,40))\n\t\t\n\t\tpainter.setPen(Qt.white)\n\t\tpainter.drawText(QRect(0,0,self.width(),self.height()), Qt.AlignCenter | Qt.AlignTop, self['nickname'])\n\t\t\n\t\tself.drawSelectedOutline(painter)\n\nclass SleepyVoltageSourceWidget():\n def __init__(self, location, attributes, orientation):\n self._location = (location.x(), location.y())\n self._attributes = attributes\n self._orientation = orientation\n \n def create(self, parent):\n vs = VoltageSourceWidget(parent)\n parent.addWidget(vs)\n vs.move(QPoint(self._location[0], self._location[1]))\n for a in self._attributes:\n vs[a] = self._attributes[a]\n if vs.orientation() != self._orientation: vs.flip()\n vs.show()\n return vs\n \n def analyse(self, circuit):\n n = analysis.IdealVoltageSource(nickname=self._attributes['nickname'], voltage=float(self._attributes['voltage']))\n circuit.components.append(n)\n return n\n\nclass VoltageSourceWidget(ObjectWidget):\n \"\"\"ObjectWidget subclass representing a simple independent voltage source.\n \n Attributes:\n voltage\n \n *Widget changes size based on orientation.\n \"\"\"\n def __init__(self, parent=None):\n ObjectWidget.__init__(self, parent)\n self.resize(90,30)\n self._posTerminal = (90,15)\n self._negTerminal = (0,15)\n \n self['nickname'] = 'vs'\n self['voltage'] = '0'\n \n self._anchorPoints = [self._posTerminal, self._negTerminal]\n self._connections = {self._posTerminal: [], self._negTerminal: []}\n self._maxConnections = {self._posTerminal: 1, self._negTerminal: 1}\n \n # event handlers\n QObject.connect(self, SIGNAL(\"flipped()\"), self._flipped)\n \n def makeSleepy(self):\n return SleepyVoltageSourceWidget(self.pos(), self._attributes, self._orientation)\n \n def paintEvent(self, pevent):\n \"\"\"Paints the widget.\"\"\"\n painter = QPainter(self)\n painter.setRenderHint(QPainter.Antialiasing)\n\t\tpainter.setPen(Qt.black)\n \n if self._orientation == 'v':\n painter.rotate(90.0)\n painter.translate(0.0, -30.0)\n \n painter.drawEllipse(QRect(34, 4, 22, 22))\n painter.setFont(QFont('Sans serif', 7))\n painter.drawText(QRect(0,0,90,30), Qt.AlignCenter | Qt.AlignTop, \"%s\\n%sV\" % (self['nickname'], self['voltage']))\n painter.drawLine(QPoint(0,15), QPoint(34,15))\n painter.drawLine(QPoint(56, 15), QPoint(90,15))\n painter.setFont(QFont('Sans serif', 11))\n painter.drawText(QRect(3,15,30,15), Qt.AlignLeft | Qt.AlignTop, \"-\")\n painter.drawText(QRect(56,15,30,15), Qt.AlignRight | Qt.AlignTop, \"+\")\n \n painter.resetMatrix()\n \n self.drawSelectedOutline(painter)\n \n def _flipped(self):\n \"\"\"Called after the object is flipped, to resize and reorient its terminals.\"\"\"\n if self._orientation == 'h':\n self.resize(90,30)\n newpos = (90,15)\n newneg = (0,15)\n self._anchorPoints = [newpos, newneg]\n self._connections = {newpos: self._connections[self._posTerminal], \\\n newneg: self._connections[self._negTerminal]}\n self._maxConnections = {newpos: 1, newneg: 1}\n self._posTerminal = newpos\n self._negTerminal = newneg\n \n elif self._orientation == 'v':\n self.resize(30,90)\n \n newneg = (15,0)\n newpos = (15,90)\n self._anchorPoints = [newpos, newneg]\n self._connections = {newpos: self._connections[self._posTerminal], \\\n newneg: self._connections[self._negTerminal]}\n self._maxConnections = {newpos: 1, newneg: 1}\n self._posTerminal = newpos\n self._negTerminal = newneg\n \n # emit the moved signal, this forces our parent to redraw connections\n # kind of a hack, but it works.\n self.emit(SIGNAL(\"moved(QPoint)\"), self.pos())\n self.update()\n \nclass SleepyCurrentSourceWidget():\n \"\"\"A 'sleepy' partner to the current source widget. This partner contains all of the \n necessary information needed to recreate the current source widget, minus connections, and\n it is serializable, and also knows how to turn itself into an analysis object.\n \n C'tor: SleepyCurrentSourceWidget(QPoint, dict, char)\n \"\"\"\n \n def __init__(self, location, attributes, orientation):\n self._location = (location.x(), location.y())\n self._attributes = attributes\n self._orientation = orientation\n \n def create(self, parent):\n vs = CurrentSourceWidget(parent)\n parent.addWidget(vs)\n vs.move(QPoint(self._location[0], self._location[1]))\n for a in self._attributes:\n vs[a] = self._attributes[a]\n if vs.orientation() != self._orientation: vs.flip()\n vs.show()\n return vs\n \n def analyse(self, circuit):\n n = analysis.IdealCurrentSource(nickname=self._attributes['nickname'], current=float(self._attributes['current']))\n circuit.components.append(n)\n return n\n\nclass CurrentSourceWidget(ObjectWidget):\n \"\"\"ObjectWidget subclass representing a simple independent current source.\n \n Attributes:\n current\n \n *Widget changes size based on orientation.\n \"\"\"\n def __init__(self, parent=None):\n ObjectWidget.__init__(self, parent)\n self.resize(90,30)\n self._posTerminal = (90,15)\n self._negTerminal = (0,15)\n \n self['nickname'] = 'cs'\n self['current'] = '0'\n \n self._anchorPoints = [self._posTerminal, self._negTerminal]\n self._connections = {self._posTerminal: [], self._negTerminal: []}\n self._maxConnections = {self._posTerminal: 1, self._negTerminal: 1}\n \n # event handlers\n QObject.connect(self, SIGNAL(\"flipped()\"), self._flipped)\n \n def makeSleepy(self):\n return SleepyCurrentSourceWidget(self.pos(), self._attributes, self._orientation)\n \n def paintEvent(self, pevent):\n \"\"\"Paints the widget.\"\"\"\n painter = QPainter(self)\n painter.setRenderHint(QPainter.Antialiasing)\n\t\tpainter.setPen(Qt.black)\n \n if self._orientation == 'v':\n painter.rotate(90.0)\n painter.translate(0.0, -30.0)\n \n painter.drawEllipse(QRect(34, 4, 22, 22))\n painter.setFont(QFont('Sans serif', 7))\n painter.drawText(QRect(0,0,90,30), Qt.AlignCenter | Qt.AlignTop, \"%s\\n%sA\" % (self['nickname'], self['current']))\n painter.drawLine(QPoint(0,15), QPoint(34,15))\n painter.drawLine(QPoint(56, 15), QPoint(90,15))\n \n path = QPainterPath()\n path.moveTo(38, 15)\n path.lineTo(53, 15)\n path.lineTo(48, 10)\n path.moveTo(53, 15)\n path.lineTo(48, 20)\n painter.drawPath(path)\n \n painter.resetMatrix()\n \n self.drawSelectedOutline(painter)\n \n def _flipped(self):\n \"\"\"Called after the object is flipped, to resize and reorient its terminals.\"\"\"\n if self._orientation == 'h':\n self.resize(90,30)\n newpos = (90,15)\n newneg = (0,15)\n self._anchorPoints = [newpos, newneg]\n self._connections = {newpos: self._connections[self._posTerminal], \\\n newneg: self._connections[self._negTerminal]}\n self._maxConnections = {newpos: 1, newneg: 1}\n self._posTerminal = newpos\n self._negTerminal = newneg\n \n elif self._orientation == 'v':\n self.resize(30,90)\n \n newneg = (15,0)\n newpos = (15,90)\n self._anchorPoints = [newpos, newneg]\n self._connections = {newpos: self._connections[self._posTerminal], \\\n newneg: self._connections[self._negTerminal]}\n self._maxConnections = {newpos: 1, newneg: 1}\n self._posTerminal = newpos\n self._negTerminal = newneg\n \n # emit the moved signal, this forces our parent to redraw connections\n # kind of a hack, but it works.\n self.emit(SIGNAL(\"moved(QPoint)\"), self.pos())\n self.update()\n \nclass SleepyResistorWidget():\n def __init__(self, location, attributes, orientation):\n self._location = (location.x(), location.y())\n self._attributes = attributes\n self._orientation = orientation\n \n def create(self, parent):\n r = ResistorWidget(parent)\n parent.addWidget(r)\n r.move(QPoint(self._location[0], self._location[1]))\n for a in self._attributes:\n r[a] = self._attributes[a]\n if r.orientation() != self._orientation: r.flip()\n r.show()\n return r\n \n def analyse(self, circuit):\n n = analysis.IdealResistor(nickname=self._attributes['nickname'], resistance=int(self._attributes['resistance']))\n circuit.components.append(n)\n return n\n\nclass ResistorWidget(ObjectWidget):\n def __init__(self, parent=None):\n ObjectWidget.__init__(self, parent)\n self.resize(90,30)\n self._firstTerminal = (90,15)\n self._secondTerminal = (0,15)\n \n self['nickname'] = 'r'\n self['resistance'] = '0'\n \n self._anchorPoints = [self._firstTerminal, self._secondTerminal]\n self._connections = {self._firstTerminal: [], self._secondTerminal: []}\n self._maxConnections = {self._firstTerminal: 1, self._secondTerminal: 1}\n \n # event handlers\n QObject.connect(self, SIGNAL(\"flipped()\"), self._flipped)\n \n def makeSleepy(self):\n return SleepyResistorWidget(self.pos(), self._attributes, self._orientation)\n \n def paintEvent(self, pevent):\n \"\"\"Paints the widget.\"\"\"\n painter = QPainter(self)\n painter.setRenderHint(QPainter.Antialiasing)\n\t\tpainter.setPen(Qt.black)\n \n if self._orientation == 'v':\n painter.rotate(90.0)\n painter.translate(0.0, -30.0)\n \n path = QPainterPath()\n path.moveTo(0,15)\n path.lineTo(30,15)\n path.lineTo(34,5)\n path.lineTo(38,20)\n path.lineTo(42,5)\n path.lineTo(46,20)\n path.lineTo(50,5)\n path.lineTo(54,20)\n path.lineTo(58,5)\n path.lineTo(60,15)\n path.lineTo(90,15)\n painter.drawPath(path)\n \n painter.setFont(QFont('Sans serif', 7))\n painter.drawText(QRect(0,0,90,30), Qt.AlignCenter | Qt.AlignBottom, \"%s: %s\" % (self['nickname'], self['resistance']))\n \n painter.resetMatrix()\n \n self.drawSelectedOutline(painter)\n \n def _flipped(self):\n \"\"\"Called after the object is flipped, to resize and reorient its terminals.\"\"\"\n if self._orientation == 'h':\n self.resize(90,30)\n newfirst = (90,15)\n newsecond = (0,15)\n self._anchorPoints = [newsecond, newfirst]\n self._connections = {newsecond: self._connections[self._secondTerminal], \\\n newfirst: self._connections[self._firstTerminal]}\n self._maxConnections = {newsecond: 1, newfirst: 1}\n self._secondTerminal = newsecond\n self._firstTerminal = newfirst\n \n elif self._orientation == 'v':\n self.resize(30,90)\n \n newsecond = (15,0)\n newfirst = (15,90)\n self._anchorPoints = [newsecond, newfirst]\n self._connections = {newsecond: self._connections[self._secondTerminal], \\\n newfirst: self._connections[self._firstTerminal]}\n self._maxConnections = {newsecond: 1, newfirst: 1}\n self._secondTerminal = newsecond\n self._firstTerminal = newfirst\n \n # emit the moved signal, this forces our parent to redraw connections\n # kind of a hack, but it works.\n self.emit(SIGNAL(\"moved(QPoint)\"), self.pos())\n self.update()\n \nclass SleepyShockleyWidget():\n def __init__(self, location, attributes, orientation):\n self._location = (location.x(), location.y())\n self._attributes = attributes\n self._orientation = orientation\n \n def create(self, parent):\n vs = ShockleyWidget(parent)\n parent.addWidget(vs)\n vs.move(QPoint(self._location[0], self._location[1]))\n for a in self._attributes:\n vs[a] = self._attributes[a]\n if vs.orientation() != self._orientation: vs.flip()\n vs.show()\n return vs\n \n def analyse(self, circuit):\n n = analysis.ShockleyDiode(nickname=self._attributes['nickname'], \\\n saturation_current=float(self._attributes['saturation_current']), \\\n thermal_voltage=float(self._attributes['thermal_voltage']), \\\n e_coeff=float(self._attributes['e_coeff']))\n circuit.components.append(n)\n return n\n\nclass ShockleyWidget(ObjectWidget):\n \"\"\"ObjectWidget subclass representing a shockley diode.\n \n Attributes:\n saturation_current=1e-12, thermal_voltage=25.85e-03, e_coeff=1.00\n \n *Widget changes size based on orientation.\n \"\"\"\n def __init__(self, parent=None):\n ObjectWidget.__init__(self, parent)\n self.resize(90,30)\n self._posTerminal = (90,15)\n self._negTerminal = (0,15)\n \n self['nickname'] = 'n'\n self['saturation_current'] = '1e-12'\n self['thermal_voltage'] = '25.85e-3'\n self['e_coeff'] = '1.00'\n \n self._anchorPoints = [self._posTerminal, self._negTerminal]\n self._connections = {self._posTerminal: [], self._negTerminal: []}\n self._maxConnections = {self._posTerminal: 1, self._negTerminal: 1}\n \n # event handlers\n QObject.connect(self, SIGNAL(\"flipped()\"), self._flipped)\n \n def makeSleepy(self):\n return SleepyShockleyWidget(self.pos(), self._attributes, self._orientation)\n \n def paintEvent(self, pevent):\n \"\"\"Paints the widget.\"\"\"\n painter = QPainter(self)\n painter.setRenderHint(QPainter.Antialiasing)\n\t\tpainter.setPen(Qt.black)\n \n if self._orientation == 'v':\n painter.rotate(90.0)\n painter.translate(0.0, -30.0)\n \n painter.setFont(QFont('Sans serif', 7))\n painter.drawText(QRect(0,0,90,30), Qt.AlignCenter | Qt.AlignTop, \"%s\" % (self['nickname']))\n \n path = QPainterPath()\n path.moveTo(0,15)\n path.lineTo(33,15)\n path.moveTo(33,5)\n path.lineTo(33,25)\n path.lineTo(55,15)\n path.lineTo(33,5)\n path.moveTo(55,5)\n path.lineTo(55,25)\n path.moveTo(55,15)\n path.lineTo(90,15)\n painter.drawPath(path)\n \n painter.resetMatrix()\n \n self.drawSelectedOutline(painter)\n \n def _flipped(self):\n \"\"\"Called after the object is flipped, to resize and reorient its terminals.\"\"\"\n if self._orientation == 'h':\n self.resize(90,30)\n newpos = (90,15)\n newneg = (0,15)\n self._anchorPoints = [newpos, newneg]\n self._connections = {newpos: self._connections[self._posTerminal], \\\n newneg: self._connections[self._negTerminal]}\n self._maxConnections = {newpos: 1, newneg: 1}\n self._posTerminal = newpos\n self._negTerminal = newneg\n \n elif self._orientation == 'v':\n self.resize(30,90)\n \n newneg = (15,0)\n newpos = (15,90)\n self._anchorPoints = [newpos, newneg]\n self._connections = {newpos: self._connections[self._posTerminal], \\\n newneg: self._connections[self._negTerminal]}\n self._maxConnections = {newpos: 1, newneg: 1}\n self._posTerminal = newpos\n self._negTerminal = newneg\n \n # emit the moved signal, this forces our parent to redraw connections\n # kind of a hack, but it works.\n self.emit(SIGNAL(\"moved(QPoint)\"), self.pos())\n self.update()\n"},"src_encoding":{"kind":"string","value":"UTF-8"},"language":{"kind":"string","value":"Python"},"is_vendor":{"kind":"bool","value":false,"string":"false"},"is_generated":{"kind":"bool","value":false,"string":"false"},"year":{"kind":"number","value":2014,"string":"2,014"}}},{"rowIdx":41168,"cells":{"__id__":{"kind":"number","value":8211977505630,"string":"8,211,977,505,630"},"blob_id":{"kind":"string","value":"1fbf8b284d585bd9582ee9b001f8b76e9cdf80cd"},"directory_id":{"kind":"string","value":"ebf7306bba2bdac746454a4b5099ab54c0aeba41"},"path":{"kind":"string","value":"/server/app.py"},"content_id":{"kind":"string","value":"e35eadf2f17e625982c1f4576231f3991cf4782d"},"detected_licenses":{"kind":"list like","value":[],"string":"[]"},"license_type":{"kind":"string","value":"no_license"},"repo_name":{"kind":"string","value":"vladimir-myskov/web-music-switcher"},"repo_url":{"kind":"string","value":"https://github.com/vladimir-myskov/web-music-switcher"},"snapshot_id":{"kind":"string","value":"d84877a55b90d2ea4a4284911dfc5ba2e999b3ad"},"revision_id":{"kind":"string","value":"83326f6e4d44d7e544a77b707faeeac52c83e0e1"},"branch_name":{"kind":"string","value":"refs/heads/master"},"visit_date":{"kind":"timestamp","value":"2020-05-07T08:53:24.170317","string":"2020-05-07T08:53:24.170317"},"revision_date":{"kind":"timestamp","value":"2012-10-01T09:27:15","string":"2012-10-01T09:27:15"},"committer_date":{"kind":"timestamp","value":"2012-10-01T09:27:15","string":"2012-10-01T09:27:15"},"github_id":{"kind":"number","value":6031462,"string":"6,031,462"},"star_events_count":{"kind":"number","value":0,"string":"0"},"fork_events_count":{"kind":"number","value":1,"string":"1"},"gha_license_id":{"kind":"null"},"gha_fork":{"kind":"null"},"gha_event_created_at":{"kind":"null"},"gha_created_at":{"kind":"null"},"gha_updated_at":{"kind":"null"},"gha_pushed_at":{"kind":"null"},"gha_size":{"kind":"null"},"gha_stargazers_count":{"kind":"null"},"gha_forks_count":{"kind":"null"},"gha_open_issues_count":{"kind":"null"},"gha_language":{"kind":"null"},"gha_archived":{"kind":"null"},"gha_disabled":{"kind":"null"},"content":{"kind":"string","value":"import os\nimport json\nimport tornado.ioloop\nimport tornado.web\nimport tornado.websocket\nfrom sockjs.tornado import SockJSRouter, SockJSConnection\n\npages = {}\n\ndef register_audio_page(key, page, handler):\n if not key in pages:\n page[\"handler\"] = handler\n pages[key] = page\n return page\n\ndef unregister_audio_page(key):\n del pages[key]\n\n\nclass PagesHandler(tornado.web.RequestHandler):\n def get(self):\n self.write(str(pages))\n\nclass SeniorHandler(tornado.web.RequestHandler):\n def get(self, key):\n self.render(\"senior.html\", key=key)\n\n\nclass AudioPageHandler(SockJSConnection):\n def open(self):\n print \"OPEN\"\n pass\n\n def on_message(self, message):\n print message\n message = json.loads(message)\n method, data = \"on_\"+ message[\"event\"], message[\"data\"]\n try:\n getattr(self, method)(data)\n except:\n pass\n\n def on_close(self):\n unregister_audio_page(self.page[\"key\"])\n\n def on_register(self, data):\n print data\n key = data[\"key\"]\n self.page = register_audio_page(key, data, self)\n\n def on_page_settings(self, data):\n for key,value in data.iteritems():\n self.page[key] = value\n\n def on_senior_prev(self, data):\n pages[data[\"key\"]][\"handler\"].send({\n \"event\":\"audio_prev\",\n \"data\": {}\n })\n\n def on_senior_next(self, data):\n pages[data[\"key\"]][\"handler\"].send({\n \"event\":\"audio_next\",\n \"data\": {}\n })\n\nsettings = {\n \"static_path\": os.path.join(os.path.dirname(__file__), \"static\")\n}\n\nAudioPageRouter = SockJSRouter(AudioPageHandler, '/websocket')\n\napplication = tornado.web.Application([\n (r\"/pages\", PagesHandler),\n #(r\"/websocket\",AudioPageHandler),\n (r\"/senior/(.*)\",SeniorHandler),\n ]+AudioPageRouter.urls,\n debug=True, **settings)\n\nif __name__ == \"__main__\":\n\n application.listen(8888)\n tornado.ioloop.IOLoop.instance().start()\n"},"src_encoding":{"kind":"string","value":"UTF-8"},"language":{"kind":"string","value":"Python"},"is_vendor":{"kind":"bool","value":false,"string":"false"},"is_generated":{"kind":"bool","value":false,"string":"false"},"year":{"kind":"number","value":2012,"string":"2,012"}}},{"rowIdx":41169,"cells":{"__id__":{"kind":"number","value":4483945889638,"string":"4,483,945,889,638"},"blob_id":{"kind":"string","value":"3a587b6716f7bb074dd428a52960dbe9a1303d5a"},"directory_id":{"kind":"string","value":"e59db7b595e3797ed212868f38d6d0a77395b776"},"path":{"kind":"string","value":"/decommission.py"},"content_id":{"kind":"string","value":"52580c59ce272c7e03f37ca6a563a7782c2c52d3"},"detected_licenses":{"kind":"list like","value":["GPL-3.0-only","GPL-1.0-or-later","GPL-3.0-or-later"],"string":"[\n \"GPL-3.0-only\",\n \"GPL-1.0-or-later\",\n \"GPL-3.0-or-later\"\n]"},"license_type":{"kind":"string","value":"non_permissive"},"repo_name":{"kind":"string","value":"9apps/ReDiS"},"repo_url":{"kind":"string","value":"https://github.com/9apps/ReDiS"},"snapshot_id":{"kind":"string","value":"a157a7fba02fe58c0c431c010a471103e3ca4fb1"},"revision_id":{"kind":"string","value":"770b3f2bff71ab09a4ecb766235efbc853b11051"},"branch_name":{"kind":"string","value":"refs/heads/master"},"visit_date":{"kind":"timestamp","value":"2016-09-09T12:22:11.585879","string":"2016-09-09T12:22:11.585879"},"revision_date":{"kind":"timestamp","value":"2012-07-26T14:35:09","string":"2012-07-26T14:35:09"},"committer_date":{"kind":"timestamp","value":"2012-07-26T14:35:09","string":"2012-07-26T14:35:09"},"github_id":{"kind":"number","value":3152308,"string":"3,152,308"},"star_events_count":{"kind":"number","value":2,"string":"2"},"fork_events_count":{"kind":"number","value":2,"string":"2"},"gha_license_id":{"kind":"null"},"gha_fork":{"kind":"bool","value":false,"string":"false"},"gha_event_created_at":{"kind":"timestamp","value":"2012-07-25T07:40:20","string":"2012-07-25T07:40:20"},"gha_created_at":{"kind":"timestamp","value":"2012-01-11T08:49:33","string":"2012-01-11T08:49:33"},"gha_updated_at":{"kind":"timestamp","value":"2012-07-25T07:40:19","string":"2012-07-25T07:40:19"},"gha_pushed_at":{"kind":"timestamp","value":"2012-07-25T07:40:18","string":"2012-07-25T07:40:18"},"gha_size":{"kind":"number","value":164,"string":"164"},"gha_stargazers_count":{"kind":"null"},"gha_forks_count":{"kind":"null"},"gha_open_issues_count":{"kind":"null"},"gha_language":{"kind":"string","value":"Python"},"gha_archived":{"kind":"null"},"gha_disabled":{"kind":"null"},"content":{"kind":"string","value":"# Copyright (C) 2011, 2012 9apps B.V.\n# \n# This file is part of Redis for AWS.\n# \n# Redis for AWS is free software: you can redistribute it and/or modify\n# it under the terms of the GNU General Public License as published by\n# the Free Software Foundation, either version 3 of the License, or\n# (at your option) any later version.\n# \n# Redis for AWS is distributed in the hope that it will be useful,\n# but WITHOUT ANY WARRANTY; without even the implied warranty of\n# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the\n# GNU General Public License for more details.\n# \n# You should have received a copy of the GNU General Public License\n# along with Redis for AWS. If not, see .\n\nimport os, sys\nimport json, urllib2\n\nfrom boto.ec2.connection import EC2Connection\nfrom boto.ec2.regioninfo import RegionInfo\n\nimport administration, backup\nfrom host import Host\nfrom events import Events\n\ntry:\n\turl = \"http://169.254.169.254/latest/\"\n\n\tuserdata = json.load(urllib2.urlopen(url + \"user-data\"))\nexcept Exception as e:\n\tprint e\n\texit( \"We couldn't get user-data or other meta-data...\")\n\n# we are going to work with local files, we need our path\npath = os.path.dirname(os.path.abspath(__file__))\n\ndef delete_monitor():\n\tos.system( \"rm {0}/etc/monit/data\".format(path))\n\ndef decommission(key, access, cluster, persistence=\"no\"):\n\tevents = Events(key, access, cluster)\n\tnode = Host(cluster, events).get_node()\n\tdef log(message, logging='warning'):\n\t\tevents.log(node, 'Decommission', message, logging)\n\n\tlog('start dommissioning', 'info')\n\t# make a last backup\n\tif \"no\" != persistence:\n\t\tlog('make last backups, first RDB', 'info')\n\t\t# take the latest RDB and move it to S3\n\t\trdb = backup.put_RDB(key, access, cluster, 'monthly')\n\t\tadministration.set_RDB(key, access, cluster, rdb)\n\n\t\t# make a last snapshot\n\t\tlog('and now a snapshot', 'info')\n\t\tsnapshot = backup.make_snapshot(key, access, cluster, 'monthly')\n\t\tadministration.add_snapshot(key, access, cluster, snapshot)\n\n\t\tdelete_monitor()\n\n\t# we don't have to get rid any the volume, it is deleted on termination\n\n\t# change to the default (no persistence)\n\tlog('remove redis.conf', 'info')\n\tos.system(\"/bin/rm -f /etc/redis/redis.conf\")\n\t# and empty the cron as well\n\t#log('empty the cron', 'info')\n\t#os.system(\"/bin/echo | /usr/bin/crontab\")\n\n\t# make sure we make a clean AMI, with all monit checks monitored\n\tlog(\"finally, monitor all (monit), but 'redis' and slave\", 'info')\n\tos.system(\"/usr/bin/monit unmonitor redis\")\n\tos.system(\"/usr/bin/monit unmonitor slave\")\n\nif __name__ == '__main__':\n\timport os, sys\n\n\ttry:\n\t\tpersistence = userdata['persistence']\n\texcept:\n\t\tpersistence = None\n\n\t# what is the domain to work with\n\tname = os.environ['REDIS_NAME'].strip()\n\tzone = os.environ['HOSTED_ZONE_NAME'].rstrip('.')\n\n\t# the name (and identity) of the cluster (the master)\n\tcluster = \"{0}.{1}\".format(name, zone)\n\n\tdecommission(sys.argv[1], sys.argv[2], cluster, persistence=persistence)\n"},"src_encoding":{"kind":"string","value":"UTF-8"},"language":{"kind":"string","value":"Python"},"is_vendor":{"kind":"bool","value":false,"string":"false"},"is_generated":{"kind":"bool","value":false,"string":"false"},"year":{"kind":"number","value":2012,"string":"2,012"}}},{"rowIdx":41170,"cells":{"__id__":{"kind":"number","value":10780367945729,"string":"10,780,367,945,729"},"blob_id":{"kind":"string","value":"a10736dba74f60f3a7a23a9ede547f40de6a2a5e"},"directory_id":{"kind":"string","value":"648893482140747100f9efc5067fb7d9d72d1c11"},"path":{"kind":"string","value":"/mtget.py"},"content_id":{"kind":"string","value":"86f56bd297fd4b2f94e8ec497f2e28c248f173c8"},"detected_licenses":{"kind":"list like","value":[],"string":"[]"},"license_type":{"kind":"string","value":"no_license"},"repo_name":{"kind":"string","value":"4poc/mtget"},"repo_url":{"kind":"string","value":"https://github.com/4poc/mtget"},"snapshot_id":{"kind":"string","value":"349bbf77d9d2987d3f6e7071e1b427814587325a"},"revision_id":{"kind":"string","value":"625409aeb58bc20f28410f8ea12dfc42be52555c"},"branch_name":{"kind":"string","value":"refs/heads/master"},"visit_date":{"kind":"timestamp","value":"2021-01-01T19:01:05.594489","string":"2021-01-01T19:01:05.594489"},"revision_date":{"kind":"timestamp","value":"2012-11-07T22:13:22","string":"2012-11-07T22:13:22"},"committer_date":{"kind":"timestamp","value":"2012-11-07T22:13:22","string":"2012-11-07T22:13:22"},"github_id":{"kind":"number","value":499285,"string":"499,285"},"star_events_count":{"kind":"number","value":0,"string":"0"},"fork_events_count":{"kind":"number","value":0,"string":"0"},"gha_license_id":{"kind":"null"},"gha_fork":{"kind":"null"},"gha_event_created_at":{"kind":"null"},"gha_created_at":{"kind":"null"},"gha_updated_at":{"kind":"null"},"gha_pushed_at":{"kind":"null"},"gha_size":{"kind":"null"},"gha_stargazers_count":{"kind":"null"},"gha_forks_count":{"kind":"null"},"gha_open_issues_count":{"kind":"null"},"gha_language":{"kind":"null"},"gha_archived":{"kind":"null"},"gha_disabled":{"kind":"null"},"content":{"kind":"string","value":"#!/usr/bin/env python\n# -*- coding: utf-8 -*-\n# ZDF Mediathek Download/Streaming Skript\n# v0.5.3 http://apoc.sixserv.org/\n# Stand: 2009-12-22\n# Artikel: http://sixserv.org/2009/12/21/mtgetzdf-mediathek-downloadstream/\n# Sollte auf jeder standard Python installation laufen, wenn nicht mailt mir bitte :)\n#\n# This program is free software: you can redistribute it and/or modify\n# it under the terms of the GNU General Public License as published by\n# the Free Software Foundation, either version 3 of the License, or\n# (at your option) any later version.\n#\n# This program is distributed in the hope that it will be useful,\n# but WITHOUT ANY WARRANTY; without even the implied warranty of\n# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the\n# GNU General Public License for more details.\n#\n# You should have received a copy of the GNU General Public License\n# along with this program. If not, see .\n\n##\n# \"Pseudo\" constants for download and streaming commands\n#\n# %URL% and %OUTFILE% will be replaced. If you notice buffering lags in \n# streaming mode increase the cache size.\n#\n# alternativly: CMD_DOWNLOAD='mmsrip \"--output=%OUTFILE%\" \"%URL%\"'\nCMD_DOWNLOAD = 'mplayer -prefer-ipv4 -noframedrop -dumpfile \"%OUTFILE%\" -dumpstream -playlist \"%URL%\"'\nCMD_STREAM = 'mplayer -fs -zoom -display :0.0 -prefer-ipv4 -cache 2000 -playlist \"%URL%\"'\n\n# used for url constructing\nURL_BASE = \"http://www.zdf.de\"\n\n# fixxed enums for mode\nDOWNLOAD = 0\nSTREAM = 1\n\n# default settings that can change via the options\nquality = 2 # DSL X000 (1k or 2k is currently supported)\nmode = STREAM # streaming the videos per default\nsearch = None\nmaxr = 10 # maximum results to proceed\ninteractive = False # interactive video and channel selection\nverbose = False\ndirectory = \"./\"\ntitle_filename = False\nignore_channel = False # ignoriert kanaele in suchergebnissen\ncolors = True # aktiviert kursiv und fettschrift in select_entries()\n\nimport getopt\nimport sys\nimport string\nimport re\nimport urllib\nimport os\nimport htmlentitydefs\n\n##\n# thanks to Fredrik Lundh for this function:\n# http://effbot.org/zone/re-sub.htm#unescape-html\n##\n# Removes HTML or XML character references and entities from a text string.\n#\n# @param text The HTML (or XML) source text.\n# @return The plain text, as a Unicode string, if necessary.\ndef unescape(text):\n def fixup(m):\n text = m.group(0)\n if text[:2] == \"&#\":\n # character reference\n try:\n if text[:3] == \"&#x\":\n return unichr(int(text[3:-1], 16))\n else:\n return unichr(int(text[2:-1]))\n except ValueError:\n pass\n else:\n # named entity\n try:\n text = unichr(htmlentitydefs.name2codepoint[text[1:-1]])\n except KeyError:\n pass\n return text # leave as is\n return re.sub(\"&#?\\w+;\", fixup, text)\n\n##\n# Gather all entries in url\n#\n# Load url, parses for videos or channels including metadata. Proceed with\n# next page if necessary until entries are found or end is reeched.\n#\n# The returning list includes dictionary entries for each found video or channel,\n# in the following format:\n# {'id': ID, 'type': TYPE, 'url': URL, 'info': list(infoA, infoB, ...)}\n#\n# @param string mediathek url\n# @return list\ndef gather_entries(url):\n global verbose, URL_BASE, maxr, ignore_channel\n \n entry_count = maxr\n\n entries = []\n \n while True:\n \n # laden der url inhalte => contents\n if verbose: print \" [+] Gathering url: \"+url\n try:\n url = re.sub('&amp;', '&', url)\n site = urllib.urlopen(url)\n contents = site.read()\n except:\n print \"Error in retriving url(%s)\" % url\n sys.exit(2)\n \n if verbose: print \" [+] Searching page for videos/kanaele\"\n\n # die beiden regex's matchen auf Videos _und_ Kanaele\n # 1ter ist url und titel 2ter ist url und untertitel url sollte gleich sein\n found=[]\n\n matches = re.findall('

([^<]+)([^<]+)<\\/a><\\/p>', contents)\n\n for match in matches:\n found_url = match[0]\n found_type = ''\n found_id = None\n\n # je nach format der gefundenen url wird type gesetzt zu video...\n video_match = re.match('/ZDFmediathek/beitrag/video/([0-9]+)/', found_url)\n if video_match:\n found_type = 'video'\n found_id = video_match.group(1)\n\n # oder 'kanaluebersicht'\n if not ignore_channel:\n kanaluebersicht_match = re.match('/ZDFmediathek/kanaluebersicht/aktuellste/([0-9]+)', found_url)\n if kanaluebersicht_match:\n found_type = 'kanal'\n found_id = kanaluebersicht_match.group(1)\n\n # nur videos oder kanal urls werden berücksichtigt, bilderstrecken \n # und interaktive inhalte werden ignoriert\n if found_id:\n try:\n found_info = [ match[1] ]\n # search for duplicate entry in found\n for item in found: # if found just add found data to str data\n\n if item['id'] == found_id:\n item['info'] += found_info\n break\n\n else: # yeah finally, first time using this python feature :)\n\n # wurde die id _nicht_ gefunden baue neues dict\n found += [{'id': found_id, 'type': found_type, 'url': found_url, 'info': found_info}]\n\n except IndexError: # sollte beim debugging gut helfen\n if verbose: print \" [+] IndexError in parsing! Payload: \"+match+\"/\"+id_match\n\n if verbose: print \" [+] FOUND: %d entries\" % len(found)\n \n # verschiebt \"max result\" gefundene einträge nach entries\n for item in found:\n entries += [item]\n entry_count -= 1\n if entry_count <= 0: break\n\n # break if no next pages\n if not 'Nutzen Sie unsere Suchfilter' in contents:\n break\n\n #\n # proceed with next page\n #\n next_match = re.findall('Weiter', contents)\n if not next_match:\n next_match = re.findall('Weitere Beitr&auml;ge laden.<\\/a>', contents)\n\n if entry_count > 0 and len(next_match) > 0:\n if verbose: print \" [+] Found Next Link!\"\n url = next_match[0]\n if not 'http://' in url:\n url = URL_BASE+url\n else:\n entry_count = 0\n\n if entry_count <= 0:\n break\n \n if verbose: print \" [+] entry_count: %d\" % entry_count\n\n return entries\n\n##\n# Print and makes user selection, return url list\n#\n# The parameter entries format is the same returned from gather_entries(),\n# the function prints the entries and if the interactive setting is True the \n# the user can enter a selection of entries, the method generates a list\n# of all selected entries and returns a url list. [\"\", \"\", ...]\n#\n# @see gather_entries\n# @param list including dictionaries in gather_entries format\n# @return list\ndef select_entries(entries):\n global URL_BASE, verbose, interactive, colors\n\n # print numeric list, create selected list with urls\n selected = []\n i = 1\n for item in entries:\n if len(item) != 4:\n if verbose: print \" [+] Video Item Error! (Wrong List structure!)\"\n next\n\n url = item['url']\n\n if verbose: print \" == > %s\" % url\n\n print \"%d : (%s)\" % (i, string.capitalize(item['type']))\n\n # vertausche \"kategorie\" mit titel, das alles mit info is ein wenig\n # unstrukturiert vll mal neu schreiben\n if len(item['info']) >= 2:\n (item['info'][0], item['info'][1]) = (item['info'][1], item['info'][0])\n\n for idx, info in enumerate(item['info']):\n info = unescape(info)\n if colors and idx == 0:\n print \"\\t\\x1B[3m%s\\x1B[0m\" % info\n elif colors and idx == 1:\n print \"\\t\\x1B[1m%s\\x1B[0m\" % info\n elif colors and idx == 2:\n print \"\\t\\x1B[3m(%s)\\x1B[0m\" % info\n else: # no colors:\n print \"\\t%s\" % info\n print\n\n if not item['info'][0]:\n title = None\n else:\n title = item['info'][0]\n\n selected += [URL_BASE+url]\n i+=1\n\n if interactive:\n print \"Select Videos to play(space seperated list):\"\n print \" ===> \",\n sel = sys.stdin.readline()[:-1]\n sel_idx = sel.split(' ')\n new_selected=[]\n for idx, t in enumerate(selected):\n if str(idx+1) in sel_idx:\n new_selected += [t]\n selected = new_selected\n print\n print \"+----------------------------------------------------------+\"\n print\n\n return selected\n\n##\n# Gather video link, parses for asx and execute cmd\n#\n# The function loads the given link, parses for a asx link in the given quality \n# setting(DSL 1000 / DSL 2000) and execute stream or download command according\n# to the mode setting.\n#\n# @param string\ndef proceed_video(url):\n global mode, verbose, directory\n if verbose: print \" [+] Proceed Video URL: Gathering video url: \"+url\n try:\n url = re.sub('&amp;', '&', url)\n site = urllib.urlopen(url)\n contents = site.read()\n except:\n print \"Error in retriving url(%s)\" % url\n sys.exit(2)\n asx_match = re.findall('DSL %d000 ([^<]+)', contents)\n if title_match:\n title = title_match[0]\n # convert space\n title = re.sub(' ', \"-\", title)\n # strip all not alpha\n title = re.sub('[^a-zA-Z0-9-]', '', title)\n title = re.sub('[-]+', '_', title)\n filename = title + '.wmv'\n\n cmd = re.sub('%OUTFILE%', directory+filename, CMD_DOWNLOAD)\n cmd = re.sub('%URL%', asx, cmd)\n if verbose: print \" [+] Execute Shell Command: \"+cmd\n os.system(cmd)\n\n##\n# print usage screen and exit\ndef usage():\n print \"\"\"ZDF Mediathek Download/Streaming Skript\nv0.5 http://apoc.sixserv.org/\nStand: 2009-12-20\n\nSyntax: %s [OPTIONS]\n\n mediathek video/kanal url oder id \n -1 qualitaet DSL 1000\n -2 qualitaet DSL 2000 (Standard)\n -m, --mode download(d) oder streaming(s)\n -d, --dir das verzeichnis wohin gespeichert werden soll(.)\n -t, --title benutzt nicht den stream dateinamen sondern titel\n -s, --search suche in der mediathek\n -l, --maxr wieviele ergebnisse verarbeiten(suche/kategorie)\n -c, --ignore-channel ignoriert kanaele\n --no-colors deaktiviert die kursiv und fettschrift\n -i interaktiv, auswahl der zu spielenden videos\n -v erweiterte ausgabe, zu debugging zwecken\n -h, --help zeigt diese hilfe\n\"\"\" % sys.argv[0]\n\n#\n# Parsing command line arguments\n#\ntry:\n opts, args = getopt.getopt(sys.argv[1:], \"12m:d:ts:l:civh\", \n [\"mode=\", \"dir=\", \"title\", \"search=\", \"maxr=\", \"ignore-channel\", \"no-colors\", \"help\"])\nexcept getopt.GetoptError, err:\n print str(err)\n usage()\n sys.exit(2)\n\n#\n# Change default settings according to the parameters\n#\ntry:\n for o, a in opts:\n if o in (\"-h\", \"--help\"):\n usage()\n sys.exit()\n elif o in (\"-1\"):\n quality = 1\n elif o in (\"-2\"):\n quality = 2\n elif o in (\"-m\", \"--mode\"):\n if a != \"d\" and a != \"s\":\n print \"mode d or s!\"\n sys.exit()\n if a == \"d\":\n mode = DOWNLOAD\n else:\n mode = STREAM\n elif o in (\"-d\", \"--dir\"):\n directory = a\n if not os.path.isdir(directory):\n print \"Error: No Directory!\"\n sys.exit()\n # missing / ?\n if directory[-1:] != '/':\n directory += '/'\n elif o in (\"-t\", \"--title\"):\n title_filename = True\n elif o in (\"-s\", \"--search\"):\n search = a\n elif o in (\"-l\", \"--maxr\"):\n maxr = int(a)\n elif o in (\"-c\", \"--ignore-channel\"):\n ignore_channel = True\n elif o in (\"--no-colors\"):\n colors = False\n elif o in (\"-i\"):\n interactive = True\n elif o in (\"-v\"):\n verbose = True\n else:\n assert False, \"unhandled option\"\nexcept ValueError:\n print \"Error in parsing parameter types.\"\n sys.exit(2)\n\n#\n# Print usage screen if url is missing\n#\nif len(sys.argv) <= 1:\n usage()\n exit\n\n\n#\n# Assign url or id variable\n#\nurl_id = sys.argv[-1]\n\n#\n# Replace url_id with search url if seach option is given\n#\nif search:\n print \"Searching... \"+search\n url_id = \"http://www.zdf.de/ZDFmediathek/suche?sucheText=%s&offset=0&flash=off\" % urllib.quote_plus(search)\n if verbose: print \" [+] Search URL: %s\" % url_id\n\n#\n# Handling video ID\n#\nif re.match(\"^[0-9]+$\", url_id):\n if verbose: print \" [+] Proceed with Id: %s\" % url_id\n proceed_video(URL_BASE+\"/ZDFmediathek/beitrag/video/%s/?flash=off\" % url_id)\n\n#\n# Handling video or channel url\n#\nelif re.match(\"^http:\", url_id):\n if verbose: print \" [+] Proceed with URL: %s\" % url_id\n\n if \"#\" in url_id:\n url_id = url_id.replace('#', '')\n\n # make sure flash is off:\n if \"flash=\" in url_id:\n url_id = re.sub('flash=on', 'flash=off', url_id)\n else:\n if \"?\" in url_id:\n url_id += \"&flash=off\"\n else:\n url_id += \"?flash=off\"\n\n #\n # Handle Video URL:\n #\n if re.findall(\"/video/\", url_id):\n proceed_video(url_id)\n\n #\n # Handling Channel or Search URL:\n #\n else: # kategorie url z.B. zeige liste/auswahl und abspielen\n url = url_id\n proceed_urls = []\n while True:\n entries = gather_entries(url)\n selection = select_entries(entries)\n for select_url in selection:\n if 'kanal' in select_url:\n if verbose: print \" [+] Follow Kanal entry!\"\n url = select_url\n break\n else:\n if verbose: print \" [+] Proceed with Video:\"\n proceed_video(select_url)\n else:\n break\n\n#EOF\n"},"src_encoding":{"kind":"string","value":"UTF-8"},"language":{"kind":"string","value":"Python"},"is_vendor":{"kind":"bool","value":false,"string":"false"},"is_generated":{"kind":"bool","value":false,"string":"false"},"year":{"kind":"number","value":2012,"string":"2,012"}}},{"rowIdx":41171,"cells":{"__id__":{"kind":"number","value":7172595385757,"string":"7,172,595,385,757"},"blob_id":{"kind":"string","value":"dd8e72b32576eb50eaf589eef705cbac0601b3b6"},"directory_id":{"kind":"string","value":"e263d74a2ada7b9bdd9f3adcb8953418e1ee21bf"},"path":{"kind":"string","value":"/gui.py"},"content_id":{"kind":"string","value":"817e0b89299337213537eaa18ede39a6ee3ca563"},"detected_licenses":{"kind":"list like","value":[],"string":"[]"},"license_type":{"kind":"string","value":"no_license"},"repo_name":{"kind":"string","value":"Sebelino/kexjobb"},"repo_url":{"kind":"string","value":"https://github.com/Sebelino/kexjobb"},"snapshot_id":{"kind":"string","value":"5aa47849c05c4a52fe27205d43c5bdef4ba6f829"},"revision_id":{"kind":"string","value":"7c06cda0143b292780194006bc60303b33b9a6f9"},"branch_name":{"kind":"string","value":"refs/heads/master"},"visit_date":{"kind":"timestamp","value":"2021-01-16T19:33:25.918792","string":"2021-01-16T19:33:25.918792"},"revision_date":{"kind":"timestamp","value":"2013-04-12T16:53:08","string":"2013-04-12T16:53:08"},"committer_date":{"kind":"timestamp","value":"2013-04-12T16:53:08","string":"2013-04-12T16:53:08"},"github_id":{"kind":"null"},"star_events_count":{"kind":"number","value":0,"string":"0"},"fork_events_count":{"kind":"number","value":0,"string":"0"},"gha_license_id":{"kind":"null"},"gha_fork":{"kind":"null"},"gha_event_created_at":{"kind":"null"},"gha_created_at":{"kind":"null"},"gha_updated_at":{"kind":"null"},"gha_pushed_at":{"kind":"null"},"gha_size":{"kind":"null"},"gha_stargazers_count":{"kind":"null"},"gha_forks_count":{"kind":"null"},"gha_open_issues_count":{"kind":"null"},"gha_language":{"kind":"null"},"gha_archived":{"kind":"null"},"gha_disabled":{"kind":"null"},"content":{"kind":"string","value":"#!/usr/bin/env python\n# -*- coding: utf-8 -*-\n#\n# namnlös.py\n# \n# Copyright 2013 Jonatan Åkesson \n# \n# This program is free software; you can redistribute it and/or modify\n# it under the terms of the GNU General Public License as published by\n# the Free Software Foundation; either version 2 of the License, or\n# (at your option) any later version.\n# \n# This program is distributed in the hope that it will be useful,\n# but WITHOUT ANY WARRANTY; without even the implied warranty of\n# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the\n# GNU General Public License for more details.\n# \n# You should have received a copy of the GNU General Public License\n# along with this program; if not, write to the Free Software\n# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston,\n# MA 02110-1301, USA.\n# \n# \nimport time\nimport os\nimport subprocess\nimport re\nfrom threading import Thread\n\ndef kod():\n\ts = ''\n\tbl = '3000'\n\ts = get_active_window_title(\"\")\n\n\tprint(\"\" + s)\n\n\tif s.find('Zenia') > 0:\n\t\tbl= '3500'\n\telif s.find('Hej') > 0:\n\t\tprint(\"Hittade Chrome!\")\n\t\tbl= '100'\n\telif s.find('Kate') > 0:\n\t\tbl='5'\n\telif s.find('Chrom') > 0:\n\t\tbl='2000'\n\telif s.find('no') > 0:\n\t\tbl='500'\n\t\t\n\t#sätter brightness\n\tp1 = subprocess.Popen(['echo', bl], stdout=subprocess.PIPE)\n\t#p2 = subprocess.Popen(['tee', '/sys/class/backlight/intel_backlight/brightness'], stdin=p1.stdout)\n\tp2 = subprocess.Popen(['tee', '/sys/class/backlight/acpi_video0/brightness'], stdin=p1.stdout)\n\tp1.stdout.close() \n\toutput = p2.communicate()\n\t\n\t\n\t\n\t#läser actual brightness\n\tactual_bright = ''\n\tp3 = subprocess.Popen(['cat', '/sys/class/backlight/acpi_video0/brightness'], stdout=subprocess.PIPE)\n\t#p3 = subprocess.Popen(['cat', '/sys/class/backlight/intel_backlight/brightness'], stdout=subprocess.PIPE)\n\tfor line in p3.stdout:\n\t actual_bright = line.rstrip()\n\t print(\"actual brightness: \" + line)\n\t \n\t\n\t#read keypresses\n\t#\n\tkey_presses = \"33\"\n\t#\n\t\n\t\n\t\n\t# skriver till fil klassifieringsfil\n\t\n\tskriv = \"\" + key_presses + \",?,\" + actual_bright + \",?,?,lower.\" \n\tf = open('power.test','w')\n\tf.write(\"\")\n\tf.write(skriv)\n\tf.close()\n\t\n\t\n\t\n\t\n\t#Klassifierar med adaboost\n\tlista = boost()\n\tprint(lista)\n\t\n\t\n\t\n\t#höjer eller sänker\n\t#sätter brightness\n\t#p1 = subprocess.Popen(['echo', bl], stdout=subprocess.PIPE)\n\t#p2 = subprocess.Popen(['tee', '/sys/class/backlight/intel_backlight/brightness'], stdin=p1.stdout)\n\t#p2 = subprocess.Popen(['tee', '/sys/class/backlight/acpi_video0/brightness'], stdin=p1.stdout)\n\t#p1.stdout.close() \n\t#output = p2.communicate()\n\t\n\t\n\ttime.sleep(0.5)\n\n\n\n\ndef get_active_window_title(self):\n\troot = subprocess.Popen(['xprop', '-root', '_NET_ACTIVE_WINDOW'], stdout=subprocess.PIPE)\n\n\tfor line in root.stdout:\n\t\tm = re.search('^_NET_ACTIVE_WINDOW.* ([\\w]+)$', line)\n\t\tif m != None:\n\t\t\tid_ = m.group(1)\n\t\t\tid_w = subprocess.Popen(['xprop', '-id', id_, 'WM_NAME'], stdout=subprocess.PIPE)\n\t\t\tbreak\n\n\tif id_w != None:\n\t\tfor line in id_w.stdout:\n\t\t\tmatch = re.match(\"WM_NAME\\(\\w+\\) = (?P.+)$\", line)\n\t\t\tif match != None:\n\t\t\t\treturn match.group(\"name\")\n\n\treturn \"active win no\"\n\ndef boost():\n\tlista = []\n\tread = os.popen(\"icsiboost -S power -C < power.test; echo $?\")\n\trad = read.readline()\n\tlista = rad.split(\" \")\n\treturn lista\n\t\n\t\nwhile 1==1:\n kod()\n \nroot.mainloop()\n"},"src_encoding":{"kind":"string","value":"UTF-8"},"language":{"kind":"string","value":"Python"},"is_vendor":{"kind":"bool","value":false,"string":"false"},"is_generated":{"kind":"bool","value":false,"string":"false"},"year":{"kind":"number","value":2013,"string":"2,013"}}},{"rowIdx":41172,"cells":{"__id__":{"kind":"number","value":7490423011246,"string":"7,490,423,011,246"},"blob_id":{"kind":"string","value":"b5635ba0b0397820b43a45f549a027a9d4586c38"},"directory_id":{"kind":"string","value":"525821586d35422fadad2f4460dd227235884dbb"},"path":{"kind":"string","value":"/djangomako.py"},"content_id":{"kind":"string","value":"4162846fac7d7b08895b50889d4a308d9f4e68cf"},"detected_licenses":{"kind":"list like","value":[],"string":"[]"},"license_type":{"kind":"string","value":"no_license"},"repo_name":{"kind":"string","value":"kk71/djangomako"},"repo_url":{"kind":"string","value":"https://github.com/kk71/djangomako"},"snapshot_id":{"kind":"string","value":"30eb0495a760a79149c1762730582ce075db1f7b"},"revision_id":{"kind":"string","value":"90f59bdc32a316e62e8e32c948dc0aa630f7558e"},"branch_name":{"kind":"string","value":"refs/heads/master"},"visit_date":{"kind":"timestamp","value":"2016-08-05T04:27:09.877746","string":"2016-08-05T04:27:09.877746"},"revision_date":{"kind":"timestamp","value":"2013-11-27T13:54:41","string":"2013-11-27T13:54:41"},"committer_date":{"kind":"timestamp","value":"2013-11-27T13:54:41","string":"2013-11-27T13:54:41"},"github_id":{"kind":"null"},"star_events_count":{"kind":"number","value":0,"string":"0"},"fork_events_count":{"kind":"number","value":0,"string":"0"},"gha_license_id":{"kind":"null"},"gha_fork":{"kind":"null"},"gha_event_created_at":{"kind":"null"},"gha_created_at":{"kind":"null"},"gha_updated_at":{"kind":"null"},"gha_pushed_at":{"kind":"null"},"gha_size":{"kind":"null"},"gha_stargazers_count":{"kind":"null"},"gha_forks_count":{"kind":"null"},"gha_open_issues_count":{"kind":"null"},"gha_language":{"kind":"null"},"gha_archived":{"kind":"null"},"gha_disabled":{"kind":"null"},"content":{"kind":"string","value":"# -*- coding: utf-8 -*-\n'''\ndjango-mako template connection module\nversion 0.1, for django1.5+\n\n'''\n#python import\nfrom glob import glob\n\n#django import\nfrom django.http import HttpResponse\nfrom django.core.context_processors import csrf\nfrom django.conf import settings\n\n#mako import\nfrom mako.lookup import TemplateLookup\ndjlookup=TemplateLookup(directories=settings.TEMPLATE_DIRS,input_encoding=\"utf-8\")\n\n\n\ndef render_to_string(template_name,\n\t\tdictionary=None,\n\t\trequest=None):\n\t'''\nrender a template to a string(like render_to_string django.template.loader)\n'''\n\tt=djlookup.get_template(template_name)\n\tif request!=None:\n\t\tdictionary.update(csrf(request))\n\tpage=t.render(**dictionary)\n\treturn page\n\t\n\n\ndef render_to_response(template_name,\n\t\tdictionary={},\n\t\tcontent_type=\"text/html\",\n\t\trequest=None,\n\t\tstatus=200):\n\t'''\na simple http response method just like django's\nfor easy alternativity\n'''\n\tpage=render_to_string(template_name,dictionary,request)\n\treturn HttpResponse(content=page,content_type=content_type,status=status)\n\n\n\ndef tmpldebug(request,tmpl=\"\"):\n\t'''\nargument:\n\ttmpl:specific template file name.\n'''\n\tif tmpl==\"\":\n\t\tt='''\n\n\n\ndjangomako template design mode\n\n\n'''\n\t\tfor tmpldir in settings.TEMPLATE_DIRS:\n\t\t\tif tmpldir[-1]!=\"/\":tmpldir+=\"/\"\n\t\t\tt+=\"

\"+tmpldir+\"

\"\n\t\t\tfor s in glob(tmpldir+\"*\"):\n\t\t\t\tif s[-1:]==\"~\":continue\n\t\t\t\ts=s[len(tmpldir):]\n\t\t\t\tt+='

'+s+\"

\"\n\t\t\ts+=\"
\"\n\t\tt+='''\n \n\n'''\n\t\treturn HttpResponse(t)\n\n\telse:\n\t\treturn render_to_response(tmpl,{})\n\n"},"src_encoding":{"kind":"string","value":"UTF-8"},"language":{"kind":"string","value":"Python"},"is_vendor":{"kind":"bool","value":false,"string":"false"},"is_generated":{"kind":"bool","value":false,"string":"false"},"year":{"kind":"number","value":2013,"string":"2,013"}}},{"rowIdx":41173,"cells":{"__id__":{"kind":"number","value":3444563793200,"string":"3,444,563,793,200"},"blob_id":{"kind":"string","value":"d79983c0e1a7bbc4b4e3aaffc3f940c6aac7ba17"},"directory_id":{"kind":"string","value":"731f30b6c3a012618b5d5b57dd4af3d65fdd601c"},"path":{"kind":"string","value":"/ROOT/Old Code/Early Testing - Testing of Various Modules/random test code/curve.py"},"content_id":{"kind":"string","value":"23d2527a8e837faa946b458d0ab925b57518d878"},"detected_licenses":{"kind":"list like","value":["Apache-2.0","LicenseRef-scancode-unknown-license-reference"],"string":"[\n \"Apache-2.0\",\n \"LicenseRef-scancode-unknown-license-reference\"\n]"},"license_type":{"kind":"string","value":"non_permissive"},"repo_name":{"kind":"string","value":"PhilipToddCoppola/Honours-Project"},"repo_url":{"kind":"string","value":"https://github.com/PhilipToddCoppola/Honours-Project"},"snapshot_id":{"kind":"string","value":"4d921653de47a0d623e590654f347f573441ceb0"},"revision_id":{"kind":"string","value":"fe226a197eb2d3448b6ecebb70597cbe097f0f26"},"branch_name":{"kind":"string","value":"refs/heads/master"},"visit_date":{"kind":"timestamp","value":"2020-06-26T08:15:53.487096","string":"2020-06-26T08:15:53.487096"},"revision_date":{"kind":"timestamp","value":"2014-12-16T13:55:17","string":"2014-12-16T13:55:17"},"committer_date":{"kind":"timestamp","value":"2014-12-16T13:55:17","string":"2014-12-16T13:55:17"},"github_id":{"kind":"null"},"star_events_count":{"kind":"number","value":0,"string":"0"},"fork_events_count":{"kind":"number","value":0,"string":"0"},"gha_license_id":{"kind":"null"},"gha_fork":{"kind":"null"},"gha_event_created_at":{"kind":"null"},"gha_created_at":{"kind":"null"},"gha_updated_at":{"kind":"null"},"gha_pushed_at":{"kind":"null"},"gha_size":{"kind":"null"},"gha_stargazers_count":{"kind":"null"},"gha_forks_count":{"kind":"null"},"gha_open_issues_count":{"kind":"null"},"gha_language":{"kind":"null"},"gha_archived":{"kind":"null"},"gha_disabled":{"kind":"null"},"content":{"kind":"string","value":"from visual import*\n\ndef R(x):\n y = -(1.0/4.0)*x**2 + 4\n return y\n\ndx = 0.5\n\na = 0.0\n\nb = 3.0\n\nx_axis = curve(pos=[(-10,0,0),(10,0,0)])\n\ny_axis = curve(pos=[(0,-10,0),(0,10,0)])\n\nz_axis = curve(pos=[(0,0,-10),(0,0,10)])\n\nline = curve(x=arange(0,3,.1))\nline.color=color.cyan\nline.radius = .1\nline.y = -(1.0/4.0) * (line.x**2) + 4\n\n#scene.background = color.white\n\nfor i in range(-10, 11):\n\n curve(pos=[(-0.5,i),(0.5,i)])\n curve(pos=[(i,-0.5),(i,0.5)])\n\nVT = 0\n\n\nfor x in arange(a + dx,b + dx,dx):\n\n V = pi * R(x)**2 * dx\n\n disk = cylinder(pos=(x,0,0),radius=R(x),axis=(-dx,0,0), color = color.yellow)\n\n VT = V + VT\n\n print V\n\nprint \"Volume =\", VT\n"},"src_encoding":{"kind":"string","value":"UTF-8"},"language":{"kind":"string","value":"Python"},"is_vendor":{"kind":"bool","value":false,"string":"false"},"is_generated":{"kind":"bool","value":false,"string":"false"},"year":{"kind":"number","value":2014,"string":"2,014"}}},{"rowIdx":41174,"cells":{"__id__":{"kind":"number","value":11940009112460,"string":"11,940,009,112,460"},"blob_id":{"kind":"string","value":"c9fc48d1c33612692ed61dbea6561c5995c7a0a0"},"directory_id":{"kind":"string","value":"1d4669fc5788aaf98b1db585687c9365d4c7a3ac"},"path":{"kind":"string","value":"/analyze/imgmix/imgmix.py"},"content_id":{"kind":"string","value":"f2914c8d63f7fb85ac1fdaf688cd6bf9c0cc0059"},"detected_licenses":{"kind":"list like","value":[],"string":"[]"},"license_type":{"kind":"string","value":"no_license"},"repo_name":{"kind":"string","value":"dmpots/hobbes"},"repo_url":{"kind":"string","value":"https://github.com/dmpots/hobbes"},"snapshot_id":{"kind":"string","value":"fa6787834b261565e14c6b9b34a62f277ae9e10b"},"revision_id":{"kind":"string","value":"cde59d45f2779ed2d363e5f5014b8ccaac1e0121"},"branch_name":{"kind":"string","value":"refs/heads/master"},"visit_date":{"kind":"timestamp","value":"2016-09-05T17:39:54.405454","string":"2016-09-05T17:39:54.405454"},"revision_date":{"kind":"timestamp","value":"2012-02-28T22:56:11","string":"2012-02-28T22:56:11"},"committer_date":{"kind":"timestamp","value":"2012-02-28T22:56:11","string":"2012-02-28T22:56:11"},"github_id":{"kind":"null"},"star_events_count":{"kind":"number","value":0,"string":"0"},"fork_events_count":{"kind":"number","value":0,"string":"0"},"gha_license_id":{"kind":"null"},"gha_fork":{"kind":"null"},"gha_event_created_at":{"kind":"null"},"gha_created_at":{"kind":"null"},"gha_updated_at":{"kind":"null"},"gha_pushed_at":{"kind":"null"},"gha_size":{"kind":"null"},"gha_stargazers_count":{"kind":"null"},"gha_forks_count":{"kind":"null"},"gha_open_issues_count":{"kind":"null"},"gha_language":{"kind":"null"},"gha_archived":{"kind":"null"},"gha_disabled":{"kind":"null"},"content":{"kind":"string","value":"#!/usr/bin/env python3\n\nimport os\nimport re\nimport sys\n\n\ndef find_logs(root):\n logs = []\n for (dirpath, _, filenames) in os.walk(root):\n for filename in filenames:\n if re.match(r\".*[.]imgmix[.]\\d+[.]LOG\", filename):\n logs.append(os.path.join(dirpath, filename))\n return logs\n\ndef read_logs(logs):\n def clean(l):\n return os.path.basename(l.strip())\n libs = set()\n for log in logs:\n with open(log) as f:\n libs.update(map(clean, f.readlines()))\n return libs\n\ndef filter_libs(libs):\n for lib in libs:\n # Get rid of anything not a lib\n if not lib.startswith(\"lib\"):\n continue\n\n # Get rid of Haskell runtime\n if lib.startswith(\"libHSrts\"):\n continue\n\n # Allow libquantum for SPEC\n if lib.startswith(\"libquantum\"):\n yield lib\n\n # Get rid of non-Haskell libraries\n if not lib.startswith(\"libHS\"):\n continue\n\n yield lib\n\ndef normalize_libs(libs):\n def normalize(lib):\n dot = lib.find(\".\")\n dash= lib.find(\"-\")\n\n if dot == -1 and dash == -1:\n return lib\n elif dot == -1:\n return lib[:dash]\n\n elif dash == -1:\n return lib[:dot]\n else:\n return lib[:min(dot,dash)]\n \n return map(normalize, libs)\n \n \n\ndef main(argv):\n if len(argv) != 1:\n print(\"usage: imgmix.py \")\n sys.exit(1)\n\n root = argv[0]\n logs = find_logs(root)\n libs = read_logs(logs)\n libs = filter_libs(libs)\n libs = normalize_libs(libs)\n\n final_set = sorted(set(libs))\n \n print(\" const char* AllowedNamesList[] = {\", end=\"\\n \")\n print(\",\\n \".join(map(lambda l: '\"'+l+'\"', final_set)))\n print(\" };\")\n\nif __name__ == \"__main__\":\n main(sys.argv[1:])\n \n"},"src_encoding":{"kind":"string","value":"UTF-8"},"language":{"kind":"string","value":"Python"},"is_vendor":{"kind":"bool","value":false,"string":"false"},"is_generated":{"kind":"bool","value":false,"string":"false"},"year":{"kind":"number","value":2012,"string":"2,012"}}},{"rowIdx":41175,"cells":{"__id__":{"kind":"number","value":18657337935490,"string":"18,657,337,935,490"},"blob_id":{"kind":"string","value":"721fa3d476c9d062eacf1552e1f849855c4d30f7"},"directory_id":{"kind":"string","value":"d0646ba7b8deef191bd89469836d78e6f65b1180"},"path":{"kind":"string","value":"/sfa/util/threadmanager.py"},"content_id":{"kind":"string","value":"b47b8186e12d1e9b86a1b0bd892650c73d52977c"},"detected_licenses":{"kind":"list like","value":["LicenseRef-scancode-unknown-license-reference"],"string":"[\n \"LicenseRef-scancode-unknown-license-reference\"\n]"},"license_type":{"kind":"string","value":"non_permissive"},"repo_name":{"kind":"string","value":"planetlab/sfa"},"repo_url":{"kind":"string","value":"https://github.com/planetlab/sfa"},"snapshot_id":{"kind":"string","value":"5b6ca773bf0bbb66d0b0b1f9288a511844abea56"},"revision_id":{"kind":"string","value":"d0f743e245e0bb24d7ed1016bcc6e61d1e558a95"},"branch_name":{"kind":"string","value":"refs/heads/master"},"visit_date":{"kind":"timestamp","value":"2021-01-23T20:13:24.722855","string":"2021-01-23T20:13:24.722855"},"revision_date":{"kind":"timestamp","value":"2011-10-17T19:48:52","string":"2011-10-17T19:48:52"},"committer_date":{"kind":"timestamp","value":"2011-10-17T19:48:52","string":"2011-10-17T19:48:52"},"github_id":{"kind":"number","value":1352164,"string":"1,352,164"},"star_events_count":{"kind":"number","value":1,"string":"1"},"fork_events_count":{"kind":"number","value":1,"string":"1"},"gha_license_id":{"kind":"null"},"gha_fork":{"kind":"null"},"gha_event_created_at":{"kind":"null"},"gha_created_at":{"kind":"null"},"gha_updated_at":{"kind":"null"},"gha_pushed_at":{"kind":"null"},"gha_size":{"kind":"null"},"gha_stargazers_count":{"kind":"null"},"gha_forks_count":{"kind":"null"},"gha_open_issues_count":{"kind":"null"},"gha_language":{"kind":"null"},"gha_archived":{"kind":"null"},"gha_disabled":{"kind":"null"},"content":{"kind":"string","value":"import threading\nimport traceback\nimport time\nfrom Queue import Queue\nfrom sfa.util.sfalogging import logger\n\ndef ThreadedMethod(callable, results, errors):\n \"\"\"\n A function decorator that returns a running thread. The thread\n runs the specified callable and stores the result in the specified\n results queue\n \"\"\"\n def wrapper(args, kwds):\n class ThreadInstance(threading.Thread): \n def run(self):\n try:\n results.put(callable(*args, **kwds))\n except Exception, e:\n logger.log_exc('ThreadManager: Error in thread: ')\n errors.put(traceback.format_exc())\n \n thread = ThreadInstance()\n thread.start()\n return thread\n return wrapper\n\n \n\nclass ThreadManager:\n \"\"\"\n ThreadManager executes a callable in a thread and stores the result\n in a thread safe queue. \n \"\"\"\n\n def __init__(self):\n self.results = Queue()\n self.errors = Queue()\n self.threads = []\n\n def run (self, method, *args, **kwds):\n \"\"\"\n Execute a callable in a separate thread. \n \"\"\"\n method = ThreadedMethod(method, self.results, self.errors)\n thread = method(args, kwds)\n self.threads.append(thread)\n\n start = run\n\n def join(self):\n \"\"\"\n Wait for all threads to complete \n \"\"\"\n for thread in self.threads:\n thread.join()\n\n def get_results(self, lenient=True):\n \"\"\"\n Return a list of all the results so far. Blocks until \n all threads are finished. \n If lienent is set to false the error queue will be checked before \n the response is returned. If there are errors in the queue an SFA Fault will \n be raised. \n \"\"\"\n self.join()\n results = []\n if not lenient:\n errors = self.get_errors()\n if errors: \n raise Exception(errors[0])\n\n while not self.results.empty():\n results.append(self.results.get()) \n return results\n\n def get_errors(self):\n \"\"\"\n Return a list of all errors. Blocks untill all threads are finished\n \"\"\"\n self.join()\n errors = []\n while not self.errors.empty():\n errors.append(self.errors.get())\n return errors\n\n def get_return_value(self):\n \"\"\"\n Get the value that should be returuned to the client. If there are errors then the\n first error is returned. If there are no errors, then the first result is returned \n \"\"\"\n \n \nif __name__ == '__main__':\n\n def f(name, n, sleep=1):\n nums = []\n for i in range(n, n+5):\n print \"%s: %s\" % (name, i)\n nums.append(i)\n time.sleep(sleep)\n return nums\n def e(name, n, sleep=1):\n nums = []\n for i in range(n, n+3) + ['n', 'b']:\n print \"%s: 1 + %s:\" % (name, i)\n nums.append(i + 1)\n time.sleep(sleep)\n return nums \n\n threads = ThreadManager()\n threads.run(f, \"Thread1\", 10, 2)\n threads.run(f, \"Thread2\", -10, 1)\n threads.run(e, \"Thread3\", 19, 1)\n\n #results = threads.get_results()\n #errors = threads.get_errors()\n #print \"Results:\", results\n #print \"Errors:\", errors\n results_xlenient = threads.get_results(lenient=False)\n\n"},"src_encoding":{"kind":"string","value":"UTF-8"},"language":{"kind":"string","value":"Python"},"is_vendor":{"kind":"bool","value":false,"string":"false"},"is_generated":{"kind":"bool","value":false,"string":"false"},"year":{"kind":"number","value":2011,"string":"2,011"}}},{"rowIdx":41176,"cells":{"__id__":{"kind":"number","value":7361573977300,"string":"7,361,573,977,300"},"blob_id":{"kind":"string","value":"e2df286d83369a6994fcc6842aa04ff304e5249b"},"directory_id":{"kind":"string","value":"905a226b397698b528f867ce945a4605195dd81e"},"path":{"kind":"string","value":"/CMGTools/H2TauTau/python/proto/analyzers/WHMMTAnalyzer.py"},"content_id":{"kind":"string","value":"65b367aa97cd6ec04dcd6c152244707d634617f0"},"detected_licenses":{"kind":"list like","value":[],"string":"[]"},"license_type":{"kind":"string","value":"no_license"},"repo_name":{"kind":"string","value":"gitytakahas/cmg-cmssw"},"repo_url":{"kind":"string","value":"https://github.com/gitytakahas/cmg-cmssw"},"snapshot_id":{"kind":"string","value":"f6dfff8827427676e08e1d6d098d72cc9bc01022"},"revision_id":{"kind":"string","value":"34b51fff47a11ad8a51b3949aaa433ac1ea88b55"},"branch_name":{"kind":"string","value":"refs/heads/CMG_PAT_V5_18_from-CMSSW_5_3_14"},"visit_date":{"kind":"timestamp","value":"2021-07-07T05:30:24.122663","string":"2021-07-07T05:30:24.122663"},"revision_date":{"kind":"timestamp","value":"2014-12-16T21:22:34","string":"2014-12-16T21:22:34"},"committer_date":{"kind":"timestamp","value":"2014-12-16T21:22:34","string":"2014-12-16T21:22:34"},"github_id":{"kind":"number","value":16833702,"string":"16,833,702"},"star_events_count":{"kind":"number","value":0,"string":"0"},"fork_events_count":{"kind":"number","value":1,"string":"1"},"gha_license_id":{"kind":"null"},"gha_fork":{"kind":"bool","value":true,"string":"true"},"gha_event_created_at":{"kind":"timestamp","value":"2015-03-10T17:07:19","string":"2015-03-10T17:07:19"},"gha_created_at":{"kind":"timestamp","value":"2014-02-14T10:05:16","string":"2014-02-14T10:05:16"},"gha_updated_at":{"kind":"timestamp","value":"2014-12-16T21:23:00","string":"2014-12-16T21:23:00"},"gha_pushed_at":{"kind":"timestamp","value":"2015-03-10T17:07:19","string":"2015-03-10T17:07:19"},"gha_size":{"kind":"number","value":594741,"string":"594,741"},"gha_stargazers_count":{"kind":"number","value":0,"string":"0"},"gha_forks_count":{"kind":"number","value":1,"string":"1"},"gha_open_issues_count":{"kind":"number","value":0,"string":"0"},"gha_language":{"kind":"string","value":"C++"},"gha_archived":{"kind":"null"},"gha_disabled":{"kind":"null"},"content":{"kind":"string","value":"import operator\nimport math\nfrom ROOT import TLorentzVector, Double\nfrom CMGTools.RootTools.fwlite.Analyzer import Analyzer\nfrom CMGTools.RootTools.analyzers.DiLeptonAnalyzer import DiLeptonAnalyzer\nfrom CMGTools.RootTools.fwlite.AutoHandle import AutoHandle\nfrom CMGTools.RootTools.statistics.Counter import Counter, Counters\nfrom CMGTools.RootTools.physicsobjects.PhysicsObjects import Muon, Tau, GenParticle, Jet\nfrom CMGTools.RootTools.physicsobjects.HTauTauElectron import HTauTauElectron as Electron\nfrom CMGTools.RootTools.utils.DeltaR import cleanObjectCollection, matchObjectCollection, bestMatch\nfrom CMGTools.RootTools.utils.TriggerMatching import triggerMatched\n\n\n####################################################################3\n#\n# 11 Nov 2013 Y.Takahashi\n# This analyzer is for WH, EMuTau-channel\n#\n####################################################################3\n\nclass WHMMTAnalyzer(Analyzer):\n\n # Class needed for the object selections\n LeptonClass = Muon\n OtherLeptonClass = Electron\n TauClass = Tau\n\n\n # Init\n def __init__(self, cfg_ana, cfg_comp, looperName):\n# print 'Init for the WHMMTAnalyzer'\n super(WHMMTAnalyzer,self).__init__(cfg_ana, cfg_comp, looperName)\n\n\n # beginLoop\n def beginLoop(self):\n# print 'Init for the beginLoop'\n super(WHMMTAnalyzer, self).beginLoop()\n self.counters.addCounter('MMT')\n count = self.counters.counter('MMT')\n count.register('all events')\n count.register('step1')\n count.register('step2')\n count.register('step3')\n \n def declareHandles(self):\n super(WHMMTAnalyzer, self).declareHandles()\n\n self.handles['electrons'] = AutoHandle(\n ('cmgElectronSel','','PAT'), 'std::vector')\n\n\n self.handles['muons'] = AutoHandle(\n ('cmgMuonSel','','PAT'), 'std::vector')\n\n\n self.handles['jets'] = AutoHandle( 'cmgPFJetSel',\n 'std::vector' )\n\n self.handles['taus'] = AutoHandle(\n# ('cmgTauSel','','PAT'), 'std::vector')\n# ('cmgTauSel','','MUTAUTAU'), 'std::vector')\n# ('cmgTauSel','','DIMUTAU'), 'std::vector')\n ('cmgTauSel','','DIMUTAU'), 'std::vector', fallbackLabel=('cmgTauSel','','MUTAUTAU'))\n\n\n\n\n\n\n # Muon\n #################################################\n \n def buildLooseLeptons(self, cmgLeptons, event):\n '''Build loose muons'''\n\n leptons = []\n \n for index, lep in enumerate(cmgLeptons):\n\n pyl = self.__class__.LeptonClass(lep)\n pyl.associatedVertex = event.goodVertices[0]\n pyl.flag_id = False\n pyl.flag_iso = False\n pyl.trig_match = False\n\n if pyl.pt() > 10. and abs(pyl.eta()) < 2.4 and \\\n pyl.looseId() and abs(pyl.dz()) < 0.2 and \\\n pyl.sourcePtr().innerTrack().hitPattern().numberOfValidPixelHits()>0:\n\n leptons.append( pyl )\n \n return leptons\n\n def muid(self, pyl):\n '''check muon ID'''\n return pyl.tightId()\n\n\n def muiso(self, pyl):\n '''check muon isolation'''\n\n relIso = False\n if abs(pyl.eta()) < 1.479 and self.testLeg2Iso(pyl, 0.15):\n relIso = True\n if abs(pyl.eta()) > 1.479 and self.testLeg2Iso(pyl, 0.1):\n relIso = True\n\n return relIso\n\n\n def buildVetoLeptons(self, cmgLeptons, event):\n '''Build muons'''\n\n leptons = []\n for index, lep in enumerate(cmgLeptons):\n pyl = self.__class__.LeptonClass(lep)\n pyl.associatedVertex = event.goodVertices[0]\n\n if pyl.pt() > 5. and abs(pyl.eta()) < 2.3 and \\\n self.muid(pyl) and abs(pyl.dz()) < 0.2 and self.testLeg2Iso(pyl, 0.15) and abs(pyl.dB3D()) < 0.2:\n\n leptons.append( pyl )\n \n return leptons\n\n\n\n\n # Electron\n #################################################\n\n def buildLooseOtherLeptons(self, cmgOtherLeptons, event):\n '''Build loose electrons'''\n\n otherLeptons = []\n\n for index, lep in enumerate(cmgOtherLeptons):\n pyl = self.__class__.OtherLeptonClass(lep)\n pyl.associatedVertex = event.goodVertices[0]\n pyl.flag_id = False\n pyl.flag_iso = False\n pyl.trig_match = False\n \n if pyl.pt() > 10. and abs(pyl.eta()) < 2.5 and \\\n pyl.loosestIdForTriLeptonVeto() and abs(pyl.dz()) < 0.2 and pyl.sourcePtr().isGsfCtfScPixChargeConsistent():\n \n otherLeptons.append( pyl )\n\n return otherLeptons\n\n def eid(self, pyl):\n '''check electron ID'''\n return pyl.mvaForLeptonVeto()\n\n\n def eiso(self, pyl):\n '''check electron ID'''\n\n relIso = False\n if abs(pyl.eta()) < 1.479 and self.testLeg2Iso(pyl, 0.15):\n relIso = True\n if abs(pyl.eta()) > 1.479 and self.testLeg2Iso(pyl, 0.1):\n relIso = True\n \n return relIso\n\n\n def buildVetoOtherLeptons(self, cmgOtherLeptons, event):\n '''Build electrons for third lepton veto, associate best vertex.\n '''\n otherLeptons = []\n for index, lep in enumerate(cmgOtherLeptons):\n pyl = self.__class__.OtherLeptonClass(lep)\n pyl.associatedVertex = event.goodVertices[0]\n\n if pyl.pt() > 10. and abs(pyl.eta()) < 2.5 and \\\n pyl.mvaForLeptonVeto() and abs(pyl.dz()) < 0.2 and self.testLeg2Iso(pyl, 0.3):\n\n otherLeptons.append( pyl )\n\n return otherLeptons\n\n\n\n\n # Tau\n #################################################\n\n def buildLooseTau(self, cmgLeptons, event):\n '''Build taus.'''\n leptons = []\n \n for index, lep in enumerate(cmgLeptons):\n pyl = self.__class__.TauClass(lep)\n pyl.associatedVertex = event.goodVertices[0]\n pyl.flag_id = False\n pyl.flag_iso = False\n pyl.decaymode = -999\n pyl.ep = -999\n pyl.againstELooseArmin = False\n pyl.againstETight = False\n pyl.againstELoose = False\n pyl.againstEMedium = False\n pyl.againstE2Loose = False\n pyl.againstE2Medium = False\n# pyl.againstE0Loose = False\n# pyl.againstE0Medium = False\n pyl.againstERaw = -999\n pyl.againstE2Raw = -999\n pyl.againstE0Raw = -999\n pyl.againstECat = -999\n pyl.againstE2Cat = -999\n# pyl.againstE0Cat = -999\n pyl.againstMuLoose = False\n pyl.againstMuTight = False\n pyl.mvaisolation = -999\n pyl.mvaisolation_loose = False\n pyl.dBisolation = -999\n\n\n ### new tau ID ###\n\n pyl.byLooseCombinedIsolationDeltaBetaCorr3Hits = False\n pyl.byMediumCombinedIsolationDeltaBetaCorr3Hits = False\n pyl.byTightCombinedIsolationDeltaBetaCorr3Hits = False\n pyl.byCombinedIsolationDeltaBetaCorrRaw3Hits = -999\n pyl.againstMuonLoose2 = False\n pyl.againstMuonMedium2 = False\n pyl.againstMuonTight2 = False\n pyl.againstElectronMVA5category = False\n pyl.againstElectronLooseMVA5 = False\n pyl.againstElectronMediumMVA5 = False\n pyl.againstElectronTightMVA5 = False\n pyl.againstElectronVTightMVA5 = False\n pyl.againstMuonLoose3 = False\n pyl.againstMuonTight3 = False\n pyl.againstMuonMVALoose = False\n pyl.againstMuonMVAMedium = False\n pyl.againstMuonMVATight = False\n pyl.againstMuonMVARaw = -999\n pyl.byIsolationMVA3oldDMwoLTraw = -999\n pyl.byLooseIsolationMVA3oldDMwoLT = False\n pyl.byMediumIsolationMVA3oldDMwoLT = False\n pyl.byTightIsolationMVA3oldDMwoLT = False\n pyl.byVTightIsolationMVA3oldDMwoLT = False\n pyl.byVVTightIsolationMVA3oldDMwoLT = False\n pyl.byIsolationMVA3oldDMwLTraw = -999\n pyl.byLooseIsolationMVA3oldDMwLT = False\n pyl.byMediumIsolationMVA3oldDMwLT = False\n pyl.byTightIsolationMVA3oldDMwLT = False\n pyl.byVTightIsolationMVA3oldDMwLT = False\n pyl.byVVTightIsolationMVA3oldDMwLT = False\n pyl.byIsolationMVA3newDMwoLTraw = -999\n pyl.byLooseIsolationMVA3newDMwoLT = False\n pyl.byMediumIsolationMVA3newDMwoLT = False\n pyl.byTightIsolationMVA3newDMwoLT = False\n pyl.byVTightIsolationMVA3newDMwoLT = False\n pyl.byVVTightIsolationMVA3newDMwoLT = False\n pyl.byIsolationMVA3newDMwLTraw = -999\n pyl.byLooseIsolationMVA3newDMwLT = False\n pyl.byMediumIsolationMVA3newDMwLT = False\n pyl.byTightIsolationMVA3newDMwLT = False\n pyl.byVTightIsolationMVA3newDMwLT = False\n pyl.byVVTightIsolationMVA3newDMwLT = False\n\n\n # old tau ID\n \n pyl.decayModeFinding = False\n pyl.byVLooseCombinedIsolationDeltaBetaCorr = False\n pyl.byLooseCombinedIsolationDeltaBetaCorr = False\n pyl.byMediumCombinedIsolationDeltaBetaCorr = False\n pyl.byTightCombinedIsolationDeltaBetaCorr = False\n pyl.againstElectronLoose = False\n pyl.againstElectronMedium = False\n pyl.againstElectronTight = False\n pyl.againstElectronDeadECAL = False\n pyl.againstMuonLoose = False\n pyl.againstMuonMedium = False\n pyl.againstMuonTight = False\n\n\n \n\n if pyl.pt() > 20 and abs(pyl.eta()) < 2.3 and \\\n pyl.tauID(\"decayModeFinding\") and abs(pyl.dz()) < 0.2:\n \n leptons.append( pyl ) \n\n return leptons\n\n\n def tauid(self, pyl):\n '''check tau ID.'''\n\n# print 'inside_check', pyl.tauID(\"againstMuonLoose\"), pyl.tauID(\"againstElectronLooseMVA3\")\n if pyl.tauID(\"againstMuonLoose\") > 0.5 and pyl.tauID(\"againstElectronLooseMVA3\"):\n# print 'This becomes true !!'\n return True\n else: \n return False\n\n\n def tauiso(self, pyl):\n '''check tau isolation.'''\n\n return self.testLeg1Iso(pyl, None)\n\n\n \n def buildVetoTau(self, cmgLeptons, event):\n '''Build taus.'''\n leptons = []\n\n for index, lep in enumerate(cmgLeptons):\n pyl = self.__class__.TauClass(lep)\n pyl.associatedVertex = event.goodVertices[0]\n\n if pyl.pt() > 20 and abs(pyl.eta()) < 2.5 and \\\n pyl.tauID(\"decayModeFinding\") and self.testLeg1Iso(pyl, None) and abs(pyl.dz()) < 0.2:\n\n leptons.append( pyl )\n\n return leptons\n\n\n\n\n # process\n #####################################################\n\n def process(self, iEvent, event):\n\n# print 'process ongoing!'\n# import pdb; pdb.set_trace()\n\t\n#\timport pdb; pdb.set_trace()\n self.readCollections(iEvent)\n self.counters.counter('MMT').inc('all events')\n \n event.muoncand = self.buildLooseLeptons(self.handles['muons'].product(), event)\n event.electroncand = self.buildLooseOtherLeptons(self.handles['electrons'].product(), event)\n event.taucand = self.buildLooseTau(self.handles['taus'].product(), event)\n\n cmgJets = self.handles['jets'].product()\n\n event.CSVjet = []\n\n for cmgJet in cmgJets:\n jet = Jet( cmgJet )\n if self.testVetoBJet(jet):\n event.CSVjet.append(jet)\n\n\n event.electroncand, dummpy = cleanObjectCollection(event.electroncand,\n masks = event.muoncand,\n deltaRMin = 0.5)\n\n \n \n # CSV veto\n electroncand_removebjet = []\n muoncand_removebjet = []\n \n for ielectron in event.electroncand:\n bm, dr2min = bestMatch(ielectron, event.CSVjet)\n if dr2min > 0.25:\n electroncand_removebjet.append(ielectron)\n\n for imuon in event.muoncand:\n bm, dr2min = bestMatch(imuon, event.CSVjet)\n if dr2min > 0.25:\n muoncand_removebjet.append(imuon)\n\n event.electroncand = electroncand_removebjet\n event.muoncand = muoncand_removebjet\n \n \n# event.flag_trigmatched = False\n# \n\n\n# if not event.flag_trigmatched:\n# return False\n\n \n# event.cleanelectron = []\n# event.cleanmuon = []\n\n\n\n for ii in event.electroncand: \n ii.flag_id = self.eid(ii)\n ii.flag_iso = self.eiso(ii)\n\n# ii.trig_match = True\n\n if hasattr(event, 'hltPath'):\n if self.triggerCheck(event, event.hltPath, ii):\n ii.trig_match = True\n# if hasattr(event, 'hltPaths'):\n# if self.triggerCheck(event, event.hltPaths, ii):\n# ii.trig_match = True\n\n\n#\n# for jj in event.muoncand:\n# if self.returnMass(jj, ii) > 20. and \\\n# ii.charge()*jj.charge()==1. and \\\n# self.returnDR(ii, jj) > 0.5:\n#\n# flag_add = True\n#\n# if flag_add:\n# ii.flag_id = self.eid(ii)\n# ii.flag_iso = self.eiso(ii)\n# event.cleanelectron.append(ii)\n#\n#\n#\n for ii in event.muoncand:\n ii.flag_id = self.muid(ii)\n ii.flag_iso = self.muiso(ii)\n\n\n# ii.trig_match = True\n# if hasattr(event, 'hltPaths'):\n# if self.triggerCheck(event, event.hltPaths, ii):\n# ii.trig_match = True\n\n if hasattr(event, 'hltPath'):\n if self.triggerCheck(event, event.hltPath, ii):\n ii.trig_match = True\n\n\n# continue\n# \n# for jj in event.electroncand:\n# if self.returnMass(jj, ii) > 20. and \\\n# ii.charge()*jj.charge()==1. and \\\n# self.returnDR(ii, jj) > 0.5:\n#\n# flag_add = True\n#\n# if flag_add:\n# ii.flag_id = self.muid(ii)\n# ii.flag_iso = self.muiso(ii)\n# event.cleanmuon.append(ii)\n\n\n\n# event.electroncand = event.cleanelectron\n# event.muoncand = event.cleanmuon\n\n \n\n\n# idiso_electron = [ie for ie in event.electroncand if self.eid(ie) and self.eiso(ie)]\n# idiso_muon = [im for im in event.muoncand if self.muid(im) and self.muiso(im)]\n\n# if idiso_electron[0].pt() > idiso_muon[0].pt():\n \n\n\n# if not (len(event.muoncand)>=1 and len(event.electroncand)>=1 and len(event.taucand)>=1):\n# print 'YCheck : (m,e,t) = ', len(event.muoncand), len(event.electroncand), len(event.taucand)\n# return False\n\n \n# lepton1 = [] # Leading lepton\n# lepton2 = [] # 2nd leading lepton\n\n\n# if not (len(id_electron)>=1 and len(id_muon)>=1):\n# return False\n \n\n# lepton_type = ''\n#\n# if id_electron[0].pt() > id_muon[0].pt(): #e-mu\n# lepton1 = [ie for ie in id_electron if ie.pt() > 20.]\n# lepton2 = [im for im in id_muon if im.pt() > 10.]\n# lepton_type = 'electron'\n# elif id_electron[0].pt() < id_muon[0].pt():\n# lepton1 = [im for im in id_muon if im.pt() > 20.]\n# lepton2 = [ie for ie in id_electron if ie.pt() > 10.]\n# lepton_type = 'muon'\n\n\n\n# import pdb; pdb.set_trace() \n# if not (len(lepton1)==1 and len(lepton2)==1):\n# return False\n\n# self.counters.counter('MMT').inc('1mu + 1e')\n# \n#\n# event.muon = ''\n# event.electron = ''\n# \n# if lepton_type=='muon':\n# event.muon = lepton1[0]\n# event.electron = lepton2[0]\n# elif lepton_type=='electron':\n# event.electron = lepton1[0]\n# event.muon = lepton2[0]\n\n\n\n\n event.loosetau = []\n\n for itau in event.taucand:\n\n itau.decaymode = itau.decayMode()\n itau.ep = itau.calcEOverP()\n itau.flag_iso = self.tauiso(itau)\n itau.flag_id = self.tauid(itau)\n\n itau.againstERaw = itau.tauID('againstElectronMVA3raw')\n itau.againstE2Raw = itau.tauID('againstElectronMVA2raw')\n itau.againstE0Raw = itau.tauID('againstElectronMVA')\n itau.againstECat = int(round(itau.tauID('againstElectronMVA3category')))\n itau.againstE2Cat = int(round(itau.tauID('againstElectronMVA2category')))\n# itau.againstE0Cat = int(round(itau.tauID('againstElectronMVAcategory')))\n itau.againstELooseArmin = itau.tauID(\"againstElectronLoose\")\n itau.againstETight = itau.tauID(\"againstElectronTightMVA3\")\n itau.againstELoose = itau.tauID(\"againstElectronLooseMVA3\")\n itau.againstEMedium = itau.tauID(\"againstElectronMediumMVA3\")\n itau.againstE2Loose = itau.tauID(\"againstElectronLooseMVA2\")\n itau.againstE2Medium = itau.tauID(\"againstElectronMediumMVA2\")\n# itau.againstE0Loose = itau.tauID(\"againstElectronLooseMVA\")\n# itau.againstE0Medium = itau.tauID(\"againstElectronMediumMVA\")\n itau.againstMuLoose = itau.tauID(\"againstMuonLoose\")\n itau.againstMuTight = itau.tauID(\"againstMuonTight\")\n itau.dBisolation = itau.tauID(\"byCombinedIsolationDeltaBetaCorrRaw3Hits\")\n itau.mvaisolation = itau.tauID(\"byRawIsoMVA\")\n itau.mvaisolation_loose = itau.tauID('byLooseIsoMVA')\n\n # new tau ID \n itau.byLooseCombinedIsolationDeltaBetaCorr3Hits = itau.tauID(\"byLooseCombinedIsolationDeltaBetaCorr3Hits\")\n itau.byMediumCombinedIsolationDeltaBetaCorr3Hits = itau.tauID(\"byMediumCombinedIsolationDeltaBetaCorr3Hits\")\n itau.byTightCombinedIsolationDeltaBetaCorr3Hits = itau.tauID(\"byTightCombinedIsolationDeltaBetaCorr3Hits\")\n itau.byCombinedIsolationDeltaBetaCorrRaw3Hits = itau.tauID(\"byCombinedIsolationDeltaBetaCorrRaw3Hits\")\n itau.againstMuonLoose2 = itau.tauID(\"againstMuonLoose2\")\n itau.againstMuonMedium2 = itau.tauID(\"againstMuonMedium2\")\n itau.againstMuonTight2 = itau.tauID(\"againstMuonTight2\")\n itau.againstElectronMVA5category = itau.tauID(\"againstElectronMVA5category\")\n itau.againstElectronLooseMVA5 = itau.tauID(\"againstElectronLooseMVA5\")\n itau.againstElectronMediumMVA5 = itau.tauID(\"againstElectronMediumMVA5\")\n itau.againstElectronTightMVA5 = itau.tauID(\"againstElectronTightMVA5\")\n itau.againstElectronVTightMVA5 = itau.tauID(\"againstElectronVTightMVA5\")\n itau.againstMuonLoose3 = itau.tauID(\"againstMuonLoose3\")\n itau.againstMuonTight3 = itau.tauID(\"againstMuonTight3\")\n itau.againstMuonMVALoose = itau.tauID(\"againstMuonMVALoose\")\n itau.againstMuonMVAMedium = itau.tauID(\"againstMuonMVAMedium\")\n itau.againstMuonMVATight = itau.tauID(\"againstMuonMVATight\")\n itau.againstMuonMVARaw = itau.tauID(\"againstMuonMVARaw\")\n itau.byIsolationMVA3oldDMwoLTraw = itau.tauID(\"byIsolationMVA3oldDMwoLTraw\")\n itau.byLooseIsolationMVA3oldDMwoLT = itau.tauID(\"byLooseIsolationMVA3oldDMwoLT\")\n itau.byMediumIsolationMVA3oldDMwoLT = itau.tauID(\"byMediumIsolationMVA3oldDMwoLT\")\n itau.byTightIsolationMVA3oldDMwoLT = itau.tauID(\"byTightIsolationMVA3oldDMwoLT\")\n itau.byVTightIsolationMVA3oldDMwoLT = itau.tauID(\"byVTightIsolationMVA3oldDMwoLT\")\n itau.byVVTightIsolationMVA3oldDMwoLT = itau.tauID(\"byVVTightIsolationMVA3oldDMwoLT\")\n itau.byIsolationMVA3oldDMwLTraw = itau.tauID(\"byIsolationMVA3oldDMwLTraw\")\n itau.byLooseIsolationMVA3oldDMwLT = itau.tauID(\"byLooseIsolationMVA3oldDMwLT\")\n itau.byMediumIsolationMVA3oldDMwLT = itau.tauID(\"byMediumIsolationMVA3oldDMwLT\")\n itau.byTightIsolationMVA3oldDMwLT = itau.tauID(\"byTightIsolationMVA3oldDMwLT\")\n itau.byVTightIsolationMVA3oldDMwLT = itau.tauID(\"byVTightIsolationMVA3oldDMwLT\")\n itau.byVVTightIsolationMVA3oldDMwLT = itau.tauID(\"byVVTightIsolationMVA3oldDMwLT\")\n itau.byIsolationMVA3newDMwoLTraw = itau.tauID(\"byIsolationMVA3newDMwoLTraw\")\n itau.byLooseIsolationMVA3newDMwoLT = itau.tauID(\"byLooseIsolationMVA3newDMwoLT\")\n itau.byMediumIsolationMVA3newDMwoLT = itau.tauID(\"byMediumIsolationMVA3newDMwoLT\")\n itau.byTightIsolationMVA3newDMwoLT = itau.tauID(\"byTightIsolationMVA3newDMwoLT\")\n itau.byVTightIsolationMVA3newDMwoLT = itau.tauID(\"byVTightIsolationMVA3newDMwoLT\")\n itau.byVVTightIsolationMVA3newDMwoLT = itau.tauID(\"byVVTightIsolationMVA3newDMwoLT\")\n itau.byIsolationMVA3newDMwLTraw = itau.tauID(\"byIsolationMVA3newDMwLTraw\")\n itau.byLooseIsolationMVA3newDMwLT = itau.tauID(\"byLooseIsolationMVA3newDMwLT\")\n itau.byMediumIsolationMVA3newDMwLT = itau.tauID(\"byMediumIsolationMVA3newDMwLT\")\n itau.byTightIsolationMVA3newDMwLT = itau.tauID(\"byTightIsolationMVA3newDMwLT\")\n itau.byVTightIsolationMVA3newDMwLT = itau.tauID(\"byVTightIsolationMVA3newDMwLT\")\n itau.byVVTightIsolationMVA3newDMwLT = itau.tauID(\"byVVTightIsolationMVA3newDMwLT\")\n\n # old tau ID\n\n itau.decayModeFinding = itau.tauID(\"decayModeFinding\")\n itau.byVLooseCombinedIsolationDeltaBetaCorr = itau.tauID(\"byVLooseCombinedIsolationDeltaBetaCorr\")\n itau.byLooseCombinedIsolationDeltaBetaCorr = itau.tauID(\"byLooseCombinedIsolationDeltaBetaCorr\")\n itau.byMediumCombinedIsolationDeltaBetaCorr = itau.tauID(\"byMediumCombinedIsolationDeltaBetaCorr\")\n itau.byTightCombinedIsolationDeltaBetaCorr = itau.tauID(\"byTightCombinedIsolationDeltaBetaCorr\")\n itau.againstElectronLoose = itau.tauID(\"againstElectronLoose\")\n itau.againstElectronMedium = itau.tauID(\"againstElectronMedium\")\n itau.againstElectronTight = itau.tauID(\"againstElectronTight\")\n itau.againstElectronDeadECAL = itau.tauID(\"againstElectronDeadECAL\")\n itau.againstMuonLoose = itau.tauID(\"againstMuonLoose\")\n itau.againstMuonMedium = itau.tauID(\"againstMuonMedium\")\n itau.againstMuonTight = itau.tauID(\"againstMuonTight\")\n\n\n\n\n# print 'dB, raw, loose', itau.tauID(\"byCombinedIsolationDeltaBetaCorrRaw3Hits\"), itau.tauID(\"byRawIsoMVA\"), itau.tauID('byLooseIsoMVA')\n# print 'ID_check', itau.tauID(\"againstMuonLoose\"), itau.tauID(\"againstElectronLooseMVA3\")\n# print 'mu_loose, e_loose, e_medium', itau.tauID(\"againstMuonLoose\"), itau.tauID(\"againstElectronLooseMVA3\"), itau.tauID(\"againstElectronMediumMVA3\"), itau.flag_id\n \n# if flag_mu_mass and and \\\n# ((itau.decayMode()==0 and itau.calcEOverP() > 0.2) or (itau.decayMode()!=0)):\n# itau.flag_id = True\n#\n# \n# if flag_e_mass==False and flag_mu_mass==False and self.tauid(itau):\n# itau.flag_id = True\n\n\n\n\n\n# flag_e_overlap = False\n# flag_e_mass = False\n#\n# for ii in idiso_electron:\n# mass_et = self.returnMass(ii, itau)\n# if mass_et > 71.2 and mass_et < 111.2:\n# flag_e_mass = True\n# \n# if self.returnDR(itau, ii) < 0.5:\n# flag_e_overlap = True\n#\n# if flag_e_overlap:\n# continue\n# \n#\n# flag_mu_overlap = False\n# flag_mu_mass = False\n#\n# for ii in idiso_muon:\n# mass_mt = self.returnMass(ii, itau)\n# if mass_mt > 71.2 and mass_mt < 111.2:\n# flag_mu_mass = True\n# \n# if self.returnDR(itau, ii) < 0.5:\n# flag_mu_overlap = True\n#\n# if flag_mu_overlap:\n# continue\n\n\n# if self.tauiso(itau):\n# itau.flag_iso = True\n#\n# \n# if flag_e_mass and itau.tauID(\"againstElectronMediumMVA3\"):\n# itau.flag_id = True\n#\n#\n# if flag_mu_mass and itau.tauID(\"againstMuonTight\") and \\\n# ((itau.decayMode()==0 and itau.calcEOverP() > 0.2) or (itau.decayMode()!=0)):\n# itau.flag_id = True\n#\n# \n# if flag_e_mass==False and flag_mu_mass==False and self.tauid(itau):\n# itau.flag_id = True\n\n\n event.loosetau.append(itau)\n\n\n event.taucand = event.loosetau\n\n # Additional tau veto\n event.vetotaucand = self.buildVetoTau(self.handles['taus'].product(), event)\n event.vetomuoncand = self.buildVetoLeptons(self.handles['muons'].product(), event)\n event.vetoelectroncand = self.buildVetoOtherLeptons(self.handles['electrons'].product(), event)\n\n flag_plus = 0\n flag_minus = 0\n \n for im in event.muoncand:\n if im.charge()==1:\n flag_plus +=1\n else:\n flag_minus +=1\n\n\n self.counters.counter('MMT').inc('step1')\n \n if not (flag_plus >= 2 or flag_minus >= 2):\n return False\n\n self.counters.counter('MMT').inc('step2')\n\n if not (len(event.muoncand)>=2 and len(event.taucand)>=1):\n# if not (len(event.taucand)>=1 and len(event.muoncand)>=1 and len(event.electroncand)>=1):\n return False\n\n self.counters.counter('MMT').inc('step3')\n\n# idiso_tau = [it for it in event.taucand if (it.flag_id and it.flag_iso)]\n# if not len(idiso_tau)>=1 :\n# return False\n\n\n\n\n\n# if not len(lepton3) == 1:\n# return False\n\n# self.counters.counter('MMT').inc('1 e/mu/tau')\n# event.tau = lepton3[0]\n\n\n\n\n# event.M_l2t = self.returnMass(lepton2[0], event.tau)\n\n# if self.returnMass(event.muon, event.electron) < 20.:\n# return False\n# if self.returnMass(lepton2[0], event.tau) < 20.:\n# return False\n\n \n\n # charge requirement\n # SS for two light leptons\n\n# if event.electron.charge()*event.muon.charge()==-1.:\n# return False\n\n\n# if event.tau.charge()*event.muon.charge()!=-1.:\n# return False\n\n\n # dR separation \n# if self.returnDR(event.tau, event.muon) < 0.5:\n# return False\n# if self.returnDR(event.tau, event.electron) < 0.5:\n# return False\n# if self.returnDR(event.electron, event.muon) < 0.5:\n# return False\n\n \n\n# event.loosetaucand, dummpy = cleanObjectCollection(event.loosetaucand,\n# masks = [event.muon],\n## masks = event.muoncand,\n# deltaRMin = 0.4)\n#\n# event.loosetaucand, dummpy = cleanObjectCollection(event.loosetaucand,\n# masks = [event.electron],\n## masks = event.electroncand,\n# deltaRMin = 0.4)\n#\n# event.loosetaucand, dummpy = cleanObjectCollection(event.loosetaucand,\n# masks = [event.tau],\n# deltaRMin = 0.4)\n#\n#\n# event.loosemuoncand, dummpy = cleanObjectCollection(event.loosemuoncand,\n# masks = [event.muon],\n# deltaRMin = 0.4)\n#\n# event.loosemuoncand, dummpy = cleanObjectCollection(event.loosemuoncand,\n# masks = [event.electron],\n# deltaRMin = 0.4)\n#\n# event.loosemuoncand, dummpy = cleanObjectCollection(event.loosemuoncand,\n# masks = [event.tau],\n# deltaRMin = 0.4)\n#\n# event.looseelectroncand, dummpy = cleanObjectCollection(event.looseelectroncand,\n# masks = [event.muon],\n# deltaRMin = 0.4)\n#\n# event.looseelectroncand, dummpy = cleanObjectCollection(event.looseelectroncand,\n# masks = [event.electron],\n# deltaRMin = 0.4)\n#\n# event.looseelectroncand, dummpy = cleanObjectCollection(event.looseelectroncand,\n# masks = [event.tau],\n# deltaRMin = 0.4)\n\n \n \n# NadditionalLepton = len(event.loosetaucand) + len(event.loosemuoncand) + len(event.looseelectroncand)\n# if NadditionalLepton>=1:\n# return False\n\n\n\n# print 'All events passed : ', event.run, event.lumi, event.eventId\n \n return True\n\n \n\n def returnMass(self, obj1, obj2):\n\n e4 = TLorentzVector()\n t4 = TLorentzVector()\n\n e4.SetPtEtaPhiM(Double(obj1.pt()),\n Double(obj1.eta()),\n Double(obj1.phi()),\n Double(obj1.mass()))\n\n t4.SetPtEtaPhiM(Double(obj2.pt()),\n Double(obj2.eta()),\n Double(obj2.phi()),\n Double(obj2.mass()))\n\n return (e4 + t4).M()\n \n def returnDR(self, obj1, obj2):\n deta = obj1.eta() - obj2.eta()\n dphi = obj1.phi() - obj2.phi()\n dr2 = deta*deta + dphi*dphi\n return math.sqrt(dr2)\n\n def triggerCheck(self, event, hltPath, leg):\n\n flag_pass = False\n \n# for itrig in hltPaths:\n \n# if self.trigMatched(event, itrig, leg):\n# flag_pass = True\n\n if self.trigMatched(event, hltPath, leg):\n flag_pass = True\n\n return flag_pass\n\n\n def testLeg1Iso(self, tau, isocut):\n '''if isocut is None, returns true if three-hit iso cut is passed.\n Otherwise, returns true if iso MVA > isocut.'''\n if isocut is None:\n# print 'check tau ID ', tau.tauID('byCombinedIsolationDeltaBetaCorrRaw3Hits')\n# return tau.tauID(\"byCombinedIsolationDeltaBetaCorrRaw3Hits\") < 1.5\n# return tau.tauID(\"byMediumCombinedIsolationDeltaBetaCorr3Hits\")\n return tau.tauID(\"byLooseCombinedIsolationDeltaBetaCorr3Hits\")\n else:\n return tau.tauID(\"byRawIsoMVA\")>isocut\n\n\n def testVertex(self, lepton):\n '''Tests vertex constraints, for mu and tau'''\n return abs(lepton.dxy()) < 0.045 and \\\n abs(lepton.dz()) < 0.2 \n\n\n def testLeg2ID(self, muon):\n '''Tight muon selection, no isolation requirement'''\n return muon.tightId() and \\\n self.testVertex( muon )\n \n\n def testLeg2Iso(self, muon, isocut):\n '''Tight muon selection, with isolation requirement'''\n if isocut is None:\n isocut = self.cfg_ana.iso2\n# print muon.relIsoAllChargedDB05, isocut\n# return muon.relIsoAllChargedDB05() 2 leptons (e or mu).'''\n vleptons = [lep for lep in leptons if\n self.testLegKine(lep, ptcut=ptcut, etacut=2.4) and \n self.testLeg2ID(lep) and\n self.testLeg2Iso(lep, isocut) ]\n # count electrons\n votherLeptons = [olep for olep in otherLeptons if \n self.testLegKine(olep, ptcut=ptcut, etacut=2.5) and \\\n olep.looseIdForTriLeptonVeto() and \\\n self.testVertex( olep ) and \\\n olep.relIsoAllChargedDB05() < isocut\n ]\n if len(vleptons) + len(votherLeptons)> 1:\n return False\n else:\n return True\n\n\n def leptonAccept(self, leptons):\n '''The di-lepton veto, returns false if > one lepton.\n e.g. > 1 mu in the mu tau channel'''\n looseLeptons = [muon for muon in leptons if\n self.testLegKine(muon, ptcut=15, etacut=2.4) and\n muon.isGlobalMuon() and\n muon.isTrackerMuon() and\n muon.sourcePtr().userFloat('isPFMuon') and\n #COLIN Not sure this vertex cut is ok... check emu overlap\n #self.testVertex(muon) and\n # JAN: no dxy cut\n abs(muon.dz()) < 0.2 and\n self.testLeg2Iso(muon, 0.3)\n ]\n isPlus = False\n isMinus = False\n # import pdb; pdb.set_trace()\n for lepton in looseLeptons:\n if lepton.charge()<0: isMinus=True\n elif lepton.charge()>0: isPlus=True\n else:\n raise ValueError('Impossible!')\n veto = isMinus and isPlus\n return not veto\n\n def testVetoBJet(self, jet):\n # medium csv working point\n # https://twiki.cern.ch/twiki/bin/viewauth/CMS/BTagPerformanceOP#B_tagging_Operating_Points_for_3\n\n jet.btagMVA = jet.btag(\"combinedSecondaryVertexBJetTags\")\n\n return jet.pt()>12. and \\\n abs( jet.eta() ) < 2.4 and \\\n jet.btagMVA > 0.8\n\n\n\n# def testBJet(self, jet):\n# # medium csv working point\n# # https://twiki.cern.ch/twiki/bin/viewauth/CMS/BTagPerformanceOP#B_tagging_Operating_Points_for_3\n# jet.btagMVA = jet.btag(\"combinedSecondaryVertexBJetTags\")\n#\n# return jet.pt()>20. and \\\n# abs( jet.eta() ) < 2.4 and \\\n# jet.btagMVA > 0.898 and \\\n# self.testJetID(jet)\n#\n#\n# def testJetID(self, jet):\n# jet.puJetIdPassed = jet.puJetId(wp53x=True)\n# jet.pfJetIdPassed = jet.looseJetId()\n#\n# return jet.puJetIdPassed and jet.pfJetIdPassed\n\n\n### def trigMatched(self, event, leg, legName):\n### '''Returns true if the leg is matched to a trigger object as defined in the\n### triggerMap parameter'''\n### if not hasattr( self.cfg_ana, 'triggerMap'):\n### return True\n#### else:\n#### print 'Trigger OK'\n###\n###\n### path = event.hltPath\n### print 'path = ', path\n### \n### triggerObjects = event.triggerObjects\n### print 'triggerObjects = ', triggerObjects\n###\n### filters = self.cfg_ana.triggerMap[ path ]\n### print 'filters = ', filters\n### \n### filter = None\n### print 'filter = ', filter\n###\n###\n#### import pdb; pdb.set_trace()\n### \n### if legName == 'leg1':\n### filter = filters[0]\n### elif legName == 'leg2':\n### filter = filters[1]\n### else:\n### raise ValueError( 'legName should be leg1 or leg2, not {leg}'.format(\n### leg=legName ) )\n###\n### # JAN: Need a hack for the embedded samples: No trigger matching in that case\n### if filter == '':\n#### print 'Jan filter'\n### return True\n###\n### for it in triggerObjects:\n### print '(path, filter, obj, hasPath, hasSelection = ', path, filter, it, it.hasPath(path), it.hasSelection(filter)\n###\n###\n### # the dR2Max value is 0.3^2\n### pdgIds = None\n### if len(filter) == 2:\n### filter, pdgIds = filter[0], filter[1]\n### return triggerMatched(leg, triggerObjects, path, filter,\n### dR2Max=0.089999,\n#### dR2Max=0.25,\n### pdgIds=pdgIds )\n\n\n\n\n\n\n# def trigMatched(self, event, trigpath, leg1, leg2):\n# '''Returns true if the leg is matched to a trigger object as defined in the\n# triggerMap parameter'''\n# if not hasattr( self.cfg_ana, 'triggerMap'):\n# return True\n#\n#\n#\n# triggerObjects = event.triggerObjects\n# filters = self.cfg_ana.triggerMap[ trigpath ]\n# filter = filters[0]\n# pdgIds = None\n#\n#\n## print 'trigger path = ', trigpath\n## for it in triggerObjects:\n## print '(filter, obj, hasPath, hasSelection = ', filter, it.hasPath(path), it.hasSelection(filter), it\n#\n# \n#\n# triggerMatched1 = triggerMatched(leg1, triggerObjects, trigpath, filter,\n# dR2Max=0.089999,\n# pdgIds=pdgIds )\n#\n#\n# triggerMatched2 = triggerMatched(leg2, triggerObjects, trigpath, filter,\n# dR2Max=0.089999,\n# pdgIds=pdgIds )\n#\n#\n## import pdb; pdb.set_trace();\n#\n#\n# if filter.find('Mu8_Ele17')!=-1:\n# return triggerMatched1 and triggerMatched2 and leg1.pt() > 10. and leg2.pt() > 20.\n# elif filter.find('Mu17_Ele8')!=-1:\n# return triggerMatched1 and triggerMatched2 and leg1.pt() > 20. and leg2.pt() > 10.\n# else:\n# print 'Unexpected Trigger !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!1'\n# return False\n\n\n\n\n def trigMatched(self, event, trigpath, leg):\n '''Returns true if the leg is matched to a trigger object'''\n if not hasattr( self.cfg_ana, 'triggerMap'):\n return True\n\n# print trigpath\n\n triggerObjects = event.triggerObjects\n filters = self.cfg_ana.triggerMap[ trigpath ]\n filter = filters[0]\n pdgIds = None\n\n\n flag = triggerMatched(leg, triggerObjects, trigpath, filter,\n dR2Max=0.089999,\n pdgIds=pdgIds )\n\n\n if filter.find('Mu17_Mu8')!=-1 or filter.find('Mu17_TkMu8')!=-1:\n return flag\n else:\n return False\n\n"},"src_encoding":{"kind":"string","value":"UTF-8"},"language":{"kind":"string","value":"Python"},"is_vendor":{"kind":"bool","value":false,"string":"false"},"is_generated":{"kind":"bool","value":false,"string":"false"},"year":{"kind":"number","value":2014,"string":"2,014"}}},{"rowIdx":41177,"cells":{"__id__":{"kind":"number","value":7258494777165,"string":"7,258,494,777,165"},"blob_id":{"kind":"string","value":"cba1f546d7978d2d7a1f7285789027c60acb45fa"},"directory_id":{"kind":"string","value":"73e02ee1e3537247f51781ce6d2bd4b0aa8f2e93"},"path":{"kind":"string","value":"/dynamodb/batch.py"},"content_id":{"kind":"string","value":"5da32e8b025af9df052a48699624ff8c3833b4ba"},"detected_licenses":{"kind":"list like","value":[],"string":"[]"},"license_type":{"kind":"string","value":"no_license"},"repo_name":{"kind":"string","value":"mulka/boto_mock"},"repo_url":{"kind":"string","value":"https://github.com/mulka/boto_mock"},"snapshot_id":{"kind":"string","value":"0f946b0844e4e4e81428abd5b2bf3b43cb7680d5"},"revision_id":{"kind":"string","value":"1c22b58fc52485c56a941764b0e1c460e35eb2f5"},"branch_name":{"kind":"string","value":"refs/heads/master"},"visit_date":{"kind":"timestamp","value":"2020-06-06T11:29:56.164071","string":"2020-06-06T11:29:56.164071"},"revision_date":{"kind":"timestamp","value":"2012-06-07T03:31:29","string":"2012-06-07T03:31:29"},"committer_date":{"kind":"timestamp","value":"2012-06-07T03:31:29","string":"2012-06-07T03:31:29"},"github_id":{"kind":"null"},"star_events_count":{"kind":"number","value":0,"string":"0"},"fork_events_count":{"kind":"number","value":0,"string":"0"},"gha_license_id":{"kind":"null"},"gha_fork":{"kind":"null"},"gha_event_created_at":{"kind":"null"},"gha_created_at":{"kind":"null"},"gha_updated_at":{"kind":"null"},"gha_pushed_at":{"kind":"null"},"gha_size":{"kind":"null"},"gha_stargazers_count":{"kind":"null"},"gha_forks_count":{"kind":"null"},"gha_open_issues_count":{"kind":"null"},"gha_language":{"kind":"null"},"gha_archived":{"kind":"null"},"gha_disabled":{"kind":"null"},"content":{"kind":"string","value":"class BatchWrite(object):\n def __init__(self, table, puts=None):\n self.table = table\n self.puts = puts or []\n\nclass BatchWriteList(list):\n def __init__(self, layer2):\n list.__init__(self)\n self.layer2 = layer2\n\n def add_batch(self, table, puts=None):\n self.append(BatchWrite(table, puts))\n\n def submit(self):\n return self.layer2.batch_write_item(self)\n"},"src_encoding":{"kind":"string","value":"UTF-8"},"language":{"kind":"string","value":"Python"},"is_vendor":{"kind":"bool","value":false,"string":"false"},"is_generated":{"kind":"bool","value":false,"string":"false"},"year":{"kind":"number","value":2012,"string":"2,012"}}},{"rowIdx":41178,"cells":{"__id__":{"kind":"number","value":4767413732841,"string":"4,767,413,732,841"},"blob_id":{"kind":"string","value":"4111e3cb7176ac354cf914c24cd0a13fb4cfdf85"},"directory_id":{"kind":"string","value":"ba0ddfca9381ce1a55dabb248128c0f0342c85f1"},"path":{"kind":"string","value":"/scripts/webui/config_tab_tests.py"},"content_id":{"kind":"string","value":"a676d79e266a64a2e479158538550b1c4a195ec2"},"detected_licenses":{"kind":"list like","value":["LicenseRef-scancode-warranty-disclaimer","Apache-2.0"],"string":"[\n \"LicenseRef-scancode-warranty-disclaimer\",\n \"Apache-2.0\"\n]"},"license_type":{"kind":"string","value":"non_permissive"},"repo_name":{"kind":"string","value":"mloni/contrail-test"},"repo_url":{"kind":"string","value":"https://github.com/mloni/contrail-test"},"snapshot_id":{"kind":"string","value":"0237a247d9de41ab838763fde3c4da89e1a409ed"},"revision_id":{"kind":"string","value":"db27c1abae084a22ba5a0f5a6a3212f5480d68e4"},"branch_name":{"kind":"string","value":"refs/heads/master"},"visit_date":{"kind":"timestamp","value":"2020-12-26T01:37:25.284010","string":"2020-12-26T01:37:25.284010"},"revision_date":{"kind":"timestamp","value":"2014-11-06T04:20:18","string":"2014-11-06T04:20:18"},"committer_date":{"kind":"timestamp","value":"2014-11-06T04:20:18","string":"2014-11-06T04:20:18"},"github_id":{"kind":"null"},"star_events_count":{"kind":"number","value":0,"string":"0"},"fork_events_count":{"kind":"number","value":0,"string":"0"},"gha_license_id":{"kind":"null"},"gha_fork":{"kind":"null"},"gha_event_created_at":{"kind":"null"},"gha_created_at":{"kind":"null"},"gha_updated_at":{"kind":"null"},"gha_pushed_at":{"kind":"null"},"gha_size":{"kind":"null"},"gha_stargazers_count":{"kind":"null"},"gha_forks_count":{"kind":"null"},"gha_open_issues_count":{"kind":"null"},"gha_language":{"kind":"null"},"gha_archived":{"kind":"null"},"gha_disabled":{"kind":"null"},"content":{"kind":"string","value":"# Need to import path to test/fixtures and test/scripts/\n# Ex : export PYTHONPATH='$PATH:/root/test/fixtures/:/root/test/scripts/'\n#\n# To run tests, you can do 'python -m testtools.run tests'. To run specific tests,\n# You can do 'python -m testtools.run -l tests'\n# Set the env variable PARAMS_FILE to point to your ini file. Else it will try to pick params.ini in PWD\n#\nimport os\nimport fixtures\nimport testtools\n\nfrom contrail_test_init import *\nfrom vn_test import *\nfrom vm_test import *\nfrom connections import ContrailConnections\nfrom floating_ip import *\nfrom policy_test import *\nfrom contrail_fixtures import *\nfrom tcutils.wrappers import preposttest_wrapper\nfrom testresources import ResourcedTestCase\nfrom .webui_sanity_resource import SolnSetupResource\nfrom selenium import webdriver\nfrom pyvirtualdisplay import Display\nfrom selenium.webdriver.common.keys import Keys\nimport time\nimport random\nfrom webui_test import *\nfrom selenium.webdriver.support.ui import WebDriverWait\n\n\nclass ConfigTab(\n testtools.TestCase,\n ResourcedTestCase,\n fixtures.TestWithFixtures):\n\n resources = [('base_setup', SolnSetupResource)]\n\n def __init__(self, *args, **kwargs):\n testtools.TestCase.__init__(self, *args, **kwargs)\n self.res = SolnSetupResource.getResource()\n self.inputs = self.res.inputs\n self.connections = self.res.connections\n self.logger = self.res.logger\n self.nova_fixture = self.res.nova_fixture\n self.analytics_obj = self.connections.analytics_obj\n self.vnc_lib = self.connections.vnc_lib\n self.quantum_fixture = self.connections.quantum_fixture\n self.cn_inspect = self.connections.cn_inspect\n if self.inputs.webui_verification_flag:\n self.browser = self.connections.browser\n self.browser_openstack = self.connections.browser_openstack\n self.delay = 10\n self.webui = WebuiTest(self.connections, self.inputs)\n self.webui_common = WebuiCommon(self.webui)\n\n def __del__(self):\n print \"Deleting test_with_setup now\"\n SolnSetupResource.finishedWith(self.res)\n\n def setUp(self):\n super(ConfigTab, self).setUp()\n if 'PARAMS_FILE' in os.environ:\n self.ini_file = os.environ.get('PARAMS_FILE')\n else:\n self.ini_file = 'params.ini'\n\n def tearDown(self):\n print \"Tearing down test\"\n super(ConfigTab, self).tearDown()\n SolnSetupResource.finishedWith(self.res)\n\n def runTest(self):\n pass\n # end runTest\n\n @preposttest_wrapper\n def test_floating_ips(self):\n '''Test floating ips on config->Networking->Manage Floating IPs page\n '''\n assert self.webui.verify_floating_ip_api_data()\n return True\n # end test_floating_ips\n\n @preposttest_wrapper\n def test_networks(self):\n '''Test networks on config->Networking->Networks page\n '''\n assert self.webui.verify_vn_api_data()\n return True\n # end test_networks\n\n @preposttest_wrapper\n def test_ipams(self):\n '''Test ipams on config->Networking->IP Adress Management page\n '''\n assert self.webui.verify_ipam_api_data()\n return True\n # end test_ipams\n\n @preposttest_wrapper\n def test_policies(self):\n '''Test polcies on config->Networking->Policies page\n '''\n assert self.webui.verify_policy_api_data()\n return True\n # end test_policies\n\n @preposttest_wrapper\n def test_service_templates(self):\n '''Test svc templates on config->Services->Service Templates page\n '''\n assert self.webui.verify_service_template_api_basic_data()\n return True\n # end test_service_templates\n\n @preposttest_wrapper\n def test_service_instances(self):\n '''Test svc instances on config->Services->Service Instances page\n '''\n assert self.webui.verify_service_instance_api_basic_data()\n return True\n # end test_service_instances\n\n @preposttest_wrapper\n def test_project_quotas(self):\n '''Test project quotas on config->Networking->Project Quotas page\n '''\n assert self.webui.verify_project_quotas()\n return True\n # end test_project_quotas\n# end ConfigTab\n"},"src_encoding":{"kind":"string","value":"UTF-8"},"language":{"kind":"string","value":"Python"},"is_vendor":{"kind":"bool","value":false,"string":"false"},"is_generated":{"kind":"bool","value":false,"string":"false"},"year":{"kind":"number","value":2014,"string":"2,014"}}},{"rowIdx":41179,"cells":{"__id__":{"kind":"number","value":16990890624808,"string":"16,990,890,624,808"},"blob_id":{"kind":"string","value":"7e3f7e8007abca5301f65d6fd9150c6178366e6d"},"directory_id":{"kind":"string","value":"098463adcfcc4ab71f09f7b389377dbad6884e8e"},"path":{"kind":"string","value":"/src/base/loader/code/user_code_range.py"},"content_id":{"kind":"string","value":"ec9321448ad830d2e2c49231e65c990fd505558a"},"detected_licenses":{"kind":"list like","value":["BSD-3-Clause","LicenseRef-scancode-proprietary-license","LGPL-2.0-or-later","MIT"],"string":"[\n \"BSD-3-Clause\",\n \"LicenseRef-scancode-proprietary-license\",\n \"LGPL-2.0-or-later\",\n \"MIT\"\n]"},"license_type":{"kind":"string","value":"non_permissive"},"repo_name":{"kind":"string","value":"jcai19/ncc_gem5"},"repo_url":{"kind":"string","value":"https://github.com/jcai19/ncc_gem5"},"snapshot_id":{"kind":"string","value":"46c58172106adb01fdb9f863174b337ca6305352"},"revision_id":{"kind":"string","value":"3ac9d7f9e9eb89faf44e52cce3bb2bc321885f6f"},"branch_name":{"kind":"string","value":"refs/heads/master"},"visit_date":{"kind":"timestamp","value":"2016-09-06T12:03:57.794510","string":"2016-09-06T12:03:57.794510"},"revision_date":{"kind":"timestamp","value":"2014-08-09T01:48:58","string":"2014-08-09T01:48:58"},"committer_date":{"kind":"timestamp","value":"2014-08-09T01:48:58","string":"2014-08-09T01:48:58"},"github_id":{"kind":"null"},"star_events_count":{"kind":"number","value":0,"string":"0"},"fork_events_count":{"kind":"number","value":0,"string":"0"},"gha_license_id":{"kind":"null"},"gha_fork":{"kind":"null"},"gha_event_created_at":{"kind":"null"},"gha_created_at":{"kind":"null"},"gha_updated_at":{"kind":"null"},"gha_pushed_at":{"kind":"null"},"gha_size":{"kind":"null"},"gha_stargazers_count":{"kind":"null"},"gha_forks_count":{"kind":"null"},"gha_open_issues_count":{"kind":"null"},"gha_language":{"kind":"null"},"gha_archived":{"kind":"null"},"gha_disabled":{"kind":"null"},"content":{"kind":"string","value":"import subprocess\nimport sys\nimport getopt\n\ndef getUserCodeRange(argv):\n\tinputfile = ''\n\toutputfile = ''\n\ttry:\n\t\topts, args = getopt.getopt(argv,\"hi:o:\",[\"ifile=\",\"ofile=\"])\n\texcept getopt.GetoptError:\n\t\tprint 'user_code_range.py -i -o '\n\t\tsys.exit(2)\n\tfor opt, arg in opts:\n\t\tif opt == '-h':\n\t\t\tprint 'user_code_range.py -i -o '\n\t\t\tsys.exit()\n\t\telif opt in (\"-i\", \"--ifile\"):\n\t\t\tinputfile = arg\n\t\telif opt in (\"-o\", \"--ofile\"):\n\t\t\toutputfile = arg\n\tcommand = \"llvm-nm --print-size \" + inputfile +\" > \" + inputfile + \".syms\"\n\tsubprocess.call(command, shell=True)\n\tfin = open(inputfile + \".syms\", 'r')\n\taddrRanges = []\n\tfor line in fin:\n\t\tlineSplit = line.split()\n\t\tif len(lineSplit) == 4 and (lineSplit[2] == \"T\" or lineSplit[2] == \"t\") and (lineSplit[3][0:2] == \"_Z\" or lineSplit[3] == \"main\"):\n\t\t\tstart = int(lineSplit[0], 16)\n\t\t\tend = start + int(lineSplit[1], 16)\n\t\t\t#addrRanges.append((start, end, int(lineSplit[1], 16), lineSplit[3]))\n\t\t\taddrRanges.append((start, end))\n\taddrRanges = sorted(addrRanges, key=lambda addrRange: addrRange[0])\n\tretList = []\n\tretList.append(addrRanges[0][0])\n\tretList.append(addrRanges[len(addrRanges)-1][1])\n\tfout = open(outputfile, 'w')\n\tfout.write(str(addrRanges[0][0]) + \" \" + str(addrRanges[len(addrRanges)-1][1]))\n\tfin.close()\n\tfout.close()\n\tcommand = \"rm \" + inputfile + \".syms\"\n\tsubprocess.call(command, shell=True)\n\treturn retList\n\nif __name__ == \"__main__\":\n\tgetUserCodeRange(sys.argv[1:])\n"},"src_encoding":{"kind":"string","value":"UTF-8"},"language":{"kind":"string","value":"Python"},"is_vendor":{"kind":"bool","value":false,"string":"false"},"is_generated":{"kind":"bool","value":false,"string":"false"},"year":{"kind":"number","value":2014,"string":"2,014"}}},{"rowIdx":41180,"cells":{"__id__":{"kind":"number","value":6004364300235,"string":"6,004,364,300,235"},"blob_id":{"kind":"string","value":"09544a13989d7e9fd230a1a0d88c4bcad5d38cc3"},"directory_id":{"kind":"string","value":"0302d051b460c3b803d0c609b8dd04c76aed1841"},"path":{"kind":"string","value":"/nerd/configuration.py"},"content_id":{"kind":"string","value":"c058b0799ac7610ee779dc506cc71f5347761c9c"},"detected_licenses":{"kind":"list like","value":[],"string":"[]"},"license_type":{"kind":"string","value":"no_license"},"repo_name":{"kind":"string","value":"GendoIkari/nerdcommander"},"repo_url":{"kind":"string","value":"https://github.com/GendoIkari/nerdcommander"},"snapshot_id":{"kind":"string","value":"204a09e66d610407d7412afcfb29ed4dc9957583"},"revision_id":{"kind":"string","value":"b9f606aeae2e6404f658f9965ab67102ed0c7bb8"},"branch_name":{"kind":"string","value":"refs/heads/master"},"visit_date":{"kind":"timestamp","value":"2015-08-05T09:43:16.971907","string":"2015-08-05T09:43:16.971907"},"revision_date":{"kind":"timestamp","value":"2013-02-05T23:27:33","string":"2013-02-05T23:27:33"},"committer_date":{"kind":"timestamp","value":"2013-02-05T23:27:33","string":"2013-02-05T23:27:33"},"github_id":{"kind":"null"},"star_events_count":{"kind":"number","value":0,"string":"0"},"fork_events_count":{"kind":"number","value":0,"string":"0"},"gha_license_id":{"kind":"null"},"gha_fork":{"kind":"null"},"gha_event_created_at":{"kind":"null"},"gha_created_at":{"kind":"null"},"gha_updated_at":{"kind":"null"},"gha_pushed_at":{"kind":"null"},"gha_size":{"kind":"null"},"gha_stargazers_count":{"kind":"null"},"gha_forks_count":{"kind":"null"},"gha_open_issues_count":{"kind":"null"},"gha_language":{"kind":"null"},"gha_archived":{"kind":"null"},"gha_disabled":{"kind":"null"},"content":{"kind":"string","value":"import os\n\ndef preferiteFolders():\n return [(\"Home\", os.path.expanduser(\"~\"), \"HOME\")]\n"},"src_encoding":{"kind":"string","value":"UTF-8"},"language":{"kind":"string","value":"Python"},"is_vendor":{"kind":"bool","value":false,"string":"false"},"is_generated":{"kind":"bool","value":false,"string":"false"},"year":{"kind":"number","value":2013,"string":"2,013"}}},{"rowIdx":41181,"cells":{"__id__":{"kind":"number","value":2267742752352,"string":"2,267,742,752,352"},"blob_id":{"kind":"string","value":"df7aa56b4b5cfdb6c58e009e17100d2c803c096d"},"directory_id":{"kind":"string","value":"c7d2acaf53b11cb552036d994bf409279d5831d7"},"path":{"kind":"string","value":"/example/tclient.py"},"content_id":{"kind":"string","value":"101ba21a52d7c40820bd75bed65a95ddbff20707"},"detected_licenses":{"kind":"list like","value":["BSD-3-Clause"],"string":"[\n \"BSD-3-Clause\"\n]"},"license_type":{"kind":"string","value":"permissive"},"repo_name":{"kind":"string","value":"jamiesun/pyrad"},"repo_url":{"kind":"string","value":"https://github.com/jamiesun/pyrad"},"snapshot_id":{"kind":"string","value":"ff665b3464efc8c9926f1dabd9cc27fd5bccc2c8"},"revision_id":{"kind":"string","value":"b97843ec5314ef4cbd408dc13bf0a113f6206337"},"branch_name":{"kind":"string","value":"refs/heads/master"},"visit_date":{"kind":"timestamp","value":"2020-12-25T10:35:46.802080","string":"2020-12-25T10:35:46.802080"},"revision_date":{"kind":"timestamp","value":"2012-09-05T09:26:10","string":"2012-09-05T09:26:10"},"committer_date":{"kind":"timestamp","value":"2012-09-05T09:26:10","string":"2012-09-05T09:26:10"},"github_id":{"kind":"null"},"star_events_count":{"kind":"number","value":0,"string":"0"},"fork_events_count":{"kind":"number","value":0,"string":"0"},"gha_license_id":{"kind":"null"},"gha_fork":{"kind":"null"},"gha_event_created_at":{"kind":"null"},"gha_created_at":{"kind":"null"},"gha_updated_at":{"kind":"null"},"gha_pushed_at":{"kind":"null"},"gha_size":{"kind":"null"},"gha_stargazers_count":{"kind":"null"},"gha_forks_count":{"kind":"null"},"gha_open_issues_count":{"kind":"null"},"gha_language":{"kind":"null"},"gha_archived":{"kind":"null"},"gha_disabled":{"kind":"null"},"content":{"kind":"string","value":"#!/usr/bin/env python\n#coding:utf-8\nfrom twisted.internet import protocol\nfrom twisted.internet import reactor\nfrom twisted.python import log\nimport sys,socket\nfrom pyrad import dictionary\nfrom pyrad import host\nimport pyrad\nimport time\n\nclass RadiusTestClient(host.Host, protocol.DatagramProtocol):\n def __init__(self, server, authport=1812, acctport=1813,\n secret=\"secret\", dict=dictionary.Dictionary(\"dictionary\")):\n host.Host.__init__(self, dict=dict)\n self.server = server\n self.authport = authport\n self.acctport = acctport\n self.secret = secret\n self.reply = 0\n \n def startProtocol(self):\n self.transport.socket.setsockopt(socket.SOL_SOCKET,socket.SO_RCVBUF,1024*10*20)\n self.transport.connect(self.server, self.authport)\n self.sendAuth()\n reactor.callLater(60,self.done)\n\n def done(self):\n times = self.lasttime - self.starttime\n percount = self.reply /times\n log.msg(\"reply:%s\"%self.reply)\n log.msg(\"reply per second:%s\"%percount)\n reactor.stop()\n\n def sendAuth(self):\n self.starttime = time.time()\n for i in xrange(1000):\n req=self.CreateAuthPacket(code=pyrad.packet.AccessRequest,\n User_Name=\"test01\",secret=self.secret)\n req[\"User-Password\"] = req.PwCrypt(\"888888\")\n req[\"NAS-IP-Address\"] = \"198.168.8.139\" \n self.transport.write(req.RequestPacket()) \n sendtimes = time.time() - self.starttime\n log.msg(\"sends per second:%s\"%(1000/sendtimes))\n\n def datagramReceived(self, datagram, (host, port)):\n self.reply += 1\n self.lasttime = time.time()\n\ndef main():\n log.startLogging(sys.stdout, 0)\n protocol = RadiusTestClient(\"198.168.8.8\",secret=\"secret\")\n reactor.listenUDP(0, protocol)\n reactor.run()\n\nif __name__ == '__main__':\n main()"},"src_encoding":{"kind":"string","value":"UTF-8"},"language":{"kind":"string","value":"Python"},"is_vendor":{"kind":"bool","value":false,"string":"false"},"is_generated":{"kind":"bool","value":false,"string":"false"},"year":{"kind":"number","value":2012,"string":"2,012"}}},{"rowIdx":41182,"cells":{"__id__":{"kind":"number","value":7206955123144,"string":"7,206,955,123,144"},"blob_id":{"kind":"string","value":"4405531269689d0cb302f594693699e346826da8"},"directory_id":{"kind":"string","value":"42f97265c1e384860c5ea2e3c36172c3695cf069"},"path":{"kind":"string","value":"/CardData.py"},"content_id":{"kind":"string","value":"62fa91e37184cf19cc92e38a89315074554f2e66"},"detected_licenses":{"kind":"list like","value":[],"string":"[]"},"license_type":{"kind":"string","value":"no_license"},"repo_name":{"kind":"string","value":"pmwheatley/HollywoodStudios"},"repo_url":{"kind":"string","value":"https://github.com/pmwheatley/HollywoodStudios"},"snapshot_id":{"kind":"string","value":"1a04abf81be939393818345847b766d88a904663"},"revision_id":{"kind":"string","value":"7f21203f3e703b62fa9cfc57c040c7e892471daa"},"branch_name":{"kind":"string","value":"refs/heads/master"},"visit_date":{"kind":"timestamp","value":"2021-01-25T10:34:27.340443","string":"2021-01-25T10:34:27.340443"},"revision_date":{"kind":"timestamp","value":"2013-11-02T17:09:04","string":"2013-11-02T17:09:04"},"committer_date":{"kind":"timestamp","value":"2013-11-02T17:09:04","string":"2013-11-02T17:09:04"},"github_id":{"kind":"number","value":13828142,"string":"13,828,142"},"star_events_count":{"kind":"number","value":0,"string":"0"},"fork_events_count":{"kind":"number","value":0,"string":"0"},"gha_license_id":{"kind":"null"},"gha_fork":{"kind":"null"},"gha_event_created_at":{"kind":"null"},"gha_created_at":{"kind":"null"},"gha_updated_at":{"kind":"null"},"gha_pushed_at":{"kind":"null"},"gha_size":{"kind":"null"},"gha_stargazers_count":{"kind":"null"},"gha_forks_count":{"kind":"null"},"gha_open_issues_count":{"kind":"null"},"gha_language":{"kind":"null"},"gha_archived":{"kind":"null"},"gha_disabled":{"kind":"null"},"content":{"kind":"string","value":"from Cards import *\nfrom Constants import *\n\nACTORSDECK\t= Deck([\tActorCard(0,\t'Charlie Chaplin',\tC,\t[3000, 6000],\t[[0, 0],\t[4, 4]]),\n\t\t\t\tActorCard(1,\t'Buster Keaton',\tC,\t[2000, 4000],\t[[0, -1],\t[2, 0]]),\n\t\t\t\tActorCard(2,\t'Mack Sennett',\t\tC,\t[3000, 4000],\t[[0, 0],\t[2, 1]]),\n\t\t\t\tActorCard(3,\t'Marx Brothers',\tC,\t[2000, 4000],\t[[0, -1],\t[2, 0]]),\n\t\t\t\tActorCard(4,\t'Laurel & Hardy',\tC,\t[2000, 5000],\t[[0, 0],\t[2, 3]]),\n\t\t\t\tActorCard(5,\t'Harold Lloyd',\t\tC,\t[1000, 3000],\t[[0, 0],\t[1, 1]]),\n\t\t\t\tActorCard(6,\t'Will Rogers',\t\tC,\t[1000, 3000],\t[[0, 0],\t[1, 2]]),\n\t\t\t\tActorCard(7,\t'Karl Dane',\t\tC,\t[1000, 4000],\t[[0, -1],\t[2, 0]]),\n\t\t\t\tActorCard(8,\t'Boris Karloff',\tC,\t[2000, 5000],\t[[0, 0],\t[2, 4]]),\n\t\t\t\tActorCard(9,\t'Bela Lugosi',\t\tC,\t[2000, 5000],\t[[0, -1],\t[4, 1]]),\n\t\t\t\tActorCard(11,\t'Gene Kelly',\t\tBD,\t[2000, 4000],\t[[0, 0],\t[1, 3]]),\n\t\t\t\tActorCard(11,\t'Fred Astaire',\t\tBD,\t[2000, 5000],\t[[0, 0],\t[3, 4]]),\n\t\t\t\tActorCard(12,\t'Rex Harrison',\t\tBD,\t[1000, 4000],\t[[0, 0],\t[2, 3]]),\n\t\t\t\tActorCard(13,\t'Colin Clive',\t\tBD,\t[1000, 3000],\t[[0, -1],\t[2, 0]]),\n\t\t\t\tActorCard(14,\t'Tyrone Power',\t\tBD,\t[1000, 3000],\t[[0, 0],\t[1, 1]]),\n\t\t\t\tActorCard(15,\t'John Gilbert',\t\tBD,\t[3000, 5000],\t[[0, -1],\t[3, 1]]),\n\t\t\t\tActorCard(16,\t'Rudolph Valentino',\tBD,\t[3000, 5000],\t[[0, -1],\t[3, 0]]),\n\t\t\t\tActorCard(17,\t'Cary Grant',\t\tBD,\t[3000, 6000],\t[[0, 0],\t[4, 4]]),\n\t\t\t\tActorCard(18,\t'Milton Sills',\t\tBD,\t[1000, 3000],\t[[0, -1],\t[1, 0]]),\n\t\t\t\tActorCard(19,\t'James Stewart',\tBD,\t[2000, 5000],\t[[0, 0],\t[3, 3]]),\n\t\t\t\tActorCard(20,\t'Humphrey Bogart',\tDM,\t[3000, 6000],\t[[0, 0],\t[3, 5]]),\n\t\t\t\tActorCard(21,\t'James Cagney',\t\tDM,\t[2000, 5000],\t[[0, 0],\t[3, 4]]),\n\t\t\t\tActorCard(22,\t'Edward G. Robinson',\tDM,\t[1000, 3000],\t[[0, -1],\t[1, 0]]),\n\t\t\t\tActorCard(23,\t'Gary Cooper',\t\tDM,\t[2000, 4000],\t[[0, 0],\t[2, 1]]),\n\t\t\t\tActorCard(24,\t'Charles Laughton',\tDM,\t[3000, 5000],\t[[0, 0],\t[3, 3]]),\n\t\t\t\tActorCard(25,\t'Peter Lorre',\t\tDM,\t[1000, 4000],\t[[0, 0],\t[2, 2]]),\n\t\t\t\tActorCard(26,\t'Charles Boyer',\tDM,\t[3000, 4000],\t[[0, -1],\t[2, 0]]),\n\t\t\t\tActorCard(27,\t'Sterling Hayden',\tDM,\t[2000, 4000],\t[[0, 0],\t[2, 3]]),\n\t\t\t\tActorCard(28,\t'Fred MacMurray',\tDM,\t[1000, 3000],\t[[0, -1],\t[2, 0]]),\n\t\t\t\tActorCard(29,\t'Lon Chaney',\t\tDM,\t[2000, 5000],\t[[0, -1],\t[4, 1]]),\n\t\t\t\tActorCard(30,\t'Charlton Heston',\tFH,\t[3000, 5000],\t[[0, 0],\t[2, 4]]),\n\t\t\t\tActorCard(31,\t'John Gielgud',\t\tFH,\t[1000, 3000],\t[[0, 0],\t[1, 1]]),\n\t\t\t\tActorCard(32,\t'Henry Fonda',\t\tFH,\t[1000, 4000],\t[[0, 0],\t[2, 3]]),\n\t\t\t\tActorCard(33,\t'Spencer Tracy',\tFH,\t[1000, 3000],\t[[0, -1],\t[1, 1]]),\n\t\t\t\tActorCard(34,\t'Errol Flynn',\t\tFH,\t[2000, 5000],\t[[0, -1],\t[4, 0]]),\n\t\t\t\tActorCard(35,\t'Tom Mix',\t\tFH,\t[2000, 4000],\t[[0, -1],\t[2, 0]]),\n\t\t\t\tActorCard(36,\t'Douglas Fairbanks',\tFH,\t[3000, 6000],\t[[0, -1],\t[5, 2]]),\n\t\t\t\tActorCard(37,\t'Laurence Olivier',\tFH,\t[2000, 4000],\t[[0, 0],\t[2, 3]]),\n\t\t\t\tActorCard(38,\t'William S. Hart',\tFH,\t[1000, 3000],\t[[0, 0],\t[1, 0]]),\n\t\t\t\tActorCard(39,\t'John Barrymore',\tFH,\t[3000, 5000],\t[[0, 0],\t[3, 3]]),\n\t\t\t\tActorCard(40,\t'Mary Pickford',\tS,\t[3000, 6000],\t[[0, -1],\t[5, 2]]),\n\t\t\t\tActorCard(41,\t'Alice Faye',\t\tS,\t[2000, 4000],\t[[0, 0],\t[2, 1]]),\n\t\t\t\tActorCard(42,\t'Ava Gardner',\t\tS,\t[3000, 5000],\t[[0, -1],\t[3, 3]]),\n\t\t\t\tActorCard(43,\t'Judy Garland',\t\tS,\t[3000, 5000],\t[[0, 0],\t[2, 4]]),\n\t\t\t\tActorCard(44,\t'Shirley Temple',\tS,\t[1000, 5000],\t[[0, 0],\t[4, 1]]),\n\t\t\t\tActorCard(45,\t'Lillian Gish',\t\tS,\t[2000, 3000],\t[[0, 0],\t[1, 1]]),\n\t\t\t\tActorCard(46,\t'Janet Gaynor',\t\tS,\t[2000, 4000],\t[[0, -1],\t[2, 0]]),\n\t\t\t\tActorCard(47,\t'Ginger Rogers',\tS,\t[2000, 4000],\t[[0, 0],\t[2, 3]]),\n\t\t\t\tActorCard(48,\t'Claudette Colbert',\tS,\t[1000, 3000],\t[[0, 0],\t[1, 2]]),\n\t\t\t\tActorCard(49,\t'Jeanette MacDonald',\tS,\t[1000, 3000],\t[[0, -1],\t[1, 0]]),\n\t\t\t\tActorCard(50,\t'Mae West',\t\tSW,\t[3000, 5000],\t[[0, -1],\t[3, 1]]),\n\t\t\t\tActorCard(51,\t'Marlene Dietrich',\tSW,\t[3000, 5000],\t[[0, 0],\t[3, 2]]),\n\t\t\t\tActorCard(52,\t'Carole Lombard',\tSW,\t[2000, 5000],\t[[0, 0],\t[4, 1]]),\n\t\t\t\tActorCard(53,\t'Bette Davis',\t\tSW,\t[2000, 4000],\t[[0, 0],\t[2, 3]]),\n\t\t\t\tActorCard(54,\t'Jean Harlow',\t\tSW,\t[3000, 6000],\t[[0, 0],\t[3, 5]]),\n\t\t\t\tActorCard(55,\t'Rita Hayworth',\tSW,\t[2000, 4000],\t[[0, 0],\t[2, 2]]),\n\t\t\t\tActorCard(56,\t'Theda Bara',\t\tSW,\t[2000, 4000],\t[[0, -1],\t[2, 0]]),\n\t\t\t\tActorCard(57,\t'Mary Astor',\t\tSW,\t[1000, 3000],\t[[0, -1],\t[1, 2]]),\n\t\t\t\tActorCard(58,\t'Nita Naldi',\t\tSW,\t[1000, 3000],\t[[0, -1],\t[1, 0]]),\n\t\t\t\tActorCard(59,\t'Barbara Stanwyck',\tSW,\t[1000, 3000],\t[[0, 0],\t[1, 1]]),\n\t\t\t\tActorCard(60,\t'Greta Garbo',\t\tQL,\t[3000, 6000],\t[[1, 0],\t[5, 2]]),\n\t\t\t\tActorCard(61,\t'Ingrid Bergman',\tQL,\t[3000, 6000],\t[[1, 1],\t[4, 4]]),\n\t\t\t\tActorCard(62,\t'Mabel Normand',\tQL,\t[2000, 4000],\t[[1, 0],\t[2, 0]]),\n\t\t\t\tActorCard(63,\t'Irene Dunne',\t\tQL,\t[2000, 4000],\t[[1, 0],\t[2, 1]]),\n\t\t\t\tActorCard(64,\t'Joan Crawford',\tQL,\t[2000, 4000],\t[[1, 1],\t[2, 3]]),\n\t\t\t\tActorCard(65,\t'Vivien Leigh',\t\tQL,\t[3000, 5000],\t[[1, 1],\t[2, 4]]),\n\t\t\t\tActorCard(66,\t'Katherine Hepburn',\tQL,\t[3000, 6000],\t[[1, 1],\t[4, 4]]),\n\t\t\t\tActorCard(67,\t'Norma Shearer',\tQL,\t[3000, 5000],\t[[1, 0],\t[3, 1]]),\n\t\t\t\tActorCard(68,\t'Gloria Swanson',\tQL,\t[3000, 5000],\t[[1, 1],\t[3, 0]]),\n\t\t\t\tActorCard(69,\t'Lauren Bacall',\tQL,\t[3000, 6000],\t[[1, 1],\t[4, 5]])])\nDIRECTORSDECK\t= Deck([\tDirectorCard(0,\t\t'Allan Smithee',\t1000,\t{FILMNOIR: 0,\tROMANCE: 0,\tHORROR: 0,\tCOMEDY: 0,\tEPIC: 0,\tSWORDS: 0},\t{}),\n\t\t\t\tDirectorCard(1,\t\t'Georges Cochrane',\t1000,\t{FILMNOIR: 0,\tROMANCE: 1,\tHORROR: 0,\tCOMEDY: 0,\tEPIC: 0,\tSWORDS: 0},\t{}),\n\t\t\t\tDirectorCard(2,\t\t'Beaumont Smith',\t1000,\t{FILMNOIR: 0,\tROMANCE: 0,\tHORROR: 0,\tCOMEDY: 0,\tEPIC: 0,\tSWORDS: 1},\t{}),\n\t\t\t\tDirectorCard(3,\t\t'Cecil M. Hepworth',\t1000,\t{FILMNOIR: 1,\tROMANCE: 0,\tHORROR: 0,\tCOMEDY: 0,\tEPIC: 0,\tSWORDS: 0},\t{}),\n\t\t\t\tDirectorCard(4,\t\t'Sinclair Hill',\t1000,\t{FILMNOIR: 0,\tROMANCE: 0,\tHORROR: 1,\tCOMEDY: 0,\tEPIC: 0,\tSWORDS: 0},\t{}),\n\t\t\t\tDirectorCard(5,\t\t'Harry Edwards',\t1000,\t{FILMNOIR: 0,\tROMANCE: 0,\tHORROR: 0,\tCOMEDY: 1,\tEPIC: 0,\tSWORDS: 0},\t{}),\n\t\t\t\tDirectorCard(6,\t\t'Raoul Walsh',\t\t2000,\t{FILMNOIR: 1,\tROMANCE: 0,\tHORROR: 0,\tCOMEDY: 0,\tEPIC: 0,\tSWORDS: 1},\t{}),\n\t\t\t\tDirectorCard(7,\t\t'Frank Wilson',\t\t2000,\t{FILMNOIR: 1,\tROMANCE: 0,\tHORROR: 0,\tCOMEDY: 0,\tEPIC: 1,\tSWORDS: 0},\t{}),\n\t\t\t\tDirectorCard(8,\t\t'John K. Wells',\t2000,\t{FILMNOIR: 0,\tROMANCE: 0,\tHORROR: 1,\tCOMEDY: 0,\tEPIC: 0,\tSWORDS: 1},\t{}),\n\t\t\t\tDirectorCard(9,\t\t'Joseph Henabery',\t2000,\t{FILMNOIR: 0,\tROMANCE: 1,\tHORROR: 0,\tCOMEDY: 1,\tEPIC: 0,\tSWORDS: 0},\t{}),\n\t\t\t\tDirectorCard(10,\t'Edgar Lewis',\t\t2000,\t{FILMNOIR: 0,\tROMANCE: 1,\tHORROR: 1,\tCOMEDY: 1,\tEPIC: 0,\tSWORDS: 0},\t{}),\n\t\t\t\tDirectorCard(11,\t'Clarence Brown',\t2000,\t{FILMNOIR: 0,\tROMANCE: 1,\tHORROR: 0,\tCOMEDY: 0,\tEPIC: 1,\tSWORDS: 0},\t{}),\n\t\t\t\tDirectorCard(12,\t'Franklyn Barrett',\t2000,\t{FILMNOIR: 1,\tROMANCE: 0,\tHORROR: 1,\tCOMEDY: 0,\tEPIC: 0,\tSWORDS: 0},\t{}),\n\t\t\t\tDirectorCard(13,\t'Busby Berkeley',\t3000,\t{FILMNOIR: 0,\tROMANCE: 1,\tHORROR: 1,\tCOMEDY: 1,\tEPIC: 0,\tSWORDS: 0},\t{}),\n\t\t\t\tDirectorCard(14,\t'D.W. Griffith',\t3000,\t{FILMNOIR: 1,\tROMANCE: 0,\tHORROR: 0,\tCOMEDY: 0,\tEPIC: 1,\tSWORDS: 1},\t{}),\n\t\t\t\tDirectorCard(15,\t'Rouben Mamoulian',\t3000,\t{FILMNOIR: 0,\tROMANCE: 1,\tHORROR: 1,\tCOMEDY: 1,\tEPIC: 0,\tSWORDS: 0},\t{}),\n\t\t\t\tDirectorCard(16,\t'Erich Von Stroheim',\t3000,\t{FILMNOIR: 0,\tROMANCE: 0,\tHORROR: 0,\tCOMEDY: 0,\tEPIC: 3,\tSWORDS: 0},\t{}),\n\t\t\t\tDirectorCard(17,\t'Tod Browning',\t\t3000,\t{FILMNOIR: 0,\tROMANCE: 0,\tHORROR: 0,\tCOMEDY: 1,\tEPIC: 0,\tSWORDS: 2},\t{}),\n\t\t\t\tDirectorCard(18,\t'Edgar Jones',\t\t3000,\t{FILMNOIR: 1,\tROMANCE: 0,\tHORROR: 0,\tCOMEDY: 0,\tEPIC: 0,\tSWORDS: 1},\t{}),\n\t\t\t\tDirectorCard(19,\t'King Vidor',\t\t4000,\t{FILMNOIR: 1,\tROMANCE: 0,\tHORROR: 1,\tCOMEDY: 0,\tEPIC: 1,\tSWORDS: 1},\t{}),\n\t\t\t\tDirectorCard(20,\t'F.W. Murneau',\t\t4000,\t{FILMNOIR: 1,\tROMANCE: 1,\tHORROR: 1,\tCOMEDY: 0,\tEPIC: 1,\tSWORDS: 0},\t{}),\n\t\t\t\tDirectorCard(21,\t'Elia Kazan',\t\t4000,\t{FILMNOIR: 1,\tROMANCE: 1,\tHORROR: 0,\tCOMEDY: 0,\tEPIC: 1,\tSWORDS: 1},\t{}),\n\t\t\t\tDirectorCard(22,\t'Leslie Goodwins',\t4000,\t{FILMNOIR: 0,\tROMANCE: 1,\tHORROR: 0,\tCOMEDY: 1,\tEPIC: 0,\tSWORDS: 1},\t{BMOVIEBONUS: 1}),\n\t\t\t\tDirectorCard(23,\t'Ernst Lubitsch',\t4000,\t{FILMNOIR: 0,\tROMANCE: 1,\tHORROR: 0,\tCOMEDY: 2,\tEPIC: 0,\tSWORDS: 0},\t{ALSOWRITER: True}),\n\t\t\t\tDirectorCard(24,\t'Alfred Hitchcock',\t4000,\t{FILMNOIR: 2,\tROMANCE: 0,\tHORROR: 2,\tCOMEDY: 0,\tEPIC: 0,\tSWORDS: 0},\t{AMOVIEEXTRACOST: -2000}),\n\t\t\t\tDirectorCard(25,\t'Cecil B. DeMille',\t5000,\t{FILMNOIR: 0,\tROMANCE: 1,\tHORROR: 0,\tCOMEDY: 0,\tEPIC: 3,\tSWORDS: 0},\t{AMOVIEBONUS: 1}),\n\t\t\t\tDirectorCard(26,\t'Frank Capra',\t\t5000,\t{FILMNOIR: 0,\tROMANCE: 1,\tHORROR: 0,\tCOMEDY: 2,\tEPIC: 1,\tSWORDS: 0},\t{ROLLOSCAR: 2}),\n\t\t\t\tDirectorCard(27,\t'James Whale',\t\t5000,\t{FILMNOIR: 1,\tROMANCE: 0,\tHORROR: 3,\tCOMEDY: 0,\tEPIC: 0,\tSWORDS: 0},\t{FILMNOIRROLLCLASSIC: 2,\tHORRORROLLCLASSIC: 2}),\n\t\t\t\tDirectorCard(28,\t'John Ford',\t\t5000,\t{FILMNOIR: 0,\tROMANCE: 1,\tHORROR: 0,\tCOMEDY: 0,\tEPIC: 0,\tSWORDS: 3},\t{BMOVIEBONUS: 1}),\n\t\t\t\tDirectorCard(29,\t'Victor Fleming',\t5000,\t{FILMNOIR: 2,\tROMANCE: 0,\tHORROR: 1,\tCOMEDY: 2,\tEPIC: 0,\tSWORDS: 1},\t{AMOVIEEXTRACOST: 1000,\tBMOVIEEXTRACOST:1000})])\n\nFILMNOIRDECK\t= Deck([ScriptCard(0,\t'The Big Sleep',\t\t\t\tFILMNOIR,\t[DM, SW, C],\t4000,\t1000,\t2,\t0,\tFalse,\tFalse),\n\t\t\t\t\t\tScriptCard(1,\t'Rififi',\t\t\t\t\t\tFILMNOIR,\t[BD, DM],\t\t4000,\t1000,\t2,\t0,\tFalse,\tFalse),\n\t\t\t\t\t\tScriptCard(2,\t'The Asphalt Jungle',\t\t\tFILMNOIR,\t[DM, S],\t\t5000,\t2000,\t3,\t1,\tFalse,\tFalse),\n\t\t\t\t\t\tScriptCard(3,\t'Touch of Evil',\t\t\t\tFILMNOIR,\t[BD, DM],\t\t5000,\t2000,\t3,\t1,\tFalse,\tFalse),\n\t\t\t\t\t\tScriptCard(4,\t'The Killing',\t\t\t\t\tFILMNOIR,\t[DM, C],\t\t6000,\t3000,\t3,\t1,\tFalse,\tFalse),\n\t\t\t\t\t\tScriptCard(5,\t'The Third Man',\t\t\t\tFILMNOIR,\t[DM, SW],\t\t5000,\t1000,\t3,\t0,\tFalse,\tFalse),\n\t\t\t\t\t\tScriptCard(6,\t'Double Indemnity',\t\t\t\tFILMNOIR,\t[DM, SW],\t\t5000,\t2000,\t3,\t1,\tFalse,\tTrue),\n\t\t\t\t\t\tScriptCard(7,\t'Sunset Boulevard',\t\t\t\tFILMNOIR,\t[BD, SW, S],\t7000,\t4000,\t4,\t2,\tTrue,\tFalse),\n\t\t\t\t\t\tScriptCard(8,\t'Strangers on a Train',\t\t\tFILMNOIR,\t[BD, C],\t\t4000,\t1000,\t2,\t0,\tFalse,\tFalse),\n\t\t\t\t\t\tScriptCard(9,\t'Casablanca',\t\t\t\t\tFILMNOIR,\t[DM, BD, S],\t7000,\t4000,\t5,\t2,\tTrue,\tTrue),\n\t\t\t\t\t\tScriptCard(10,\t'Scarface',\t\t\t\t\t\tFILMNOIR,\t[DM, SW],\t\t5000,\t2000,\t2,\t0,\tFalse,\tTrue),\n\t\t\t\t\t\tScriptCard(11,\t'The Maltese Falcon',\t\t\tFILMNOIR,\t[DM, SW, DM],\t6000,\t3000,\t4,\t2,\tTrue,\tFalse),\n\t\t\t\t\t\tScriptCard(12,\t'The Blue Angel',\t\t\t\tFILMNOIR,\t[SW, DM],\t\t6000,\t3000,\t4,\t2,\tFalse,\tFalse),\n\t\t\t\t\t\tScriptCard(13,\t'Notorious',\t\t\t\t\tFILMNOIR,\t[DM, SW],\t\t5000,\t2000,\t3,\t1,\tFalse,\tFalse),\n\t\t\t\t\t\tScriptCard(14,\t'The Public Enemy',\t\t\t\tFILMNOIR,\t[DM],\t\t\t5000,\t2000,\t3,\t1,\tFalse,\tFalse),\n\t\t\t\t\t\tScriptCard(15,\t'Key Largo',\t\t\t\t\tFILMNOIR,\t[BD, SW, DM],\t5000,\t2000,\t3,\t1,\tFalse,\tFalse)])\nROMANCEDECK\t= Deck([\tScriptCard(16,\t'Golden Earrings',\t\t\t\tROMANCE,\t[SW, BD],\t\t6000,\t2000,\t3,\t0,\tFalse,\tFalse),\n\t\t\t\t\t\tScriptCard(17,\t'Romeo and Juliet',\t\t\t\tROMANCE,\t[S, BD],\t\t7000,\t3000,\t4,\t2,\tFalse,\tFalse),\n\t\t\t\t\t\tScriptCard(18,\t'The Love Trap',\t\t\t\tROMANCE,\t[S, FH],\t\t5000,\t3000,\t2,\t2,\tFalse,\tFalse),\n\t\t\t\t\t\tScriptCard(19,\t\"It's a Wonderful Life\",\t\tROMANCE,\t[BD, C, S],\t\t9000,\t5000,\t4,\t2,\tTrue,\tFalse),\n\t\t\t\t\t\tScriptCard(20,\t'Now, Voyager',\t\t\t\t\tROMANCE,\t[SW, FH],\t\t5000,\t2000,\t3,\t1,\tFalse,\tFalse),\n\t\t\t\t\t\tScriptCard(21,\t\"Heart o' the Hills\",\t\t\tROMANCE,\t[QL, BD],\t\t4000,\t2000,\t2,\t1,\tFalse,\tFalse),\n\t\t\t\t\t\tScriptCard(22,\t'My Best Girl',\t\t\t\t\tROMANCE,\t[BD, QL],\t\t6000,\t3000,\t3,\t2,\tFalse,\tFalse),\n\t\t\t\t\t\tScriptCard(23,\t'Grand Hotel',\t\t\t\t\tROMANCE,\t[SW, BD, S],\t7000,\t3000,\t4,\t2,\tTrue,\tFalse),\n\t\t\t\t\t\tScriptCard(24,\t'Ninotchka',\t\t\t\t\tROMANCE,\t[QL, BD],\t\t5000,\t2000,\t3,\t1,\tTrue,\tFalse),\n\t\t\t\t\t\tScriptCard(25,\t'Small Town Girl',\t\t\t\tROMANCE,\t[S, C],\t\t\t4000,\t1000,\t2,\t0,\tFalse,\tFalse),\n\t\t\t\t\t\tScriptCard(26,\t'The Eagle',\t\t\t\t\tROMANCE,\t[BD, SW],\t\t4000,\t2000,\t2,\t1,\tFalse,\tFalse),\n\t\t\t\t\t\tScriptCard(27,\t'Morocco',\t\t\t\t\t\tROMANCE,\t[SW, BD],\t\t5000,\t2000,\t3,\t1,\tFalse,\tTrue),\n\t\t\t\t\t\tScriptCard(28,\t'The Shop Around the Corner',\tROMANCE,\t[S, BD],\t\t6000,\t3000,\t3,\t2,\tFalse,\tTrue),\n\t\t\t\t\t\tScriptCard(29,\t'The Gilded Lily',\t\t\t\tROMANCE,\t[S, BD, BD],\t4000,\t1000,\t2,\t0,\tFalse,\tFalse),\n\t\t\t\t\t\tScriptCard(30,\t'No Time for Love',\t\t\t\tROMANCE,\t[S, BD],\t\t6000,\t2000,\t3,\t1,\tFalse,\tTrue),\n\t\t\t\t\t\tScriptCard(31,\t'The Philadelphia Story',\t\tROMANCE,\t[FH, S, BD],\t4000,\t2000,\t2,\t1,\tFalse,\tFalse)])\nHORRORDECK\t= Deck([\tScriptCard(32,\t'Bride of Frankenstein',\t\tHORROR,\t\t[C, SW],\t\t7000,\t3000,\t3,\t2,\tFalse,\tTrue),\n\t\t\t\t\t\tScriptCard(33,\t'Faust',\t\t\t\t\t\tHORROR,\t\t[DM],\t\t \t5000,\t3000,\t2,\t2,\tFalse,\tFalse),\n\t\t\t\t\t\tScriptCard(34,\t'Freaks',\t\t\t\t\t\tHORROR,\t\t[DM, C],\t\t5000,\t2000,\t2,\t0,\tFalse,\tFalse),\n\t\t\t\t\t\tScriptCard(35,\t'The Mummy',\t\t\t\t\tHORROR,\t\t[C],\t\t\t7000,\t4000,\t3,\t2,\tFalse,\tFalse),\n\t\t\t\t\t\tScriptCard(36,\t'The Night of the Hunter',\t\tHORROR,\t\t[DM, S],\t\t5000,\t3000,\t2,\t2,\tFalse,\tFalse),\n\t\t\t\t\t\tScriptCard(37,\t'Dead of Night',\t\t\t\tHORROR,\t\t[C],\t\t\t8000,\t4000,\t3,\t1,\tFalse,\tFalse),\n\t\t\t\t\t\tScriptCard(38,\t'Nosferatu',\t\t\t\t\tHORROR,\t\t[C, FH],\t\t7000,\t4000,\t3,\t1,\tFalse,\tFalse),\n\t\t\t\t\t\tScriptCard(39,\t'The Unknown',\t\t\t\t\tHORROR,\t\t[C, DM],\t\t5000,\t3000,\t2,\t2,\tFalse,\tFalse),\n\t\t\t\t\t\tScriptCard(40,\t'The Hunchback of Notre Dame',\tHORROR,\t\t[S, C],\t\t\t7000,\t4000,\t3,\t2,\tFalse,\tTrue),\n\t\t\t\t\t\tScriptCard(41,\t'The Invisible Man',\t\t\tHORROR,\t\t[DM, S],\t\t5000,\t2000,\t3,\t1,\tFalse,\tFalse),\n\t\t\t\t\t\tScriptCard(42,\t'The Raven',\t\t\t\t\tHORROR,\t\t[DM, C],\t\t6000,\t3000,\t3,\t2,\tFalse,\tFalse),\n\t\t\t\t\t\tScriptCard(43,\t'Dracula',\t\t\t\t\t\tHORROR,\t\t[C, FH, SW],\t6000,\t4000,\t4,\t2,\tTrue,\tFalse),\n\t\t\t\t\t\tScriptCard(44,\t'Frankenstein',\t\t\t\t\tHORROR,\t\t[C, FH, S],\t\t6000,\t3000,\t3,\t2,\tTrue,\tFalse),\n\t\t\t\t\t\tScriptCard(45,\t'King Kong',\t\t\t\t\tHORROR,\t\t[S, FH],\t\t8000,\t4000,\t4,\t2,\tTrue,\tFalse),\n\t\t\t\t\t\tScriptCard(46,\t'The Invisible Ray',\t\t\tHORROR,\t\t[FH, DM],\t\t5000,\t2000,\t3,\t1,\tFalse,\tFalse),\n\t\t\t\t\t\tScriptCard(47,\t'The Phantom of the Opera',\t\tHORROR,\t\t[S, DM, FH],\t6000,\t3000,\t3,\t2,\tFalse,\tTrue)])\nCOMEDYDECK\t= Deck([\tScriptCard(48,\t'The Electric House',\t\tCOMEDY,\t\t[S, C],\t\t6000,\t3000,\t2,\t2,\tFalse,\tFalse),\n\t\t\t\t\t\tScriptCard(49,\t'The Music Box',\t\tCOMEDY,\t\t[C, C],\t\t5000,\t2000,\t2,\t0,\tFalse,\tFalse),\n\t\t\t\t\t\tScriptCard(50,\t'Sherlock, Jr.',\t\tCOMEDY,\t\t[C],\t\t6000,\t3000,\t3,\t2,\tFalse,\tFalse),\n\t\t\t\t\t\tScriptCard(51,\t'Limelight',\t\t\tCOMEDY,\t\t[C, S],\t\t7000,\t3000,\t4,\t2,\tFalse,\tTrue),\n\t\t\t\t\t\tScriptCard(52,\t'Arsenic and Old Lace',\t\tCOMEDY,\t\t[C, SW],\t6000,\t4000,\t3,\t3,\tTrue,\tFalse),\n\t\t\t\t\t\tScriptCard(53,\t'Modern Times',\t\t\tCOMEDY,\t\t[C, S],\t\t6000,\t3000,\t3,\t2,\tFalse,\tTrue),\n\t\t\t\t\t\tScriptCard(54,\t\"Singin' in the Rain\",\t\tCOMEDY,\t\t[C, BD, S],\t8000,\t4000,\t4,\t1,\tTrue,\tFalse),\n\t\t\t\t\t\tScriptCard(55,\t'The General',\t\t\tCOMEDY,\t\t[C, S],\t\t6000,\t4000,\t3,\t3,\tFalse,\tFalse),\n\t\t\t\t\t\tScriptCard(56,\t'The Circus',\t\t\tCOMEDY,\t\t[C, C],\t\t5000,\t2000,\t3,\t1,\tFalse,\tFalse),\n\t\t\t\t\t\tScriptCard(57,\t'The Great Dictator',\t\tCOMEDY,\t\t[C, C, S],\t6000,\t3000,\t3,\t2,\tFalse,\tTrue),\n\t\t\t\t\t\tScriptCard(58,\t'Mr. Smith Goes to Washington',\tCOMEDY,\t\t[BD, S],\t5000,\t2000,\t3,\t1,\tTrue,\tFalse),\n\t\t\t\t\t\tScriptCard(59,\t'City Lights',\t\t\tCOMEDY,\t\t[C, SW, BD],\t6000,\t2000,\t3,\t0,\tFalse,\tFalse),\n\t\t\t\t\t\tScriptCard(60,\t\"I'm No Angel\",\t\t\tCOMEDY,\t\t[SW, C],\t6000,\t3000,\t3,\t2,\tFalse,\tFalse),\n\t\t\t\t\t\tScriptCard(61,\t'Cops',\t\t\t\tCOMEDY,\t\t[C, S],\t\t7000,\t3000,\t4,\t2,\tFalse,\tFalse),\n\t\t\t\t\t\tScriptCard(62,\t'The Tramp',\t\t\tCOMEDY,\t\t[C],\t\t5000,\t2000,\t3,\t1,\tFalse,\tFalse),\n\t\t\t\t\t\tScriptCard(63,\t'The Playhouse',\t\tCOMEDY,\t\t[S, C],\t\t6000,\t3000,\t2,\t2,\tFalse,\tFalse)])\nEPICDECK\t= Deck([\tScriptCard(64,\t'Ben-Hur: A Tale of the Christ',EPIC,\t[FH, DM, SW, QL],\t8000,\t3000,\t4,\t2,\tTrue,\tTrue),\n\t\t\t\t\t\tScriptCard(65,\t'Cleopatra',\t\t\tEPIC,\t[QL, BD, SW],\t\t7000,\t3000,\t4,\t2,\tTrue,\tFalse),\n\t\t\t\t\t\tScriptCard(66,\t\"Hell's Angels\",\t\tEPIC,\t[FH, SW, BD],\t\t12000,\t7000,\t4,\t3,\tTrue,\tFalse),\n\t\t\t\t\t\tScriptCard(67,\t'David Copperfield',\t\tEPIC,\t[BD, SW, S, FH],\t7000,\t4000,\t3,\t1,\tTrue,\tFalse),\n\t\t\t\t\t\tScriptCard(68,\t'Paths of Glory',\t\tEPIC,\t[BD, DM, FH],\t\t6000,\t3000,\t3,\t2,\tTrue,\tFalse),\n\t\t\t\t\t\tScriptCard(69,\t'The Ten Commandments',\t\tEPIC,\t[FH, S, QL, DM],\t9000,\t5000,\t5,\t3,\tTrue,\tTrue),\n\t\t\t\t\t\tScriptCard(70,\t'The Wizard of Oz',\t\tEPIC,\t[S, C, QL, C],\t\t8000,\t4000,\t4,\t2,\tTrue,\tTrue),\n\t\t\t\t\t\tScriptCard(71,\t'Citizen Kane',\t\t\tEPIC,\t[BD, SW, FH],\t\t7000,\t4000,\t4,\t2,\tTrue,\tFalse),\n\t\t\t\t\t\tScriptCard(72,\t'The King of Kings',\t\tEPIC,\t[FH, S, DM],\t\t6000,\t3000,\t3,\t2,\tTrue,\tFalse),\n\t\t\t\t\t\tScriptCard(73,\t'The Great Bank Robbery',\tEPIC,\t[DM, BD, FH],\t\t5000,\t3000,\t2,\t2,\tTrue,\tFalse),\n\t\t\t\t\t\tScriptCard(74,\t'Birth of a Nation',\t\tEPIC,\t[BD, S, DM],\t\t6000,\t3000,\t3,\t2,\tTrue,\tFalse),\n\t\t\t\t\t\tScriptCard(75,\t'The Last Days of Pompei',\tEPIC,\t[FH, DM, SW, S],\t8000,\t5000,\t3,\t3,\tTrue,\tFalse),\n\t\t\t\t\t\tScriptCard(76,\t'The Grapes of Wrath',\t\tEPIC,\t[FH, FH, S],\t\t6000,\t3000,\t3,\t2,\tTrue,\tTrue),\n\t\t\t\t\t\tScriptCard(77,\t'The Crusades',\t\t\tEPIC,\t[FH, DM, SW],\t\t7000,\t4000,\t3,\t3,\tTrue,\tFalse),\n\t\t\t\t\t\tScriptCard(78,\t'The African Queen',\t\tEPIC,\t[DM, QL, BD],\t\t6000,\t3000,\t3,\t2,\tTrue,\tTrue),\n\t\t\t\t\t\tScriptCard(79,\t'Gone with the Wind',\t\tEPIC,\t[BD, QL, SW, FH],\t10000,\t6000,\t5,\t3,\tTrue,\tTrue)])\nSWORDSDECK\t= Deck([\tScriptCard(80,\t'Red Dusk',\t\t\t\tSWORDS,\t[SW, DM],\t6000,\t3000,\t3,\t2,\tFalse,\tFalse),\n\t\t\t\t\t\tScriptCard(81,\t'Fighting Caravans',\t\t\tSWORDS,\t[FH],\t\t5000,\t2000,\t3,\t1,\tFalse,\tFalse),\n\t\t\t\t\t\tScriptCard(82,\t'The Four Horsemen of the Apocalypse',\tSWORDS,\t[FH, DM, S],\t6000,\t4000,\t3,\t3,\tFalse,\tTrue),\n\t\t\t\t\t\tScriptCard(83,\t'The Black Pirate',\t\t\tSWORDS,\t[DM, SW, FH],\t4000,\t2000,\t2,\t1,\tFalse,\tFalse),\n\t\t\t\t\t\tScriptCard(84,\t'The Sheik',\t\t\t\tSWORDS,\t[FH, S],\t8000,\t3000,\t4,\t2,\tTrue,\tFalse),\n\t\t\t\t\t\tScriptCard(85,\t'The Prince and the Pauper',\t\tSWORDS,\t[FH, SW],\t5000,\t2000,\t3,\t1,\tFalse,\tTrue),\n\t\t\t\t\t\tScriptCard(86,\t'The Quest of Life',\t\t\tSWORDS,\t[FH, BD],\t4000,\t1000,\t2,\t0,\tFalse,\tFalse),\n\t\t\t\t\t\tScriptCard(87,\t'The Adventures of Robin Hood',\t\tSWORDS,\t[FH],\t\t7000,\t4000,\t4,\t3,\tFalse,\tFalse),\n\t\t\t\t\t\tScriptCard(88,\t'The Falcon',\t\t\t\tSWORDS,\t[BD, FH],\t6000,\t3000,\t3,\t2,\tFalse,\tFalse),\n\t\t\t\t\t\tScriptCard(89,\t'This Gun for Hire',\t\t\tSWORDS,\t[SW, FH, DM],\t6000,\t2000,\t3,\t0,\tFalse,\tFalse),\n\t\t\t\t\t\tScriptCard(90,\t'Jesse James',\t\t\t\tSWORDS,\t[FH, BD],\t6000,\t3000,\t3,\t2,\tFalse,\tFalse),\n\t\t\t\t\t\tScriptCard(91,\t\"A Rogue's Romance\",\t\t\tSWORDS,\t[FH, SW],\t4000,\t1000,\t2,\t0,\tFalse,\tFalse),\n\t\t\t\t\t\tScriptCard(92,\t'The Avenging Sword',\t\t\tSWORDS,\t[FH, SW],\t9000,\t5000,\t4,\t3,\tFalse,\tFalse),\n\t\t\t\t\t\tScriptCard(93,\t'The Soldier and the Lady',\t\tSWORDS,\t[FH, S],\t7000,\t4000,\t3,\t2,\tTrue,\tFalse),\n\t\t\t\t\t\tScriptCard(94,\t'The Count of Monte Cristo',\t\tSWORDS,\t[FH, BD, C],\t5000,\t2000,\t3,\t1,\tFalse,\tTrue),\n\t\t\t\t\t\tScriptCard(95,\t'Adventures of Don Juan',\t\tSWORDS,\t[FH, S, SW],\t4000,\t2000,\t2,\t1,\tFalse,\tFalse),\n\t\t\t\t\t\tScriptCard(96,\t'The Thief of Bagdad',\t\t\tSWORDS,\t[FH, C, DM],\t5000,\t3000,\t2,\t2,\tTrue,\tFalse)])\n\nCREWDECK\t= Deck([\tCrewCard(ORDINARYCREW)] * 5 + [CrewCard(GOODCREW)] * 7 + [CrewCard(EXCELLENTCREW)] * 5)\nWRITERSDECK\t= Deck([\tWriterCard(ORDINARYWRITER)] * 10 + [WriterCard(EXCELLENTWRITER)] * 8)\n"},"src_encoding":{"kind":"string","value":"UTF-8"},"language":{"kind":"string","value":"Python"},"is_vendor":{"kind":"bool","value":false,"string":"false"},"is_generated":{"kind":"bool","value":false,"string":"false"},"year":{"kind":"number","value":2013,"string":"2,013"}}},{"rowIdx":41183,"cells":{"__id__":{"kind":"number","value":8993661568168,"string":"8,993,661,568,168"},"blob_id":{"kind":"string","value":"98b06fff79cbc39dde40f6c9dd5cc0a8b928b9bb"},"directory_id":{"kind":"string","value":"ee53c87481baea8d3184230a59b9ff3317622b20"},"path":{"kind":"string","value":"/my/parse.py"},"content_id":{"kind":"string","value":"59a4fa63db3c14672f29d49ccdd822a6a7edd4bf"},"detected_licenses":{"kind":"list like","value":[],"string":"[]"},"license_type":{"kind":"string","value":"no_license"},"repo_name":{"kind":"string","value":"zbhknight/pscripts"},"repo_url":{"kind":"string","value":"https://github.com/zbhknight/pscripts"},"snapshot_id":{"kind":"string","value":"b549ad670cf4732fdf9dea427ffa916f9df96cee"},"revision_id":{"kind":"string","value":"2f2ea0c77b0338746bbc21453375cb454a225860"},"branch_name":{"kind":"string","value":"refs/heads/master"},"visit_date":{"kind":"timestamp","value":"2016-08-06T03:48:27.132204","string":"2016-08-06T03:48:27.132204"},"revision_date":{"kind":"timestamp","value":"2013-08-12T03:17:43","string":"2013-08-12T03:17:43"},"committer_date":{"kind":"timestamp","value":"2013-08-12T03:17:43","string":"2013-08-12T03:17:43"},"github_id":{"kind":"null"},"star_events_count":{"kind":"number","value":0,"string":"0"},"fork_events_count":{"kind":"number","value":0,"string":"0"},"gha_license_id":{"kind":"null"},"gha_fork":{"kind":"null"},"gha_event_created_at":{"kind":"null"},"gha_created_at":{"kind":"null"},"gha_updated_at":{"kind":"null"},"gha_pushed_at":{"kind":"null"},"gha_size":{"kind":"null"},"gha_stargazers_count":{"kind":"null"},"gha_forks_count":{"kind":"null"},"gha_open_issues_count":{"kind":"null"},"gha_language":{"kind":"null"},"gha_archived":{"kind":"null"},"gha_disabled":{"kind":"null"},"content":{"kind":"string","value":"#!/usr/bin/python\n\nimport sys\nimport re\nimport datetime\nimport shlex\nimport subprocess\nimport MySQLdb\nfrom os import path\n\nexIP = ['222.200.191.253', '1.3.6.1', '172.18.41.178']\n\ndef getData(filename):\n\ttry:\n\t\tcomm = 'grep -n ========== ' + filename\n\t\targs = shlex.split(comm)\n\t\tlineNum = int(subprocess.check_output(args).split('\\n')[-2].split(':')[0])\n\t\twcNum = int(subprocess.check_output(['wc', filename]).split()[0])\n\t\tnumber = wcNum - lineNum\n\t\tcomm = 'tail -n ' + str(number) + ' ' + filename\n\t\targs = shlex.split(comm)\n\t\tdata = subprocess.check_output(args).split('\\n')\n\texcept:\n\t\tf = open(filename, 'rb')\n\t\tdata = [ line for line in f ]\n\t\tf.close()\n\t\n\tf = open(filename, 'ab')\n\tf.write('='*80+'\\n')\n\tf.close()\n\t\n\tresult = []\n\tfor line in data:\n\t\tunit = parseLine(line)\n\t\tif unit:\n\t\t\tresult.append(unit)\n\n\treturn result\n\ndef parseLine(line):\n\ttimeP = r'([a-zA-Z]{3}\\s\\d{1,2}\\s(\\d{2}:){2}\\d{2})'\n\tipP = r'((\\d{1,3}\\.){3}\\d{1,3})'\n\tpattern = timeP + r'\\s' + ipP + r'.*?' + ipP + r'(.*)'\n\tm = re.match(pattern, line)\n\tif m:\n\t\treturn (m.group(1), m.group(5), m.group(7))\n\ndef getTime(string):\n\tyear = datetime.datetime.today().year\n\ttime = datetime.datetime.strptime(string+' '+str(year), \"%b %d %H:%M:%S %Y\")\n\treturn time\n\ndef packup(data):\n\tfinal = {}\n\tfor item in data:\n\t\tif not item[1] in exIP:\n\t\t\tif final.has_key(item[1]):\n\t\t\t\tfinal[item[1]][1] = getTime(item[0])\n\t\t\t\tfinal[item[1]][2].append(item[2])\n\t\t\telse:\n\t\t\t\tfinal[item[1]] = [getTime(item[0]), 0, []]\n\t\n\treturn final\n\ndef insertDB(final, filename):\n\tdb = MySQLdb.connect('localhost', 'root', '8817793', 'payroll')\n\tc = db.cursor()\n\tfor key, argv in final.items():\n\t\tdestIP = path.basename(filename)\t\n\t\tsourceIP = key\n\t\tstartTime = argv[0]\n\t\tendTime = argv[1]\n\t\tcomms = argv[2]\n\t\tc.execute('insert into watch_login (sourceIP, destIP, startTime, endTime) values (%s,%s,%s,%s)', (sourceIP, destIP, startTime, endTime))\n\n\t\tlastId = c.lastrowid\n\t\tcommList = [ (lastId, comm) for comm in comms ]\n\t\tc.executemany('insert into watch_command (ip_id, comm) values (%s, %s)', commList)\n\nif __name__ == '__main__':\n\targv = sys.argv[1:]\n\tfor a in argv:\n\t\tdata = getData(a)\n\t\tfinal = packup(data)\n\t\tinsertDB(final, a)\n"},"src_encoding":{"kind":"string","value":"UTF-8"},"language":{"kind":"string","value":"Python"},"is_vendor":{"kind":"bool","value":false,"string":"false"},"is_generated":{"kind":"bool","value":false,"string":"false"},"year":{"kind":"number","value":2013,"string":"2,013"}}},{"rowIdx":41184,"cells":{"__id__":{"kind":"number","value":15212774165963,"string":"15,212,774,165,963"},"blob_id":{"kind":"string","value":"42760d3111b43066192f01cf6f2640be2ced904b"},"directory_id":{"kind":"string","value":"d210fa2dfc4ac8a917219e6e9e3632ebce4b2763"},"path":{"kind":"string","value":"/blaze/compute/tests/test_bcolz_compute.py"},"content_id":{"kind":"string","value":"acd084b3be5cb0652a56d1455b86b131723fbcbe"},"detected_licenses":{"kind":"list like","value":["BSD-3-Clause","LicenseRef-scancode-unknown-license-reference"],"string":"[\n \"BSD-3-Clause\",\n \"LicenseRef-scancode-unknown-license-reference\"\n]"},"license_type":{"kind":"string","value":"non_permissive"},"repo_name":{"kind":"string","value":"chdoig/blaze"},"repo_url":{"kind":"string","value":"https://github.com/chdoig/blaze"},"snapshot_id":{"kind":"string","value":"53e74cbb31378185ad8385d4ca33c7c772033e22"},"revision_id":{"kind":"string","value":"caa5a497e1ca1ceb1cf585483312ff4cd74d0bda"},"branch_name":{"kind":"string","value":"refs/heads/master"},"visit_date":{"kind":"timestamp","value":"2020-12-24T17:09:00.218435","string":"2020-12-24T17:09:00.218435"},"revision_date":{"kind":"timestamp","value":"2014-08-28T18:54:01","string":"2014-08-28T18:54:01"},"committer_date":{"kind":"timestamp","value":"2014-08-28T18:54:01","string":"2014-08-28T18:54:01"},"github_id":{"kind":"number","value":21960748,"string":"21,960,748"},"star_events_count":{"kind":"number","value":1,"string":"1"},"fork_events_count":{"kind":"number","value":0,"string":"0"},"gha_license_id":{"kind":"null"},"gha_fork":{"kind":"null"},"gha_event_created_at":{"kind":"null"},"gha_created_at":{"kind":"null"},"gha_updated_at":{"kind":"null"},"gha_pushed_at":{"kind":"null"},"gha_size":{"kind":"null"},"gha_stargazers_count":{"kind":"null"},"gha_forks_count":{"kind":"null"},"gha_open_issues_count":{"kind":"null"},"gha_language":{"kind":"null"},"gha_archived":{"kind":"null"},"gha_disabled":{"kind":"null"},"content":{"kind":"string","value":"from __future__ import absolute_import, division, print_function\n\nimport pytest\nbcolz = pytest.importorskip('bcolz')\n\nimport numpy as np\nfrom pandas import DataFrame\n\nfrom blaze.bcolz import into, chunks\nfrom blaze.expr import *\nfrom blaze.compute.core import compute\n\n\nb = bcolz.ctable([[1, 2, 3],\n [1., 2., 3.]],\n names=['a', 'b'])\n\nt = TableSymbol('t', '{a: int32, b: float64}')\n\n\ndef test_chunks():\n assert len(list(chunks(b, chunksize=2))) == 2\n assert (next(chunks(b, chunksize=2)) == into(np.array(0), b)[:2]).all()\n\n\ndef test_reductions():\n assert compute(t.a.sum(), b) == 6\n assert compute(t.a.min(), b) == 1\n assert compute(t.a.max(), b) == 3\n assert compute(t.a.mean(), b) == 2.\n assert abs(compute(t.a.std(), b) - np.std([1, 2, 3])) < 1e-5\n assert abs(compute(t.a.var(), b) - np.var([1, 2, 3])) < 1e-5\n assert compute(t.a.nunique(), b) == 3\n assert compute(t.nunique(), b) == 3\n assert len(list(compute(t.distinct(), b))) == 3\n assert len(list(compute(t.a.distinct(), b))) == 3\n\n\ndef test_selection_head():\n b = into(bcolz.ctable,\n ((i, i + 1, float(i)**2) for i in range(10000)),\n names=['a', 'b', 'c'])\n t = TableSymbol('t', '{a: int32, b: int32, c: float64}')\n\n assert compute((t.a < t.b).all(), b) == True\n assert list(compute(t[t.a < t.b].a.head(10), b)) == list(range(10))\n assert list(compute(t[t.a > t.b].a.head(10), b)) == []\n\n assert into([], compute(t[t.a + t.b > t.c], b)) == [(0, 1, 0),\n (1, 2, 1),\n (2, 3, 4)]\n assert len(compute(t[t.a + t.b > t.c].head(10), b)) # non-empty\n assert len(compute(t[t.a + t.b < t.c].head(10), b)) # non-empty\n\n\ndef test_selection_isnan():\n assert compute(t[t.a.isnan()].count(), b) == 0\n assert compute(t[~(t.a.isnan())].count(), b) == 3\n"},"src_encoding":{"kind":"string","value":"UTF-8"},"language":{"kind":"string","value":"Python"},"is_vendor":{"kind":"bool","value":false,"string":"false"},"is_generated":{"kind":"bool","value":false,"string":"false"},"year":{"kind":"number","value":2014,"string":"2,014"}}},{"rowIdx":41185,"cells":{"__id__":{"kind":"number","value":11544872137282,"string":"11,544,872,137,282"},"blob_id":{"kind":"string","value":"e760a22e92b2061a57f28550b4d96630ca71dee4"},"directory_id":{"kind":"string","value":"cfd547b2cf7812d2534a1992e633fcf4a54d5fa6"},"path":{"kind":"string","value":"/TriblerCode/Tribler/Core/API.py"},"content_id":{"kind":"string","value":"4f75dde9ee35e2d203ffcd37ec7aaa8beb4e62a3"},"detected_licenses":{"kind":"list like","value":["LicenseRef-scancode-unknown-license-reference","OpenSSL","LGPL-2.1-only","LGPL-2.0-or-later","Python-2.0","MIT","LicenseRef-scancode-python-cwi","LicenseRef-scancode-other-copyleft","WxWindows-exception-3.1","LGPL-2.1-or-later","LicenseRef-scancode-openssl","LicenseRef-scancode-warranty-disclaimer","GPL-1.0-or-later","LicenseRef-scancode-free-unknown","LicenseRef-scancode-ssleay-windows","LicenseRef-scancode-mit-old-style","BitTorrent-1.1","GPL-2.0-only"],"string":"[\n \"LicenseRef-scancode-unknown-license-reference\",\n \"OpenSSL\",\n \"LGPL-2.1-only\",\n \"LGPL-2.0-or-later\",\n \"Python-2.0\",\n \"MIT\",\n \"LicenseRef-scancode-python-cwi\",\n \"LicenseRef-scancode-other-copyleft\",\n \"WxWindows-exception-3.1\",\n \"LGPL-2.1-or-later\",\n \"LicenseRef-scancode-openssl\",\n \"LicenseRef-scancode-warranty-disclaimer\",\n \"GPL-1.0-or-later\",\n \"LicenseRef-scancode-free-unknown\",\n \"LicenseRef-scancode-ssleay-windows\",\n \"LicenseRef-scancode-mit-old-style\",\n \"BitTorrent-1.1\",\n \"GPL-2.0-only\"\n]"},"license_type":{"kind":"string","value":"non_permissive"},"repo_name":{"kind":"string","value":"thejosh223/cs198mojo"},"repo_url":{"kind":"string","value":"https://github.com/thejosh223/cs198mojo"},"snapshot_id":{"kind":"string","value":"14f359a8d55a24904aed7381a485f79774bb32dc"},"revision_id":{"kind":"string","value":"4d8d698f28e265ac91c0b1467ef3766cb33a854a"},"branch_name":{"kind":"string","value":"refs/heads/master"},"visit_date":{"kind":"timestamp","value":"2021-01-22T05:28:11.558733","string":"2021-01-22T05:28:11.558733"},"revision_date":{"kind":"timestamp","value":"2014-04-02T11:44:00","string":"2014-04-02T11:44:00"},"committer_date":{"kind":"timestamp","value":"2014-04-02T11:44:00","string":"2014-04-02T11:44:00"},"github_id":{"kind":"number","value":10900361,"string":"10,900,361"},"star_events_count":{"kind":"number","value":1,"string":"1"},"fork_events_count":{"kind":"number","value":0,"string":"0"},"gha_license_id":{"kind":"null"},"gha_fork":{"kind":"null"},"gha_event_created_at":{"kind":"null"},"gha_created_at":{"kind":"null"},"gha_updated_at":{"kind":"null"},"gha_pushed_at":{"kind":"null"},"gha_size":{"kind":"null"},"gha_stargazers_count":{"kind":"null"},"gha_forks_count":{"kind":"null"},"gha_open_issues_count":{"kind":"null"},"gha_language":{"kind":"null"},"gha_archived":{"kind":"null"},"gha_disabled":{"kind":"null"},"content":{"kind":"string","value":"# Written by Arno Bakker\n# see LICENSE.txt for license information\n#\n# To use the Tribler Core just do:\n# from Tribler.Core.API import *\n#\n\"\"\" Tribler Core API v1.0.0rc5, July 14 2008. Import this to use the API \"\"\"\n\n# History:\n# 1.0.0rc5 : Added option to define auxiliary seeding servers for live stream\n# (=these servers are always unchoked at the source server).\n#\n# 1.0.0rc4 : Changed DownloadConfig.set_vod_start_callback() to a generic \n# event-driven interface.\n\n\nfrom Tribler.Core.simpledefs import *\nfrom Tribler.Core.Base import *\nfrom Tribler.Core.Session import *\nfrom Tribler.Core.SessionConfig import *\nfrom Tribler.Core.Download import *\nfrom Tribler.Core.DownloadConfig import *\nfrom Tribler.Core.DownloadState import *\nfrom Tribler.Core.exceptions import *\nfrom Tribler.Core.RequestPolicy import *\nfrom Tribler.Core.TorrentDef import *\nfrom Tribler.Core.LiveSourceAuthConfig import *\n"},"src_encoding":{"kind":"string","value":"UTF-8"},"language":{"kind":"string","value":"Python"},"is_vendor":{"kind":"bool","value":false,"string":"false"},"is_generated":{"kind":"bool","value":false,"string":"false"},"year":{"kind":"number","value":2014,"string":"2,014"}}},{"rowIdx":41186,"cells":{"__id__":{"kind":"number","value":7799660630506,"string":"7,799,660,630,506"},"blob_id":{"kind":"string","value":"6c9c62c808cb5a52773619403105ddc37b2c6fd6"},"directory_id":{"kind":"string","value":"682f5783b2b00ecb7973220f7163f3b55517fdcd"},"path":{"kind":"string","value":"/application/screenlymanager/urls.py"},"content_id":{"kind":"string","value":"0b8283b4b85aa877a1946f01cbeff1a416ee4300"},"detected_licenses":{"kind":"list like","value":[],"string":"[]"},"license_type":{"kind":"string","value":"no_license"},"repo_name":{"kind":"string","value":"wandercampos/screenly-manager"},"repo_url":{"kind":"string","value":"https://github.com/wandercampos/screenly-manager"},"snapshot_id":{"kind":"string","value":"205f223f8e56c59d3aecaab04087b3a2000ab221"},"revision_id":{"kind":"string","value":"17b8fa70f474aeb82f68298ff52f2fbc8105e37b"},"branch_name":{"kind":"string","value":"refs/heads/master"},"visit_date":{"kind":"timestamp","value":"2021-01-20T16:41:59.029541","string":"2021-01-20T16:41:59.029541"},"revision_date":{"kind":"timestamp","value":"2014-01-16T15:06:21","string":"2014-01-16T15:06:21"},"committer_date":{"kind":"timestamp","value":"2014-01-16T15:06:21","string":"2014-01-16T15:06:21"},"github_id":{"kind":"null"},"star_events_count":{"kind":"number","value":0,"string":"0"},"fork_events_count":{"kind":"number","value":0,"string":"0"},"gha_license_id":{"kind":"null"},"gha_fork":{"kind":"null"},"gha_event_created_at":{"kind":"null"},"gha_created_at":{"kind":"null"},"gha_updated_at":{"kind":"null"},"gha_pushed_at":{"kind":"null"},"gha_size":{"kind":"null"},"gha_stargazers_count":{"kind":"null"},"gha_forks_count":{"kind":"null"},"gha_open_issues_count":{"kind":"null"},"gha_language":{"kind":"null"},"gha_archived":{"kind":"null"},"gha_disabled":{"kind":"null"},"content":{"kind":"string","value":"from django.conf.urls import patterns, include, url\nfrom django.contrib import admin\nfrom django.conf.urls.static import static\nadmin.autodiscover()\n\nurlpatterns = patterns('',\n url(r'^$', 'screenlymanager.views.index', name='index'),\n url(r'^clients/?$', 'screenlymanager.views.clients', name='clients'),\n url(r'^client/(?P[0-9]+)/?$', 'screenlymanager.views.client', name='client-detail'),\n url(r'^admin/', include(admin.site.urls)),\n)\n\n"},"src_encoding":{"kind":"string","value":"UTF-8"},"language":{"kind":"string","value":"Python"},"is_vendor":{"kind":"bool","value":false,"string":"false"},"is_generated":{"kind":"bool","value":false,"string":"false"},"year":{"kind":"number","value":2014,"string":"2,014"}}},{"rowIdx":41187,"cells":{"__id__":{"kind":"number","value":6399501281723,"string":"6,399,501,281,723"},"blob_id":{"kind":"string","value":"78f5d02998de0554f7a0ad16bab3b20643e3152b"},"directory_id":{"kind":"string","value":"d2915c3783bfd4720b56d503b07e9f73d67087eb"},"path":{"kind":"string","value":"/JariSandbox/Tokenization/ExtractBioInferSentences.py"},"content_id":{"kind":"string","value":"4b530f169e5ed64be2a76f3ff7dae622579a5a32"},"detected_licenses":{"kind":"list like","value":[],"string":"[]"},"license_type":{"kind":"string","value":"no_license"},"repo_name":{"kind":"string","value":"arururu/Tdevel"},"repo_url":{"kind":"string","value":"https://github.com/arururu/Tdevel"},"snapshot_id":{"kind":"string","value":"36f0b3eaea9f689d31aeaf0f572b0f5eb433253d"},"revision_id":{"kind":"string","value":"362a0f3923b2dfdc5ed78a4f18c2478f67d81629"},"branch_name":{"kind":"string","value":"refs/heads/master"},"visit_date":{"kind":"timestamp","value":"2021-05-28T01:12:41.319428","string":"2021-05-28T01:12:41.319428"},"revision_date":{"kind":"timestamp","value":"2012-06-26T12:25:48","string":"2012-06-26T12:25:48"},"committer_date":{"kind":"timestamp","value":"2012-06-26T12:25:48","string":"2012-06-26T12:25:48"},"github_id":{"kind":"null"},"star_events_count":{"kind":"number","value":0,"string":"0"},"fork_events_count":{"kind":"number","value":0,"string":"0"},"gha_license_id":{"kind":"null"},"gha_fork":{"kind":"null"},"gha_event_created_at":{"kind":"null"},"gha_created_at":{"kind":"null"},"gha_updated_at":{"kind":"null"},"gha_pushed_at":{"kind":"null"},"gha_size":{"kind":"null"},"gha_stargazers_count":{"kind":"null"},"gha_forks_count":{"kind":"null"},"gha_open_issues_count":{"kind":"null"},"gha_language":{"kind":"null"},"gha_archived":{"kind":"null"},"gha_disabled":{"kind":"null"},"content":{"kind":"string","value":"import cElementTree as ElementTree\nimport cElementTreeUtils as ETUtils\n\nsentenceFile = None\ntokenizationFile = None\n\ndef processDocument(documentElement):\n global sentenceFile, tokenizationFile\n sentenceElement = documentElement.find(\"sentence\")\n sentenceFile.write(sentenceElement.get(\"text\")+\"\\n\")\n \n tokenElements = sentenceElement.getiterator(\"token\")\n isFirst = True\n for tokenElement in tokenElements:\n if not isFirst:\n tokenizationFile.write(\" \")\n tokenizationFile.write( tokenElement.get(\"text\") )\n isFirst = False\n tokenizationFile.write(\"\\n\")\n\nif __name__==\"__main__\":\n # Import Psyco if available\n try:\n import psyco\n psyco.full()\n print \"Found Psyco, using\"\n except ImportError:\n print \"Psyco not installed\"\n \n sentenceFile = open(\"BioInferSentences.txt\", \"wt\")\n tokenizationFile = open(\"BioInferMedpostTokenization.txt\", \"wt\")\n \n filename = \"/usr/share/biotext/Tampere_project/PPI_Learning/Data/BioInferAnalysis.xml/BioInferAnalysis.xml\"\n print \"Processing documents\"\n ETUtils.iterparse(filename, \"document\", processDocument)\n \n sentenceFile.close()\n tokenizationFile.close()"},"src_encoding":{"kind":"string","value":"UTF-8"},"language":{"kind":"string","value":"Python"},"is_vendor":{"kind":"bool","value":false,"string":"false"},"is_generated":{"kind":"bool","value":false,"string":"false"},"year":{"kind":"number","value":2012,"string":"2,012"}}},{"rowIdx":41188,"cells":{"__id__":{"kind":"number","value":11768210411937,"string":"11,768,210,411,937"},"blob_id":{"kind":"string","value":"64ecd020a41b639fa7ff21d9609b605dc500adb0"},"directory_id":{"kind":"string","value":"424fd2e60f747ba908c87449b0fbf2206f08c4e3"},"path":{"kind":"string","value":"/tools/wordlist.py"},"content_id":{"kind":"string","value":"a61de815dc7f29bc4bf60918b055eb615bfa7986"},"detected_licenses":{"kind":"list like","value":[],"string":"[]"},"license_type":{"kind":"string","value":"no_license"},"repo_name":{"kind":"string","value":"Vishwanath17/android-target"},"repo_url":{"kind":"string","value":"https://github.com/Vishwanath17/android-target"},"snapshot_id":{"kind":"string","value":"7a5ea1bdb8e54136afeab02fb9e27b8a39d1b90f"},"revision_id":{"kind":"string","value":"a4e7eeed36931ae37b24bfbf9c21390f02619022"},"branch_name":{"kind":"string","value":"refs/heads/master"},"visit_date":{"kind":"timestamp","value":"2016-08-12T10:05:33.214220","string":"2016-08-12T10:05:33.214220"},"revision_date":{"kind":"timestamp","value":"2009-09-01T08:25:01","string":"2009-09-01T08:25:01"},"committer_date":{"kind":"timestamp","value":"2009-09-01T08:25:01","string":"2009-09-01T08:25:01"},"github_id":{"kind":"number","value":45437932,"string":"45,437,932"},"star_events_count":{"kind":"number","value":0,"string":"0"},"fork_events_count":{"kind":"number","value":0,"string":"0"},"gha_license_id":{"kind":"null"},"gha_fork":{"kind":"null"},"gha_event_created_at":{"kind":"null"},"gha_created_at":{"kind":"null"},"gha_updated_at":{"kind":"null"},"gha_pushed_at":{"kind":"null"},"gha_size":{"kind":"null"},"gha_stargazers_count":{"kind":"null"},"gha_forks_count":{"kind":"null"},"gha_open_issues_count":{"kind":"null"},"gha_language":{"kind":"null"},"gha_archived":{"kind":"null"},"gha_disabled":{"kind":"null"},"content":{"kind":"string","value":"#!/usr/bin/python2.4\n\n\"\"\"Representation of a dictionary (list) of words.\"\"\"\n\nimport re\nimport string\n\nclass Dict(list):\n \"\"\"A dictionary.\n \n Attributes:\n filename: Original filename of dictionary\n name: str, Arbitrary name of the list\n \"\"\"\n def __init__(self, filename=None, name=''):\n super(Dict, self).__init__(self)\n self.filename = filename\n self.name = name\n if filename is not None:\n words = file(filename).readlines()\n # words = map(string.upper, words)\n words = map(string.strip, words)\n self.extend(words)\n\n def append(self, item):\n \"\"\" Append only non-empty values.\"\"\"\n if len(item):\n super(Dict, self).append(item)\n \n def extend(self, newlist):\n clean_list = []\n for item in newlist:\n if len(item):\n clean_list.append(item)\n super(Dict, self).extend(clean_list)\n \n def replace(self, wordlist):\n del self[:]\n self.extend(wordlist)\n\n def getWordsByLetter(self):\n word_dict = {}\n for word in self:\n first_letter = word[0]\n if first_letter not in word_dict:\n word_dict[first_letter] = []\n word_dict[first_letter].append(word)\n return word_dict\n\n def getWordsByLength(self, length):\n wordlist = []\n for word in self:\n if len(word) == length:\n wordlist.append(word)\n return wordlist\n\n def wordsNotInList(self, wordlist):\n \"\"\"Fetch words from 'self' that arent also in 'wordlist'.\"\"\"\n final_list = []\n for word in self:\n if word not in wordlist:\n final_list.append(word)\n return final_list\n\n def wordsAlsoInList(self, wordlist):\n \"\"\"Fetch words from 'self' that are also in 'wordlist'.\"\"\"\n final_list = []\n for word in self:\n if word in wordlist:\n final_list.append(word)\n return final_list\n\n def filterByRegex(self, regex_str):\n \"\"\"Filter list by the supplied regex string.\"\"\"\n new_list = []\n regex = re.compile(regex_str, re.I)\n for word in self:\n if regex.match(word):\n new_list.append(word)\n del self[:]\n self.extend(new_list)\n \n def toUpper(self):\n newlist = []\n for word in self:\n newlist.append(word.upper())\n self.replace(newlist)\n \n def filterByWordsNotInList(self, wordlist):\n \"\"\"Filter list to words not in the given list.\"\"\"\n newlist = []\n for word in self:\n if word not in wordlist:\n newlist.append(word)\n self.replace(newlist);\n\n def writeToFile(self, filename=None):\n \"\"\"Write all words to 'filename'.\"\"\"\n if filename is None:\n filename = self.name\n fd = file(filename, 'w')\n for word in self:\n fd.write(word + '\\n')\n fd.close()\n\n"},"src_encoding":{"kind":"string","value":"UTF-8"},"language":{"kind":"string","value":"Python"},"is_vendor":{"kind":"bool","value":false,"string":"false"},"is_generated":{"kind":"bool","value":false,"string":"false"},"year":{"kind":"number","value":2009,"string":"2,009"}}},{"rowIdx":41189,"cells":{"__id__":{"kind":"number","value":19043884999184,"string":"19,043,884,999,184"},"blob_id":{"kind":"string","value":"a83c53720b9bbf54eb742848a6da92697731ac41"},"directory_id":{"kind":"string","value":"88e03e66109adb6325ccace96f37b31e15c5e86c"},"path":{"kind":"string","value":"/docopt/required.py"},"content_id":{"kind":"string","value":"a64ff10588f2dd1f58d91125fa4e6001486217c6"},"detected_licenses":{"kind":"list like","value":[],"string":"[]"},"license_type":{"kind":"string","value":"no_license"},"repo_name":{"kind":"string","value":"abevieiramota/learning-python"},"repo_url":{"kind":"string","value":"https://github.com/abevieiramota/learning-python"},"snapshot_id":{"kind":"string","value":"53ee5d158af33f627c65a7d3960083a1242713ed"},"revision_id":{"kind":"string","value":"c9dfa37e5dd547ab03d1ff67932ff28be70bfbeb"},"branch_name":{"kind":"string","value":"refs/heads/master"},"visit_date":{"kind":"timestamp","value":"2021-01-10T21:05:48.021976","string":"2021-01-10T21:05:48.021976"},"revision_date":{"kind":"timestamp","value":"2014-07-11T20:28:03","string":"2014-07-11T20:28:03"},"committer_date":{"kind":"timestamp","value":"2014-07-11T20:28:03","string":"2014-07-11T20:28:03"},"github_id":{"kind":"null"},"star_events_count":{"kind":"number","value":0,"string":"0"},"fork_events_count":{"kind":"number","value":0,"string":"0"},"gha_license_id":{"kind":"null"},"gha_fork":{"kind":"null"},"gha_event_created_at":{"kind":"null"},"gha_created_at":{"kind":"null"},"gha_updated_at":{"kind":"null"},"gha_pushed_at":{"kind":"null"},"gha_size":{"kind":"null"},"gha_stargazers_count":{"kind":"null"},"gha_forks_count":{"kind":"null"},"gha_open_issues_count":{"kind":"null"},"gha_language":{"kind":"null"},"gha_archived":{"kind":"null"},"gha_disabled":{"kind":"null"},"content":{"kind":"string","value":"\"\"\"Required.\n\nUsage:\n required.py (--parametro1=) [--parametro2=]\n\"\"\"\n\nfrom docopt import docopt\n\nargs = docopt(__doc__, version=\"oi\")\n\nprint args\n"},"src_encoding":{"kind":"string","value":"UTF-8"},"language":{"kind":"string","value":"Python"},"is_vendor":{"kind":"bool","value":false,"string":"false"},"is_generated":{"kind":"bool","value":false,"string":"false"},"year":{"kind":"number","value":2014,"string":"2,014"}}},{"rowIdx":41190,"cells":{"__id__":{"kind":"number","value":5059471493131,"string":"5,059,471,493,131"},"blob_id":{"kind":"string","value":"dd9cb1d68fae68dd62c376b0bcff46f3f2b2ca68"},"directory_id":{"kind":"string","value":"6da35308c55fa8192b6c88a2c50f932647ee4229"},"path":{"kind":"string","value":"/py/signal_protocol.py"},"content_id":{"kind":"string","value":"2b32a01127d4cf5a0ae02abc5ddd4501f760ce45"},"detected_licenses":{"kind":"list like","value":[],"string":"[]"},"license_type":{"kind":"string","value":"no_license"},"repo_name":{"kind":"string","value":"billghad/p2p"},"repo_url":{"kind":"string","value":"https://github.com/billghad/p2p"},"snapshot_id":{"kind":"string","value":"326781048267861237bf0c7f441557fcce72e19c"},"revision_id":{"kind":"string","value":"4808966552dfe2ae668ebc84c9a7ed42492ac9b3"},"branch_name":{"kind":"string","value":"refs/heads/master"},"visit_date":{"kind":"timestamp","value":"2021-01-10T14:09:43.542212","string":"2021-01-10T14:09:43.542212"},"revision_date":{"kind":"timestamp","value":"2011-03-17T22:08:50","string":"2011-03-17T22:08:50"},"committer_date":{"kind":"timestamp","value":"2011-03-17T22:08:50","string":"2011-03-17T22:08:50"},"github_id":{"kind":"number","value":47732874,"string":"47,732,874"},"star_events_count":{"kind":"number","value":0,"string":"0"},"fork_events_count":{"kind":"number","value":0,"string":"0"},"gha_license_id":{"kind":"null"},"gha_fork":{"kind":"null"},"gha_event_created_at":{"kind":"null"},"gha_created_at":{"kind":"null"},"gha_updated_at":{"kind":"null"},"gha_pushed_at":{"kind":"null"},"gha_size":{"kind":"null"},"gha_stargazers_count":{"kind":"null"},"gha_forks_count":{"kind":"null"},"gha_open_issues_count":{"kind":"null"},"gha_language":{"kind":"null"},"gha_archived":{"kind":"null"},"gha_disabled":{"kind":"null"},"content":{"kind":"string","value":"\"\"\" Network protocol module. Converts python messages from/to JSON objects\"\"\"\nimport json\nfrom collections import namedtuple\nfrom config import logs # pylint: disable=E0611\n\n\nclass ProtocolError(Exception): # pylint: disable=C0111\n pass\n\n\nclass Messages(object):\n \"\"\" Provides common messages for communication and\n a message to/from JSON converter.\"\"\"\n TERMINATOR = '\\0'\n errors = [ProtocolError.__name__]\n messages = ['chat', 'error']\n\n Message = namedtuple('Message', 'name args')\n\n @staticmethod\n def chat(message):\n \"\"\" converts chat message to an JSON object.\n JSON.name = \"chat\"\n json.args = message \"\"\"\n return Messages.serialize(\"chat\", message)\n \n\n @staticmethod\n def error(spec, reason=None):\n \"\"\" converts an error to an JSON object\n JSON.name = \"error\"\n JSON.args = \"spec, reason(s)\" list\n \"\"\"\n logs.logger.debug(\"Message error: %s, reason: %s\" % (spec, reason))\n try:\n if spec.__name__ in Messages.errors:\n return Messages.serialize(\"error\", spec.__name__, reason)\n else:\n raise ProtocolError(spec)\n except AttributeError, error:\n logs.logger.critical(\n \"Messages.error exception, reason: %s\" % error)\n raise ProtocolError(error)\n\n @staticmethod\n def serialize(name, *args):\n \"\"\" converts input to JSON objects:\n JSON.name = name\n JSON.args = args\n \"\"\"\n logs.logger.debug(\"serialize: %s, %s, %s\", name, args)\n try:\n return json.dumps({'name': name, 'args': args})\n except (ValueError, TypeError), reason:\n logs.logger.critical(\"serializer exception, reason %s\" % reason)\n raise ProtocolError(name)\n\n @staticmethod\n def deserialize(data):\n \"\"\" converts from JSON objects to python dictionary \"\"\"\n logs.logger.debug(\"deserialize: %s\" % data)\n try:\n result = json.loads(data)\n if result['name'] in Messages.messages:\n return Messages.Message(result['name'], result['args'])\n else:\n raise ProtocolError(result['name'])\n except (ValueError, TypeError, KeyError):\n raise ProtocolError(data)\n"},"src_encoding":{"kind":"string","value":"UTF-8"},"language":{"kind":"string","value":"Python"},"is_vendor":{"kind":"bool","value":false,"string":"false"},"is_generated":{"kind":"bool","value":false,"string":"false"},"year":{"kind":"number","value":2011,"string":"2,011"}}},{"rowIdx":41191,"cells":{"__id__":{"kind":"number","value":77309438121,"string":"77,309,438,121"},"blob_id":{"kind":"string","value":"504dd9c166d52d80e046f8de198c6eca809b2efa"},"directory_id":{"kind":"string","value":"a6e571edef5fdda9adedbd2abcbcd2dd5e4c7c9c"},"path":{"kind":"string","value":"/Prototype/Miscellaneous/forms.py"},"content_id":{"kind":"string","value":"b70b31735a5ad091fa2cd54811ad95dab939cecf"},"detected_licenses":{"kind":"list like","value":[],"string":"[]"},"license_type":{"kind":"string","value":"no_license"},"repo_name":{"kind":"string","value":"1101811b/DIM3-Team-Q-Fish"},"repo_url":{"kind":"string","value":"https://github.com/1101811b/DIM3-Team-Q-Fish"},"snapshot_id":{"kind":"string","value":"cd5ce885db3513873ce412e219aebe62c53e2561"},"revision_id":{"kind":"string","value":"b9791c649599a0069024f37ab6dac64e97d209a2"},"branch_name":{"kind":"string","value":"refs/heads/master"},"visit_date":{"kind":"timestamp","value":"2016-09-05T22:42:22.553807","string":"2016-09-05T22:42:22.553807"},"revision_date":{"kind":"timestamp","value":"2014-03-21T09:40:44","string":"2014-03-21T09:40:44"},"committer_date":{"kind":"timestamp","value":"2014-03-21T09:40:44","string":"2014-03-21T09:40:44"},"github_id":{"kind":"null"},"star_events_count":{"kind":"number","value":0,"string":"0"},"fork_events_count":{"kind":"number","value":0,"string":"0"},"gha_license_id":{"kind":"null"},"gha_fork":{"kind":"null"},"gha_event_created_at":{"kind":"null"},"gha_created_at":{"kind":"null"},"gha_updated_at":{"kind":"null"},"gha_pushed_at":{"kind":"null"},"gha_size":{"kind":"null"},"gha_stargazers_count":{"kind":"null"},"gha_forks_count":{"kind":"null"},"gha_open_issues_count":{"kind":"null"},"gha_language":{"kind":"null"},"gha_archived":{"kind":"null"},"gha_disabled":{"kind":"null"},"content":{"kind":"string","value":"from django import forms\nfrom Miscellaneous.models import *\n\nclass EmailForm(forms.ModelForm):\n class Meta: \n model = Contact\n\nclass ComplaintForm(forms.ModelForm):\n class Meta:\n model = Complaint\n"},"src_encoding":{"kind":"string","value":"UTF-8"},"language":{"kind":"string","value":"Python"},"is_vendor":{"kind":"bool","value":false,"string":"false"},"is_generated":{"kind":"bool","value":false,"string":"false"},"year":{"kind":"number","value":2014,"string":"2,014"}}},{"rowIdx":41192,"cells":{"__id__":{"kind":"number","value":17145509477237,"string":"17,145,509,477,237"},"blob_id":{"kind":"string","value":"db6cb41e9e9bd4c537972be3b756a1de88f7191a"},"directory_id":{"kind":"string","value":"7359acef1cbed3e94d992979ebcb87544feafc1d"},"path":{"kind":"string","value":"/qikify/controllers/GaussianProcess.py"},"content_id":{"kind":"string","value":"2cac8f65eda07fe98c60f53dae90d60f03009606"},"detected_licenses":{"kind":"list like","value":["MIT"],"string":"[\n \"MIT\"\n]"},"license_type":{"kind":"string","value":"permissive"},"repo_name":{"kind":"string","value":"abhishek-basu-git/qikify"},"repo_url":{"kind":"string","value":"https://github.com/abhishek-basu-git/qikify"},"snapshot_id":{"kind":"string","value":"de803b7313f94508d0b384fccd298b87a015ad9f"},"revision_id":{"kind":"string","value":"ed1384a1cbaf57fe4d570937e8a5859ab0858fde"},"branch_name":{"kind":"string","value":"refs/heads/master"},"visit_date":{"kind":"timestamp","value":"2022-11-13T19:52:35.696803","string":"2022-11-13T19:52:35.696803"},"revision_date":{"kind":"timestamp","value":"2012-05-23T18:53:47","string":"2012-05-23T18:53:47"},"committer_date":{"kind":"timestamp","value":"2012-05-23T18:53:47","string":"2012-05-23T18:53:47"},"github_id":{"kind":"null"},"star_events_count":{"kind":"number","value":0,"string":"0"},"fork_events_count":{"kind":"number","value":0,"string":"0"},"gha_license_id":{"kind":"null"},"gha_fork":{"kind":"null"},"gha_event_created_at":{"kind":"null"},"gha_created_at":{"kind":"null"},"gha_updated_at":{"kind":"null"},"gha_pushed_at":{"kind":"null"},"gha_size":{"kind":"null"},"gha_stargazers_count":{"kind":"null"},"gha_forks_count":{"kind":"null"},"gha_open_issues_count":{"kind":"null"},"gha_language":{"kind":"null"},"gha_archived":{"kind":"null"},"gha_disabled":{"kind":"null"},"content":{"kind":"string","value":"import numpy as np\nfrom sklearn.gaussian_process import GaussianProcess\n\n\nclass GaussianProcess(object):\n def __init__(self, nugget=0.1):\n self.nugget = nugget\n \n def fit(self, chips):\n X = pandas.DataFrame([[chip.X, chip.Y] for chip in chips])\n y = [chip.gnd for chip in chips] \n self.gp = GaussianProcess(nugget=self.nugget)\n self.gp.fit(X, y)\n \n def predict(self, chip):\n return self.gp.predict([chip.X, chip.Y])\n "},"src_encoding":{"kind":"string","value":"UTF-8"},"language":{"kind":"string","value":"Python"},"is_vendor":{"kind":"bool","value":false,"string":"false"},"is_generated":{"kind":"bool","value":false,"string":"false"},"year":{"kind":"number","value":2012,"string":"2,012"}}},{"rowIdx":41193,"cells":{"__id__":{"kind":"number","value":18880676242140,"string":"18,880,676,242,140"},"blob_id":{"kind":"string","value":"fe7dddcc7282559894ea93db48e912aad04242d7"},"directory_id":{"kind":"string","value":"5df550c720cd63cd1bae8b7cddccac7f1e0d420d"},"path":{"kind":"string","value":"/sorbic/stor/mpack.py"},"content_id":{"kind":"string","value":"940bac8e463537da373685c71ce51b8433ceb991"},"detected_licenses":{"kind":"list like","value":["Apache-2.0"],"string":"[\n \"Apache-2.0\"\n]"},"license_type":{"kind":"string","value":"permissive"},"repo_name":{"kind":"string","value":"SmithSamuelM/sorbic"},"repo_url":{"kind":"string","value":"https://github.com/SmithSamuelM/sorbic"},"snapshot_id":{"kind":"string","value":"936a88936478938893f597bf5d7f7998c06597d9"},"revision_id":{"kind":"string","value":"bf0b8d28f75c35c15f81a6f16b47755202be3fff"},"branch_name":{"kind":"string","value":"refs/heads/master"},"visit_date":{"kind":"timestamp","value":"2021-01-18T05:32:07.731795","string":"2021-01-18T05:32:07.731795"},"revision_date":{"kind":"timestamp","value":"2014-12-07T07:13:47","string":"2014-12-07T07:13:47"},"committer_date":{"kind":"timestamp","value":"2014-12-07T07:13:47","string":"2014-12-07T07:13:47"},"github_id":{"kind":"null"},"star_events_count":{"kind":"number","value":0,"string":"0"},"fork_events_count":{"kind":"number","value":0,"string":"0"},"gha_license_id":{"kind":"null"},"gha_fork":{"kind":"null"},"gha_event_created_at":{"kind":"null"},"gha_created_at":{"kind":"null"},"gha_updated_at":{"kind":"null"},"gha_pushed_at":{"kind":"null"},"gha_size":{"kind":"null"},"gha_stargazers_count":{"kind":"null"},"gha_forks_count":{"kind":"null"},"gha_open_issues_count":{"kind":"null"},"gha_language":{"kind":"null"},"gha_archived":{"kind":"null"},"gha_disabled":{"kind":"null"},"content":{"kind":"string","value":"# -*- coding: utf-8 -*-\n'''\nStorage using msgpack for serialization\n'''\n# Import third party libs\nimport msgpack\n\n\nclass Mpack(object):\n '''\n msgpack!\n '''\n def __init__(self, root):\n self.root = root\n\n def dump(self, data):\n '''\n prep the data for storage\n '''\n return msgpack.dumps(data)\n\n def load(self, raw_data):\n '''\n load data into serialized form\n '''\n return msgpack.loads(raw_data)\n"},"src_encoding":{"kind":"string","value":"UTF-8"},"language":{"kind":"string","value":"Python"},"is_vendor":{"kind":"bool","value":false,"string":"false"},"is_generated":{"kind":"bool","value":false,"string":"false"},"year":{"kind":"number","value":2014,"string":"2,014"}}},{"rowIdx":41194,"cells":{"__id__":{"kind":"number","value":4432406272138,"string":"4,432,406,272,138"},"blob_id":{"kind":"string","value":"659e4aaf0ef2cdc4f04a3b5fcf238ad76f5a0d76"},"directory_id":{"kind":"string","value":"d8e79e8fe894c64a1a1e6e89ff869e88d5f722c1"},"path":{"kind":"string","value":"/core/model.py"},"content_id":{"kind":"string","value":"43db96f2a3baf207905428601522313994aeba57"},"detected_licenses":{"kind":"list like","value":["Apache-2.0"],"string":"[\n \"Apache-2.0\"\n]"},"license_type":{"kind":"string","value":"permissive"},"repo_name":{"kind":"string","value":"justasabc/double_ball"},"repo_url":{"kind":"string","value":"https://github.com/justasabc/double_ball"},"snapshot_id":{"kind":"string","value":"ffb2e1244c603945c1bacdf997a35b5aaaf6685d"},"revision_id":{"kind":"string","value":"955231b477c2be6f86afbb7e3707396fc4bc0f1f"},"branch_name":{"kind":"string","value":"refs/heads/master"},"visit_date":{"kind":"timestamp","value":"2021-01-10T19:46:58.104500","string":"2021-01-10T19:46:58.104500"},"revision_date":{"kind":"timestamp","value":"2014-12-06T06:36:31","string":"2014-12-06T06:36:31"},"committer_date":{"kind":"timestamp","value":"2014-12-06T06:36:31","string":"2014-12-06T06:36:31"},"github_id":{"kind":"null"},"star_events_count":{"kind":"number","value":0,"string":"0"},"fork_events_count":{"kind":"number","value":0,"string":"0"},"gha_license_id":{"kind":"null"},"gha_fork":{"kind":"null"},"gha_event_created_at":{"kind":"null"},"gha_created_at":{"kind":"null"},"gha_updated_at":{"kind":"null"},"gha_pushed_at":{"kind":"null"},"gha_size":{"kind":"null"},"gha_stargazers_count":{"kind":"null"},"gha_forks_count":{"kind":"null"},"gha_open_issues_count":{"kind":"null"},"gha_language":{"kind":"null"},"gha_archived":{"kind":"null"},"gha_disabled":{"kind":"null"},"content":{"kind":"string","value":"#!usr/bin/python\n# encoding: utf-8\n\n__all__ = ['Record','RecordCollection','FileReader','Stats']\n\nfrom constants import *\n\n# 03001 10 11 12 13 26 28 11 10307806 0 0 898744 1 2003-2-20 2003-2-23\nclass Record: \n\t\n\tdef __init__(self,parts):\n\t\tself.__init(parts)\n\t\tself.__stats()\n\t\t#print self.str()\n\t\t#print self.stats_str()\n\n\tdef __init(self,parts):\n\t\tself.id = parts[0]\n\t\tself.n1 = int(parts[1])\n\t\tself.n2 = int(parts[2])\n\t\tself.n3 = int(parts[3])\n\t\tself.n4 = int(parts[4])\n\t\tself.n5 = int(parts[5])\n\t\tself.n6 = int(parts[6])\n\t\tself.n7 = int(parts[7])\n\t\tself.total_money = int(parts[8])\n\t\tself.one_money = int(parts[9])\n\t\tself.one_count = int(parts[10])\n\t\tself.two_money = int(parts[11])\n\t\tself.two_count = int(parts[12])\n\t\tself.start_date = parts[13]\n\t\tself.end_date = parts[14]\n\n\tdef __stats(self):\n\t\tself.__stats_red_blue()\n\t\tself.__stats_red()\n\n\tdef __stats_red_blue(self):\n\t\tself.red_sum = 0\n\t\tself.blue_sum = 0\n\t\tself.red_01_str = \"\"\n\t\tself.blue_01_str = \"\"\n\t\tself.red_01_count = (0,0)\n\t\tself.blue_01_count = (0,0)\n\t\tself.red_prim_count = 0\n\t\tself.blue_prim_count = 0\n\t\t# 1-11, 12-22,23-33\n\t\tself.red_3zone_count = (0,0,0)\n\t\t# 1-8, 9-16\n\t\tself.blue_2zone_count = (0,0)\n\n\t\t# get stats\n\t\t# red\n\t\tred_list = [self.n1,self.n2,self.n3,self.n4,self.n5,self.n6]\n\t\tc0 = 0\n\t\tc1 = 0\n\t\tprim = 0\n\t\tfor n in red_list:\n\t\t\tself.red_sum += n \n\t\t\tif n%2==0:\n\t\t\t\tself.red_01_str += '0'\n\t\t\t\tc0 += 1\n\t\t\telse:\n\t\t\t\tself.red_01_str += '1'\n\t\t\t\tc1 += 1\n\t\t\t# red prim\n\t\t\tif n in RED_PRIM_LIST:\n\t\t\t\tprim += 1\n\t\tself.red_01_count = (c0,c1)\n\t\tself.red_prim_count = prim\n\n\t\t# blue\n\t\tblue_list = [self.n7]\n\t\tc0 = 0\n\t\tc1 = 0\n\t\tprim = 0\n\t\tfor n in blue_list:\n\t\t\tself.blue_sum += n \n\t\t\tif n%2==0:\n\t\t\t\tself.blue_01_str += '0'\n\t\t\t\tc0 += 1\n\t\t\telse:\n\t\t\t\tself.blue_01_str += '1'\n\t\t\t\tc1 += 1\n\t\t\t# blue prim\n\t\t\tif n in BLUE_PRIM_LIST:\n\t\t\t\tprim += 1\n\t\tself.blue_01_count = (c0,c1)\n\t\tself.blue_prim_count = prim\n\n\t\t# zone stats\n\t\t# red\n\t\tzone1 = 0\n\t\tzone2 = 0\n\t\tzone3 = 0\n\t\tfor n in red_list:\n\t\t\tif n>=RED_ZONE1[0] and n<=RED_ZONE1[1]:\n\t\t\t\tzone1 +=1\n\t\t\telif n>=RED_ZONE2[0] and n<=RED_ZONE2[1]:\n\t\t\t\tzone2 +=1\n\t\t\telif n>=RED_ZONE3[0] and n<=RED_ZONE3[1]:\n\t\t\t\tzone3 +=1\n\t\tself.red_3zone_count = (zone1,zone2,zone3)\n\n\t\t# blue\n\t\tzone1 = 0\n\t\tzone2 = 0\n\t\tfor n in blue_list:\n\t\t\tif n>=BLUE_ZONE1[0] and n<=BLUE_ZONE1[1]:\n\t\t\t\tzone1 +=1\n\t\t\telif n>=BLUE_ZONE2[0] and n<=BLUE_ZONE2[1]:\n\t\t\t\tzone2 +=1\n\t\tself.blue_2zone_count = (zone1,zone2)\n\n\tdef __stats_red(self):\n\t\tred_list = [self.n1,self.n2,self.n3,self.n4,self.n5,self.n6]\n\t\t# (1) red shift to base\n\t\t# 2,13,17,20,25,33===>11,15,18,23,31\n\t\tself.red_shift_to_base = []\n\t\tfor n in red_list[1:]:\n\t\t\tself.red_shift_to_base.append(n-red_list[0])\n\n\t\t# (2) red head-tail width \n\t\tself.red_width = self.n6-self.n1\n\t\t# (3) red delta \n\t\t# 2,13,17,20,25,33===> 11,4,3,5,8\n\n\tdef stats_str(self):\n\t\treturn \"\\n [red] sum={0} 01_str={1} 01_count={2} prim={3}\\n [blue] sum={4} 01_str={5} 01_count={6} prim={7}\\n 3zone = {8}\".format(self.red_sum,self.red_01_str,self.red_01_count,self.red_prim_count,\n\t\t\tself.blue_sum,self.blue_01_str,self.blue_01_count,self.blue_prim_count,\n\t\t\tself.red_3zone_count)\n\n\tdef long_str(self):\n\t\treturn \"{0} [{1} {2} {3} {4} {5} {6} {7}] {8} {9} {10} {11} {12} {13} {14}\".format(self.id,self.n1,self.n2,self.n3,self.n4,self.n5,self.n6,self.n7,self.total_money,self.one_money,self.one_count,self.two_money,self.two_count,self.start_date,self.end_date)\n\n\tdef short_str(self):\n\t\treturn \"{0} [{1} {2} {3} {4} {5} {6} {7}] {8}/{9}\".format(self.id,self.n1,self.n2,self.n3,self.n4,self.n5,self.n6,self.n7,self.start_date,self.end_date)\n\n\tdef str(self):\n\t\treturn \"%s [%02d %02d %02d %02d %02d %02d %02d] %s/%s\" % (self.id,self.n1,self.n2,self.n3,self.n4,self.n5,self.n6,self.n7,self.start_date,self.end_date)\n\n\tdef __str__(self):\n\t\treturn self.long_str()\n\nclass RecordCollection:\n\n\tdef __init__(self):\n\t\tself.records = []\n\n\tdef get_records(self):\n\t\treturn self.records\n\n\tdef get_record_count(self):\n\t\treturn len(self.records)\n\n\tdef add_record(self,record):\n\t\tself.records.append(record)\n\n\tdef get_record_by_id(self,id):\n\t\t# 03088 \n\t\tif(len(id)!=5):\n\t\t\tprint \"Error. invalid id %s\".format(id)\n\t\t\treturn None\n\t\tfor record in self.records:\n\t\t\tif id == record.id:\n\t\t\t\treturn record\n\t\tprint \"Warning. can not find record id =%s\".format(id)\n\t\treturn None\n\n\t\"\"\"\n\tquerying methods:\n\treturn a list of record\n\t\"\"\"\n\tdef query_by_year(self,year):\n\t\t# 2003,2009--->03,09\n\t\t# 2010,2011--->10,11\t\n\t\tif(yearMONTH):\n\t\t\tprint \"Error. invalid month {0}\".format(month)\n\t\t\treturn None\n\t\tresult = []\n\t\tfor record in self.records:\n\t\t\tparts = record.end_date.split(\"-\")\n\t\t\tif year==int(parts[0]) and month == int(parts[1]) :\n\t\t\t\tresult.append(record)\n\t\treturn result\t\n\n\tdef __x_query_by_date(self,date):\n\t\t# '2003-9-4'\n\t\tfor record in self.records:\n\t\t\tif (date == record.end_date):\n\t\t\t\treturn [record]\n\t\tprint \"Warning. can not find record id =%s\".format(id)\n\t\treturn None\n\n\tdef query_by_year_month_day(self,year,month,day):\n\t\t# 2003,2,23\n\t\t# '2003-2-23' \n\t\tif(yearMONTH):\n\t\t\tprint \"Error. invalid month {0}\".format(month)\n\t\t\treturn None\n\t\tif(day<1 or day>31):\n\t\t\tprint \"Error. invalid day {0}\".format(day)\n\t\t\treturn None\n\t\tdate = '{0}-{1}-{2}'.format(year,month,day)\n\t\treturn self.__x_query_by_date(date)\n\n\tdef __x_query_by_number_pos_1(self,n):\n\t\tresult = []\n\t\tfor record in self.records:\n\t\t\tif n == record.n1 :\n\t\t\t\tresult.append(record)\t\n\t\treturn result\n\tdef __x_query_by_number_pos_2(self,n):\n\t\tresult = []\n\t\tfor record in self.records:\n\t\t\tif n == record.n2 :\n\t\t\t\tresult.append(record)\t\n\t\treturn result\n\tdef __x_query_by_number_pos_3(self,n):\n\t\tresult = []\n\t\tfor record in self.records:\n\t\t\tif n == record.n3 :\n\t\t\t\tresult.append(record)\t\n\t\treturn result\n\tdef __x_query_by_number_pos_4(self,n):\n\t\tresult = []\n\t\tfor record in self.records:\n\t\t\tif n == record.n4 :\n\t\t\t\tresult.append(record)\t\n\t\treturn result\n\tdef __x_query_by_number_pos_5(self,n):\n\t\tresult = []\n\t\tfor record in self.records:\n\t\t\tif n == record.n5 :\n\t\t\t\tresult.append(record)\t\n\t\treturn result\n\tdef __x_query_by_number_pos_6(self,n):\n\t\tresult = []\n\t\tfor record in self.records:\n\t\t\tif n == record.n6 :\n\t\t\t\tresult.append(record)\t\n\t\treturn result\n\tdef __x_query_by_number_pos_7(self,n):\n\t\tresult = []\n\t\tfor record in self.records:\n\t\t\tif n == record.n7 :\n\t\t\t\tresult.append(record)\t\n\t\treturn result\n\n\tdef query_by_number_pos(self,n,pos):\n\t\t# pos = 1,2,3,4,5,6,7\n\t\tif(pos<1 or pos>7):\n\t\t\tprint 'Error. valid pos is 1-7.'\n\t\t\treturn None\n\n\t\tif(pos==7):\n\t\t\tif (n>BLUE_MAX_NUMBER):\n\t\t\t\tprint \"Error. blue number >=%s\" % BLUE_MAX_NUMBER\n\t\t\t\treturn None\n\t\telse:\n\t\t\tif (n>RED_MAX_NUMBER):\n\t\t\t\tprint \"Error. red number >=%s\" % RED_MAX_NUMBER\n\t\t\t\treturn None\n\n\t\tmethods = {\n\t\t\t1:self.__x_query_by_number_pos_1,\n\t\t\t2:self.__x_query_by_number_pos_2,\n\t\t\t3:self.__x_query_by_number_pos_3,\n\t\t\t4:self.__x_query_by_number_pos_4,\n\t\t\t5:self.__x_query_by_number_pos_5,\n\t\t\t6:self.__x_query_by_number_pos_6,\n\t\t\t7:self.__x_query_by_number_pos_7\n\t\t\t}\t\t\n\t\treturn methods[pos](n)\n\n\tdef save(self,filepath):\n\t\twith open(filepath,'w') as f:\n\t\t\tfor record in self.records:\n\t\t\t\tline = \"%02d %02d %02d %02d %02d %02d %02d\\n\" % (record.n1,record.n2,record.n3,record.n4,record.n5,record.n6,record.n7)\n\t\t\t\tf.write(line)\n\t\tprint \"generated {0}.\".format(filepath)\n\n\tdef query_by_number_list(self,number_list):\n\t\tif(len(number_list)>7):\n\t\t\tprint \"Error. number list count>7\"\n\t\t\treturn None\n\t\tresult = []\n\t\tfor record in self.records:\n\t\t\tlist7 = [record.n1,record.n2,record.n3,record.n4,record.n5,record.n6,record.n7]\n\t\t\tbase_set = set(list7)\n\t\t\tquery_set = set(number_list)\n\t\t\tif query_set.issubset(base_set):\n\t\t\t\tresult.append(record)\n\t\treturn result\n\n\tdef test_number(self,n1,n2,n3,n4,n5,n6,n7):\n\t\t#03056 08 17 21 26 28 29 07 32664536 5000000 1 557563 3 2003-8-31 2003-9-4\n\t\t#result = query_by_number_list([n1,n2,n3,n4,n5,n6,n7])\n\t\tfor record in self.records:\n\t\t\tif (n1==record.n1 and n2==record.n2 and n3==record.n3 and n4==record.n4\n\t\t\t\tand n5==record.n5 and n6==record.n6 and n7==record.n7):\n\t\t\t\tprint 'Hit. [{0} {1} {2} {3} {4} {5} {6}] at {7} on {8}'.format(n1,n2,n3,n4,n5,n6,n7,record.id,record.end_date)\n\t\t\t\treturn True\n\t\tprint 'NO Hit. [{0} {1} {2} {3} {4} {5} {6}]'.format(n1,n2,n3,n4,n5,n6,n7)\n\t\treturn False\n\nclass FileReader:\n\n\tdef __init__(self):\n\t\tself.sep = ' '\n\n\tdef process(self,filepath):\n\t\trc = RecordCollection()\n\t\tfor line in open(filepath,'r'):\n\t\t\tparts = line.strip('\\n').split(self.sep)\n\t\t\tif(len(parts)!=RECORD_FIELD):\n\t\t\t\tprint \"ERROR. record field %d!\" % len(parts)\n\t\t\t\treturn None\n\t\t\trecord = Record(parts)\n\t\t\trc.add_record(record)\n\t\treturn rc\n\nclass Stats:\n\n\tdef __init__(self,rc):\n\t\tself.__init(rc)\n\n\tdef __init(self,rc):\n\t\tself.rc = rc\n\n\t\t# red stats\n\t\tself.red_sum_list = []\n\t\tself.red_01_str_list = []\n\t\tself.red_01_count_list = []\n\t\tself.red_prim_count_list = []\n\t\tself.red_3zone_count_list = []\n\t\tself.__get_red_xxx_list()\n\n\t\t# blue stats\n\t\tself.blue_sum_list = []\n\t\tself.blue_01_str_list = []\n\t\tself.blue_01_count_list = []\n\t\tself.blue_prim_count_list = []\n\t\tself.blue_2zone_count_list = []\n\t\tself.__get_blue_xxx_list()\n\n\t\t# avg avg_e\n\t\tself.red_sum_avg = self.__avg_list(self.red_sum_list)\n\t\tself.blue_sum_avg = self.__avg_list(self.blue_sum_list)\n\t\tself.red_sum_avg_e = (RED_MIN_NUMBER + RED_MAX_NUMBER)*RED_COUNT/2.0\n\t\tself.blue_sum_avg_e = (BLUE_MIN_NUMBER + BLUE_MAX_NUMBER)*BLUE_COUNT/2.0\n\n\t\t# red only\n\t\t# red shift base\n\t\tself.red_shift_to_base_list = self.__get_red_shift_to_base_list()\n\t\t# red width\n\t\tself.red_width_list = self.__get_red_width_list()\n\n\t\t# red/blue prim pair\n\t\tself.prim_count_list = zip(self.red_prim_count_list,self.blue_prim_count_list)\n\n\tdef __avg_list(self,list):\n\t\tcount = len(list)\n\t\tif(count==0):\n\t\t\treturn 0.0\n\t\ttotal_sum = 0\n\t\tfor n in list:\n\t\t\ttotal_sum += n\n\t\treturn total_sum*1.0/count\n\n\tdef __inf(self,filename):\n\t\treturn \"{0}{1}\".format(INPUT_FOLDER,filename)\n\n\tdef __outf(self,filename):\n\t\treturn \"{0}{1}\".format(OUTPUT_FOLDER,filename)\n\n\tdef __save_list(self,filename,list):\n\t\tfilepath = self.__outf(filename)\n\t\twith open(filepath,'w') as f:\n\t\t\tfor item in list:\n\t\t\t\tline = str(item)+\"\\n\"\n\t\t\t\tf.write(line)\n\t\tprint \"saved {0}.\".format(filepath)\n\n\tdef save(self):\n\t\tself.__save_list('red_sum_list',self.red_sum_list)\n\t\tself.__save_list('blue_sum_list',self.blue_sum_list)\n\t\tself.__save_list('red_01_str_list',self.red_01_str_list)\n\t\tself.__save_list('blue_01_str_list',self.blue_01_str_list)\n\t\tself.__save_list('red_01_count_list',self.red_01_count_list)\n\t\tself.__save_list('blue_01_count_list',self.blue_01_count_list)\n\t\tself.__save_list('red_prim_count_list',self.red_prim_count_list)\n\t\tself.__save_list('blue_prim_count_list',self.blue_prim_count_list)\n\t\tself.__save_list('prim_count_list',self.prim_count_list)\n\t\tself.__save_list('red_3zone_count_list',self.red_3zone_count_list)\n\t\tself.__save_list('blue_2zone_count_list',self.blue_2zone_count_list)\n\n\t\t# red only\n\t\tself.__save_list('red_shift_to_base_list',self.red_shift_to_base_list)\n\t\tself.__save_list('red_width_list',self.red_width_list)\n\n\t\"\"\"\n\tget red xxx list\n\t\"\"\"\n\tdef __get_red_xxx_list(self):\n\t\tfor record in self.rc.get_records():\n\t\t\tself.red_sum_list.append(record.red_sum)\n\t\t\tself.red_01_str_list.append(record.red_01_str)\n\t\t\tself.red_01_count_list.append(record.red_01_count)\n\t\t\tself.red_prim_count_list.append(record.red_prim_count)\n\t\t\tself.red_3zone_count_list.append(record.red_3zone_count)\n\n\t\"\"\"\n\tget blue xxx list\n\t\"\"\"\n\tdef __get_blue_xxx_list(self):\n\t\tfor record in self.rc.get_records():\n\t\t\tself.blue_sum_list.append(record.blue_sum)\n\t\t\tself.blue_01_str_list.append(record.blue_01_str)\n\t\t\tself.blue_01_count_list.append(record.blue_01_count)\n\t\t\tself.blue_prim_count_list.append(record.blue_prim_count)\n\t\t\tself.blue_2zone_count_list.append(record.blue_2zone_count)\n\n\tdef get_red_sum_avg_e(self):\n\t\treturn self.red_sum_avg_e\n\n\tdef get_red_sum_avg(self):\n\t\treturn self.red_sum_avg\n\n\tdef get_blue_sum_avg_e(self):\n\t\treturn self.blue_sum_avg_e\n\n\tdef get_blue_sum_avg(self):\n\t\treturn self.blue_sum_avg\n\n\t\"\"\"\n\tred related methods\n\t\"\"\"\n\t# red shift to base\n\tdef __get_red_shift_to_base_list(self):\n\t\tself.red_shift_to_base_list = []\n\t\tfor record in self.rc.get_records():\n\t\t\tself.red_shift_to_base_list.append(record.red_shift_to_base)\n\t\treturn self.red_shift_to_base_list\n\t# red width \n\tdef __get_red_width_list(self):\n\t\tself.red_width_list = []\n\t\tfor record in self.rc.get_records():\n\t\t\tself.red_width_list.append(record.red_width)\n\t\treturn self.red_width_list\n"},"src_encoding":{"kind":"string","value":"UTF-8"},"language":{"kind":"string","value":"Python"},"is_vendor":{"kind":"bool","value":false,"string":"false"},"is_generated":{"kind":"bool","value":false,"string":"false"},"year":{"kind":"number","value":2014,"string":"2,014"}}},{"rowIdx":41195,"cells":{"__id__":{"kind":"number","value":1357209695915,"string":"1,357,209,695,915"},"blob_id":{"kind":"string","value":"f6f4e75c439d9144a71a7f61e7e73c94d5c2a55a"},"directory_id":{"kind":"string","value":"82d3b8250984c97e2bde35bf48637a9a8ed15a2c"},"path":{"kind":"string","value":"/taiga/domains/models.py"},"content_id":{"kind":"string","value":"13e5effe3910ef3c9ae3039b5f31638f856575bf"},"detected_licenses":{"kind":"list like","value":["LicenseRef-scancode-unknown-license-reference","AGPL-3.0-only"],"string":"[\n \"LicenseRef-scancode-unknown-license-reference\",\n \"AGPL-3.0-only\"\n]"},"license_type":{"kind":"string","value":"non_permissive"},"repo_name":{"kind":"string","value":"anler/taiga-back"},"repo_url":{"kind":"string","value":"https://github.com/anler/taiga-back"},"snapshot_id":{"kind":"string","value":"75ab8578826e8d5d4b965916caaa60bfb43df9e3"},"revision_id":{"kind":"string","value":"552a9d83f88bca5b113b5255bc636243671aecd2"},"branch_name":{"kind":"string","value":"refs/heads/master"},"visit_date":{"kind":"timestamp","value":"2021-01-18T11:30:46.822563","string":"2021-01-18T11:30:46.822563"},"revision_date":{"kind":"timestamp","value":"2014-05-13T15:33:31","string":"2014-05-13T15:33:31"},"committer_date":{"kind":"timestamp","value":"2014-05-14T08:32:14","string":"2014-05-14T08:32:14"},"github_id":{"kind":"null"},"star_events_count":{"kind":"number","value":0,"string":"0"},"fork_events_count":{"kind":"number","value":0,"string":"0"},"gha_license_id":{"kind":"null"},"gha_fork":{"kind":"null"},"gha_event_created_at":{"kind":"null"},"gha_created_at":{"kind":"null"},"gha_updated_at":{"kind":"null"},"gha_pushed_at":{"kind":"null"},"gha_size":{"kind":"null"},"gha_stargazers_count":{"kind":"null"},"gha_forks_count":{"kind":"null"},"gha_open_issues_count":{"kind":"null"},"gha_language":{"kind":"null"},"gha_archived":{"kind":"null"},"gha_disabled":{"kind":"null"},"content":{"kind":"string","value":"# Copyright (C) 2014 Andrey Antukh \n# This program is free software: you can redistribute it and/or modify\n# it under the terms of the GNU Affero General Public License as\n# published by the Free Software Foundation, either version 3 of the\n# License, or (at your option) any later version.\n#\n# This program is distributed in the hope that it will be useful,\n# but WITHOUT ANY WARRANTY; without even the implied warranty of\n# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the\n# GNU Affero General Public License for more details.\n#\n# You should have received a copy of the GNU Affero General Public License\n# along with this program. If not, see .\n\nimport string\n\nfrom django.db import models\nfrom django.db.models.signals import pre_save, pre_delete\nfrom django.dispatch import receiver\nfrom django.utils.translation import ugettext_lazy as _\nfrom django.core.exceptions import ValidationError\n\nfrom .base import clear_domain_cache\n\n\ndef _simple_domain_name_validator(value):\n \"\"\"\n Validates that the given value contains no whitespaces to prevent common\n typos.\n \"\"\"\n if not value:\n return\n\n checks = ((s in value) for s in string.whitespace)\n if any(checks):\n raise ValidationError(\n _(\"The domain name cannot contain any spaces or tabs.\"),\n code='invalid',\n )\n\n\nclass Domain(models.Model):\n domain = models.CharField(_('domain name'), max_length=255, unique=True,\n validators=[_simple_domain_name_validator])\n name = models.CharField(_('display name'), max_length=255)\n scheme = models.CharField(_('scheme'), max_length=60, null=True, default=None)\n\n # Site Metadata\n public_register = models.BooleanField(default=False)\n default_language = models.CharField(max_length=20, null=False, blank=True, default=\"\",\n verbose_name=_(\"default language\"))\n\n alias_of = models.ForeignKey(\"self\", null=True, default=None, blank=True,\n verbose_name=_(\"Mark as alias of\"), related_name=\"+\")\n\n class Meta:\n verbose_name = _('domain')\n verbose_name_plural = _('domain')\n ordering = ('domain',)\n\n def __str__(self):\n return self.domain\n\n def user_is_owner(self, user):\n return self.members.filter(user_id=user.id, is_owner=True).exists()\n\n def user_is_staff(self, user):\n return self.members.filter(user_id=user.id, is_staff=True).exists()\n\n def user_is_normal_user(self, user):\n return self.members.filter(user_id=user.id, is_owner=False, is_staff=False).exists()\n\n\nclass DomainMember(models.Model):\n domain = models.ForeignKey(\"Domain\", related_name=\"members\", null=True)\n user = models.ForeignKey(\"users.User\", related_name=\"+\", null=True)\n\n email = models.EmailField(max_length=255)\n is_owner = models.BooleanField(default=False)\n is_staff = models.BooleanField(default=False)\n\n class Meta:\n ordering = [\"email\"]\n verbose_name = \"Domain Member\"\n verbose_name_plural = \"Domain Members\"\n unique_together = (\"domain\", \"user\")\n\n def __str__(self):\n return \"DomainMember: {0}:{1}\".format(self.domain, self.user)\n\n\npre_save.connect(clear_domain_cache, sender=Domain)\npre_delete.connect(clear_domain_cache, sender=Domain)\n\n@receiver(pre_delete, sender=DomainMember, dispatch_uid=\"domain_member_pre_delete\")\ndef domain_member_pre_delete(sender, instance, *args, **kwargs):\n for domain_project in instance.domain.projects.all():\n domain_project.memberships.filter(user=instance.user).delete()\n"},"src_encoding":{"kind":"string","value":"UTF-8"},"language":{"kind":"string","value":"Python"},"is_vendor":{"kind":"bool","value":false,"string":"false"},"is_generated":{"kind":"bool","value":false,"string":"false"},"year":{"kind":"number","value":2014,"string":"2,014"}}},{"rowIdx":41196,"cells":{"__id__":{"kind":"number","value":17557826310194,"string":"17,557,826,310,194"},"blob_id":{"kind":"string","value":"2344515785ec67a1838719e726e814a4a43b4ff4"},"directory_id":{"kind":"string","value":"ce7e01e55644b423d9dca279bfe41762cdc6462a"},"path":{"kind":"string","value":"/studio/config/installer.py"},"content_id":{"kind":"string","value":"b664d9bcd00c6fa85ce053e12ac76430d698b12b"},"detected_licenses":{"kind":"list like","value":["GPL-3.0-or-later","LGPL-2.0-or-later","LicenseRef-scancode-warranty-disclaimer","GPL-1.0-or-later","LGPL-2.1-or-later","GPL-3.0-only","AGPL-3.0-or-later","LicenseRef-scancode-other-copyleft","LicenseRef-scancode-unknown-license-reference","LicenseRef-scancode-free-unknown"],"string":"[\n \"GPL-3.0-or-later\",\n \"LGPL-2.0-or-later\",\n \"LicenseRef-scancode-warranty-disclaimer\",\n \"GPL-1.0-or-later\",\n \"LGPL-2.1-or-later\",\n \"GPL-3.0-only\",\n \"AGPL-3.0-or-later\",\n \"LicenseRef-scancode-other-copyleft\",\n \"LicenseRef-scancode-unknown-license-reference\",\n \"LicenseRef-scancode-free-unknown\"\n]"},"license_type":{"kind":"string","value":"non_permissive"},"repo_name":{"kind":"string","value":"Chirag19/Studio"},"repo_url":{"kind":"string","value":"https://github.com/Chirag19/Studio"},"snapshot_id":{"kind":"string","value":"830256697b745466c5ae4ec91d53bd2506fe8845"},"revision_id":{"kind":"string","value":"43cb7298434fb606b15136801b79b03571a2f27e"},"branch_name":{"kind":"string","value":"refs/heads/master"},"visit_date":{"kind":"timestamp","value":"2020-05-09T13:18:16.518211","string":"2020-05-09T13:18:16.518211"},"revision_date":{"kind":"timestamp","value":"2011-03-17T08:51:32","string":"2011-03-17T08:51:32"},"committer_date":{"kind":"timestamp","value":"2011-03-17T08:51:32","string":"2011-03-17T08:51:32"},"github_id":{"kind":"null"},"star_events_count":{"kind":"number","value":0,"string":"0"},"fork_events_count":{"kind":"number","value":0,"string":"0"},"gha_license_id":{"kind":"null"},"gha_fork":{"kind":"null"},"gha_event_created_at":{"kind":"null"},"gha_created_at":{"kind":"null"},"gha_updated_at":{"kind":"null"},"gha_pushed_at":{"kind":"null"},"gha_size":{"kind":"null"},"gha_stargazers_count":{"kind":"null"},"gha_forks_count":{"kind":"null"},"gha_open_issues_count":{"kind":"null"},"gha_language":{"kind":"null"},"gha_archived":{"kind":"null"},"gha_disabled":{"kind":"null"},"content":{"kind":"string","value":"#\n# Copyright (C) 2010 Camptocamp\n#\n# This file is part of Studio\n#\n# Studio is free software: you can redistribute it and/or modify\n# it under the terms of the GNU General Public License as published by\n# the Free Software Foundation, either version 3 of the License, or\n# (at your option) any later version.\n#\n# Studio is distributed in the hope that it will be useful,\n# but WITHOUT ANY WARRANTY; without even the implied warranty of\n# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the\n# GNU General Public License for more details.\n#\n# You should have received a copy of the GNU General Public License\n# along with Studio. If not, see .\n#\n\nimport getpass\n\nfrom pylons.util import PylonsInstaller\nfrom paste.script.templates import var\nfrom paste.script.util import secret\n\n\nclass StudioInstaller(PylonsInstaller):\n\n def config_content(self, command, vars):\n \"\"\"\n Called by ``self.write_config``, this returns the text content\n for the config file, given the provided variables.\n \"\"\"\n settable_vars = [\n var('db_url', 'Database url for sqlite, postgres or mysql', \n default='sqlite:///%(here)s/studio.db'),\n var('ms_url','Url to the mapserv CGI',\n default='http://localhost/cgi-bin/mapserv'),\n var('admin_password','Password for default admin user',\n default=secret.secret_string(length=8))\n ]\n\n for svar in settable_vars:\n if command.interactive:\n prompt = 'Enter %s' % svar.full_description()\n response = command.challenge(prompt, svar.default, svar.should_echo)\n vars[svar.name] = response\n else:\n if not vars.has_key(svar.name):\n vars[svar.name] = svar.default\n\n vars['cookie_secret'] = secret.secret_string()\n\n # call default pylons install\n return super(StudioInstaller, self).config_content(command, vars)\n\n"},"src_encoding":{"kind":"string","value":"UTF-8"},"language":{"kind":"string","value":"Python"},"is_vendor":{"kind":"bool","value":false,"string":"false"},"is_generated":{"kind":"bool","value":false,"string":"false"},"year":{"kind":"number","value":2011,"string":"2,011"}}},{"rowIdx":41197,"cells":{"__id__":{"kind":"number","value":8306466767290,"string":"8,306,466,767,290"},"blob_id":{"kind":"string","value":"0024a68ed4e25e52011fba01735bf617f71a6cf5"},"directory_id":{"kind":"string","value":"5773af03f49d4950f8b1d07f941591a5f9813f0f"},"path":{"kind":"string","value":"/aprs2tracker.py"},"content_id":{"kind":"string","value":"6ee85a9a3ddd9e42f31f5434991a7e8923c26bd5"},"detected_licenses":{"kind":"list like","value":[],"string":"[]"},"license_type":{"kind":"string","value":"no_license"},"repo_name":{"kind":"string","value":"chuckhacker/APRS2Tracker"},"repo_url":{"kind":"string","value":"https://github.com/chuckhacker/APRS2Tracker"},"snapshot_id":{"kind":"string","value":"416e88e2213c645c5ab500f8f3ad2ec211db60c0"},"revision_id":{"kind":"string","value":"53348be31fd0665b0b683ad26837d2c337feb92c"},"branch_name":{"kind":"string","value":"refs/heads/master"},"visit_date":{"kind":"timestamp","value":"2021-05-27T02:44:39.266613","string":"2021-05-27T02:44:39.266613"},"revision_date":{"kind":"timestamp","value":"2011-08-13T18:55:59","string":"2011-08-13T18:55:59"},"committer_date":{"kind":"timestamp","value":"2011-08-13T18:55:59","string":"2011-08-13T18:55:59"},"github_id":{"kind":"null"},"star_events_count":{"kind":"number","value":0,"string":"0"},"fork_events_count":{"kind":"number","value":0,"string":"0"},"gha_license_id":{"kind":"null"},"gha_fork":{"kind":"null"},"gha_event_created_at":{"kind":"null"},"gha_created_at":{"kind":"null"},"gha_updated_at":{"kind":"null"},"gha_pushed_at":{"kind":"null"},"gha_size":{"kind":"null"},"gha_stargazers_count":{"kind":"null"},"gha_forks_count":{"kind":"null"},"gha_open_issues_count":{"kind":"null"},"gha_language":{"kind":"null"},"gha_archived":{"kind":"null"},"gha_disabled":{"kind":"null"},"content":{"kind":"string","value":"# APRS servers list: http://www.aprs-is.net/APRSServers.aspx\n# Tier 2 servers: http://www.aprs2.net/serverstats.php\n\nfrom aprs_client import APRSClient\nfrom aprs_handler import APRSPacket\nfrom tracker import Tracker\nfrom optparse import OptionParser, Option\n\nclass Main:\n def __init__(self, trackerUrl, trackerPass):\n self.tracker = Tracker(trackerUrl, trackerPass)\n \n def packetHandler(self, aprsString):\n print 'APRS String: %s' % aprsString\n packet = APRSPacket()\n if packet.parse(aprsString):\n print '%s -> %s' % (packet.source, packet.dest)\n print 'Report type: %s' % packet.reportType\n if packet.hasLocation:\n print 'Time: %sZ' % packet.time\n print 'Coordinates: %f, %f, Altitude: %d ft' % (packet.latitude, packet.longitude, packet.altitude)\n print 'Course: %d, Speed: %d kn, Bearing: %d' % (packet.course, packet.speed, packet.bearing)\n print 'Comment: %s' % packet.comment\n \n print 'Uploading to tracker'\n self.tracker.track(packet)\n \n print ''\n\nclass ExtendOption(Option):\n\n ACTIONS = Option.ACTIONS + (\"extend\",)\n STORE_ACTIONS = Option.STORE_ACTIONS + (\"extend\",)\n TYPED_ACTIONS = Option.TYPED_ACTIONS + (\"extend\",)\n ALWAYS_TYPED_ACTIONS = Option.ALWAYS_TYPED_ACTIONS + (\"extend\",)\n\n def take_action(self, action, dest, opt, value, values, parser):\n if action == \"extend\":\n lvalue = value.split(\",\")\n values.ensure_value(dest, []).extend(lvalue)\n else:\n Option.take_action(\n self, action, dest, opt, value, values, parser)\n\ndef defaultOpt(value, default):\n if value:\n return value\n else:\n return default\n\ndef run():\n parser = OptionParser(option_class=ExtendOption)\n parser.add_option(\"-u\", \"--url\", dest=\"url\", help=\"Tracker URL including track.php\")\n parser.add_option(\"-w\", \"--password\", dest=\"password\", help=\"Tracker password\")\n parser.add_option(\"-a\", \"--host\", dest=\"host\", help=\"APRS server host name\")\n parser.add_option(\"-p\", \"--port\", dest=\"port\", type=\"int\", help=\"APRS server port\")\n parser.add_option(\"-c\", \"--callsigns\", dest=\"callsigns\", action=\"extend\", help=\"Comma delimeted callsigns to monitor (you can use *)\")\n parser.add_option(\"-j\", \"--adjunct\", dest=\"adjunct\", help=\"APRS adjunct string\")\n (options, args) = parser.parse_args()\n \n if options.callsigns:\n adjunct = 'filter b/' + '/'.join(options.callsigns)\n if options.adjunct:\n adjunct += ' ' + options.adjunct\n else:\n adjunct = defaultOpt(options.adjunct, '')\n\n main = Main(defaultOpt(options.url, 'http://spacenear.us/tracker/track.php'),\n defaultOpt(options.password, 'aurora'))\n \n client = APRSClient(main.packetHandler,\n defaultOpt(options.host, 'ontario.aprs2.net'),\n adjunct,\n defaultOpt(options.port, 14580))\n client.start()\n \n #main.packetHandler('KE7MK-9>APOTC1,WIDE1-1,WIDE2-1,qAR,WT7T-6:/280229z4448.85N/10656.63Wv195/018/A=003888KE7MK Mobile Monitoring 146.820')\n\nif __name__=='__main__':\n run()"},"src_encoding":{"kind":"string","value":"UTF-8"},"language":{"kind":"string","value":"Python"},"is_vendor":{"kind":"bool","value":false,"string":"false"},"is_generated":{"kind":"bool","value":false,"string":"false"},"year":{"kind":"number","value":2011,"string":"2,011"}}},{"rowIdx":41198,"cells":{"__id__":{"kind":"number","value":6399501279523,"string":"6,399,501,279,523"},"blob_id":{"kind":"string","value":"153f2d6b3f1f40f7bdb8419780a5ac29b9dd1a75"},"directory_id":{"kind":"string","value":"08867249c328e6da73a8b52bb01954050e8c360a"},"path":{"kind":"string","value":"/controllers/test_util.py"},"content_id":{"kind":"string","value":"f63e13e3608508412400d51c0f4d99aa557ae7d8"},"detected_licenses":{"kind":"list like","value":[],"string":"[]"},"license_type":{"kind":"string","value":"no_license"},"repo_name":{"kind":"string","value":"pombredanne/tinyclassified"},"repo_url":{"kind":"string","value":"https://github.com/pombredanne/tinyclassified"},"snapshot_id":{"kind":"string","value":"b054537a04210a6d6646906029c966d4ca300f6d"},"revision_id":{"kind":"string","value":"c8f11fc61b89a40c686c4783220b95476c76bd86"},"branch_name":{"kind":"string","value":"refs/heads/master"},"visit_date":{"kind":"timestamp","value":"2017-12-04T16:06:23.908925","string":"2017-12-04T16:06:23.908925"},"revision_date":{"kind":"timestamp","value":"2014-09-02T00:50:22","string":"2014-09-02T00:50:22"},"committer_date":{"kind":"timestamp","value":"2014-09-02T00:50:22","string":"2014-09-02T00:50:22"},"github_id":{"kind":"null"},"star_events_count":{"kind":"number","value":0,"string":"0"},"fork_events_count":{"kind":"number","value":0,"string":"0"},"gha_license_id":{"kind":"null"},"gha_fork":{"kind":"null"},"gha_event_created_at":{"kind":"null"},"gha_created_at":{"kind":"null"},"gha_updated_at":{"kind":"null"},"gha_pushed_at":{"kind":"null"},"gha_size":{"kind":"null"},"gha_stargazers_count":{"kind":"null"},"gha_forks_count":{"kind":"null"},"gha_open_issues_count":{"kind":"null"},"gha_language":{"kind":"null"},"gha_archived":{"kind":"null"},"gha_disabled":{"kind":"null"},"content":{"kind":"string","value":"\"\"\"Utility functions / classes for controller testing.\n\n@author: Rory Olsen (rolsen, Gleap LLC 2014)\n@license: GNU GPLv3\n\"\"\"\n\n\nclass TestCursor:\n def __init__(self, results):\n self.results = results\n self.index = 0\n self.distinct_param = ''\n\n def count(self):\n return len(self.results)\n\n def __iter__(self):\n return self\n\n def next(self):\n if not self.index < len(self.results):\n raise StopIteration\n ret = self.results[self.index]\n self.index += 1\n return ret\n\n def distinct(self, distinct):\n self.distinct_param = distinct\n ret = []\n for result in self.results:\n ret.append(result[distinct])\n return ret\n\n def __getitem__(self, trash):\n return self.results[0]\n\n\nclass TestCollection():\n find_hash = None\n find_result = None\n deleted = []\n\n def find_one(self, find_hash):\n self.find_hash = find_hash\n return self.find_result\n\n def remove(self, remove):\n self.deleted.append(remove)\n\n\nclass TestDBAdapter():\n collection = None\n\n def get_listings_collection(self):\n return self.collection\n\n\ndef check_dict(expected_dict, test_dict):\n \"\"\"Check that two dictionaries are the same for each key in the first dict.\n\n Check that each key value pair in expected_dict is also in test dictionary\n but do not check that the relationship is bidirectional (test_dict may\n contain keys / values not in expected_dict).\n\n @return: True if everything in expected_dict is in test_dict. False\n otherwise.\n @rtype: bool\n \"\"\"\n for (key, value) in expected_dict.items():\n if test_dict[key] != value:\n return False\n return True\n"},"src_encoding":{"kind":"string","value":"UTF-8"},"language":{"kind":"string","value":"Python"},"is_vendor":{"kind":"bool","value":false,"string":"false"},"is_generated":{"kind":"bool","value":false,"string":"false"},"year":{"kind":"number","value":2014,"string":"2,014"}}},{"rowIdx":41199,"cells":{"__id__":{"kind":"number","value":1803886300718,"string":"1,803,886,300,718"},"blob_id":{"kind":"string","value":"8411157785a8fa8d6dd13f776a7bdf6a454f71dc"},"directory_id":{"kind":"string","value":"0fa6012b852d78c849e1385d9a926e35a07516df"},"path":{"kind":"string","value":"/nengo/tests/test_node.py"},"content_id":{"kind":"string","value":"2b39c76634550ee65186b77b0e3d23ab0b14d886"},"detected_licenses":{"kind":"list like","value":["MIT"],"string":"[\n \"MIT\"\n]"},"license_type":{"kind":"string","value":"permissive"},"repo_name":{"kind":"string","value":"jaberg/nengo"},"repo_url":{"kind":"string","value":"https://github.com/jaberg/nengo"},"snapshot_id":{"kind":"string","value":"b87a5408f77c19cd3b44c40aa0f4af55cc08c148"},"revision_id":{"kind":"string","value":"1e882c8ee684aac757a6f181d8b8498aad9d330d"},"branch_name":{"kind":"string","value":"refs/heads/master"},"visit_date":{"kind":"timestamp","value":"2020-04-01T19:10:26.717474","string":"2020-04-01T19:10:26.717474"},"revision_date":{"kind":"timestamp","value":"2013-08-29T14:27:24","string":"2013-08-29T14:27:24"},"committer_date":{"kind":"timestamp","value":"2013-08-29T14:27:24","string":"2013-08-29T14:27:24"},"github_id":{"kind":"number","value":10359867,"string":"10,359,867"},"star_events_count":{"kind":"number","value":0,"string":"0"},"fork_events_count":{"kind":"number","value":1,"string":"1"},"gha_license_id":{"kind":"null"},"gha_fork":{"kind":"bool","value":true,"string":"true"},"gha_event_created_at":{"kind":"timestamp","value":"2013-11-19T00:03:38","string":"2013-11-19T00:03:38"},"gha_created_at":{"kind":"timestamp","value":"2013-05-29T13:10:28","string":"2013-05-29T13:10:28"},"gha_updated_at":{"kind":"timestamp","value":"2013-11-19T00:03:38","string":"2013-11-19T00:03:38"},"gha_pushed_at":{"kind":"timestamp","value":"2013-11-19T00:03:38","string":"2013-11-19T00:03:38"},"gha_size":{"kind":"number","value":5422,"string":"5,422"},"gha_stargazers_count":{"kind":"number","value":0,"string":"0"},"gha_forks_count":{"kind":"number","value":0,"string":"0"},"gha_open_issues_count":{"kind":"number","value":0,"string":"0"},"gha_language":{"kind":"string","value":"Python"},"gha_archived":{"kind":"null"},"gha_disabled":{"kind":"null"},"content":{"kind":"string","value":"import numpy as np\n\nimport nengo\nimport nengo.old_api as nef\nfrom nengo.tests.helpers import SimulatorTestCase, unittest\n\n\nclass TestNode(SimulatorTestCase):\n\n def test_simple(self):\n params = dict(simulator=self.Simulator, seed=123, dt=0.001)\n\n # Old API\n net = nef.Network('test_simple', **params)\n net.make_input('in', value=np.sin)\n p = net.make_probe('in', dt_sample=0.001, pstc=0.0)\n rawp = net._raw_probe(net.inputs['in'], dt_sample=.001)\n st_probe = net._raw_probe(net.model.simtime, dt_sample=.001)\n net.run(0.01)\n\n data = p.get_data()\n raw_data = rawp.get_data()\n st_data = st_probe.get_data()\n self.assertTrue(np.allclose(st_data.ravel(),\n np.arange(0.001, 0.0105, .001)))\n self.assertTrue(np.allclose(raw_data.ravel(),\n np.sin(np.arange(0, 0.0095, .001))))\n # -- the make_probe call induces a one-step delay\n # on readout even when the pstc is really small.\n self.assertTrue(np.allclose(data.ravel()[1:],\n np.sin(np.arange(0, 0.0085, .001))))\n\n # New API\n m = nengo.Model('test_simple', **params)\n node = m.make_node('in', output=np.sin)\n m.probe('in')\n m.run(0.01)\n self.assertTrue(np.allclose(m.data[m.simtime].ravel(),\n np.arange(0.001, 0.0105, .001)))\n self.assertTrue(np.allclose(m.data['in'].ravel(),\n np.sin(np.arange(0, 0.0095, .001))))\n\n\nif __name__ == \"__main__\":\n nengo.log_to_file('log.txt', debug=True)\n unittest.main()\n"},"src_encoding":{"kind":"string","value":"UTF-8"},"language":{"kind":"string","value":"Python"},"is_vendor":{"kind":"bool","value":false,"string":"false"},"is_generated":{"kind":"bool","value":false,"string":"false"},"year":{"kind":"number","value":2013,"string":"2,013"}}}],"truncated":false,"partial":false},"paginationData":{"pageIndex":411,"numItemsPerPage":100,"numTotalItems":42509,"offset":41100,"length":100}},"jwt":"eyJhbGciOiJFZERTQSJ9.eyJyZWFkIjp0cnVlLCJwZXJtaXNzaW9ucyI6eyJyZXBvLmNvbnRlbnQucmVhZCI6dHJ1ZX0sImlhdCI6MTc1NjY4MTcyMCwic3ViIjoiL2RhdGFzZXRzL2xvdWJuYWJubC9vbGRfcHl0aG9uIiwiZXhwIjoxNzU2Njg1MzIwLCJpc3MiOiJodHRwczovL2h1Z2dpbmdmYWNlLmNvIn0.bSE5dDid-GOh58xzpmw0tDRL-pegLwpU5O676zFGA4CK9UTHconPkR-8oE1cGGTiEx8Lk0EPESPZYsVwEojQDA","displayUrls":true},"discussionsStats":{"closed":0,"open":1,"total":1},"fullWidth":true,"hasGatedAccess":true,"hasFullAccess":true,"isEmbedded":false,"savedQueries":{"community":[],"user":[]}}">
__id__
int64
3.09k
19,722B
blob_id
stringlengths
40
40
directory_id
stringlengths
40
40
path
stringlengths
2
256
content_id
stringlengths
40
40
detected_licenses
list
license_type
stringclasses
3 values
repo_name
stringlengths
5
109
repo_url
stringlengths
24
128
snapshot_id
stringlengths
40
40
revision_id
stringlengths
40
40
branch_name
stringlengths
4
42
visit_date
timestamp[ns]
revision_date
timestamp[ns]
committer_date
timestamp[ns]
github_id
int64
6.65k
581M
star_events_count
int64
0
1.17k
fork_events_count
int64
0
154
gha_license_id
stringclasses
16 values
gha_fork
bool
2 classes
gha_event_created_at
timestamp[ns]
gha_created_at
timestamp[ns]
gha_updated_at
timestamp[ns]
gha_pushed_at
timestamp[ns]
gha_size
int64
0
5.76M
gha_stargazers_count
int32
0
407
gha_forks_count
int32
0
119
gha_open_issues_count
int32
0
640
gha_language
stringlengths
1
16
gha_archived
bool
2 classes
gha_disabled
bool
1 class
content
stringlengths
9
4.53M
src_encoding
stringclasses
18 values
language
stringclasses
1 value
is_vendor
bool
2 classes
is_generated
bool
2 classes
year
int64
1.97k
2.01k
19,370,302,521,373
47ddbc8fd5e0c09ee746ef11705d8f82350df7c2
3d19e1a316de4d6d96471c64332fff7acfaf1308
/Users/D/dataland/imo_2.py
644f83705c58128d6fa42895b400bde0cfe9eaeb
[]
no_license
BerilBBJ/scraperwiki-scraper-vault
https://github.com/BerilBBJ/scraperwiki-scraper-vault
4e98837ac3b1cc3a3edb01b8954ed00f341c8fcc
65ea6a943cc348a9caf3782b900b36446f7e137d
refs/heads/master
2021-12-02T23:55:58.481210
2013-09-30T17:02:59
2013-09-30T17:02:59
null
0
0
null
null
null
null
null
null
null
null
null
null
null
null
null
import scraperwiki import lxml.html import urllib2 import datetime import json import string import random from BeautifulSoup import BeautifulSoup import random # Token can be a capital letter or a number 0-9 def urlfortoken(token,pagenum): return 'http://www.imonumber.com/ships/{token}?Vessel_page={pagenum}'.format(token=token,pagenum=pagenum) # Build array of capital letters (A:Z) let_list = map(chr, range(65, 91)) # Add to array let_list.append("9") # Step 1: Build list of total page counts for each letter/number partition of data, # and build the detail urls based on a loop that counts to that limit def urlformain(pagenum): return "http://www.imonumber.com/ships/{pagenum}".format(pagenum=pagenum) try: curr_index = scraperwiki.sqlite.get_var("index_next_run",0) except NameError: curr_index = 0 url = urlformain(let_list[curr_index]) text = urllib2.urlopen(url).read() soup = BeautifulSoup(text) # Get row that contains the count of records for letter data = soup.findAll('li',attrs={'class':'last'}) o = str(data) letterpos = (o.index("?")-1) letter = o[34] pagestart = (o.index("_page=")+6) pagefinish = (o.index(">Last")-1) last = int(o[pagestart:pagefinish]) for p in range(1,last+1): pageid = urlfortoken(letter,p) text2 = urllib2.urlopen(pageid).read() soup2 = BeautifulSoup(text2) data2 = soup2.findAll('a',attrs={'class':'imo-block'}) for n in data2: raw = str(n) linkstart = (raw.index("href=")+6) linkfinish = (raw.index("<img")-3) link = raw[linkstart:linkfinish] now = datetime.datetime.now() data3 = {"tmsp_scraped":str(now), "detail_page":link, "list_page":pageid, "index":let_list[curr_index]} scraperwiki.sqlite.save(unique_keys=["detail_page"], data=data3, table_name = "smtb_links") print "Finished Index "+letter+ " Page "+str(p) if curr_index < 36: next_index = curr_index + 1 else: next_index = 0 scraperwiki.sqlite.save_var("index_next_run",next_index) print "" print "Index value for next run will be "+str(let_list[next_index]) import scraperwiki import lxml.html import urllib2 import datetime import json import string import random from BeautifulSoup import BeautifulSoup import random # Token can be a capital letter or a number 0-9 def urlfortoken(token,pagenum): return 'http://www.imonumber.com/ships/{token}?Vessel_page={pagenum}'.format(token=token,pagenum=pagenum) # Build array of capital letters (A:Z) let_list = map(chr, range(65, 91)) # Add to array let_list.append("9") # Step 1: Build list of total page counts for each letter/number partition of data, # and build the detail urls based on a loop that counts to that limit def urlformain(pagenum): return "http://www.imonumber.com/ships/{pagenum}".format(pagenum=pagenum) try: curr_index = scraperwiki.sqlite.get_var("index_next_run",0) except NameError: curr_index = 0 url = urlformain(let_list[curr_index]) text = urllib2.urlopen(url).read() soup = BeautifulSoup(text) # Get row that contains the count of records for letter data = soup.findAll('li',attrs={'class':'last'}) o = str(data) letterpos = (o.index("?")-1) letter = o[34] pagestart = (o.index("_page=")+6) pagefinish = (o.index(">Last")-1) last = int(o[pagestart:pagefinish]) for p in range(1,last+1): pageid = urlfortoken(letter,p) text2 = urllib2.urlopen(pageid).read() soup2 = BeautifulSoup(text2) data2 = soup2.findAll('a',attrs={'class':'imo-block'}) for n in data2: raw = str(n) linkstart = (raw.index("href=")+6) linkfinish = (raw.index("<img")-3) link = raw[linkstart:linkfinish] now = datetime.datetime.now() data3 = {"tmsp_scraped":str(now), "detail_page":link, "list_page":pageid, "index":let_list[curr_index]} scraperwiki.sqlite.save(unique_keys=["detail_page"], data=data3, table_name = "smtb_links") print "Finished Index "+letter+ " Page "+str(p) if curr_index < 36: next_index = curr_index + 1 else: next_index = 0 scraperwiki.sqlite.save_var("index_next_run",next_index) print "" print "Index value for next run will be "+str(let_list[next_index])
UTF-8
Python
false
false
2,013
16,621,523,437,336
0f6a89f440f9bb44baeee87398f4d0e0a56dd1cc
c026068cf0522391f5defe763a9ecf1aeb416c17
/test_NoseXUnitLite/unit/test_tools/test_create.py
6decb07a227c625b1449465c0469994254a8f1bd
[ "LGPL-2.1-only" ]
non_permissive
kassoulet/NoseXUnitLite
https://github.com/kassoulet/NoseXUnitLite
20555244842b80fd633bd6041d3f54fc41f91794
dca07c79737b256cbf85e8428ffdc2bbba464808
refs/heads/master
2021-01-19T15:27:42.161534
2013-07-18T12:24:26
2013-07-18T12:24:26
null
0
0
null
null
null
null
null
null
null
null
null
null
null
null
null
#-*- coding: utf-8 -*- import os import test_NoseXUnitLite import nosexunitlite.tools as ntools import nosexunitlite.excepts as nexcepts class TestCreate(test_NoseXUnitLite.TestCase): def test_not_exists(self): folder = os.path.join(self.work, 'foo') ntools.create(folder) self.assertTrue(os.path.isdir(folder)) def test_exists_but_file(self): folder = os.path.join(self.work, 'foo') open(folder, 'w').close() self.assertRaises(nexcepts.ToolError, ntools.create, folder) if __name__=="__main__": test_NoseXUnitLite.main()
UTF-8
Python
false
false
2,013
1,073,741,841,433
51b80a1f31170abd8ea3cecd0bb900e1454bf9f7
163b84b3d9d151d10e0f2a25d86dbe28265b9ae9
/src/controller/root.py
ad743b0306809f096801f9a78c58c38f1ec0e551
[]
no_license
umfoida5/comp4350group5
https://github.com/umfoida5/comp4350group5
83d9427396e18b20e990a23c401dfeec17a9848d
6408d479046a96d32b3d157552c4e1321da26497
refs/heads/master
2021-01-20T09:32:32.445368
2013-03-26T22:55:18
2013-03-26T22:55:18
null
0
0
null
null
null
null
null
null
null
null
null
null
null
null
null
import cherrypy, httplib from modules.template import env from activities import Activities from events import Events from stats import Stats from profiles import Profiles from achievements import Achievements from goals import Goals from login import Login from health import HealthController class Root: activities = Activities() events = Events() stats = Stats() profiles = Profiles() achievements = Achievements() goals = Goals() login = Login() health = HealthController() @cherrypy.expose def index(self): tmpl = env.get_template('index.html') return tmpl.render() @cherrypy.expose def about(self): tmpl = env.get_template('about.html') return tmpl.render() @cherrypy.expose def get_current_username(self): if cherrypy.session.has_key('username'): return cherrypy.session.get('username') else: return ""
UTF-8
Python
false
false
2,013
19,301,583,053,780
a7037d966c58eccea54151ec2db2d90ce36e694b
2b40af4679d3f7258a35d183565b002bc8865c82
/demp.py
ef7be35789902c333f080753df1e0f08115f095f
[]
no_license
arvind-python-corner/sub-class-or-inherit-thread-class
https://github.com/arvind-python-corner/sub-class-or-inherit-thread-class
2b8ad8fb24806497c4e536f958a35c1b90d01ee5
28dde3aad65fe54eb28b3d673a712862029d175d
refs/heads/master
2016-04-04T10:50:32.150547
2014-12-15T13:03:54
2014-12-15T13:03:54
null
0
0
null
null
null
null
null
null
null
null
null
null
null
null
null
from threading import Thread class Runner(Thread): def __init__(self, count): super(Runner, self).__init__() self.count = count print 'runner init, count = %d' % self.count def run(self): while self.count: print 'runner run, count = %d' % self.count self.count -= 1 Runner(3).start()
UTF-8
Python
false
false
2,014
12,223,476,967,039
4d3fe39d48faa674c0b89f07ac439308b0ed637c
1e60239e9e7abc95121a04da4ea4b1926509571e
/bot.py
428681b2bcd4d17eab37becade4673ad2dcc5b0e
[]
no_license
JJJollyjim/bad-ball-ban-bot
https://github.com/JJJollyjim/bad-ball-ban-bot
a312cbf1be5c12dc64323d6ba7e97b3f7338c797
a66736970199f5847f82286bafe65dd95036a33e
refs/heads/master
2020-05-17T08:53:14.745041
2014-02-12T03:48:05
2014-02-12T03:48:05
null
0
0
null
null
null
null
null
null
null
null
null
null
null
null
null
import time import praw import re import os from pprint import pprint re_ban = re.compile(r'ban', re.IGNORECASE) r = praw.Reddit(user_agent="BadBallBanBot/0.1 by /u/JJJollyjim") if not os.path.exists('.password'): open('.password', 'w').close() if not os.path.exists('.place_holder'): open('.place_holder', 'w').close() f = open(".password", "r") password = f.read(100) f.close() r.login("bad_ball_ban_bot", password) subreddit = r.get_subreddit("BadBallBanBot") def set_place_holder(new_ph): f = open(".place_holder", "w") f.write(new_ph) f.close() def get_place_holder(): f = open(".place_holder", "r") ph = f.read(16) f.close() return ph print(" - Current placeholder: {0}".format(get_place_holder())) def handle_ratelimit(func, *args, **kwargs): while True: try: return func(*args, **kwargs) except praw.errors.RateLimitExceeded as error: print ' - Sleeping for %d seconds due for rate limiting' % error.sleep_time time.sleep(error.sleep_time) while True: if get_place_holder() != "": posts = list(subreddit.get_new(place_holder=get_place_holder())) else: posts = list(subreddit.get_new()) posts.pop() for post in reversed(posts): # Decide if the post is a ban appeal is_ban_appeal = re.search(re_ban, post.title) and post.is_self if is_ban_appeal: # Make sure we haven't already posted on it found_bbbb = False for c in post.comments: if str(c.author) == "bad_ball_ban_bot": found_bbbb = True break if found_bbbb: print("BBBB| {0}".format(post.title.encode('unicode-escape'))) set_place_holder(post.id) break print("BAN | {0}".format(post.title.encode('unicode-escape'))) # Comment on the post comment = handle_ratelimit(post.add_comment, ("Hi there!\n\n" "This bot has detected that you have posted a ban appeal. \n\n" "If this is correct, here are a few things you should know: \n\n" "* You have to be reported by 8 separate people within 24 hours to get banned, meaning you definitely did *something* wrong \n" "* Trolls succeeding in getting someone banned is __*very* rare__ \n" "* Bans are only temporary. The first one lasts 1 hour, with 4 hours added for each subsequent ban (1, 5, 9, etc...) \n" "* Here are the reasons people may have reported you, along with some helpful suggestions:\n" " - Offensive or spammy chat: don't be mean to the other team or your teammates \n" " - Offensive username: visit your profile page and change your name away from \"NiggerFagBall\" \n" " - AFK too much: close the game if you need to do something IRL \n" " - Working against own team: if you make a mistake which hurts your team, say 'sorry' or 'mb' (stands for 'my bad') in team chat. \n" "* Bans are based on IP addresses, meaning that if somebody at your school, house, or workplace gets banned then everyone else there will too \n\n" "--- \n\n" "Don't like words? [Here's a friendly infographic!](http://i.imgur.com/EgZ24UK.png) \n\n" "--- \n\n" "__Note:__ this bot is in beta. It may have decided that this post is a ban appeal by mistake. If so, please downvote this comment.\n\n" "Click [here](http://www.reddit.com/message/compose/?to=JJJollyjim) to message the developer \n\n" )) print(" - Comment succeeded!") else: print(" | {0}".format(post.title.encode('unicode-escape'))) set_place_holder(post.id) time.sleep(5)
UTF-8
Python
false
false
2,014
15,083,925,161,286
f5be04a98d3bea696ddaaa84bc43a960b8d707f5
b224c7413b7e6a1cb78dad60b4899674fefe8269
/openforce_default/product/product.py
dff9785b87c8b99da7271e39ab88bc0ac972b298
[]
no_license
alessandrocamilli/7-openforce-addons
https://github.com/alessandrocamilli/7-openforce-addons
2ee00b712538a8eb433d0ce0c63cd12a861548e6
78fc164679b690bcf84866987266838de134bc2f
refs/heads/master
2016-08-03T11:58:12.730337
2014-07-03T10:29:56
2014-07-03T10:29:56
21,004,298
1
0
null
null
null
null
null
null
null
null
null
null
null
null
null
# -*- coding: utf-8 -*- ############################################################################## # # Author: Alessandro Camilli ([email protected]) # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU Affero General Public License as published # by the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU Affero General Public License # along with this program. If not, see <http://www.gnu.org/licenses/>. # ############################################################################## import time from osv import fields, orm class product_product(orm.Model): _inherit = "product.product" def default_get(self, cr, uid, fields, context=None): res = super(product_product, self).default_get(cr, uid, fields, context=context) res.update({'type': 'product'}) res.update({'procure_method': 'make_to_order'}) res.update({'supply_method': 'buy'}) res.update({'sale_delay': 2}) return res def create(self, cr, uid, vals, *args, **kwargs): if 'default_code' in vals and not vals['default_code']: vals['default_code'] = self.pool.get('ir.sequence').get(cr, uid, 'product.product.default_code') res_id = super(product_product,self).create(cr, uid, vals, *args, **kwargs) return res_id
UTF-8
Python
false
false
2,014
9,801,115,411,942
c8cf51063e930e0be86e0a5e9edd702b7173049a
b39d9ef9175077ac6f03b66d97b073d85b6bc4d0
/Divisun_tablet_SmPC.py
5728a77c70e64d0513e6a20fd01e28acc87385d8
[]
no_license
urudaro/data-ue
https://github.com/urudaro/data-ue
2d840fdce8ba7e759b5551cb3ee277d046464fe0
176c57533b66754ee05a96a7429c3e610188e4aa
refs/heads/master
2021-01-22T12:02:16.931087
2013-07-16T14:05:41
2013-07-16T14:05:41
null
0
0
null
null
null
null
null
null
null
null
null
null
null
null
null
{'_data': [['Uncommon', [['Metabolism', u'Hyperkalcemi och hyperkalciuri']]], ['Rare', [['Skin', u'Pruritus, hudutslag och urtikaria.']]], ['Unknown', [['Immune system', u'\xd6verk\xe4nslighetsreaktioner som angio\xf6dem eller laryngeal\xf6dem.']]]], '_pages': [3, 3], u'_rank': 3, u'_type': u'LSFU'}
UTF-8
Python
false
false
2,013
7,206,955,151,816
26299410c81ca59e90f77fb8ee190a83104c742e
c6ac9ba554b8eab8f285c7e6b8b743706a33733a
/postu/practise/views.py
a6be1ebb81b3135027fc2aee1123d7c8e564d326
[]
no_license
feefk/postu
https://github.com/feefk/postu
bc3b55c28bd521eb68909c39f43bf6ad1b1ef2dd
12fd2705d6f603257edc18fc855f14cf52e38f54
refs/heads/master
2021-01-10T18:54:21.893209
2014-08-28T11:12:29
2014-08-28T11:12:29
null
0
0
null
null
null
null
null
null
null
null
null
null
null
null
null
# Create your views here. # -*- coding: utf-8 -*- from django.shortcuts import render_to_response from django.http import HttpResponse from wrapper import * import Image import os.path def grid(request): return render_response(request, "practise/gridTest.html") def bootStrap(request): return render_response(request, "practise/bootstrap.html") def _resized(image, new_w, new_h): img_w , img_h = image.size target_rate = new_w / new_h img_rate = img_w / img_h if ( img_rate > target_rate ): thumb_w = new_w thumb_h = new_w / img_rate else: thumb_h = new_h thumb_w = new_h * img_rate print thumb_w, thumb_h return image.resize((thumb_w, thumb_h), Image.BILINEAR) def upload_img(request): return render_response(request, "practise/upload_img.html") def upload(request): reqfile = request.FILES['file'] new_w = 600 new_h = 400 image = Image.open(reqfile) img_w , img_h = image.size #print img_w , img_h #image.thumbnail((128,128),Image.ANTIALIAS) thumb = _resized(image, new_w, new_h) projectRoot = os.path.abspath('.').replace('\\','/') thumb.save(projectRoot + "/static/thumb/1.jpeg","JPEG") return render_response(request, "practise/upload_img.html")
UTF-8
Python
false
false
2,014
15,006,615,758,848
03650f44dd24d47beb81f99130b674aed8156406
0bbbada73cc640ba0691b9d6cedbbb97d938eeb1
/gui-qt/MainView.py
88a37eb4df30c312bdf4e068dca0eea728b7ce33
[]
no_license
autoscatto/Disasma
https://github.com/autoscatto/Disasma
7600b904d35fbf94781f4ebe5ab4270bdf8f9f68
2feb67dafaa4fb2e8b5a73954f272e941bed4509
refs/heads/master
2021-01-21T08:02:02.658801
2013-08-24T13:06:15
2013-08-24T13:06:15
6,725,722
1
1
null
null
null
null
null
null
null
null
null
null
null
null
null
from PyQt4 import QtGui, QtCore, QtWebKit import sys, os import resources os.chdir(os.path.dirname(__file__)) path = os.path.abspath(os.path.join(os.path.dirname(__file__), '..')) if not path in sys.path: sys.path.insert(1, path) del path from util import * from process import * class ANALyzer(QtCore.QObject): contentChanged = QtCore.pyqtSignal() def __init__(self, process): QtCore.QThread.__init__(self) self.process = process def setMemoryView(self, vmv): self.vmv = vmv def show(self): process = self.process preinizzio = '<!DOCTYPE HTML>\n' inizzio = '<html><head>' + \ '<link rel="stylesheet" type="text/css" href="style.css"/>\n' + \ '</head><body>' self.stuff = preinizzio+inizzio self.stuff += self.vmv.getHTML() self.stuff += """ <div class="content" style='-moz-user-select: none; -webkit-user-select: none; -ms-user-select:none; user-select:none;' unselectable='on' onselectstart='return false;' onmousedown='return false;'> """ self.stuff += ''.join( \ '<div class="sect_row" onclick="sv.setAddr(%d)"><div class="section">0x%08x-0x%08x: %s section</div></div><br/>\n' % \ (r[0], r[0], r[1], s.name) for (r, s) in process.sections.items()) self.stuff += '</div></body></html>' # self.stuff = "<h1>te a me mi puppi la fava</h1>" self.contentChanged.emit() class SectionViewer(QtCore.QObject): contentReady = QtCore.pyqtSignal() def __init__(self, proc, addr = 0): QtCore.QObject.__init__(self) self.proc = proc self.sect = None if addr != 0: self.sect = self.proc.sections[addr] def setMemoryView(self, vmv): self.vmv = vmv @QtCore.pyqtSlot(str, int, int) def viewAs(self, mode, start, end): #fragType = fragment.CodeFragment if mode == 'code' else fragment.DataFragment #self.sect.addFragment(fragType, start, end) viewType = 'D' if mode == 'D' else 'C' self.sect.cdmap[start-self.sect.start:end-self.sect.start] = viewType self.show() @QtCore.pyqtSlot(int) def setAddr(self, addr): self.sect = self.proc.sections[addr] self.show() # @QtCore.pyqtSlot(int) def show(self): out = [] out.append('<!DOCTYPE HTML>\n') out.append('<html>\n') out.append(' <head>\n') out.append(' <link rel="stylesheet" href="style.css" />\n') out.append(' </head>\n') out.append(' <body>\n') out.append(' <div class="content">\n') out.append(self.vmv.getHTML()) out.append(self.sect.getHTML()) # end content out.append('</body>\n') out.append('</html>') self.stuff = ''.join(out) self.contentReady.emit() class VirtualMemoryView(object): def __init__(self, process): tmp = [] self.min = -1 self.max = -1 self.sizes = [] self.pixelWidth = 600.0 last = -1 for (interval, section) in process.sections.items(): # just to avoid elf .strtab and .symtab, clearly it # has to be fixed if self.min == -1 and interval[0] > 2000: self.min = interval[0] self.max = interval[1] if last != -1: if last != interval[0] and last > 2000: tmp.append(('gap', interval[0]-last, interval[0])) last = interval[1] if interval[0] > 2000: tmp.append((section.name, interval[1]-interval[0], interval[0])) rangetot = self.max-self.min for (name, size, start) in tmp: self.sizes.append((name, int((float(size)/float(rangetot))*self.pixelWidth), start)) def getHTML(self): ret = '' colors = { \ 'data' : '#3333bb', \ 'text' : '#bb3333', \ 'gap' : '#111111' \ } i = 0 ret += '<div id="virtual-memory">\n' for (name, size, start) in self.sizes: onclick = "sv.setAddr(%d)" % start if name == '.text' or ('__TEXT' in name): x = 'text' elif name == 'gap': x = 'gap' onclick = "" else: x = 'data' ret += '<div onclick="%s" class="vmview" style="background-color: %s; width: %spx;"></div>\n' % \ (onclick, colors[x], size) i += 1 ret += '</div>\n' ret += '<div style="height: 50px; display: block; clear: both"></div>\n' return ret class MainView(QtGui.QMainWindow): def __init__(self): super(MainView, self).__init__() self.initUI() def showLoading(self): self.webView.setHtml('<h1>Loading...</h1>', QtCore.QUrl('qrc:/')) def showHtml(self): stuff = self.anal.stuff self.webView.setHtml(stuff, QtCore.QUrl('qrc:/')) self.webView.page().mainFrame().addToJavaScriptWindowObject("sv", self.sectView) def showView(self): stuff = self.sectView.stuff self.webView.setHtml(stuff, QtCore.QUrl('qrc:/')) self.webView.page().mainFrame().addToJavaScriptWindowObject("sv", self.sectView) def showBackground(self): self.webView.setHtml('<html><head><link rel="stylesheet" href="style.css"/></head><body></body></html>', \ QtCore.QUrl('qrc:/')) def viewSections(self): self.anal.show() def openStuff(self): fileName = QtGui.QFileDialog.getOpenFileName( \ self, 'Open File', '', 'Files (*)') if fileName: self.showLoading() self.process = loader.SPUTA_FUORI_IL_MOSTO(fileName) self.virtualMemoryView = VirtualMemoryView(self.process) self.anal = ANALyzer(self.process) self.anal.setMemoryView(self.virtualMemoryView) self.anal.contentChanged.connect(self.showHtml) self.sectView = SectionViewer(self.process) self.sectView.setMemoryView(self.virtualMemoryView) self.sectView.contentReady.connect(self.showView) self.viewSections() def cpuStatus(self): if self.cpustatus.isHidden(): self.cpustatus.show() else: self.cpustatus.hide() def _actions(self): ret = [ \ ('Open', ':icons/open.png', 'Ctrl+O', self.openStuff), \ (None, None, None, None), \ ('View Sections', ':icons/sections.png', 'Ctrl+Alt+S', self.viewSections), \ ('View CPU status', ':icons/cpu.png', 'Ctrl+Q', self.cpuStatus) \ ] return ret def _setupToolbar(self, toolbar): alist = self._actions() for (name, icon, shortcut, triggered) in alist: if name == None: toolbar.addSeparator() else: a = QtGui.QAction(QtGui.QIcon(icon), name, self) a.setShortcut(shortcut) a.triggered.connect(triggered) toolbar.addAction(a) def initUI(self): self.toolbar = self.addToolBar('Stuff') self._setupToolbar(self.toolbar) self.setGeometry(100, 100, 800, 600) self.setWindowTitle('QTDisassa!') #self.showMaximized() self.webView = QtWebKit.QWebView(self) self.setCentralWidget(self.webView) self.cpustatus = QtGui.QDockWidget("CPU Status", self) self.cpustatus.setAllowedAreas(QtCore.Qt.BottomDockWidgetArea | QtCore.Qt.LeftDockWidgetArea) self.addDockWidget(QtCore.Qt.BottomDockWidgetArea, self.cpustatus) self.cpustatus.hide() self.showBackground() self.show() def main(): app = QtGui.QApplication(sys.argv) mainView = MainView() sys.exit(app.exec_()) if __name__ == '__main__': main()
UTF-8
Python
false
false
2,013
4,183,298,148,073
03a1b7f38e7b53a0ae564440d73626b78bdb6b07
a373e287fbbbf78ebd7187c3a958926d6af15d9b
/sage/combinat/sf/homogeneous.py
3a6b423f7df3a16c7b9c5acdbf9efb7ad9ee3d49
[]
no_license
thalespaiva/sagelib
https://github.com/thalespaiva/sagelib
a1b9f57869b61128476eb8dda6e62558ff2eefbc
fd0c7c46e6a2da4b84df582e0da0333ce5cf79d9
refs/heads/master
2021-01-20T03:23:02.550743
2012-07-09T11:36:20
2012-07-09T11:36:20
null
0
0
null
null
null
null
null
null
null
null
null
null
null
null
null
""" Homogenous symmetric functions """ #***************************************************************************** # Copyright (C) 2007 Mike Hansen <[email protected]>, # # Distributed under the terms of the GNU General Public License (GPL) # # This code is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU # General Public License for more details. # # The full text of the GPL is available at: # # http://www.gnu.org/licenses/ #***************************************************************************** #################################### # # # Homogeneous Symmetric Functions # # # #################################### import multiplicative, sfa, classical class SymmetricFunctionAlgebra_homogeneous(multiplicative.SymmetricFunctionAlgebra_multiplicative): def __init__(self, R): """ TESTS:: sage: h = SFAHomogeneous(QQ) sage: h == loads(dumps(h)) True """ classical.SymmetricFunctionAlgebra_classical.__init__(self, R, "homogeneous", 'h') def dual_basis(self, scalar=None, prefix=None): """ The dual basis of the homogeneous basis with respect to the standard scalar product is the monomial basis. EXAMPLES:: sage: m = SFAMonomial(QQ) sage: h = SFAHomogeneous(QQ) sage: h.dual_basis() == m True """ if scalar is None: return sfa.SFAMonomial(self.base_ring()) else: return sfa.SymmetricFunctionAlgebra(self, scalar, prefix=prefix) class Element(classical.SymmetricFunctionAlgebra_classical.Element): def omega(self): """ Returns the image of self under the Frobenius / omega automorphism. EXAMPLES:: sage: h = SFAHomogeneous(QQ) sage: a = h([2,1]); a h[2, 1] sage: a.omega() h[1, 1, 1] - h[2, 1] sage: e = SFAElementary(QQ) sage: e(h([2,1]).omega()) e[2, 1] """ e = sfa.SFAElementary(self.parent().base_ring()) return self.parent()(e._from_element(self)) def expand(self, n, alphabet='x'): """ Expands the symmetric function as a symmetric polynomial in n variables. EXAMPLES:: sage: h = SFAHomogeneous(QQ) sage: h([3]).expand(2) x0^3 + x0^2*x1 + x0*x1^2 + x1^3 sage: h([1,1,1]).expand(2) x0^3 + 3*x0^2*x1 + 3*x0*x1^2 + x1^3 sage: h([2,1]).expand(3) x0^3 + 2*x0^2*x1 + 2*x0*x1^2 + x1^3 + 2*x0^2*x2 + 3*x0*x1*x2 + 2*x1^2*x2 + 2*x0*x2^2 + 2*x1*x2^2 + x2^3 sage: h([3]).expand(2,alphabet='y') y0^3 + y0^2*y1 + y0*y1^2 + y1^3 sage: h([3]).expand(2,alphabet='x,y') x^3 + x^2*y + x*y^2 + y^3 sage: h([3]).expand(3,alphabet='x,y,z') x^3 + x^2*y + x*y^2 + y^3 + x^2*z + x*y*z + y^2*z + x*z^2 + y*z^2 + z^3 """ condition = lambda part: False return self._expand(condition, n, alphabet) # Backward compatibility for unpickling from sage.structure.sage_object import register_unpickle_override register_unpickle_override('sage.combinat.sf.homogeneous', 'SymmetricFunctionAlgebraElement_homogeneous', SymmetricFunctionAlgebra_homogeneous.Element)
UTF-8
Python
false
false
2,012
12,489,764,914,902
944da320203ffea97b7c6c7235deafdd25444738
9592dea816fdbf84992b031ee09eaf87419d0810
/recursion_SumTo.py
13597571748975a25a1a826e4305ddc45861ee61
[]
no_license
MattAnderson16/Algorithms
https://github.com/MattAnderson16/Algorithms
5300afd3adf9bfd19237b435828bf6493f7808a5
57b355e0a7f9db5433c4f6b7ffed39400d1b7c21
refs/heads/master
2020-06-01T14:08:36.317254
2014-10-20T15:06:58
2014-10-20T15:06:58
null
0
0
null
null
null
null
null
null
null
null
null
null
null
null
null
def SumTo(n): if n == 1: result = 1 else: result = n + SumTo(n-1) return result n = int(input("Please enter a positive integer value >> ")) result = SumTo(n) print(result)
UTF-8
Python
false
false
2,014
17,248,588,663,249
fa23fb72695501c5f39e627e7b61dafbe46f68b5
1a554fcb0bdb08a58b90bba6e14c7710fd48a26a
/uis_r_us/tests.py
a8b5cae1af6c4e52c33cfae9b01b226a66f18a77
[]
no_license
mvpdev/nmis
https://github.com/mvpdev/nmis
68c304d8657e587e4225231b20b276ffec3bf584
c85c0b4996b50329dd078845ccc0ff8804ae9760
refs/heads/master
2020-05-20T00:50:37.654729
2011-08-19T19:46:12
2011-08-19T19:46:12
1,102,934
2
0
null
null
null
null
null
null
null
null
null
null
null
null
null
from django.test import TestCase from uis_r_us.views import get_nav_zones, get_nav_zones_inefficient class TestLgaList(TestCase): fixtures = ['districts.json'] def test_nav_zones(self): nav_zones = get_nav_zones() self.assertEqual(len(nav_zones), 6) def test_nav_zones2(self): nav_zones2 = get_nav_zones_inefficient() self.assertEqual(len(nav_zones2), 6) def test_nav_zone_equality(self): nzs = [get_nav_zones(), get_nav_zones_inefficient()] def get_names(z): [n['name'] for n in z] self.assertEqual(*[get_names(nz) for nz in nzs]) def state_names(z): return [s['name'] for s in z['states']] for nzi in range(0, len(nz)): self.assertEqual(*[state_names(nz[nzi]) for nz in nzs]) def ordered_lga_slugs(z): lga_slugs = [] for s in z['states']: for lga in s['lgas']: lga_slugs.append(lga['unique_slug']) return lga_slugs for nzi in range(0, len(nz)): self.assertEqual(*[ordered_lga_slugs(nz[nzi]) for nz in nzs])
UTF-8
Python
false
false
2,011
4,028,679,345,171
be6f603b87db51fde408bbd0e4d2fe8578f9c8a0
4a2bfc2cce097eaf3147ebb39e89a70f4fc7312f
/bundle/systemTests/moduleSuites/moduleEndToEndSuites/src/main/resources/scripts/testOISearch.py
74cc884dd27327c781eebc9f6612cc8a28249b74
[]
no_license
piyush76/EMS
https://github.com/piyush76/EMS
24da7f9caf5611bec80a095dc62c5bd6c7c41ea4
12320744e1cb6c492caba6d766056eaef2ade096
refs/heads/master
2021-01-25T03:54:27.575584
2014-05-08T20:37:37
2014-05-08T20:41:17
null
0
0
null
null
null
null
null
null
null
null
null
null
null
null
null
# -*- coding: utf-8 -*- import os,sys,time,socket,requests from com.m1.ems.mgmt import ManagementContainer from com.m1.ems.mgmt import SearchConstraint from com.m1.util.mgmt import SearchConstraintOperator from com.m1.ems.search import IndexSearchConstraint from com.m1.ems.search.solr import SolrQueryBuilder from com.m1.ems.search import UserQueryBuilder from com.m1.ems.search.IIndexSearchManager import CallerApp from com.m1.ems.mgmt import ICustomerManager from com.m1.ems.mgmt import Capabilities from com.m1.util.ender import Service from testUtils import * from com.m1.ems.mgmt.activemailbox import IReviewerGroupManager from com.m1.ems.mgmt import SavedUserSet from com.m1.ems.mgmt.activemailbox import ReviewerGroup from com.m1.ems.mgmt import InternalUserSets # wait until messages have been indexed... def waitForindexing(mc,custid,count): print 'waitForindexing()' retries = 30 while retries > 0: found = searchCount(mc,custid,'','','','','','any',None) print 'found',found if found > count: print 'failing...' return False; if found == count: print 'success - ending wait' return True print 'sleeping 30 seconds' time.sleep(30) retries = retries - 1 print 'Failed to find correct number of messages' print 'found',found,'expected',count raise Exception('Failed to find messages in time') return False # run a search and report the count def searchCount(mc,custid,keywords,sender,receiver,subject,attachment,language,attach_scope,rg = None, msgMode = False): global highpass global highfail sm = mc.getIndexSearchManager() isc = IndexSearchConstraint(custid,None) isc.constrainByLanguage(language) if rg is not None : isc.constrainByReviewerGroup(rg) if msgMode : isc.queryMessages(msgMode) qb = SolrQueryBuilder(custid); qb.applyLanguage(language) qb.applyAttachmentScope(attach_scope) qb.applyDefaultSearch(keywords,False,False) qb.applyFileName(attachment,None) qb.applyRecipients(receiver,False) qb.applyFrom(sender) qb.applySubject(subject) print 'generatedQuery=',qb.getQuery() sr = sm.search(qb.getQuery(),isc,None,CallerApp.REVIEWER_SEARCH) for doc in sr.documents(): if True != msgMode : if '' != sender : if 0 <= doc.getSender().find('<b>') : highpass = highpass + 1 print 'highlight passed' else : highfail = highfail +1 print 'highlight failed' print '\t',doc.getSender() if '' != receiver : recip = '' r1 = doc.getString('mailto') if r1 is not None : recip = recip + r1 r2 = doc.getString('mailcc') if r1 is not None : recip = recip + r1 if 0 <= recip.find('<b>'): highpass = highpass + 1 print 'highlight passed' else : highfail = highfail +1 print 'highlight failed' print '\t', recip if '' != subject : if 0 <= doc.getSubject().find('<b>') : highpass = highpass + 1 print 'highlight passed' else : highfail = highfail +1 print 'highlight failed' print '\t',doc.getSubject() if '' != attachment : att = doc.getString("attachedfiles") if att is None: att = doc.getString("filename") if att is not None : if 0 <= att.find('<b>') : highpass = highpass + 1 print 'highlight passed' else : highfail = highfail +1 print 'highlight failed' print '\t',att return sr.getDocCount() def searcher(mc,custid,keywords,sender,receiver,subject,attachment,language, attach_scope): sm = mc.getIndexSearchManager() isc = IndexSearchConstraint(custid,None) isc.constrainByLanguage(language) qb = SolrQueryBuilder(custid); qb.applyLanguage(language) qb.applyAttachmentScope(attach_scope) qb.applyDefaultSearch(keywords,False,False) qb.applyFileName(attachment,None) qb.applyRecipients(receiver,False) qb.applyFrom(sender) qb.applySubject(subject) sr = sm.search(qb.getQuery(),isc,None,CallerApp.REVIEWER_SEARCH) return sr def testPartial(expected,mc,custid,keywords,sender,receiver,subject,attachment,language, attach_scope): global failed_count global ok_count print 'custid="'+str(custid)+'"' print 'keywords="'+keywords+'"' print 'sender="'+sender+'"' print 'receivers="'+receiver+'"' print 'subject="'+subject+'"' print 'attachment="'+attachment+'"' print 'language="'+language+'"' print 'attachment_scope=',attach_scope sr = searcher(mc,custid,keywords,sender,receiver,subject,attachment,language, attach_scope) print 'expected =',expected,'\t found =',sr.isPartial() if sr.isPartial() != expected: print 'FAILED' failed_count = failed_count + 1 else: print 'PASSED' ok_count = ok_count + 1 # run a test and count passes and failures def test(expected,mc,custid,keywords,sender,receiver,subject,attachment,language,attach_scope,rg = None, msgMode = False): global failed_count global ok_count print 'custid="'+str(custid)+'"' print 'keywords="'+repr(keywords)+'"' print 'sender="'+sender+'"' print 'receivers="'+receiver+'"' print 'subject="'+subject+'"' print 'attachment="'+attachment+'"' print 'language="'+language+'"' print 'attachment_scope=',attach_scope if rg is not None : print 'reviewerGroup ='+rg.toString() print 'message_mode=',msgMode c = searchCount(mc,custid,keywords,sender,receiver,subject,attachment,language,attach_scope,rg, msgMode) print 'expected =',expected,'\t found =',c if c != expected: print 'FAILED' failed_count = failed_count + 1 else: print 'PASSED' ok_count = ok_count + 1 # run a test in both message and document mode and record successes and failures def testAllModes(expected,mc,custid,keywords,sender,receiver,subject,attachment,language,attach_scope,rg = None, messageCount = None): global failed_count global ok_count messageExpected = expected if messageCount is not None : messageExpected = messageCount test(expected,mc,custid,keywords,sender,receiver,subject,attachment,language,attach_scope,rg, False) test(messageExpected,mc,custid,keywords,sender,receiver,subject,attachment,language,attach_scope,rg, True) def partialTest(mc,custid): global ok_count global failed_count svc = Service("ems-solr") try: languages = ['en', 'any'] for language in languages: attach_scope = None # find keywords in body testPartial(False,mc,custid,'natural','','','','',language, attach_scope) im = mc.getIslandManager() cm = mc.getCustomerManager() parms = im.getIsland(cm.getCustomer(int(custid)).getSearchIslandId()).getSearchParms() print 'search parms',parms # this should no effect on search svc.invoke('stop','archive-7') print 'archive-7 stopped' languages = ['en', 'any'] for language in languages: attach_scope = None # find keywords in body testPartial(False,mc,custid,'natural','','','','',language, attach_scope) svc.invoke('start','archive-7') print 'archive-7 started' # this should cause search URL to change svc.invoke('stop','archive-8') print 'archive-8 stopped' parms2 = im.getIsland(cm.getCustomer(int(custid)).getSearchIslandId()).getSearchParms() tries = 10 while ((-1 == parms2.find('archive-7')) or (-1 == parms2.find('isPartial=0'))) and tries > 0: print 'waiting for search parms to change...',parms2 time.sleep(30) parms2 = im.getIsland(cm.getCustomer(int(custid)).getSearchIslandId()).getSearchParms() tries = tries - 1 if (-1 == parms2.find('archive-7')) or (-1 == parms2.find('isPartial=0')): print 'FAILED because distributed search parms are incorrect:',parms2 failed_count = failed_count + 1 return else: print 'distributed search parms changed to',parms2 languages = ['en', 'any'] for language in languages: attach_scope = None # find keywords in body testPartial(False,mc,custid,'natural','','','','',language, attach_scope) parms = parms2 # search URL should change and isPartial should be set and detected svc.invoke('stop','archive-7') print 'archive-7 and archive-8 stopped' parms2 = im.getIsland(cm.getCustomer(int(custid)).getSearchIslandId()).getSearchParms() tries = 10 while -1 == parms2.find('isPartial=1') and tries > 0: # wait for DS manager to notice archive-7,archive-8 is no longer there print 'waiting for search parms to change...',parms2 time.sleep(30) parms2 = im.getIsland(cm.getCustomer(int(custid)).getSearchIslandId()).getSearchParms() tries = tries - 1 if -1 == parms2.find('isPartial=1'): print 'FAILED because distributed search parms are incorrect:',parms failed_count = failed_count + 1 return else: print 'distributed search parms changed to',parms2 languages = ['en', 'any'] for language in languages: attach_scope = None # find keywords in body testPartial(True,mc,custid,'natural','','','','',language, attach_scope) finally: svc.invoke('start',['archive-5','archive-6','archive-7','archive-8']) def extractTest(mc,custid): global ok_count global failed_count # in the right languages languages = ['en', 'any'] for language in languages: attach_scope = None # find keywords in body test(1,mc,custid,'"if you have queries that contain these words"','','','','sign.zip',language, attach_scope) test(2,mc,custid,'','','','','sign.zip',language, attach_scope) def extraScopeTest(mc, custid): global ok_count global failed_count rgm = None rg = None try: rgm = mc.getReviewerGroupManager() mailboxScope = InternalUserSets.getAllUsersSet(custid) mc.getUserManager().saveUserSet(mailboxScope) reviewers = InternalUserSets.getAllUsersSet(custid) mc.getUserManager().saveUserSet(reviewers) rg = rgm.getReviewerGroup(custid, "dummyGroup99") if rg is not None: print 'removing pre-existing reviewergroup' rgm.deleteReviewerGroup(custid,rg.getGroupID()) rg = rgm.createReviewerGroup(custid, "dummyGroup99", reviewers, mailboxScope, 'mailfrom:string("irfan")') if rg is not None : print 'created rg='+rg.toString() rg1 = rgm.getReviewerGroup(custid, "dummyGroup99") if 'mailfrom:string("irfan")' != rg1.getExtraScope() : print 'FAIL - extrascope not correctly set for rg' else: print 'new reviewerGroup has extrascope =' + rg1.getExtraScope() rg = rg1 else : print 'failed to create reviewerGroup' test(2,mc,custid,'','','','','','any',None,rg) test(1,mc,custid,'','','','','','any',True,rg) test(1,mc,custid,'','','','','','any',False,rg) test(2,mc,custid,'','','','','','en',None,rg) test(1,mc,custid,'','','','','','en',True,rg) test(1,mc,custid,'','','','','','en',False,rg) test(2,mc,custid,'','','','','','ja',None,rg) test(1,mc,custid,'','','','','','ja',True,rg) test(1,mc,custid,'','','','','','ja',False,rg) rg.setExtraScope('mailsubject:"character map"') rgm.updateReviewerGroup(rg) test(2,mc,custid,'','','','','','any',None,rg) test(0,mc,custid,'','','','','','any',True,rg) test(2,mc,custid,'','','','','','any',False,rg) rg.setExtraScope('content:"koala"') rgm.updateReviewerGroup(rg) test(2,mc,custid,'','','','','','any',None,rg) test(1,mc,custid,'','','','','','any',True,rg) test(1,mc,custid,'','','','','','any',False,rg) rg.setExtraScope('emaildate:>2012-06-11') rgm.updateReviewerGroup(rg) test(6,mc,custid,'','','','','','any',None,rg) test(1,mc,custid,'','','','','','any',True,rg) test(5,mc,custid,'','','','','','any',False,rg) rg.setExtraScope('recipients:"[email protected]"') rgm.updateReviewerGroup(rg) test(8,mc,custid,'','','','','','any',None,rg) test(1,mc,custid,'','','','','','any',True,rg) test(7,mc,custid,'','','','','','any',False,rg) rg.setExtraScope('senders:"irfan"') rgm.updateReviewerGroup(rg) test(2,mc,custid,'','','','','','any',None,rg) test(1,mc,custid,'','','','','','any',True,rg) test(1,mc,custid,'','','','','','any',False,rg) finally: if rgm is not None: if rg is not None : print 'removing reviewergroup' rgm.deleteReviewerGroup(custid,rg.getGroupID()) else : print 'no reviewer group to clean up' else : print ' no reviewer group manager available' def getLoginAuthToken(mc, custid, emailAddr): um = mc.getUserManager() ua = um.getUser(emailAddr) oam = mc.getOutlookAccessManager() cis = oam.generateUserTokens(custid,[ua.getUserID()]) cisToken = cis[0].getAuthToken() print 'cisToken=',cisToken return cisToken def runOISQuery(cisToken, requestedItems, query = '', columns = None, sort = None, host = 'as-1.ems.labmanager.net'): cmd = 'curl -v -k https://'+host+"?authToken="+cisToken+' -H "Content-Type: text/xml" ' if requestedItems is not None : cmd += '-H "x-m1-ems-requested-items: ' + requestedItems + '" ' if columns is not None : cmd += '-H "x-m1-ems-search-columns: ' + columns + '" ' if sort is not None : cmd += '-H "x-m1-ems-sort-order: ' + sort + '" ' cmd += ' -d "" + query + ""' ############################################### # 'curl -v -k https://as-1.ems.labmanager.net/wfe/searchArchive?authToken='+cistoken+' -H "x-m1-ems-requested-items: 1 2 3 4 5 6 7 8 9" -H "x-m1-ems-search-columns: 0x0E1B" -H "x-m1-ems-sort-order:" -d "<subrestriction></subrestriction>"' import commands cmd = 'curl -s -v -k https://as-1.ems.labmanager.net/wfe/searchArchive?authToken=923-gmcxfootlrglnvgkhtlugwmpiwnsikpl -H "x-m1-ems-requested-items: 1 2 3 4 5 6" -H "Content-Type: text/xml" -d "<SubRestriction></SubRestriction>"' r = commands.getstatusoutput(cmd); output = r[1] #print output; lines = output.splitlines() outputStarted = False; contentlines = [] outputHeaders = {} for line in lines: if line.startswith('<'): # this is an output header line1 = line.split(': ',1) outputHeaders[line1[0].strip('< ')] = line1[1] outputStarted = True elif line.startswith('*') or line.startswith('>'): # input or info - troll for RC # look for HTTP/1.1 200 OK like response at end of a line line2 = line else : if outputStarted: # output content print line print '\n' print outputHeaders ######################################### def mainTest(mc,custid,isEdiscovery): global ok_count global failed_count loginAuthToken = getLoginAuthToken(mc, custid, "[email protected]") # in the right languages languages = ['en', 'any'] for language in languages: attach_scope = None # find keywords in body testAllModes(1,mc,custid,'natural','','','','',language, attach_scope) testAllModes(1,mc,custid,'"natural"','','','','',language, attach_scope) testAllModes(1,mc,custid,'habitat','','','','',language, attach_scope) testAllModes(1,mc,custid,'"habitat"','','','','',language, attach_scope) testAllModes(1,mc,custid,'natural habitat','','','','',language, attach_scope) if isEdiscovery: testAllModes(1,mc,custid,'"natural habitat"','','','','',language, attach_scope) testAllModes(0,mc,custid,'"naturally habitat"','','','','',language, attach_scope) testAllModes(0,mc,custid,'"natural habitats"','','','','',language, attach_scope) else: testAllModes(1,mc,custid,'"natural habitat"','','','','',language, attach_scope) testAllModes(1,mc,custid,'"naturally habitat"','','','','',language, attach_scope) testAllModes(1,mc,custid,'"natural habitats"','','','','',language, attach_scope) testAllModes(1,mc,custid,'habitat natural','','','','',language, attach_scope) testAllModes(0,mc,custid,'"habitat natural"','','','','',language, attach_scope) # test EMSDEV-9804 testAllModes(3,mc,custid,'"the"','','','','',language, attach_scope, None, 2) testAllModes(3,mc,custid,'the','','','','',language, attach_scope, None, 2) # test EMSDEV-9843 testAllModes(1,mc,custid,'"their natural habitat is in australia"','','','','',language, attach_scope) if isEdiscovery: testAllModes(0,mc,custid,'"their natural habitat are in australia"','','','','',language, attach_scope) else: testAllModes(1,mc,custid,'"their natural habitat are in australia"','','','','',language, attach_scope) testAllModes(1,mc,custid,'their natural habitat is in australia','','','','',language, attach_scope) # find keywords in other fields testAllModes(0,mc,custid,'ziggurat','','','','',language, attach_scope) testAllModes(3,mc,custid,'irfan','','','','',language, attach_scope, None, 2) testAllModes(2,mc,custid,'tester','','','','',language, attach_scope, None, 1) testAllModes(2,mc,custid,'australia','','','','',language, attach_scope, None, 1) testAllModes(2,mc,custid,'100','','','','',language, attach_scope, None, 1) testAllModes(2,mc,custid,'koala','','','','',language, attach_scope, None, 1) testAllModes(2,mc,custid,'jpg','','','','',language, attach_scope, None, 1) testAllModes(2,mc,custid,'.jpg','','','','',language, attach_scope, None, 1) testAllModes(2,mc,custid,'*.jpg','','','','',language, attach_scope, None, 1) testAllModes(0,mc,custid,'800','','','','',language, attach_scope) # find keywords in sender testAllModes(0,mc,custid,'','koala','','','',language, attach_scope) testAllModes(2,mc,custid,'','irfan','','','',language, attach_scope, None, 1) testAllModes(2,mc,custid,'','jabbar','','','',language, attach_scope, None, 1) # find keywords in recipient testAllModes(0,mc,custid,'','','koala','','',language, attach_scope) testAllModes(2,mc,custid,'','','tester','','',language, attach_scope, None, 1) testAllModes(16,mc,custid,'','','lab062','','',language, attach_scope, None, 11) # find keywords in subject testAllModes(0,mc,custid,'','','','koala','',language, attach_scope) testAllModes(2,mc,custid,'','','','australia','',language, attach_scope, None, 1) testAllModes(2,mc,custid,'','','','100','',language, attach_scope, None, 1) testAllModes(0,mc,custid,'','','','800','',language, attach_scope) # verify EMSDEV-10004 testAllModes(2,mc,custid,'','','','australia 100','',language, attach_scope, None, 1) testAllModes(0,mc,custid,'','','','australia habitat','',language, attach_scope) # find keywords in attachment testAllModes(0,mc,custid,'','','','','australia',language, attach_scope) testAllModes(2,mc,custid,'','','','','koala',language, attach_scope, None, 1) testAllModes(2,mc,custid,'','','','','jpg',language, attach_scope, None, 1) testAllModes(2,mc,custid,'','','','','.jpg',language, attach_scope, None, 1) testAllModes(2,mc,custid,'','','','','*.jpg',language, attach_scope, None, 1) attach_scope = True # find keywords in body testAllModes(0,mc,custid,'natural','','','','',language, attach_scope) testAllModes(0,mc,custid,'"natural"','','','','',language, attach_scope) testAllModes(0,mc,custid,'habitat','','','','',language, attach_scope) testAllModes(0,mc,custid,'"habitat"','','','','',language, attach_scope) testAllModes(0,mc,custid,'natural habitat','','','','',language, attach_scope) testAllModes(0,mc,custid,'"natural habitat"','','','','',language, attach_scope) testAllModes(0,mc,custid,'habitat natural','','','','',language, attach_scope) testAllModes(0,mc,custid,'"habitat natural"','','','','',language, attach_scope) # find keywords in other fields testAllModes(0,mc,custid,'ziggurat','','','','',language, attach_scope) testAllModes(2,mc,custid,'irfan','','','','',language, attach_scope) testAllModes(1,mc,custid,'tester','','','','',language, attach_scope) testAllModes(1,mc,custid,'australia','','','','',language, attach_scope) testAllModes(1,mc,custid,'100','','','','',language, attach_scope) testAllModes(0,mc,custid,'800','','','','',language, attach_scope) testAllModes(1,mc,custid,'koala','','','','',language, attach_scope) testAllModes(1,mc,custid,'jpg','','','','',language, attach_scope) testAllModes(1,mc,custid,'.jpg','','','','',language, attach_scope) testAllModes(1,mc,custid,'*.jpg','','','','',language, attach_scope) # find keywords in sender testAllModes(0,mc,custid,'','koala','','','',language, attach_scope) testAllModes(1,mc,custid,'','irfan','','','',language, attach_scope) testAllModes(1,mc,custid,'','jabbar','','','',language, attach_scope) # find keywords in recipient testAllModes(0,mc,custid,'','','koala','','',language, attach_scope) testAllModes(1,mc,custid,'','','tester','','',language, attach_scope) testAllModes(5,mc,custid,'','','lab062','','',language, attach_scope) # find keywords in subject testAllModes(0,mc,custid,'','','','koala','',language, attach_scope) testAllModes(1,mc,custid,'','','','australia','',language, attach_scope) if isEdiscovery: testAllModes(0,mc,custid,'','','','"australias"','',language, attach_scope) else: testAllModes(1,mc,custid,'','','','"australias"','',language, attach_scope) if isEdiscovery: testAllModes(1,mc,custid,'','','','"but sjgy6343468"','',language, attach_scope) testAllModes(0,mc,custid,'','','','"and sjgy6343468"','',language, attach_scope) else: testAllModes(1,mc,custid,'','','','"but sjgy6343468"','',language, attach_scope) testAllModes(1,mc,custid,'','','','"and sjgy6343468"','',language, attach_scope) testAllModes(1,mc,custid,'','','','100','',language, attach_scope) testAllModes(0,mc,custid,'','','','800','',language, attach_scope) # verify EMSDEV-10004 testAllModes(1,mc,custid,'','','','australia 100','',language, attach_scope) testAllModes(0,mc,custid,'','','','australia habitat','',language, attach_scope) # test EMSDEV-9804 testAllModes(2,mc,custid,'"the"','','','','',language, attach_scope) testAllModes(2,mc,custid,'the','','','','',language, attach_scope) # test EMSDEV-9843 testAllModes(0,mc,custid,'"their natural habitat is in australia"','','','','',language, attach_scope) testAllModes(0,mc,custid,'their natural habitat is in australia','','','','',language, attach_scope) # find keywords in attachment testAllModes(0,mc,custid,'','','','','australia',language, attach_scope) testAllModes(1,mc,custid,'','','','','koala',language, attach_scope) testAllModes(1,mc,custid,'','','','','jpg',language, attach_scope) testAllModes(1,mc,custid,'','','','','.jpg',language, attach_scope) testAllModes(1,mc,custid,'','','','','*.jpg',language, attach_scope) attach_scope = False # find keywords in body testAllModes(1,mc,custid,'natural','','','','',language, attach_scope) testAllModes(1,mc,custid,'"natural"','','','','',language, attach_scope) testAllModes(1,mc,custid,'habitat','','','','',language, attach_scope) testAllModes(1,mc,custid,'"habitat"','','','','',language, attach_scope) testAllModes(1,mc,custid,'natural habitat','','','','',language, attach_scope) testAllModes(1,mc,custid,'"natural habitat"','','','','',language, attach_scope) testAllModes(1,mc,custid,'habitat natural','','','','',language, attach_scope) testAllModes(0,mc,custid,'"habitat natural"','','','','',language, attach_scope) # find keywords in other fields testAllModes(0,mc,custid,'ziggurat','','','','',language, attach_scope) testAllModes(1,mc,custid,'irfan','','','','',language, attach_scope) testAllModes(1,mc,custid,'tester','','','','',language, attach_scope) testAllModes(1,mc,custid,'australia','','','','',language, attach_scope) testAllModes(1,mc,custid,'100','','','','',language, attach_scope) testAllModes(0,mc,custid,'800','','','','',language, attach_scope) testAllModes(1,mc,custid,'koala','','','','',language, attach_scope) testAllModes(1,mc,custid,'jpg','','','','',language, attach_scope) testAllModes(1,mc,custid,'.jpg','','','','',language, attach_scope) testAllModes(1,mc,custid,'*.jpg','','','','',language, attach_scope) # find keywords in sender testAllModes(0,mc,custid,'','koala','','','',language, attach_scope) testAllModes(1,mc,custid,'','irfan','','','',language, attach_scope) testAllModes(1,mc,custid,'','jabbar','','','',language, attach_scope) # find keywords in recipient testAllModes(0,mc,custid,'','','koala','','',language, attach_scope) testAllModes(1,mc,custid,'','','tester','','',language, attach_scope) testAllModes(11,mc,custid,'','','lab062','','',language, attach_scope) # find keywords in subject testAllModes(0,mc,custid,'','','','koala','',language, attach_scope) testAllModes(1,mc,custid,'','','','australia','',language, attach_scope) testAllModes(1,mc,custid,'','','','100','',language, attach_scope) testAllModes(0,mc,custid,'','','','800','',language, attach_scope) # verify EMSDEV-10004 testAllModes(1,mc,custid,'','','','australia 100','',language, attach_scope) testAllModes(0,mc,custid,'','','','australia habitat','',language, attach_scope) # test EMSDEV-9804 testAllModes(1,mc,custid,'"the"','','','','',language, attach_scope) testAllModes(1,mc,custid,'the','','','','',language, attach_scope) # test EMSDEV-9843 testAllModes(1,mc,custid,'"their natural habitat is in australia"','','','','',language, attach_scope) testAllModes(1,mc,custid,'their natural habitat is in australia','','','','',language, attach_scope) # find keywords in attachment testAllModes(0,mc,custid,'','','','','australia',language, attach_scope) testAllModes(1,mc,custid,'','','','','koala',language, attach_scope) testAllModes(1,mc,custid,'','','','','jpg',language, attach_scope) testAllModes(1,mc,custid,'','','','','.jpg',language, attach_scope) testAllModes(1,mc,custid,'','','','','*.jpg',language, attach_scope) # in "wrong" language languages = ['ru','zh-tw','zh-cn','fr','de','nl','sv','ja','pt','ar','he'] for language in languages: attach_scope = None # find keywords in body testAllModes(0,mc,custid,'natural','','','','',language, attach_scope) testAllModes(0,mc,custid,'"natural"','','','','',language, attach_scope) testAllModes(0,mc,custid,'habitat','','','','',language, attach_scope) testAllModes(0,mc,custid,'"habitat"','','','','',language, attach_scope) testAllModes(0,mc,custid,'natural habitat','','','','',language, attach_scope) testAllModes(0,mc,custid,'"natural habitat"','','','','',language, attach_scope) testAllModes(0,mc,custid,'habitat natural','','','','',language, attach_scope) testAllModes(0,mc,custid,'"habitat natural"','','','','',language, attach_scope) # find keywords in other fields testAllModes(0,mc,custid,'ziggurat','','','','',language, attach_scope) testAllModes(2,mc,custid,'irfan','','','','',language, attach_scope, None, 1) testAllModes(2,mc,custid,'tester','','','','',language, attach_scope, None, 1) testAllModes(2,mc,custid,'koala','','','','',language, attach_scope, None, 1) testAllModes(2,mc,custid,'jpg','','','','',language, attach_scope, None, 1) testAllModes(2,mc,custid,'.jpg','','','','',language, attach_scope, None, 1) testAllModes(2,mc,custid,'*.jpg','','','','',language, attach_scope, None, 1) testAllModes(0,mc,custid,'australia','','','','',language, attach_scope) testAllModes(0,mc,custid,'100','','','','',language, attach_scope) testAllModes(0,mc,custid,'800','','','','',language, attach_scope) # find keywords in sender testAllModes(0,mc,custid,'','koala','','','',language, attach_scope) testAllModes(2,mc,custid,'','irfan','','','',language, attach_scope, None, 1) testAllModes(2,mc,custid,'','jabbar','','','',language, attach_scope, None, 1) # find keywords in recipient testAllModes(0,mc,custid,'','','koala','','',language, attach_scope) testAllModes(2,mc,custid,'','','tester','','',language, attach_scope, None, 1) testAllModes(16,mc,custid,'','','lab062','','',language, attach_scope, None, 11) # find keywords in subject testAllModes(0,mc,custid,'','','','koala','',language, attach_scope) testAllModes(0,mc,custid,'','','','australia','',language, attach_scope) testAllModes(0,mc,custid,'','','','100','',language, attach_scope) testAllModes(0,mc,custid,'','','','800','',language, attach_scope) # verify EMSDEV-10004 testAllModes(0,mc,custid,'','','','australia 100','',language, attach_scope) testAllModes(0,mc,custid,'','','','australia habitat','',language, attach_scope) # test EMSDEV-9804 testAllModes(0,mc,custid,'"the"','','','','',language, attach_scope) testAllModes(0,mc,custid,'the','','','','',language, attach_scope) # test EMSDEV-9843 testAllModes(0,mc,custid,'"their natural habitat is in australia"','','','','',language, attach_scope) testAllModes(0,mc,custid,'their natural habitat is in australia','','','','',language, attach_scope) # find keywords in attachment testAllModes(0,mc,custid,'','','','','australia',language, attach_scope) testAllModes(2,mc,custid,'','','','','koala',language, attach_scope, None, 1) testAllModes(2,mc,custid,'','','','','jpg',language, attach_scope, None, 1) testAllModes(2,mc,custid,'','','','','.jpg',language, attach_scope, None, 1) testAllModes(2,mc,custid,'','','','','*.jpg',language, attach_scope, None, 1) attach_scope = True # find keywords in body testAllModes(0,mc,custid,'natural','','','','',language, attach_scope) testAllModes(0,mc,custid,'"natural"','','','','',language, attach_scope) testAllModes(0,mc,custid,'habitat','','','','',language, attach_scope) testAllModes(0,mc,custid,'"habitat"','','','','',language, attach_scope) testAllModes(0,mc,custid,'natural habitat','','','','',language, attach_scope) testAllModes(0,mc,custid,'"natural habitat"','','','','',language, attach_scope) testAllModes(0,mc,custid,'habitat natural','','','','',language, attach_scope) testAllModes(0,mc,custid,'"habitat natural"','','','','',language, attach_scope) # find keywords in other fields testAllModes(0,mc,custid,'ziggurat','','','','',language, attach_scope) testAllModes(1,mc,custid,'irfan','','','','',language, attach_scope) testAllModes(1,mc,custid,'tester','','','','',language, attach_scope) testAllModes(0,mc,custid,'australia','','','','',language, attach_scope) testAllModes(0,mc,custid,'100','','','','',language, attach_scope) testAllModes(0,mc,custid,'800','','','','',language, attach_scope) testAllModes(1,mc,custid,'koala','','','','',language, attach_scope) testAllModes(1,mc,custid,'jpg','','','','',language, attach_scope) testAllModes(1,mc,custid,'.jpg','','','','',language, attach_scope) testAllModes(1,mc,custid,'*.jpg','','','','',language, attach_scope) # find keywords in sender testAllModes(0,mc,custid,'','koala','','','',language, attach_scope) testAllModes(1,mc,custid,'','irfan','','','',language, attach_scope) testAllModes(1,mc,custid,'','jabbar','','','',language, attach_scope) # find keywords in recipient testAllModes(0,mc,custid,'','','koala','','',language, attach_scope) testAllModes(1,mc,custid,'','','tester','','',language, attach_scope) testAllModes(5,mc,custid,'','','lab062','','',language, attach_scope) # find keywords in subject testAllModes(0,mc,custid,'','','','koala','',language, attach_scope) testAllModes(0,mc,custid,'','','','australia','',language, attach_scope) testAllModes(0,mc,custid,'','','','100','',language, attach_scope) testAllModes(0,mc,custid,'','','','800','',language, attach_scope) # verify EMSDEV-10004 testAllModes(0,mc,custid,'','','','australia 100','',language, attach_scope) testAllModes(0,mc,custid,'','','','australia habitat','',language, attach_scope) # find keywords in attachment testAllModes(0,mc,custid,'','','','','australia',language, attach_scope) testAllModes(1,mc,custid,'','','','','koala',language, attach_scope) testAllModes(1,mc,custid,'','','','','jpg',language, attach_scope) testAllModes(1,mc,custid,'','','','','.jpg',language, attach_scope) testAllModes(1,mc,custid,'','','','','*.jpg',language, attach_scope) attach_scope = False # find keywords in body testAllModes(0,mc,custid,'natural','','','','',language, attach_scope) testAllModes(0,mc,custid,'"natural"','','','','',language, attach_scope) testAllModes(0,mc,custid,'habitat','','','','',language, attach_scope) testAllModes(0,mc,custid,'"habitat"','','','','',language, attach_scope) testAllModes(0,mc,custid,'natural habitat','','','','',language, attach_scope) testAllModes(0,mc,custid,'"natural habitat"','','','','',language, attach_scope) testAllModes(0,mc,custid,'habitat natural','','','','',language, attach_scope) testAllModes(0,mc,custid,'"habitat natural"','','','','',language, attach_scope) # find keywords in other fields testAllModes(0,mc,custid,'ziggurat','','','','',language, attach_scope) testAllModes(1,mc,custid,'irfan','','','','',language, attach_scope) testAllModes(1,mc,custid,'tester','','','','',language, attach_scope) testAllModes(0,mc,custid,'australia','','','','',language, attach_scope) testAllModes(0,mc,custid,'100','','','','',language, attach_scope) testAllModes(0,mc,custid,'800','','','','',language, attach_scope) testAllModes(1,mc,custid,'koala','','','','',language, attach_scope) testAllModes(1,mc,custid,'jpg','','','','',language, attach_scope) testAllModes(1,mc,custid,'.jpg','','','','',language, attach_scope) testAllModes(1,mc,custid,'*.jpg','','','','',language, attach_scope) # find keywords in sender testAllModes(0,mc,custid,'','koala','','','',language, attach_scope) testAllModes(1,mc,custid,'','irfan','','','',language, attach_scope) testAllModes(1,mc,custid,'','jabbar','','','',language, attach_scope) # find keywords in recipient testAllModes(0,mc,custid,'','','koala','','',language, attach_scope) testAllModes(1,mc,custid,'','','tester','','',language, attach_scope) testAllModes(11,mc,custid,'','','lab062','','',language, attach_scope) # find keywords in subject testAllModes(0,mc,custid,'','','','koala','',language, attach_scope) testAllModes(0,mc,custid,'','','','australia','',language, attach_scope) testAllModes(0,mc,custid,'','','','100','',language, attach_scope) testAllModes(0,mc,custid,'','','','800','',language, attach_scope) # verify EMSDEV-10004 testAllModes(0,mc,custid,'','','','australia 100','',language, attach_scope) testAllModes(0,mc,custid,'','','','australia habitat','',language, attach_scope) # find keywords in attachment testAllModes(0,mc,custid,'','','','','australia',language, attach_scope) testAllModes(1,mc,custid,'','','','','koala',language, attach_scope) testAllModes(1,mc,custid,'','','','','jpg',language, attach_scope) testAllModes(1,mc,custid,'','','','','.jpg',language, attach_scope) testAllModes(1,mc,custid,'','','','','*.jpg',language, attach_scope) # specialized tests for EMSDEV-10255, EMSDEV-10288 # verify searchability for various dbcs encodings # EMSDEV-10288 encoded ja msg as attachment and main body both searchable string1 = unicode('同市安田のテレトラック横手第2駐車場で','utf-8') string2 = unicode('業者が正確な数値を算出するが','utf-8') testAllModes(1,mc,custid,string1,'','','','','any',None) testAllModes(0,mc,custid,string1,'','','','','any',True) testAllModes(1,mc,custid,string1,'','','','','any',False) testAllModes(1,mc,custid,string2,'','','','','any',None) testAllModes(1,mc,custid,string2,'','','','','any',True) testAllModes(0,mc,custid,string2,'','','','','any',False) testAllModes(1,mc,custid,string1,'','','','','ja',None) testAllModes(0,mc,custid,string1,'','','','','ja',True) testAllModes(1,mc,custid,string1,'','','','','ja',False) testAllModes(1,mc,custid,string2,'','','','','ja',None) testAllModes(1,mc,custid,string2,'','','','','ja',True) testAllModes(0,mc,custid,string2,'','','','','ja',False) string3 = unicode('んおやゆよににんかはきくまりねくりれ','utf-8') testAllModes(1,mc,custid,string3,'','','','','any',None) testAllModes(0,mc,custid,string3,'','','','','any',True) testAllModes(1,mc,custid,string3,'','','','','any',False) testAllModes(1,mc,custid,string3,'','','','','ja',None) testAllModes(0,mc,custid,string3,'','','','','ja',True) testAllModes(1,mc,custid,string3,'','','','','ja',False) testAllModes(0,mc,custid,string3,'','','','','en',None) testAllModes(0,mc,custid,string3,'','','','','en',True) testAllModes(0,mc,custid,string3,'','','','','en',False) string3 = unicode('きくまのりれけむらにかてふくこは','utf-8') testAllModes(1,mc,custid,string3,'','','','','any',None) testAllModes(0,mc,custid,string3,'','','','','any',True) testAllModes(1,mc,custid,string3,'','','','','any',False) testAllModes(1,mc,custid,string3,'','','','','ja',None) testAllModes(0,mc,custid,string3,'','','','','ja',True) testAllModes(1,mc,custid,string3,'','','','','ja',False) testAllModes(0,mc,custid,string3,'','','','','en',None) testAllModes(0,mc,custid,string3,'','','','','en',True) testAllModes(0,mc,custid,string3,'','','','','en',False) string3 = unicode('うそらガッぢクゅぽニずガぱれぽろよめざ','utf-8') testAllModes(2,mc,custid,string3,'','','','','any',None) testAllModes(0,mc,custid,string3,'','','','','any',True) testAllModes(2,mc,custid,string3,'','','','','any',False) testAllModes(2,mc,custid,string3,'','','','','ja',None) testAllModes(0,mc,custid,string3,'','','','','ja',True) testAllModes(2,mc,custid,string3,'','','','','ja',False) testAllModes(0,mc,custid,string3,'','','','','en',None) testAllModes(0,mc,custid,string3,'','','','','en',True) testAllModes(0,mc,custid,string3,'','','','','en',False) # arabic string3 = unicode('يؤكد السعي لمنع حرب أهلية','utf-8') testAllModes(3,mc,custid,string3,'','','','','any',None) testAllModes(1,mc,custid,string3,'','','','','any',True) testAllModes(2,mc,custid,string3,'','','','','any',False) testAllModes(3,mc,custid,string3,'','','','','ar',None) testAllModes(1,mc,custid,string3,'','','','','ar',True) testAllModes(2,mc,custid,string3,'','','','','ar',False) testAllModes(1,mc,custid,string3,'','','','','en',None) testAllModes(0,mc,custid,string3,'','','','','en',True) testAllModes(1,mc,custid,string3,'','','','','en',False) string3 = unicode('من عناصر قوة حفظ السلام الدولية','utf-8') testAllModes(1,mc,custid,string3,'','','','','any',None) testAllModes(1,mc,custid,string3,'','','','','any',True) testAllModes(0,mc,custid,string3,'','','','','any',False) testAllModes(1,mc,custid,string3,'','','','','ar',None) testAllModes(1,mc,custid,string3,'','','','','ar',True) testAllModes(0,mc,custid,string3,'','','','','ar',False) testAllModes(0,mc,custid,string3,'','','','','en',None) testAllModes(0,mc,custid,string3,'','','','','en',True) testAllModes(0,mc,custid,string3,'','','','','en',False) # spanish string3 = unicode('la motivación de nuestro rival será más grande pero hay','utf-8') testAllModes(1,mc,custid,string3,'','','','','any',None) testAllModes(1,mc,custid,string3,'','','','','any',True) testAllModes(0,mc,custid,string3,'','','','','any',False) testAllModes(1,mc,custid,string3,'','','','','es',None) testAllModes(1,mc,custid,string3,'','','','','es',True) testAllModes(0,mc,custid,string3,'','','','','es',False) testAllModes(0,mc,custid,string3,'','','','','en',None) testAllModes(0,mc,custid,string3,'','','','','en',True) testAllModes(0,mc,custid,string3,'','','','','en',False) string3 = unicode('o primeros con un punto más','utf-8') testAllModes(1,mc,custid,string3,'','','','','any',None) testAllModes(0,mc,custid,string3,'','','','','any',True) testAllModes(1,mc,custid,string3,'','','','','any',False) testAllModes(1,mc,custid,string3,'','','','','es',None) testAllModes(0,mc,custid,string3,'','','','','es',True) testAllModes(1,mc,custid,string3,'','','','','es',False) testAllModes(0,mc,custid,string3,'','','','','en',None) testAllModes(0,mc,custid,string3,'','','','','en',True) testAllModes(0,mc,custid,string3,'','','','','en',False) def searchCountQL(mc,custid,query,language,attach_scope,msgMode=False): sm = mc.getIndexSearchManager() isc = IndexSearchConstraint(custid,None) isc.constrainByLanguage(language) isc.queryMessages(msgMode) qb = SolrQueryBuilder(custid) qb.applyLanguage(language) qb.applyAttachmentScope(attach_scope) qb.applyDefaultSearch(query,True,True) print 'generated query =',qb.getQuery() sr = sm.search(qb.getQuery(),isc,None,CallerApp.REVIEWER_SEARCH) return sr.getDocCount() def searchCountUQL(mc,custid,query,language,attach_scope,msgMode=False): sm = mc.getIndexSearchManager() isc = IndexSearchConstraint(custid,None) isc.constrainByLanguage(language) isc.queryMessages(msgMode) qb = UserQueryBuilder(custid); qb.applyLanguage(language) qb.applyAttachmentScope(attach_scope) qb.applyDefaultSearch(query,True,True) print 'generated UQL query =',qb.getQuery() qbs = SolrQueryBuilder(custid); qbs.applyLanguage(language) qbs.applyAttachmentScope(attach_scope) qbs.applyDefaultSearch(qb.getQuery(),True,True) print 'generated query =',qbs.getQuery() sr = sm.search(qbs.getQuery(),isc,None,CallerApp.REVIEWER_SEARCH) return sr.getDocCount() def testQLAllModes(expected,mc,custid,query,language,attach_scope,rg = None, msgCount = None): global failed_count global ok_count msgExpected = expected if msgCount is not None : msgExpected = msgCount testQL(expected,mc,custid,query,language,attach_scope,rg, False) testQL(msgExpected,mc,custid,query,language,attach_scope,rg, True) def testQL(expected,mc,custid,query,language,attach_scope,rg = None, msgMode=False): global failed_count global ok_count print 'custid="'+str(custid)+'"' print 'query="'+query+'"' print 'language="'+language+'"' print 'attachment_scope=',attach_scope print 'message_mode=',msgMode c = searchCountQL(mc,custid,query,language,attach_scope,msgMode) print 'expected =',expected,'\t found =',c if c != expected: print 'FAILED' failed_count = failed_count + 1 else: print 'PASSED' ok_count = ok_count + 1 c = searchCountUQL(mc,custid,query,language,attach_scope,msgMode) print 'expected =',expected,'\t found =',c if c != expected: print 'FAILED' failed_count = failed_count + 1 else: print 'PASSED' ok_count = ok_count + 1 def mainTestQL(mc,custid): global ok_count global failed_count # in the right languages languages = ['en', 'any'] for language in languages: attach_scope = None # find keywords in body testQLAllModes(1,mc,custid,'natural',language, attach_scope) testQLAllModes(1,mc,custid,'"natural"',language, attach_scope) testQLAllModes(1,mc,custid,'habitat',language, attach_scope) testQLAllModes(1,mc,custid,'"habitat"',language, attach_scope) testQLAllModes(1,mc,custid,'natural habitat',language, attach_scope) testQLAllModes(1,mc,custid,'"natural habitat"',language, attach_scope) testQLAllModes(1,mc,custid,'habitat natural',language, attach_scope) testQLAllModes(0,mc,custid,'"habitat natural"',language, attach_scope) # find keywords in other fields testQLAllModes(0,mc,custid,'ziggurat',language, attach_scope) testQLAllModes(3,mc,custid,'irfan',language, attach_scope, None, 2) testQLAllModes(2,mc,custid,'tester',language, attach_scope, None, 1) testQLAllModes(2,mc,custid,'australia',language, attach_scope, None, 1) testQLAllModes(2,mc,custid,'koala',language, attach_scope, None, 1) testQLAllModes(2,mc,custid,'jpg',language, attach_scope, None, 1) testQLAllModes(2,mc,custid,'.jpg',language, attach_scope, None,1 ) # testQLAllModes(2,mc,custid,'*.jpg',language, attach_scope, None, 1) # find keywords in sender testQLAllModes(0,mc,custid,'mailfrom:koala',language, attach_scope) testQLAllModes(2,mc,custid,'mailfrom:irfan',language, attach_scope, None, 1) testQLAllModes(2,mc,custid,'mailfrom:jabbar',language, attach_scope, None, 1) # find keywords in recipient testQLAllModes(0,mc,custid,'mailto:koala',language, attach_scope) testQLAllModes(2,mc,custid,'mailto:tester',language, attach_scope, None, 1) testQLAllModes(16,mc,custid,'mailto:lab062',language, attach_scope, None, 11) # find keywords in subject testQLAllModes(0,mc,custid,'mailsubject:koala',language, attach_scope) testQLAllModes(2,mc,custid,'mailsubject:australia',language, attach_scope, None, 1) # find keywords in attachments testQLAllModes(0,mc,custid,'attachedfiles:australia',language, attach_scope) testQLAllModes(1,mc,custid,'attachedfiles:koala',language, attach_scope) testQLAllModes(1,mc,custid,'attachedfiles:jpg',language, attach_scope) testQLAllModes(1,mc,custid,'attachedfiles:.jpg',language, attach_scope) # testQLAllModes(1,mc,custid,'attachedfiles:*.jpg',language, attach_scope) # find keywords in filename testQLAllModes(0,mc,custid,'filename:australia',language, attach_scope) testQLAllModes(1,mc,custid,'filename:koala',language, attach_scope) testQLAllModes(1,mc,custid,'filename:jpg',language, attach_scope) testQLAllModes(1,mc,custid,'filename:.jpg',language, attach_scope) # testQLAllModes(1,mc,custid,'filename:*.jpg',language, attach_scope) # test EMSDEV-10026 testQLAllModes(2,mc,custid,'recipients:tester',language, attach_scope, None, 1) attach_scope = True # find keywords in body testQLAllModes(0,mc,custid,'natural',language, attach_scope) testQLAllModes(0,mc,custid,'"natural"',language, attach_scope) testQLAllModes(0,mc,custid,'habitat',language, attach_scope) testQLAllModes(0,mc,custid,'"habitat"',language, attach_scope) testQLAllModes(0,mc,custid,'natural habitat',language, attach_scope) testQLAllModes(0,mc,custid,'"natural habitat"',language, attach_scope) testQLAllModes(0,mc,custid,'habitat natural',language, attach_scope) testQLAllModes(0,mc,custid,'"habitat natural"',language, attach_scope) # find keywords in other fields testQLAllModes(0,mc,custid,'ziggurat',language, attach_scope) testQLAllModes(2,mc,custid,'irfan',language, attach_scope) testQLAllModes(1,mc,custid,'tester',language, attach_scope) testQLAllModes(1,mc,custid,'australia',language, attach_scope) testQLAllModes(1,mc,custid,'koala',language, attach_scope) testQLAllModes(1,mc,custid,'jpg',language, attach_scope) testQLAllModes(1,mc,custid,'.jpg',language, attach_scope) # testQLAllModes(1,mc,custid,'*.jpg',language, attach_scope) # find keywords in sender testQLAllModes(0,mc,custid,'mailfrom:koala',language, attach_scope) testQLAllModes(1,mc,custid,'mailfrom:irfan',language, attach_scope) testQLAllModes(1,mc,custid,'mailfrom:jabbar',language, attach_scope) # find keywords in recipient testQLAllModes(0,mc,custid,'mailto:koala',language, attach_scope) testQLAllModes(1,mc,custid,'mailto:tester',language, attach_scope) testQLAllModes(5,mc,custid,'mailto:lab062',language, attach_scope) # find keywords in subject testQLAllModes(0,mc,custid,'mailsubject:koala',language, attach_scope) testQLAllModes(1,mc,custid,'mailsubject:australia',language, attach_scope) # find keywords in attachments testQLAllModes(0,mc,custid,'attachedfiles:australia',language, attach_scope) testQLAllModes(0,mc,custid,'attachedfiles:koala',language, attach_scope) testQLAllModes(0,mc,custid,'attachedfiles:jpg',language, attach_scope) testQLAllModes(0,mc,custid,'attachedfiles:.jpg',language, attach_scope) # testQLAllModes(0,mc,custid,'attachedfiles:*.jpg',language, attach_scope) # find keywords in filename testQLAllModes(0,mc,custid,'filename:australia',language, attach_scope) testQLAllModes(1,mc,custid,'filename:koala',language, attach_scope) testQLAllModes(1,mc,custid,'filename:jpg',language, attach_scope) testQLAllModes(1,mc,custid,'filename:.jpg',language, attach_scope) # testQLAllModes(1,mc,custid,'filename:*.jpg',language, attach_scope) attach_scope = False # find keywords in body # find keywords in body testQLAllModes(1,mc,custid,'natural',language, attach_scope) testQLAllModes(1,mc,custid,'"natural"',language, attach_scope) testQLAllModes(1,mc,custid,'habitat',language, attach_scope) testQLAllModes(1,mc,custid,'"habitat"',language, attach_scope) testQLAllModes(1,mc,custid,'natural habitat',language, attach_scope) testQLAllModes(1,mc,custid,'"natural habitat"',language, attach_scope) testQLAllModes(1,mc,custid,'habitat natural',language, attach_scope) testQLAllModes(0,mc,custid,'"habitat natural"',language, attach_scope) # find keywords in other fields testQLAllModes(0,mc,custid,'ziggurat',language, attach_scope) testQLAllModes(1,mc,custid,'irfan',language, attach_scope) testQLAllModes(1,mc,custid,'tester',language, attach_scope) testQLAllModes(1,mc,custid,'australia',language, attach_scope) testQLAllModes(1,mc,custid,'koala',language, attach_scope) testQLAllModes(1,mc,custid,'jpg',language, attach_scope) testQLAllModes(1,mc,custid,'.jpg',language, attach_scope) # testQLAllModes(1,mc,custid,'*.jpg',language, attach_scope) # find keywords in sender testQLAllModes(0,mc,custid,'mailfrom:koala',language, attach_scope) testQLAllModes(1,mc,custid,'mailfrom:irfan',language, attach_scope) testQLAllModes(1,mc,custid,'mailfrom:jabbar',language, attach_scope) # find keywords in recipient testQLAllModes(0,mc,custid,'mailto:koala',language, attach_scope) testQLAllModes(1,mc,custid,'mailto:tester',language, attach_scope) testQLAllModes(11,mc,custid,'mailto:lab062',language, attach_scope) # find keywords in subject testQLAllModes(0,mc,custid,'mailsubject:koala',language, attach_scope) testQLAllModes(1,mc,custid,'mailsubject:australia',language, attach_scope) # find keywords in attachments testQLAllModes(0,mc,custid,'attachedfiles:australia',language, attach_scope) testQLAllModes(1,mc,custid,'attachedfiles:koala',language, attach_scope) testQLAllModes(1,mc,custid,'attachedfiles:jpg',language, attach_scope) testQLAllModes(1,mc,custid,'attachedfiles:.jpg',language, attach_scope) # testQLAllModes(1,mc,custid,'attachedfiles:*.jpg',language, attach_scope) # find keywords in filename testQLAllModes(0,mc,custid,'filename:australia',language, attach_scope) testQLAllModes(0,mc,custid,'filename:koala',language, attach_scope) testQLAllModes(0,mc,custid,'filename:jpg',language, attach_scope) testQLAllModes(0,mc,custid,'filename:.jpg',language, attach_scope) # testQLAllModes(0,mc,custid,'filename:*.jpg',language, attach_scope) # in "wrong" language languages = ['ru','zh-tw','zh-cn','fr','de','nl','sv','ja','pt','ar','he'] for language in languages: attach_scope = None # find keywords in body testQLAllModes(0,mc,custid,'natural',language, attach_scope) testQLAllModes(0,mc,custid,'"natural"',language, attach_scope) testQLAllModes(0,mc,custid,'habitat',language, attach_scope) testQLAllModes(0,mc,custid,'"habitat"',language, attach_scope) testQLAllModes(0,mc,custid,'natural habitat',language, attach_scope) testQLAllModes(0,mc,custid,'"natural habitat"',language, attach_scope) testQLAllModes(0,mc,custid,'habitat natural',language, attach_scope) testQLAllModes(0,mc,custid,'"habitat natural"',language, attach_scope) # find keywords in other fields testQLAllModes(0,mc,custid,'ziggurat',language, attach_scope) testQLAllModes(2,mc,custid,'irfan',language, attach_scope, None, 1) testQLAllModes(2,mc,custid,'tester',language, attach_scope, None, 1) # weird, but attachment is "generic" testQLAllModes(0,mc,custid,'australia',language, attach_scope) testQLAllModes(2,mc,custid,'koala',language, attach_scope, None, 1) testQLAllModes(2,mc,custid,'jpg',language, attach_scope, None, 1) testQLAllModes(2,mc,custid,'.jpg',language, attach_scope, None, 1) # testQLAllModes(2,mc,custid,'*.jpg',language, attach_scope) # find keywords in sender testQLAllModes(0,mc,custid,'mailfrom:koala',language, attach_scope) testQLAllModes(2,mc,custid,'mailfrom:irfan',language, attach_scope, None, 1) testQLAllModes(2,mc,custid,'mailfrom:jabbar',language, attach_scope, None, 1) # find keywords in recipient testQLAllModes(0,mc,custid,'mailto:koala',language, attach_scope) testQLAllModes(2,mc,custid,'mailto:tester',language, attach_scope, None, 1) testQLAllModes(16,mc,custid,'mailto:lab062',language, attach_scope, None, 11) # find keywords in subject testQLAllModes(0,mc,custid,'mailsubject:koala',language, attach_scope) # weird, but attachment is "generic" testQLAllModes(0,mc,custid,'mailsubject:australia',language, attach_scope) # find keywords in attachments testQLAllModes(0,mc,custid,'attachedfiles:australia',language, attach_scope) testQLAllModes(1,mc,custid,'attachedfiles:koala',language, attach_scope) testQLAllModes(1,mc,custid,'attachedfiles:jpg',language, attach_scope) testQLAllModes(1,mc,custid,'attachedfiles:.jpg',language, attach_scope) # testQLAllModes(1,mc,custid,'attachedfiles:*.jpg',language, attach_scope) # find keywords in filename testQLAllModes(0,mc,custid,'filename:australia',language, attach_scope) testQLAllModes(1,mc,custid,'filename:koala',language, attach_scope) testQLAllModes(1,mc,custid,'filename:jpg',language, attach_scope) testQLAllModes(1,mc,custid,'filename:.jpg',language, attach_scope) # testQLAllModes(1,mc,custid,'filename:*.jpg',language, attach_scope) attach_scope = True # find keywords in body testQLAllModes(0,mc,custid,'natural',language, attach_scope) testQLAllModes(0,mc,custid,'"natural"',language, attach_scope) testQLAllModes(0,mc,custid,'habitat',language, attach_scope) testQLAllModes(0,mc,custid,'"habitat"',language, attach_scope) testQLAllModes(0,mc,custid,'natural habitat',language, attach_scope) testQLAllModes(0,mc,custid,'"natural habitat"',language, attach_scope) testQLAllModes(0,mc,custid,'habitat natural',language, attach_scope) testQLAllModes(0,mc,custid,'"habitat natural"',language, attach_scope) # find keywords in other fields testQLAllModes(0,mc,custid,'ziggurat',language, attach_scope) testQLAllModes(1,mc,custid,'irfan',language, attach_scope) testQLAllModes(1,mc,custid,'tester',language, attach_scope) testQLAllModes(0,mc,custid,'australia',language, attach_scope) testQLAllModes(1,mc,custid,'koala',language, attach_scope) testQLAllModes(1,mc,custid,'jpg',language, attach_scope) testQLAllModes(1,mc,custid,'.jpg',language, attach_scope) # testQLAllModes(1,mc,custid,'*.jpg',language, attach_scope) # find keywords in sender testQLAllModes(0,mc,custid,'mailfrom:koala',language, attach_scope) testQLAllModes(1,mc,custid,'mailfrom:irfan',language, attach_scope) testQLAllModes(1,mc,custid,'mailfrom:jabbar',language, attach_scope) # find keywords in recipient testQLAllModes(0,mc,custid,'mailto:koala',language, attach_scope) testQLAllModes(1,mc,custid,'mailto:tester',language, attach_scope) testQLAllModes(5,mc,custid,'mailto:lab062',language, attach_scope) # find keywords in subject testQLAllModes(0,mc,custid,'mailsubject:koala',language, attach_scope) testQLAllModes(0,mc,custid,'mailsubject:australia',language, attach_scope) # find keywords in attachments testQLAllModes(0,mc,custid,'attachedfiles:australia',language, attach_scope) testQLAllModes(0,mc,custid,'attachedfiles:koala',language, attach_scope) testQLAllModes(0,mc,custid,'attachedfiles:jpg',language, attach_scope) testQLAllModes(0,mc,custid,'attachedfiles:.jpg',language, attach_scope) # testQLAllModes(0,mc,custid,'attachedfiles:*.jpg',language, attach_scope) # find keywords in filename testQLAllModes(0,mc,custid,'filename:australia',language, attach_scope) testQLAllModes(1,mc,custid,'filename:koala',language, attach_scope) testQLAllModes(1,mc,custid,'filename:jpg',language, attach_scope) testQLAllModes(1,mc,custid,'filename:.jpg',language, attach_scope) # testQLAllModes(1,mc,custid,'filename:*.jpg',language, attach_scope) attach_scope = False # find keywords in body testQLAllModes(0,mc,custid,'natural',language, attach_scope) testQLAllModes(0,mc,custid,'"natural"',language, attach_scope) testQLAllModes(0,mc,custid,'habitat',language, attach_scope) testQLAllModes(0,mc,custid,'"habitat"',language, attach_scope) testQLAllModes(0,mc,custid,'natural habitat',language, attach_scope) testQLAllModes(0,mc,custid,'"natural habitat"',language, attach_scope) testQLAllModes(0,mc,custid,'habitat natural',language, attach_scope) testQLAllModes(0,mc,custid,'"habitat natural"',language, attach_scope) # find keywords in other fields testQLAllModes(0,mc,custid,'ziggurat',language, attach_scope) testQLAllModes(1,mc,custid,'irfan',language, attach_scope) testQLAllModes(1,mc,custid,'tester',language, attach_scope) testQLAllModes(0,mc,custid,'australia',language, attach_scope) testQLAllModes(1,mc,custid,'koala',language, attach_scope) testQLAllModes(1,mc,custid,'jpg',language, attach_scope) testQLAllModes(1,mc,custid,'.jpg',language, attach_scope) # testQLAllModes(1,mc,custid,'*.jpg',language, attach_scope) # find keywords in sender testQLAllModes(0,mc,custid,'mailfrom:koala',language, attach_scope) testQLAllModes(1,mc,custid,'mailfrom:irfan',language, attach_scope) testQLAllModes(1,mc,custid,'mailfrom:jabbar',language, attach_scope) # find keywords in recipient testQLAllModes(0,mc,custid,'mailto:koala',language, attach_scope) testQLAllModes(1,mc,custid,'mailto:tester',language, attach_scope) testQLAllModes(11,mc,custid,'mailto:lab062',language, attach_scope) # find keywords in subject testQLAllModes(0,mc,custid,'mailsubject:koala',language, attach_scope) testQLAllModes(0,mc,custid,'mailsubject:australia',language, attach_scope) # find keywords in attachments testQLAllModes(0,mc,custid,'attachedfiles:australia',language, attach_scope) testQLAllModes(1,mc,custid,'attachedfiles:koala',language, attach_scope) testQLAllModes(1,mc,custid,'attachedfiles:jpg',language, attach_scope) testQLAllModes(1,mc,custid,'attachedfiles:.jpg',language, attach_scope) # testQLAllModes(1,mc,custid,'attachedfiles:*.jpg',language, attach_scope) # find keywords in filename testQLAllModes(0,mc,custid,'filename:australia',language, attach_scope) testQLAllModes(0,mc,custid,'filename:koala',language, attach_scope) testQLAllModes(0,mc,custid,'filename:jpg',language, attach_scope) testQLAllModes(0,mc,custid,'filename:.jpg',language, attach_scope) # testQLAllModes(0,mc,custid,'filename:*.jpg',language, attach_scope) if __name__ == '__main__': if len(sys.argv) < 2 or len(sys.argv) > 3: print sys.argv[0],'islandId [custId | - ]' sys.exit(-1) inCustId = None if len(sys.argv) != 2 : inCustId = sys.argv[2] ok_count = 0 failed_count = 0 highpass = 0 highfail = 0 custid = None if inCustId is not None and inCustId != '-': custid = int(inCustId) island = None edMode = False proxy = Service('solrproxy-Island102Cluster1') try: mc = ManagementContainer.getInstance() island = mc.getIslandManager().getIsland(int(sys.argv[1])) edMode = island.isEdiscoveryEnabled() if not island.isEdiscoveryEnabled(): island.setEdiscoveryEnabled(True) mc.getIslandManager().updateIsland(island) print 'restarting proxy to ensure that island capability cache is in synch' proxy.invoke('restart','work-3') print 'proxy restarted' if custid is None : custid = setupCustomer(mc,sys.argv[1],'/tmp/searchcorpus','lab062.m1dev.com') waitForindexing(mc,custid,16) elif mc.getCustomerManager().getCustomer(custid) is None : msg = 'customer with id '+ str(custid)+' does not exist.' print msg custid = None raise Exception(msg) caps = mc.getCustomerManager().getCustomerCapabilities(int(custid)) caps.setBooleanCapability(Capabilities.CAP_ALLOW_ARCHIVE_EDISCOVERY,True) mc.getCustomerManager().saveCustomerCapabilities(caps) caps = mc.getCustomerManager().getCustomerCapabilities(int(custid)) print custid,'ediscovery is',caps.getBooleanCapability(Capabilities.CAP_ALLOW_ARCHIVE_EDISCOVERY) mainTest(mc,custid,True) # turn off ediscovery for customer caps = mc.getCustomerManager().getCustomerCapabilities(int(custid)) caps.setBooleanCapability(Capabilities.CAP_ALLOW_ARCHIVE_EDISCOVERY,False) mc.getCustomerManager().saveCustomerCapabilities(caps) caps = mc.getCustomerManager().getCustomerCapabilities(int(custid)) print custid,'ediscovery is',caps.getBooleanCapability(Capabilities.CAP_ALLOW_ARCHIVE_EDISCOVERY) mainTest(mc,custid,False) mainTestQL(mc,custid) extraScopeTest(mc,custid) extractTest(mc,custid) partialTest(mc,custid) print highpass,'highlight tests passed' print highfail,'highlight failures' print ok_count,'succeeded' print failed_count,'failures' sys.exit(failed_count + highfail) finally: #delete the customer if custid is not None and inCustId is None: print 'deleting customer',custid mc.getCustomerManager().deleteCustomers([custid]) if island is not None and edMode != island.isEdiscoveryEnabled(): island.setEdiscoveryEnabled(edMode) mc.getIslandManager().updateIsland(island) print 'restarting proxy to ensure that island capability cache is in synch' proxy.invoke('restart','work-3') print 'proxy restarted' sys.exit(-1)
UTF-8
Python
false
false
2,014
6,605,659,743,829
450d13e77a021a22760d9ad65645a56f6e8ada31
090324db0c04d8c30ad6688547cfea47858bf3af
/utils/prof.py
6b8271252d128d7a7c169f71f2bdca0423042324
[]
no_license
fidlej/sokobot
https://github.com/fidlej/sokobot
b82c4c36d73e224d0d0e1635021ca04485da589e
d3d04753a5043e6a22dafd132fa633d8bc66b9ea
refs/heads/master
2021-01-21T13:14:29.523501
2011-06-12T07:34:14
2011-06-12T07:34:14
32,650,745
1
0
null
null
null
null
null
null
null
null
null
null
null
null
null
#!/usr/bin/env python import sys import sokopath from solve import main as command PROF_FILENAME = "stats.prof" def _collect_profile(filename): import cProfile as profile profile.run("command(use_psyco=False)", filename) def _view_profile(filename): import pstats p = pstats.Stats(filename) p.strip_dirs() #p.sort_stats('time') p.sort_stats('cumulative') p.print_stats(20) def main(): args = sys.argv[1:] if len(args) == 1 and args[0].endswith(".prof"): filename = args[0] else: filename = PROF_FILENAME _collect_profile(filename) _view_profile(filename) if __name__ == "__main__": main()
UTF-8
Python
false
false
2,011
14,276,471,300,349
f4d615ff970d4630f565a2417a41b6866d74eea6
441a490ad58551132b0ff8dd6f750b0805a21c40
/mockobjlib/row_proxy.py
a8fe0b5c08e6ebf9be821adcef8ed851b0f5baa3
[]
no_license
rhintz42/mockobjlib
https://github.com/rhintz42/mockobjlib
2080da4ab31758ea83cbc7becf9a3818a8ed9c78
9c5b3053e623c06d85bdd51512b318655d4efbe2
refs/heads/master
2020-05-26T06:08:11.361175
2014-03-24T06:15:03
2014-03-24T06:15:03
null
0
0
null
null
null
null
null
null
null
null
null
null
null
null
null
class RowProxy(object): def __init__(self, *args, **kwargs): self._row = () self._keys = [] self._values = [] self._dict = {} for key,val in kwargs.items(): setattr(self, key, val) setattr(self, key.lower(), val) setattr(self, key.upper(), val) self._keys.append(key) self._row = self._row + (val,) self._values.append(val) self._dict[key] = val def __repr__(self): return str(self._row) def __str__(self): return str(self._row) def __getitem__(self, attr): return self._dict[attr] def __getattr__(self, attr): self._raise_column_error(attr) def keys(self): return self._keys def values(self): return self._values @property def __dict__(self): self._raise_column_error('dict') def _raise_column_error(self, attr): raise AttributeError("Could not locate column in row for column '%s'" % (attr))
UTF-8
Python
false
false
2,014
5,703,716,603,244
1d4e915019b673796da472079621065214427a95
5604903736cfd20eaf31e64ac29edb9da5be3fed
/app/getter.py
c5570d3297b707ed86cc829a3de7ba77413fe209
[]
no_license
nervouna/XMLGetter
https://github.com/nervouna/XMLGetter
749d6a0aed6ca9d4aab3980521b46dc39221f109
90b7079db5d3ed89da18aa5b5f43ad9fdaae4408
refs/heads/master
2016-08-06T15:55:57.143827
2014-03-05T04:13:20
2014-03-05T04:13:20
null
0
0
null
null
null
null
null
null
null
null
null
null
null
null
null
#!/usr/bin/env python #-*-coding: utf-8 -*- import os import time import requests from app import app from datetime import datetime from xml.dom.minidom import parseString newIssueAPI = dict( bbwc='http://content.cdn.bb.bbwc.cn/v4/app1/interface/content-getissue-1-3.xml', ilady='http://content.cdn.imlady.bbwc.cn/v4/app2/interface/content-getissue-1-3.xml') def getXML(api): raw_xml = requests.get(api).text.encode('utf-8').replace('\n', '') return raw_xml def parseXML(raw_xml): dom = parseString(raw_xml) return dom def saveStuff(dom, dirName): try: os.mkdir(os.path.join(app.config['STUFFDIR'], dirName)) except OSError: pass # Saving The Source Cover covers = dom.getElementsByTagName('news:cover_art_icons')[0] for sucker in covers.childNodes[1:]: covers.removeChild(sucker) sourceCover = covers.firstChild # The raw xml has bad filenames. sourceCoverURL = sourceCover.getAttribute('src').replace('00.png', '.png') sourceCoverFile = os.path.join( app.config['STUFFDIR'], dirName, 'source.png') with file(sourceCoverFile, 'wb') as c: r = requests.get(sourceCoverURL) c.write(r.content) sourceCover.setAttribute( 'src', os.path.join(app.config['HOST'], 'slateXML', dirName, 'source.png')) # Writing The XML File updateTime = dom.getElementsByTagName('updated')[0] updateTime.firstChild.data = datetime.strftime( datetime.today(), '%Y-%m-%dT%H:%M:%SZ') xmlFile = os.path.join(app.config['STUFFDIR'], dirName, 'newIssue.xml') with file(xmlFile, 'w') as x: x.write(dom.toprettyxml(indent=' ').encode('utf-8')) for key in newIssueAPI: raw_xml = getXML(newIssueAPI[key]) dom = parseXML(raw_xml) saveStuff(dom, key)
UTF-8
Python
false
false
2,014
1,975,684,958,599
7db15e650b83c8e42b1e22fae5038016f45f4651
058c2dc9f24ced073968510e9f3c6e9a15894424
/miitus/srv/prep.py
c8bc4545fd4f510989b481264e3856f1ff695e5b
[]
no_license
AntXlab/miitus
https://github.com/AntXlab/miitus
76a88e1e27e11ce168e48fe57002c3ae08734b91
8d7635be68f6670bcbfa7736884ef8283675876f
refs/heads/master
2021-01-10T20:00:57.626175
2014-07-29T22:48:25
2014-07-29T22:48:25
21,154,978
2
0
null
null
null
null
null
null
null
null
null
null
null
null
null
from __future__ import absolute_import from sqlalchemy.ext.declarative import declarative_base from .utils import Singleton class Preparation(Singleton): """ everything declarative """ def __init__(self): super(Preparation, self).__init__() self.__sql_base = declarative_base() @property def Base(self): """ get declarative_base of sqlalchemy """ return self.__sql_base
UTF-8
Python
false
false
2,014
8,306,466,795,309
aab18ec1a3de179ced99830e853a5ddc10d850b5
dc151b0d1fbf44e7f069e529a6f76fdb5682b862
/randomGif.py
90e367a43d99eba653fc9c91b42831dec4b469dd
[]
no_license
Friss/random-gif-maker
https://github.com/Friss/random-gif-maker
20fd17b7747a34f675cb8461f7db2831848de1ca
54727ca356d9cdbf4c9a53816ec452c6a517e143
refs/heads/master
2021-01-19T05:03:32.790852
2014-02-02T17:13:57
2014-02-02T17:13:57
16,439,205
2
0
null
null
null
null
null
null
null
null
null
null
null
null
null
import os import random import subprocess from moviepy.editor import * def scanfolder(root): """ Get all movies with certain file extenstion andd append to list Params: root - path to root movies Directory Returns: movies - list of movies found. """ movies = [] for path, dirs, files in os.walk(root): for f in files: if f.endswith('.mkv') or f.endswith('.m2ts') or f.endswith('.avi'): #print os.path.join(path, f) movies.append(os.path.join(path,f)) #print movies return movies def getLength(filename): """ Get information on choosen movie file. Params: filename - path to movie to check Returns: list containing the line "Duration" Note: For some reason json output wouldn't show Duration while this call does. """ result = subprocess.Popen(["ffprobe", filename],stdout = subprocess.PIPE, stderr = subprocess.STDOUT) return [x for x in result.stdout.readlines() if "Duration" in x] def makeGif(root): files = scanfolder(root) #Get all Movies moviepath = random.choice(files) #Random Movie from list moviename = moviepath[root.__len__():].split("/")[2] #Remove Root Directory and Subfolder moviename = moviename[:moviename.index(".")] #Remove file extenstion moviename = moviename.split("[")[0] #Remove any [2013/1080p/720p] moviename = moviename.split("(")[0] #Remove any (2013/1080p/720p) print "Movie Chosen: " + moviename duration = getLength(moviepath)[0].split(",") #Get Duration duration = duration[0].split(" ") duration = duration[3].split(":") #Break into hour, mins, secs #print duration hour = random.randint(0,int(duration[0])) #Random hour mins = random.randint(0,int(duration[1])+1) #Random min secs = float(duration[2]) #Parse secs to float timePassed = round(random.uniform(0, 3),2) #Random seconds to elapse up to 3. """ print hour print mins print secs print timePassed """ #Make GIF 1/3 sized. VideoFileClip(moviepath).\ subclip((hour,mins,secs),(hour,mins,secs+timePassed)).\ resize(0.3).\ to_gif('movie.gif') return moviename, hour, mins, secs if __name__ == "__main__": var = raw_input("Enter Path to Movie Directory: ") print "Movies Path: ", var moviename, hour, mins, secs = makeGif(var) exit()
UTF-8
Python
false
false
2,014
14,001,593,394,290
7c900a8e65577a8277f7f8a1a02edee7d727c076
b1751df2f0c0207c82abc957696ca4d9e7ca38fe
/python/keyValueInterface.py
63eaa5594402889aae25e73a8e5919a242fcaac5
[]
no_license
MichaelMathieu/neuromorphsSLAM
https://github.com/MichaelMathieu/neuromorphsSLAM
13ab4d8e2d3a423a0092e932420183ed8132697d
ec3379c98446e5b91432539ddabf21b1c2bf2f3f
refs/heads/master
2021-01-10T20:32:27.555899
2013-07-19T04:30:12
2013-07-19T04:30:12
null
0
0
null
null
null
null
null
null
null
null
null
null
null
null
null
from streamclient import StreamClient import json import re class keyValueInterface(StreamClient): def __init__(self, host, port, namespace="slam"): super(keyValueInterface, self).__init__(host, port) self.namespace = namespace self.placeCellPositionKey = self.namespace+"/placeCellPos" self.placeCellStatusKey = self.namespace+"/spikes" self.positionKey = self.namespace+"/position" self.quitKey = self.namespace+"/quit" def getQuitCmd(self): return bool(self.get(self.quitKey)) def setQuitCmd(self, quitCmd): self.set(self.quitKey, json.dumps(bool(quitCmd))) def setPosition(self, posX, posY): self.set(self.positionKey, "X=%f Y=%f" % ( posX, 1 - posY )) #print "Set Position X=%f Y%f" % (posX, posY) def setPlaceCellPositions(self, positionMatrix): #print "Set place cell positions ", positionMatrix self.set(self.placeCellPositionKey, json.dumps(positionMatrix)) def setPlaceCellStatus(self, placeCellStatusRaw): placeCellStatus = json.dumps(placeCellStatusRaw) #print "Converted placeCellStatusRaw to ", placeCellStatus self.set(self.placeCellStatusKey, placeCellStatus) if __name__ == "__main__": import time k = keyValueInterface("10.1.95.82", 21567, "slam") k.set('slam/velocity', 'dx=0.1 dy=0.000') print "set velocity" for i in range(1000000): time.sleep(0.5) j = 0 if i % 10 == 0: if i % 20 == 0: j = 1 else: j = 2 spikes = [ fk * i / 10 for fk in range(j) ] t = i % 10 + 1 spikes = json.dumps(spikes) print spikes k.set('slam/spikes', spikes)
UTF-8
Python
false
false
2,013
17,927,193,509,039
db8b17be6f719c2f0303f0a7c05ed02119053302
edefaa8f194215ce2c2062334ed9491b34edd39d
/bin/master.py
eda9e7a33477c8aa985a748f659f093c55ed8348
[ "Apache-2.0" ]
permissive
nl5887/upscale
https://github.com/nl5887/upscale
f0a697dba1f7ff82b2e5f8fbf38044b26c98588e
efebc08af3355f5ad09a28aa41f99fb734386e6e
refs/heads/master
2020-05-17T02:31:56.887538
2013-06-21T19:47:53
2013-06-21T19:47:53
10,342,081
2
0
null
null
null
null
null
null
null
null
null
null
null
null
null
# run queue, e.g. start / shutdown / balance import zmq import threading import time import sys import os from threading import Thread from Queue import Queue from apscheduler.scheduler import Scheduler POSSIBLE_TOPDIR = os.path.normpath(os.path.join(os.path.abspath(sys.argv[0]), os.pardir, os.pardir)) if os.path.exists(os.path.join(POSSIBLE_TOPDIR, 'upscale', '__init__.py')): sys.path.insert(0, POSSIBLE_TOPDIR) from upscale.master import balancer from upscale.utils.rpc import RemoteClient #from upscale.utils.decorators import periodic_task, every, adecorator, Dec #from upscale.utils.decorators import periodic_task, every, adecorator, Dec from upscale import log as logging LOG = logging.getLogger('upscale.master') class Tasks(RemoteClient): pass class Worker(RemoteClient): pass def queue(f): """ decorator function that will add function to queue instead of executing them directly """ def wrapper(*args, **kwargs): q.put((f, args, kwargs)) return wrapper class Master(object): def __init__(self): self.scheduler = Scheduler() self.scheduler.configure({'daemonic': True}) self.scheduler.add_interval_job(self._balance, seconds=60) self.scheduler.start() pass def _balance(self): def wrapper(): balancer.rebalance() self.reload_all() q.put((wrapper, [], {})) # reconfigure haproxy def reload_all(self): from upscale.utils.common import get_hosts for host in get_hosts(): print ("Reloading host {0}.".format(host.private_dns_name)) with Tasks("tcp://{0}:10000/".format(host.private_dns_name)) as h: # should run async and wait for all results to finish h.reload() # start host @queue def start(self, namespace, application): from upscale.master.balancer import get_containers print namespace, application, (hosts, containers) = get_containers() # also weighted hosts, so one in static host, one on spot instance min_host = None for host in containers: if (not min_host or len(containers[host])<len(containers[min_host])): # check if it already contains project min_host_applications = set([(b.split('_')[0], b.split('_')[1]) for b in containers[host] if len(b.split('_'))==3]) if ((namespace, application) in min_host_applications): continue min_host=host if not min_host: raise Exception('No host available') print 'Starting on host {0}.'.format(min_host) # start container on min host # check minhost with Worker("tcp://{0}:10000/".format(hosts[min_host])) as h: #h.start(namespace, application).get(timeout=5) print ('Starting new container') h.start(namespace, application) self.reload_all() # health checks, does namespace, application exist #enqueue(wrapper, ) return (True) @queue def destroy(self, namespace, website): # get all containers for project and destroy them print namespace, application, (hosts, containers) = get_containers() for host in containers: for container in containers[host]: pass @queue def upgrade(self, namespace, website): # rolling upgrade, first start new instances with new version, # then shutdown old ones # get containers and host of old version # start new containers with new version # shutdown old versions pass def worker(): """ Worker runs a queue of operations on the upscale cluster. """ while True: (func, args, kwargs) = q.get() print func try: func(*args, **kwargs) except Exception, e: print e logging.exception('Worker') from upscale.utils.rpc import Server import time import sys import traceback from upscale.worker.worker import Worker q = Queue() t = Thread(target=worker) t.daemon = True t.start() if __name__ == '__main__': from upscale.worker import tasks with Server("tcp://0.0.0.0:5867", {'Master': Master()}) as s: s.run()
UTF-8
Python
false
false
2,013
11,407,433,142,582
9cc239e9bf983f20276478a7a06a5dc6fc56f232
165635bbacb7d5d1ff9f5af1c2d99f5c5e4266ec
/internals/levelbuilder/tilesets/field.py
39e980e128dfbb31b9fc4296539b3ceb09be32fd
[]
no_license
imclab/Legend-of-Adventure
https://github.com/imclab/Legend-of-Adventure
f414d6fadc4cb6d09a93eeea8f7e3a253ce6baf4
dacfc4a36dd2a0497690bd870f6259661a53c6b0
refs/heads/master
2020-12-28T20:19:33.033560
2013-08-26T03:39:00
2013-08-26T03:39:00
null
0
0
null
null
null
null
null
null
null
null
null
null
null
null
null
TILESET = { (5, 5, 5, 4): 0, (5, 5, 4, 4): 1, (5, 5, 4, 5): 2, (5, 4, 4, 4): 3, (4, 5, 4, 4): 4, (5, 4, 5, 4): 5, (4, 4, 4, 4): 6, (4, 5, 4, 5): 7, (4, 4, 5, 4): 8, (4, 4, 4, 5): 9, (5, 4, 5, 5): 10, (4, 4, 5, 5): 11, (4, 5, 5, 5): 12, (4, 5, 5, 4): 13, (5, 4, 4, 5): 14, (6, 6, 6, 5): 15, (6, 6, 5, 5): 16, (6, 6, 5, 6): 17, (6, 5, 5, 5): 18, (5, 6, 5, 5): 19, (6, 5, 6, 5): 20, (5, 5, 5, 5): 21, (5, 6, 5, 6): 22, (5, 5, 6, 5): 23, (5, 5, 5, 6): 24, (6, 5, 6, 6): 25, (5, 5, 6, 6): 26, (5, 6, 6, 6): 27, (5, 6, 6, 5): 28, (6, 5, 5, 6): 29, (7, 7, 7, 6): 30, (7, 7, 6, 6): 31, (7, 7, 6, 7): 32, (7, 6, 6, 6): 33, (6, 7, 6, 6): 34, (7, 6, 7, 6): 35, (6, 6, 6, 6): 36, (6, 7, 6, 7): 37, (6, 6, 7, 6): 38, (6, 6, 6, 7): 39, (7, 6, 7, 7): 40, (6, 6, 7, 7): 41, (6, 7, 7, 7): 42, (6, 7, 7, 6): 43, (7, 6, 6, 7): 44, (4, 4, 4, 3): 45, (4, 4, 3, 3): 46, (4, 4, 3, 4): 47, (4, 3, 3, 3): 48, (3, 4, 3, 3): 49, (4, 3, 4, 3): 50, (3, 3, 3, 3): 51, (3, 4, 3, 4): 52, (3, 3, 4, 3): 53, (3, 3, 3, 4): 54, (4, 3, 4, 4): 55, (3, 3, 4, 4): 56, (3, 4, 4, 4): 57, (3, 4, 4, 3): 58, (4, 3, 3, 4): 59, (8, 8, 8, 7): 60, (8, 8, 7, 7): 61, (8, 8, 7, 8): 62, (8, 7, 7, 7): 63, (7, 8, 7, 7): 64, (8, 7, 8, 7): 65, (7, 7, 7, 7): 66, (7, 8, 7, 8): 67, (7, 7, 8, 7): 68, (7, 7, 7, 8): 69, (8, 7, 8, 8): 70, (7, 7, 8, 8): 71, (7, 8, 8, 8): 72, (7, 8, 8, 7): 73, (8, 7, 7, 8): 74, }
UTF-8
Python
false
false
2,013
9,929,964,390,989
9745f65b196be1f0b2d92301568d048da13dee23
52e7d42fb2a21235fa6df0a6e5ac391128b30593
/gui/system/nav.py
fa9311e635ac11de637929680c7d3e709b338aa6
[]
no_license
jceel/freenas
https://github.com/jceel/freenas
ceef3a4705abdfd85c7cde43bbe480e9d186c6e7
6735c07be236e03f8a6a3f0dfddf9fa2232fc078
refs/heads/master
2021-01-17T22:45:07.641335
2014-09-15T22:03:17
2014-09-15T22:03:17
null
0
0
null
null
null
null
null
null
null
null
null
null
null
null
null
from freenasUI.freeadmin.tree import TreeNode from django.utils.translation import ugettext_lazy as _ BLACKLIST = [ 'Advanced', 'Email', 'NTPServer', 'Settings', 'SSL', 'SystemDataset', 'Registration', ] NAME = _('System') ICON = u'SystemIcon' ORDER = 1 class Advanced(TreeNode): gname = 'Advanced' name = _(u'Advanced') icon = u"SettingsIcon" type = 'opensystem' order = -90 class Email(TreeNode): gname = 'Email' name = _(u'Email') icon = 'EmailIcon' type = 'opensystem' order = -85 class General(TreeNode): gname = 'General' name = _(u'General') icon = u"SettingsIcon" type = 'opensystem' order = -95 class Info(TreeNode): gname = 'SysInfo' name = _(u'Information') icon = u"InfoIcon" type = 'opensystem' order = -100 class SystemDataset(TreeNode): gname = 'SystemDataset' name = _(u'System Dataset') icon = u"SysDatasetIcon" type = 'opensystem' order = -80 class TunableView(TreeNode): gname = 'View' type = 'opensystem' append_to = 'system.Tunable'
UTF-8
Python
false
false
2,014
7,722,351,213,657
f2b7fb4d83725538e5052f72590094b8cc5be0ce
a84d3e3393a3056a5d96f499994f2394bf617ce5
/korovic/__main__.py
c20da37f4d96a962fb956b981fd7e19435c2c7f9
[]
no_license
lordmauve/korovic
https://github.com/lordmauve/korovic
d261fcfccdc6a4bdcfd060e7b55dd4e1bbfd7827
c92aa5954a6b3546dc0b9554dcc7faa155ecf051
refs/heads/master
2020-07-23T12:33:20.597600
2012-05-28T06:22:15
2012-05-28T06:22:15
207,557,528
1
0
null
null
null
null
null
null
null
null
null
null
null
null
null
from .game import Game from optparse import OptionParser def main(): p = OptionParser() p.add_option('-l', '--level', type='int', help='Initial level', default=1) options, args = p.parse_args() g = Game() g.start(level=options.level) if __name__ == '__main__': main()
UTF-8
Python
false
false
2,012
19,310,172,967,123
23a060707b2a2cbb8d5c88d25a5379fc1cf3a1cd
0f761e1a715fade24d8c7fc2aa8817d7739e84c8
/exercises/02_7.py
de8b7f51d99b4498a2c6b483904e6e7b79e4ccbc
[ "LicenseRef-scancode-public-domain" ]
non_permissive
hillbs/calculus-made-easy-python
https://github.com/hillbs/calculus-made-easy-python
f7df651b274e7e3a786313ff1b42737f13ffaa49
dd9e2a55998cff8605f80e6b9231a145cb5ae841
refs/heads/master
2021-01-22T00:32:23.599820
2013-07-29T03:13:07
2013-07-29T03:13:07
null
0
0
null
null
null
null
null
null
null
null
null
null
null
null
null
from sympy import * """ PDF Page 32, Chapter V, Example 7 If lt and l0 be the lengths of a rod of iron at the temperatures t Centigrades. and 0 Centigrades. respectively, then lt = l0 (1+0.000012t). Find the change of length of the rod per degree Centigrade. """ l0 = Symbol('l0') lt = Symbol('lt') t = Symbol('t') expr = l0*(1+Float(0.000012)*t) result = Derivative(expr, t).doit() # 1.2e-5*l0
UTF-8
Python
false
false
2,013
2,516,850,840,303
d8d0d51400e03101fb9969f5c18763a4f343bcc8
d531ea0d025cbe18f70d78df4355b7901835685a
/helenus.py
fdd76f193a2658b7a4d2232d7e1294f131b5bb5b
[]
no_license
hongweiwang/helenus
https://github.com/hongweiwang/helenus
2074164cd0e5ef9da2247ea03a76a0c7fdb0cca0
26d18e044c940975ac7aa842f7cd629f9fc4adb0
refs/heads/master
2016-09-02T03:15:00.870523
2013-12-18T19:36:22
2013-12-18T19:36:22
null
0
0
null
null
null
null
null
null
null
null
null
null
null
null
null
from cassandra import ConsistencyLevel from cassandra.cluster import Cluster from cassandra.query import SimpleStatement import random, time, md5, threading, sys, socket from cassandraNode import CassandraNode ''' if len(sys.argv) < 4: print 'usage: python helenus.py [log] [1|2] [nqueries]' sys.exit(1) host = socket.gethostname() host = host[0:host.find('.')] log = host + '_' + sys.argv[1] option = sys.argv[2] nqueries = int(sys.argv[3]) f = open(log,'w') ''' class Helenus(): def __init__(self, IPs, f): self.nodes = [] self.max = 100 * len(IPs) self.f = f for ip in IPs: machines = [ip] node = CassandraNode(machines) self.nodes.append(node) def create_keyspace(self, keyspace): for node in self.nodes: node.create_keyspace(keyspace) def set_keyspace(self, keyspace): for node in self.nodes: node.set_keyspace(keyspace) def create_table(self, table): for node in self.nodes: node.create_table(table) def drop_keyspace(self, keyspace): for node in self.nodes: node.drop_keyspace(keyspace) def get_nodes(self, key): number = self.hash(key) # print number node_index = number / 100 next_node_index = node_index + 1 if node_index == len(self.nodes) - 1: next_node_index = 0 # print 'node_index: ' + str(node_index) # print 'next_node_index: ' + str(next_node_index) return node_index, next_node_index def insert(self, table, key, text): node_index, next_node_index = self.get_nodes(key) self.nodes[node_index].insert(table, key, text) self.nodes[next_node_index].insert(table, key, text) def query_one_node(self, table, key): #print 'query one node: ' + key[-6:] node_index, next_node_index = self.get_nodes(key) r = random.randint(0, 1) if r == 0: self.do_query(node_index, table, key) else: self.do_query(next_node_index, table, key) #print 'end query one node: ' + key[-6:] def query_two_node(self, table, key): #print 'query two node: ' + key[-6:] node_index, next_node_index = self.get_nodes(key) thread1 = threading.Thread(target=self.do_query, args=(node_index, table, key)) thread2 = threading.Thread(target=self.do_query, args=(next_node_index, table, key)) thread1.start() thread2.start() thread1.join() thread2.join() #print 'end query two node: ' + key[-6:] def do_query(self, node_index, table, key): start = time.time() self.nodes[node_index].query(table, key) end = time.time() elaps = (end - start) * 1000 #print 'time: ' + str(elaps) + ' ms' self.f.write(str(elaps) + '\n') def show(self, table): for node in self.nodes: node.show(table) def show_count(self, table): for node in self.nodes: node.show_count(table) def hash(self, str): digest = md5.new(str).hexdigest() number = int(digest, 16) return number % self.max def test(): IPs = ['155.98.39.126', '155.98.39.57', '155.98.39.93', '155.98.39.142'] # IPs = ['155.98.39.126'] helenus = Helenus(IPs) keyspace = 'mykeyspace' #helenus.drop_keyspace(keyspace) #helenus.create_keyspace(keyspace) helenus.set_keyspace(keyspace) table = 'mytable' #helenus.create_table(table) key_size = 40 value_size = 1000 value = 'a' * value_size #for i in range(100000): # key = 'a' * 35 + str(i).zfill(5) # helenus.insert(table, key, value) #print 'insert complete!!!' # helenus.insert(table, 'mykey', 'myvalue') # helenus.query_one_node(table, 'mykey') # helenus.query_two_node(table, 'mykey') helenus.show_count(table) def test_query(option): IPs = ['155.98.39.126', '155.98.39.57', '155.98.39.93', '155.98.39.142'] helenus = Helenus(IPs) keyspace = 'mykeyspace' helenus.set_keyspace(keyspace) table = 'mytable' key_list = [] for i in range(1): rnum = random.randint(0, 250000) key = 'a' * 35 + str(rnum).zfill(5) key_list.append(key) if option == '1': for key in key_list: helenus.query_one_node(table, key) if option == '2': for key in key_list: helenus.query_two_node(table, key) #test_query('2') #f.close()
UTF-8
Python
false
false
2,013
15,092,515,090,848
6011a863ae07a63c2e708bf2f2abbc920a4d6f7a
05ef1951e7e8bb91e72903805e1f9c8bfdb441e7
/dashdb/views.py
8c4603a5e15aa3537d7ef1d523b57387d84fc1b1
[]
no_license
imagreenplant/Scrum-Dashboard
https://github.com/imagreenplant/Scrum-Dashboard
fc0556e2f8255f0ab31d625e44659c34d1471549
895bdbf39ddb4532f1072504aafa82c72c1903c5
refs/heads/master
2016-09-05T17:47:08.224948
2013-10-07T03:41:40
2013-10-07T03:41:40
null
0
0
null
null
null
null
null
null
null
null
null
null
null
null
null
# Create your views here. import dashdb.models class ConfigViews: def getBacklogsForTeam(self,given_team): return dashdb.models.Backlog.objects.filter(team__name=given_team) def getMainBacklogsForTeam(self,given_team): return dashdb.models.Backlog.objects.filter(team__name=given_team).filter(supplemental=False) def getIterationsForBacklog(self,given_backlog): return dashdb.models.Iteration.objects.filter(backlog__backlog=given_backlog) def getAllTeams(self): return dashdb.models.Team.objects.all()
UTF-8
Python
false
false
2,013
16,724,602,681,566
4d87c8190be4e821b32b9ea253ac68b7f8fde764
391785b56dfe18bf8c8f6c6d3a4be1b3cfa004de
/class.py
cc5002d8167b1a969da2696b6c7cf46bb47d910b
[]
no_license
sammanthp007/trial
https://github.com/sammanthp007/trial
eb1eb29202930b20fe1b408c57688ab6f6682868
136ecb44728d9d13dbb0310bcbb812cdb4f56d7a
refs/heads/master
2016-08-07T01:03:19.113061
2014-10-25T21:01:27
2014-10-25T21:01:27
null
0
0
null
null
null
null
null
null
null
null
null
null
null
null
null
sport = "soccer(but actually football)" print "my favorite sport is", sport
UTF-8
Python
false
false
2,014
14,826,227,108,647
172ba2658b71f10418c298bb54fc1b0669741c79
b5e1d508d0a0bb538c5c1e09f7ee4a78a093792f
/quickstart.py~
c2b2799d99aff7d59bfca5c50b3643b47c2ed4ca
[]
no_license
harishv93/OnlineStorageManager
https://github.com/harishv93/OnlineStorageManager
2ee9ea33b278771c93b60de86e1a9fcc170d3256
09e6fd989f679c8ecf668ae80f16e813f3d51e31
refs/heads/master
2020-05-19T09:38:38.003570
2013-01-30T11:21:50
2013-01-30T11:21:50
null
0
0
null
null
null
null
null
null
null
null
null
null
null
null
null
#!/usr/bin/python import httplib2 import pprint from apiclient.discovery import build from apiclient.http import MediaFileUpload from oauth2client.client import OAuth2WebServerFlow CLIENT_ID = '988728566209.apps.googleusercontent.com' CLIENT_SECRET = 'CwblTNS_mS6gCmwyYLrJ6uOx' OAUTH_SCOPE = 'https://www.googleapis.com/oauth/drive' REDIRECT_URI = 'urn:ietf:wg:oauth:2.0:oob' FILENAME = 'document.txt' flow = OAuth2WebServerFlow(CLIENT_ID,CLIENT_SECRET,OAUTH_SCOPE,REDIRECT_URI) authorize_url = flow.step1_get_authorize_url() print 'Go to the following link in your browser: ' + authorize_url code = raw_input('Enter Verification Code: ').strip() credentials = flow.step2_exchange(code) http = httplib2.Http() http = credentials.authorize(http) drive_service = build('drive','v2',http=http) media_body = MediaFileUpload(FILENAME,mimetype='text/plain',resumable=True) body = { 'title' : 'My document', 'description' : 'A test document', 'mimeType' : 'text/plain' } file = drive_service.files().insert(body=body,media_body=media_body).execute() pprint.pprint(file)
UTF-8
Python
false
false
2,013
515,396,086,374
89a61899b031aa6928e04d5d5d40e9df360ef645
93720fa8240ed31835d53480a1db31519e5f22ea
/src/test_scripts/syntheticTest/__init__.py
64568c49870abbe158b9a0f7453051c40512fcc9
[]
no_license
riccitensor/contest-py
https://github.com/riccitensor/contest-py
788075916bbc6d78c8280977d542f78446151bef
c32f0321bd5819df9658cbeeb368aa70f3245af2
refs/heads/master
2021-01-25T08:55:23.822311
2012-06-11T19:44:05
2012-06-11T19:44:05
9,649,980
1
0
null
null
null
null
null
null
null
null
null
null
null
null
null
''' tests cassandra features '''
UTF-8
Python
false
false
2,012
3,917,010,202,083
6d3a5ed918625a62cbf113ada0a837de40e42f19
d7d6bed7dd23fd0386f369c8a51725977dbe0c4a
/questiontree/crawler/wikiSpider/wikiSpider/spiders/spider.py
d21bc632bfb39ed937a2255dab95a987628785ab
[]
no_license
clement91190/coheal-questiontree
https://github.com/clement91190/coheal-questiontree
881c9372dde747b76a86c23679aa55de24e00915
a21f326f3086c93d9f6e9206fcee1fa70393062e
refs/heads/master
2020-06-05T10:47:52.489659
2014-01-02T18:11:37
2014-01-02T18:11:37
null
0
0
null
null
null
null
null
null
null
null
null
null
null
null
null
# -*-coding:Utf-8 -* from scrapy.contrib.spiders import CrawlSpider, Rule from scrapy.contrib.linkextractors.sgml import SgmlLinkExtractor from scrapy.selector import HtmlXPathSelector from scrapy.http import Request import nltk class wikiSpider(CrawlSpider): name = "wikiSpider" allowed_domains = ['wikipedia.org'] symptome_list = ['cyanose', 'attaque de panique'] #start_urls = ["http://fr.wikipedia.org/wiki/Medecine"] start_urls = [] for s in symptome_list: start_urls.append('https://www.google.fr/search?sclient=psy&hl=fr&source=hp&q=' + s.encode('UTF-8') + '&btnG=Rechercher' +'#q='+ s.encode('UTF-8')) """ rules = ( Rule(SgmlLinkExtractor(restrict_xpaths=('//div[@class="mw-body"]//a/@href'))), Rule(SgmlLinkExtractor( allow=("http://fr.wikipedia.org/wiki/",)), callback='parse_item'), ) """ rules = ( Rule(SgmlLinkExtractor(restrict_xpaths=('//div[@id="center_col"]/li[@class="g"]/h3[@class="r"]/a/@href'))), Rule(SgmlLinkExtractor( allow=("",)), callback='parse_item'), ) def parse_item(self, response): hxs = HtmlXPathSelector(response) items = hxs.select('//div[@id="bodyContent"]//text()').extract() with open('data.txt', 'w+') as fich: for i,v in enumerate(items): #items[i] = unicodedata.normalize('NFKC', v) tag = v.encode('UTF-8') tokens = nltk.word_tokenize(tag) for t in tokens: if len(t) > 6: fich.write(t) fich.write('\n') #print "chose vues ici{}".format(items)
UTF-8
Python
false
false
2,014
11,690,901,003,124
1a76763c3cc0b94456da40ef09cfcc0082a42b1f
3ff5aab7d6b70715710d81d468e29b9f402ec791
/functions.py
007e72b0af4a25d6572862229045a2423a7c8d5c
[]
no_license
ben18785/malaria-captureMonteCarlo
https://github.com/ben18785/malaria-captureMonteCarlo
5de910b2884bb179c69dce55dbd71d7aa25f3fab
f2cc22a47505a0e3f2c5699e6358771d75cf5eb4
refs/heads/master
2020-06-08T06:45:48.424942
2014-10-27T14:48:39
2014-10-27T14:48:39
null
0
0
null
null
null
null
null
null
null
null
null
null
null
null
null
from IBM_functions import basic import random as random import numpy as np import matplotlib.pyplot as plt # A function which puts male and female mosquitoes at random swarms and houses respectively throughout the domain def initialise(aArea,numMaleMosquitoes,numFemaleMosquitoes,vPInParameters,vPMoveParameters,cPDie): # Get PIn parameters cPInHeterogeneityIndicator = vPInParameters[0] cPInMaleAll = vPInParameters[1] cPInMaleBetaA = vPInParameters[2] cPInMaleBetaB = vPInParameters[3] cPInFemaleAll = vPInParameters[4] cPInFemaleBetaA = vPInParameters[5] cPInFemaleBetaB = vPInParameters[6] # Get the PMove parameters cPMoveHeterogeneityIndicator = vPMoveParameters[0] cPMoveMaleAll = vPMoveParameters[1] cPMoveMaleBetaA = vPMoveParameters[2] cPMoveMaleBetaB = vPMoveParameters[3] cPMoveFemaleAll = vPMoveParameters[4] cPMoveFemaleBetaA = vPMoveParameters[5] cPMoveFemaleBetaB = vPMoveParameters[6] # First sort males cNumMaleMosquitoes = numMaleMosquitoes cNumSwarms= aArea.getNumSwarms() vSwarmSequence = range(0,cNumSwarms) aSwarmList = aArea.getSwarmGroup().getTargetList() aMaleMosquitoList = [] # Put male mosquitoes in while cNumMaleMosquitoes > 0: cRandSwarm = random.choice(vSwarmSequence) if cPInHeterogeneityIndicator == 0 and cPMoveHeterogeneityIndicator == 0: aPInMale = cPInMaleAll aPMoveMale = cPMoveMaleAll elif cPInHeterogeneityIndicator == 0: aPInMale = cPInMaleAll aPMoveMale = random.betavariate(cPMoveMaleBetaA,cPMoveMaleBetaB) elif cPMoveHeterogeneityIndicator == 0: aPInMale = random.betavariate(cPInMaleBetaA,cPInMaleBetaB) aPMoveMale = cPMoveMaleAll else: aPInMale = random.betavariate(cPInMaleBetaA,cPInMaleBetaB) aPMoveMale = random.betavariate(cPMoveMaleBetaA,cPMoveMaleBetaB) aMaleMosquitoList.append(basic.maleMosquito(aSwarmList[cRandSwarm],aPInMale,aPMoveMale,cPDie)) # Move the mosquito inside in relation to probability c_randInsideSwarm = random.random() if c_randInsideSwarm < aMaleMosquitoList[-1].getPIn(): aMaleMosquitoList[-1].moveInside() cNumMaleMosquitoes-=1 # Now sort females cNumFemaleMosquitoes = numFemaleMosquitoes cNumHouses = aArea.getNumHouses() vHouseSequence = range(0,cNumHouses) aHouseList = aArea.getHouseGroup().getTargetList() aFemaleMosquitoList = [] # Put female mosquitoes in while cNumFemaleMosquitoes > 0: cRandHouse = random.choice(vHouseSequence) if cPInHeterogeneityIndicator == 0 and cPMoveHeterogeneityIndicator == 0: aPInFemale = cPInFemaleAll aPMoveFemale = cPMoveFemaleAll elif cPInHeterogeneityIndicator == 0: aPInFemale = cPInFemaleAll aPMoveFemale = random.betavariate(cPMoveFemaleBetaA,cPMoveFemaleBetaB) elif cPMoveHeterogeneityIndicator == 0: aPInFemale = random.betavariate(cPInFemaleBetaA,cPInFemaleBetaB) aPMoveFemale = cPMoveFemaleAll else: aPInFemale = random.betavariate(cPInFemaleBetaA,cPInFemaleBetaB) aPMoveFemale = random.betavariate(cPMoveFemaleBetaA,cPMoveFemaleBetaB) aFemaleMosquitoList.append(basic.femaleMosquito(aHouseList[cRandHouse],aPInFemale,aPMoveFemale,cPDie)) # Move the mosquito inside in relation to its probability PIn cRandInsideHouse= random.random() if cRandInsideHouse < aFemaleMosquitoList[-1].getPIn(): aFemaleMosquitoList[-1].moveInside() cNumFemaleMosquitoes-=1 # A function which allows the mosquitoes to move around probabilistically def evolveSystem(aArea,cDays,vReleaseParameters,vPInParameters,vPMoveParameters,aPDie,vSampleParameters): cNumberMaleReleases = vReleaseParameters[0] cReleaseMaleStartTime = vReleaseParameters[1] cReleaseMaleTimeGap = vReleaseParameters[2] cReleaseMaleMosquitoNumber = vReleaseParameters[3] cNumberFemaleReleases = vReleaseParameters[4] cReleaseFemaleStartTime = vReleaseParameters[5] cReleaseFemaleTimeGap = vReleaseParameters[6] cReleaseFemaleMosquitoNumber = vReleaseParameters[7] cIntroductionNew = vReleaseParameters[8] vReleaseMaleTimes = releaseTimeGenerator(cNumberMaleReleases,cReleaseMaleStartTime,cReleaseMaleTimeGap) vReleaseFemaleTimes = releaseTimeGenerator(cNumberFemaleReleases,cReleaseFemaleStartTime,cReleaseFemaleTimeGap) cMaleReleaseIndexCounter = 0 cFemaleReleaseIndexCounter = 0 fig = plt.figure() for t in range(0,cDays): print(t) # # print(aArea.getNumMosquitoes()) # print(sum(aArea.getNumListMarkedTotalFemales())) vMale = aArea.getMaleMosquitoList() vFemale = aArea.getFemaleMosquitoList() MoveAndInsideMosquitoes(vMale,aArea,1,vPInParameters,vPMoveParameters,aPDie) MoveAndInsideMosquitoes(vFemale,aArea,0,vPInParameters,vPMoveParameters,aPDie) vFemales = aArea.getNumListInsideFemales() vMales = aArea.getNumListInsideMales() cMaleReleaseIndexCounter += releaseMosquitoes(t,vReleaseMaleTimes,cMaleReleaseIndexCounter,cReleaseMaleMosquitoNumber,1,aArea,cIntroductionNew,vPInParameters,vPMoveParameters,aPDie) cFemaleReleaseIndexCounter += releaseMosquitoes(t,vReleaseFemaleTimes,cFemaleReleaseIndexCounter,cReleaseFemaleMosquitoNumber,0,aArea,cIntroductionNew,vPInParameters,vPMoveParameters,aPDie) cSampleMaleTime = vSampleParameters[0] cSampleFemaleTime = vSampleParameters[1] if t == cSampleMaleTime: print("Sampling males") [cCountMarked,cCountUnmarked] = sampleTargets(aArea,1,vSampleParameters) print(cCountMarked,cCountUnmarked) print(lincolnEstimate(cCountMarked,cCountUnmarked,cReleaseMaleMosquitoNumber)) if t == cSampleFemaleTime: print("Sampling females") [cCountMarked,cCountUnmarked] = sampleTargets(aArea,0,vSampleParameters) print(cCountMarked,cCountUnmarked) print(lincolnEstimate(cCountMarked,cCountUnmarked,cReleaseMaleMosquitoNumber)) vColourMales = aArea.getMarkedIndicatorMales() vColourFemales = aArea.getMarkedIndicatorFemales() ax1 = fig.add_subplot(211) ax1.scatter(aArea.getHouseLocations()[:,0],aArea.getHouseLocations()[:,1],s=5*vFemales,c=vColourFemales,label='houses') plt.legend(loc='upper left') ax1.hold(False) ax2 = fig.add_subplot(212) ax2.scatter(aArea.getSwarmLocations()[:,0],aArea.getSwarmLocations()[:,1],s=5*vMales,c=vColourMales,label = 'swarms',vmin=0,vmax = 1) ax2.hold(False) plt.legend(loc='upper left') plt.draw() fig.show() def MoveAndInsideMosquitoes(vMosquitoList,aArea,cSex,vPInParameters,vPMoveParameters,aPDie): k = 1 cNumMoved = 0 for mosquitoes in vMosquitoList: # First see whether or not mosquito dies cRandDie = random.random() if cRandDie < mosquitoes.getPDie(): mosquitoes.die(aArea,vPInParameters,vPMoveParameters,aPDie) else: # If not dead move # Whether or not not move mosquito cMoveRand = random.random() if cMoveRand < mosquitoes.getPMove(): moveMosquito(mosquitoes,aArea,cSex) cNumMoved += 1 # Whether or not to move the mosquito inside cInRand = random.random() if cInRand < mosquitoes.getPIn(): mosquitoes.moveInside() else: mosquitoes.moveOutside() return cNumMoved def moveMosquito(mosquitoes,aArea,cSex): # Get a list of all relevant targets if cSex == 1: vTargetList = list(aArea.getSwarmList()) else: vTargetList = list(aArea.getHouseList()) # Remove the current target from this list vTargetList.remove(mosquitoes.getTarget()) vMovePropensities = [] aLocation = mosquitoes.getLocation() for targets in vTargetList: bLocation = targets.getLocation() vMovePropensities.append(1/squareDistance(aLocation,bLocation)) # Normalise the propensities vMovePropensities = np.array(vMovePropensities)/sum(vMovePropensities) # Select a target at random targetSwitch = 0 cNumTargets = len(vMovePropensities) while targetSwitch == 0: cTargetRandIndex = random.randint(0,cNumTargets-1) cTargetRand = random.random() if cTargetRand < vMovePropensities[cTargetRandIndex]: mosquitoes.move(vTargetList[cTargetRandIndex]) targetSwitch = 1 def squareDistance(aLocation,bLocation): return (aLocation[0]-bLocation[0])**2 + (aLocation[1]-bLocation[1])**2 def releaseTimeGenerator(cNumReleases,cReleaseStartTime,cReleaseTimeGap): vReleaseTimes = [] cReleaseTimeTemp = cReleaseStartTime for i in range(0,cNumReleases): vReleaseTimes.append(cReleaseTimeTemp) cReleaseTimeTemp += cReleaseTimeGap return vReleaseTimes def releaseMosquitoes(t,vReleaseTimes,cReleaseIndexCounter,cReleaseMosquitoNumber,cSex,aArea,cIntroductionNew,vPInParameters,vPMoveParameters,aPDie): # If not a release time just return 0 if t > vReleaseTimes[-1]: return 0 if t != vReleaseTimes[cReleaseIndexCounter]: return 0 if cSex == 1: vTargets = aArea.getSwarmList() else: vTargets = aArea.getHouseList() cLenTargets = len(vTargets) # Find a target that has a sufficient number of mosquitoes if cIntroductionNew == 0: switchRelease = 0 while switchRelease == 0: cRandTargetIndex = random.randint(0,cLenTargets-1) if vTargets[cRandTargetIndex].getNumUnmarkedInside() > cReleaseMosquitoNumber: vUnmarkedInsideMosquitoList = vTargets[cRandTargetIndex].getUnmarkedMosquitoInsideList() switchRelease = 1 # Only want to release the correct number, no more vUnmarkedInsideMosquitoList = vUnmarkedInsideMosquitoList[0:cReleaseMosquitoNumber] for mosquitoes in vUnmarkedInsideMosquitoList: mosquitoes.mark() else: # Just release that number of marked mosquitoes into a random target location cRandTargetIndex = random.randint(0,cLenTargets-1) cPInHeterogeneityIndicator = vPInParameters[0] cPInMaleAll = vPInParameters[1] cPInMaleBetaA = vPInParameters[2] cPInMaleBetaB = vPInParameters[3] cPInFemaleAll = vPInParameters[4] cPInFemaleBetaA = vPInParameters[5] cPInFemaleBetaB = vPInParameters[6] # Get the PMove parameters cPMoveHeterogeneityIndicator = vPMoveParameters[0] cPMoveMaleAll = vPMoveParameters[1] cPMoveMaleBetaA = vPMoveParameters[2] cPMoveMaleBetaB = vPMoveParameters[3] cPMoveFemaleAll = vPMoveParameters[4] cPMoveFemaleBetaA = vPMoveParameters[5] cPMoveFemaleBetaB = vPMoveParameters[6] for i in range(0,cReleaseMosquitoNumber): if cSex == 1: if cPInHeterogeneityIndicator == 0: aPIn = cPInMaleAll else: aPIn = random.betavariate(cPInMaleBetaA,cPInMaleBetaB) if cPMoveHeterogeneityIndicator == 0: aPMove = cPMoveMaleAll else: aPMove = random.betavariate(cPMoveMaleBetaA,cPMoveMaleBetaB) aMosquito = basic.maleMosquito(vTargets[cRandTargetIndex],aPIn,aPMove,aPDie) aMosquito.mark() else: if cPInHeterogeneityIndicator == 0: aPIn = cPInFemaleAll else: aPIn = random.betavariate(cPInFemaleBetaA,cPInFemaleBetaB) if cPMoveHeterogeneityIndicator == 0: aPMove = cPMoveFemaleAll else: aPMove = random.betavariate(cPMoveFemaleBetaA,cPMoveFemaleBetaB) aMosquito = basic.femaleMosquito(vTargets[cRandTargetIndex],aPIn,aPMove,aPDie) aMosquito.mark() return 1 def sampleTargets(aArea,cSex,vSampleParameters): if cSex == 0: # Females - assume we know the location of all houses vTargetsSampled = aArea.getHouseList() else: cKnownSwarmsPercentage = vSampleParameters[2] vTargets = aArea.getSwarmList() # Assume that only a random fraction of male swarms are known cNumSwarms = len(vTargets) vTargetsShuffled = random.sample(vTargets,cNumSwarms) cNumKnownSwarms = int(cKnownSwarmsPercentage*cNumSwarms) vTargetsKnown = vTargetsShuffled[0:cNumKnownSwarms] vTargetsSampled = vTargetsKnown cDailyNumTargetsSampled = vSampleParameters[3] vTargetsSampled = vTargetsSampled[0:cDailyNumTargetsSampled] cCountMarked = 0 cCountUnmarked = 0 for targets in vTargetsSampled: cCountMarked += targets.getNumMarkedInside() cCountUnmarked += targets.getNumUnmarkedInside() return [cCountMarked,cCountUnmarked] def lincolnEstimate(cCountMarked,cCountUnmarked,cNumReleased): if cCountMarked == 0: print("No marked mosquitoes found") return -1 cCountTotal = cCountMarked + cCountUnmarked return cNumReleased*(cCountTotal/cCountMarked)
UTF-8
Python
false
false
2,014
18,047,452,594,593
7440a967f5cb1e017ef2be60e92de000b7bf3f78
6c7be3a7c642b1b26cf29c1b2ba3c1f39369e783
/sphericaltrig.py
ea9d31acede46ca6b7840787c2feeb51d65694bd
[ "LicenseRef-scancode-warranty-disclaimer" ]
non_permissive
AndrewSDFoster/AST4700
https://github.com/AndrewSDFoster/AST4700
8fe06196d8e39e4083ffd8e65e5021e838b126d1
d01e1cf5ef02c7f2149559e3acbf2e398ead9e7b
refs/heads/master
2020-12-30T10:36:57.842274
2014-02-28T17:40:23
2014-02-28T17:40:23
null
0
0
null
null
null
null
null
null
null
null
null
null
null
null
null
#!/usr/bin/env python ''' dms2deg - converst from dms to deg hms2deg - converts from hms to deg deg2dms - converts from deg to dms deg2hms - converts from deg to hms AngSepReal - uses spherical trig to find angle between two points AngSepEucl - estimates angle between two points with a right triangle AngSepPole - estimates angle between two points with a polar triangle equ2ecl - converts equatorial to ecliptic coordinates ecl2gal - converts ecliptic to galactic coordinates gal2equ - converts galactic to equatorial coordinates equ2gal - converts equatorial to galactic coordinates gal2ecl - converts galactic to ecliptic coordinates ecl2equ - converts ecliptic to equatorial coordinates fuck horizon coordinates. Seriously. I'm not doing that shit. sphericalLawOfCosines - what the name suggests sphericalLawOfSines - ^ euclideanLawOfCosines - ^^ euclideanLawOfSines - ^^^ EquinoxToJ2000 - Converts from any equinox to J2000 EpochWithJ2000equinox - finds a nearby epoch with J2000 equinox B1950toJ2000 - converts from the B1950 equinox to the J2000 equinox refractionAnglehor - computes angle of atmospheric refraction trueAltitudehor - computes true altitude from apparent apparentAltitudehor - computes apparent altitude from true ''' import numpy as np def dms2deg(DEC): '''DEC of form [degree, arcminute, arcsecond], returns as decimal''' #distribute the sign of the first nonzero entry to the others dec = np.float64(DEC.copy()) if dec[0] == 0: if dec[1] < 0: dec[2] *= -1 elif dec[0] < 0: dec[1] *= -1 dec[2] *= -1 #check input if (not ((dec[0] <= 0 and dec[1] <= 0 and dec[2] <= 0) \ or (dec[0] >= 0 and dec[1] >= 0 and dec[2] >= 0))\ or dec[0] > 90 or dec[0] < -90 \ or dec[1] > 60 or dec[1] < -60 \ or dec[2] > 60 or dec[2] < -60): print("ERROR bad input for dms2deg, trying to continue anyway") return dec[0] + dec[1]/60. + dec[2]/3600. def hms2deg(ra): '''ra of form [hour, minute, second], returns as decimal''' #type conversion ra = np.float64(ra.copy()) #check input if (ra[0] < 0 or ra[0] > 24 \ or ra[1] < 0 or ra[1] > 60 \ or ra[2] < 0 or ra[2] > 60): print("ERROR bad input for hms2deg, trying to compute anyway") return (ra[0] + ra[1]/60. + ra[2]/3600.)*15 def deg2dms(c): '''deg is decimal degrees, converts to array of [degrees, minutes, seconds]''' deg = int( c) amn = np.abs(int( (c-deg)*60.)) asc = ((np.abs((c-deg)*60.))-amn)*60. if c < 0 and deg == 0: if amn == 0: asc *= -1 else: amn *= -1 return np.array([deg, amn, asc]) def deg2hms(c): '''c is decimal degrees, converts to an array of [hours, minutes, seconds]''' return deg2dms(c/15) def AngSepReal(ra1, dec1, ra2, dec2): ''' ra1 and ra2 are lists of the form [ hour, minute, second] dec1 and dec2 are lists of the form [degree, arcmin, arcsec] returns angle between them ''' #A is angle at pt 2, a is side length across from A (from pole to pt 1) #B is angle at pt 1, b is side length across from B (from pole to pt 2) #C is angle at pole, c is side length across from C (from pt 1 to pt 2) #Find angle C (difference of RA's) C = np.abs(ra1-ra2) #find sides a and b (90-dec) a = 90 - dec1 b = 90 - dec2 #spherical law of cosines c = sphericalLawOfCosines(a=a,b=b,c=None,C=C) return c def AngSepEucl(ra1, dec1, ra2, dec2): ''' ra1 and ra2 are lists of the form [ hour, minute, second] dec1 and dec2 are lists of the form [degree, arcmin, arcsec] returns angle between them estimated from euclidean right triangle ''' #convert to degrees RA1 = hms2deg( ra1) RA2 = hms2deg( ra2) DEC1 = dms2deg(dec1) DEC2 = dms2deg(dec2) #find avg dec, and the differences in dec and ra aDEC = (DEC1 + DEC2) / 2. dDEC = np.abs(DEC1 - DEC2) dRA = np.abs( RA1 - RA2) #find read "dist" in RA rdRA = dRA*np.cos(aDEC*np.pi/180.) #compute length of hypotenuse dist = np.sqrt(rdRA*rdRA + dDEC*dDEC) return deg2dms(dist) def AngSepPole(ra1, dec1, ra2, dec2): ''' ra1 and ra2 are lists of the form [ hour, minute, second] dec1 and dec2 are lists of the form [degree, arcmin, arcsec] returns angle between them as estimated by a euclidean polar triangle ''' #get degrees of each angle RA1 = hms2deg( ra1) RA2 = hms2deg( ra2) DEC1 = dms2deg(dec1) DEC2 = dms2deg(dec2) #A is angle at pt 2, a is side length across from A (from pole to pt 1) #B is angle at pt 1, b is side length across from B (from pole to pt 2) #C is angle at pole, c is side length across from C (from pt 1 to pt 2) #Find angle C (difference of RA's) C = deg2dms(np.abs(RA1-RA2)) #find sides a and b (90-dec) a = 90 - DEC1 b = 90 - DEC2 #switch to using south pole if it is closer if (a + b)/2 > 90: a = 180 - a b = 180 - b #euclidean law of cosines c = euclideanLawOfCosines(a=a, b=b, c=None, C=C) return deg2dms(c) def equ2ecl((alpha, delta)): '''accepts tuple of dms arrays, returns the same. converts hms RA and dms DEC into dms beta and lambda''' #define constants and get things into decimals/radians epsilon = dms2deg([23, 26, 21]) * np.pi/180 alpha = hms2deg(alpha) * np.pi/180 delta = dms2deg(delta) * np.pi/180 #calculate beta beta = np.arcsin(np.sin(delta)*np.cos(epsilon) - \ np.cos(delta)*np.sin(epsilon)*np.sin(alpha)) #calculate cos and sin of lmbda coslmbda = np.cos(delta)*np.cos(alpha)/np.cos(beta) sinlmbda = (np.sin(delta) - np.cos(epsilon)*np.sin(beta)) / \ (np.sin(epsilon)*np.cos(beta)) #use arctan2 to get the right quadrant lmbda = np.arctan2(sinlmbda, coslmbda) #bring it back to dms beta = deg2dms( beta * 180/np.pi) lmbda = deg2dms(lmbda * 180/np.pi) #return a tuple return (beta, lmbda) def equ2gal((alpha, delta)): '''accepts a tuple of dms arrays, returns the same. converts hms RA and dms DEC into dms b an l''' #define constants and get things into decimals/radians deltag = dms2deg([27,07,42]) * np.pi/180 alphag = hms2deg([12,51,26.3]) * np.pi/180 lnode = dms2deg([32,55,55]) * np.pi/180 delta = dms2deg(delta) * np.pi/180 alpha = hms2deg(alpha) * np.pi/180 #calculate b b = np.arcsin(np.sin(deltag)*np.sin(delta) + \ np.cos(deltag)*np.cos(delta)*np.cos(alpha-alphag)) #calculate cos and sin of l cosl = np.cos(delta)*np.sin(alpha-alphag)/np.cos(b) sinl = (np.sin(delta)-np.sin(deltag)*np.sin(b))/(np.cos(deltag)*np.cos(b)) #use arctan2 to get the right quadrant l = np.arctan2(sinl, cosl) #readd lnode back in l += lnode #bring it back to dms l = deg2dms(l * 180/np.pi) b = deg2dms(b * 180/np.pi) #return a tuple return (b,l) def ecl2equ((beta, lmbda)): '''accepts a tuple of dms arrays, returns the same. converts dms beta and lmbda to hms RA and dms DEC''' #define constants and get things into decimals/radians epsilon = dms2deg([23, 26, 21]) * np.pi/180 beta = dms2deg(beta) * np.pi/180 lmbda = dms2deg(lmbda) * np.pi/180 #calculate delta delta = np.arcsin(np.sin(beta)*np.cos(epsilon) + \ np.cos(beta)*np.sin(epsilon)*np.sin(lmbda)) #calculate cos and sin of alpha cosalpha = np.cos(lmbda)*np.cos(beta)/np.cos(delta) sinalpha = (np.cos(epsilon)*np.sin(delta) - np.sin(beta)) / \ (np.sin(epsilon)*np.cos(delta)) #use arctan2 to get the right quadrant alpha = np.arctan2(sinalpha, cosalpha) #bring it back to dms alpha = deg2hms(alpha * 180/np.pi) delta = deg2dms(delta * 180/np.pi) #return a tuple #return (alpha, delta) def gal2equ((b, l)): '''accepts a tuple of dms arrays, returns the same. converts dms b and l to hms RA and dms DEC''' #define constants and get things into decimals/radians deltag = dms2deg([27,07,42]) * np.pi/180 alphag = hms2deg([12,51,26.3]) * np.pi/180 lnode = dms2deg([32,55,55]) * np.pi/180 b = dms2deg(b) * np.pi/180 l = hms2deg(l) * np.pi/180 #calculate delta delta = np.arcsin(np.sin(deltag)*np.sin(b) + \ np.cos(deltag)*np.cos(b)*np.sin(l-lnode)) #calculate cos and sin of alpha cosalp = (np.sin(b)-np.sin(deltag)*np.sin(delta))/(np.cos(deltag)*np.cos(delta)) sinalp = np.cos(l-lnode)*np.cos(b)/np.cos(delta) #use arctan2 to get the right quadrant alpha = np.arctan2(sinalp, cosalp) #add alphag back in alpha += alphag #bring it back to dms alpha = deg2hms(alpha * 180/np.pi) delta = deg2dms(delta * 180/np.pi) #return a tuple return (alpha, delta) def ecl2gal((beta, lmbda)): '''accepts a tuple of dms arrays, returns the same. converts dms beta and lambda to dms b and l''' return equ2gal(ecl2equ((beta, lmbda))) def gal2ecl((b, l)): '''accepts a tuple of dms arrays, returns the same. converts dms b and l to dms beta and lambda''' return equ2ecl(gal2equ((b, l))) def sphericalLawOfCosines(a, b, c, C=None): '''Law of cosines, lowercase variables are side lengths (dms arrays) either c or C (opposite side/angle pair) is calculated from the other parameters. ''' #if angle must be found if C == None: #do math, notice the conversion to/from radians a = dms2deg(a)*np.pi/180 b = dms2deg(b)*np.pi/180 c = dms2deg(c)*np.pi/180 C = np.arccos((np.cos(c) - np.cos(a)*np.cos(b))/np.sin(a)*np.sin(b)) return deg2dms(C*180/np.pi) #if side must be found if c == None: #do math, notice the conversion to/from radians a = dms2deg(a)*np.pi/180 b = dms2deg(b)*np.pi/180 C = dms2deg(C)*np.pi/180 c = np.arccos(np.cos(a)*np.cos(b) + np.sin(a)*np.sin(b)*np.cos(C)) return deg2dms(c*180/np.pi) #tell the user they fucked up else: print("Error, law of cosines invalid parameters") return np.array([0,0,0]) def sphericalLawOfSines(angle1, side1, angle2, side2=None): '''Law of sines. 4 possible inputs, angle1 and side1 must be dms arrays and either angle2 or side2 must also be dms arrays, with the other as None This function will determing the missing side/angle from the other three parameters ''' #if side2 must be found if side2 == None: #do math, notice the conversion to/from radians angle1 = dms2deg(angle1)*np.pi/180 angle2 = dms2deg(angle2)*np.pi/180 side1 = dms2deg( side2)*np.pi/180 side2 = np.arcsin(np.sin(angle2)*np.sin(side1)/np.sin(angle1))*180/np.pi return deg2dms(side2) #if angle2 must be found if angle2 == None: #do math, notice the conversion to/from radians angle1 = dms2deg(angle1)*np.pi/180 side1 = dms2deg( side1)*np.pi/180 side2 = dms2deg( side2)*np.pi/180 angle2 = np.arcsin(np.sin(side2)*np.sin(angle1)/np.sin(side1))*180/np.pi return deg2dms(angle2) #tell the user that they fucked up else: print('Error, law of sines overconstrained, returning 0') return np.array([0,0,0]) def euclideanLawOfCosines(a, b, c, C=None): ''' euclidean law of cosines parameters of the sides of a triangle and one angle, when given 3, the fourth is found. C and c are opposite each other ''' #if angle must be found if C == None: #do math (notice degree radian conversion) C = np.arccos((c*c - a*a - b*b)/(-2*a*b))*180/np.pi return deg2dms(C) #if side must be found if c == None: #do math (notice degree radian conversion) C = dms2deg(C)*np.pi/180 c = np.sqrt(a*a + b*b - 2*a*b*np.cos(C)) return c #tell the user that they fucked up else: print("invalid parameters for euclidean law of cosines") return 0 def euclideanLawOfSines(A, a, B, b = None): ''' euclidean Law of sines parameters of opposite side/angle pairs (A/a and B/b) from three values, the fourth is calculated ''' #if side must be found if b == None: #do math, notice radians/degrees conversions A = dms2deg(A)*np.pi/180 B = dms2deg(B)*np.pi/180 b = np.sin(B)*a/np.sin(A) return b #if angle must be found if B == None: #do math, notice radians/degrees conversions A = dms2deg(A)*np.pi/180 B = np.arcsin(b*np.sin(A)/a)*180/np.pi return deg2dms(B) #tell the user that they fucked up else: print("invalid parameters for euclidean law of sines") return 0 def EquinoxToJ2000(alpha, delta, pmA, pmD, date, BJD=False): '''converts Ephemeri from one time to J2000.0 alpha and delta are the location of the star at start (RA and dec) pmA and pmD are the proper motions in alpha and delta in arcsec/yr returns alpha and delta in J2000.0 Can also be done in BJD rather than years ''' #compute time for time standard if BJD: T = np.float64(date-2451545.0)/36525. year = (T*100)+2000.0 else: T = np.float64(date-2000.0)/100. year = date #Compute precession constants for time and get everything into radians M = (1.2812323*T + 0.0003879*T*T + 0.0000101*T*T*T)*np.pi/180. N = (0.5567530*T - 0.0001185*T*T - 0.0000116*T*T*T)*np.pi/180. alpha = hms2deg(alpha)*np.pi/180 delta = dms2deg(delta)*np.pi/180 #find the mean epoch for each time alpham = alpha - 0.5*(M + N*np.sin(alpha)*np.tan(delta)) deltam = delta - 0.5*N*np.cos(alpham) #find the new location of the star's old position alpha0 = alpha - M - N*np.sin(alpham)*np.tan(deltam) delta0 = delta - N*np.cos(alpham) #return to hms and dms delta0 = deg2dms(delta0*180/np.pi) alpha0 = deg2hms(alpha0*180/np.pi) #Account for proper motions (alphaf, deltaf) = EpochWithJ2000equinox(alpha0, delta0, pmA, pmD, year) #return return (alphaf, deltaf) def EpochWithJ2000equinox(alpha0, delta0, pmA, pmD, date, BJD=False): '''Uses J2000 Equinox and proper motions to find new locations of stars at a different epoch. Takes alpha0 and delta0 as the locations at epoch and equinox of J2000, and pmA, pmD, the proper motions in alpha and delta and calculates the starses positsdjolfjslifjoisjd ljals lksjdlkjk fuck you ''' #years of how many of them go past since yeah #wow I was tired when I wrote this, making it more readable #get the amount of time in years since 2000.0 if BJD: years = np.abs((date-2451545.0)/365.) else: years = np.abs(date-2000.0) delta0 = dms2deg(delta0) alpha0 = hms2deg(alpha0) #correct for proper motions, be sure to correct for cos(dec) factor in RA deltaf = delta0 + pmD*years/3600. #average delta, converted to radians inside of cosine alphaf = alpha0 + (pmA*years/3600.)/np.cos(((delta0+deltaf)/2.)*np.pi/180.) #back to dms/hms deltaf = deg2dms(deltaf) alphaf = deg2hms(alphaf) #return the values return (alphaf, deltaf) def B1950toJ2000(alpha, delta, pmA, pmD): '''Converts from B1950 equinox/epoch to J2000 equinox/epoch takes alpha, delta, and proper motions in each for B1950 returns alpha and delta in J2000 NOTE: This is just a wrapper for EquinoxToJ2000() with the equinox coming from B1950 ''' return EquinoxToJ2000(alpha, delta, pmA, pmD, 2433282.423, BJD=True) def refractionAnglehor(Aapp): ''' Finds the angle of refraction of an object at an apparent altitude of Aapp Aapp given in typical [deg, min, sec] numpy array NOTE: THIS IS AN APPROXIMATION THAT ONLY WORKS CLOSE TO THE HORIZON ''' c0 = 35.338/60 c1 = -13.059/60 c2 = 2.765/60 c3 = -0.244/60 #get to degs Aapp = dms2deg(Aapp) #formula theta = deg2dms(c0 + c1*Aapp + c2*Aapp**2 + c3*Aapp**3) return theta def trueAltitudehor(Aapp): ''' wrapper that finds true altitude from the apparent altitude, Aapp, Aapp given in typical [deg, min, sec] numpy array NOTE: THIS IS AN APPROXIMATION THAT ONLY WORKS CLOSE TO THE HORIZON ''' #this is basically just a wrapper function a = deg2dms(dms2deg(Aapp) - dms2deg(refractionAnglehor(Aapp))) return a def apparentAltitudehor(a): ''' Finds apparent altitude from true altitude, given in typical [deg,min,sec] numpy array NOTE: THIS IS AN APPROXIMATION THAT ONLY WORKS CLOSE TO THE HORIZON ''' c0 = 35.338/60 c1 = -13.059/60 c2 = 2.765/60 c3 = -0.244/60 #this is the polynomial roots = np.roots((-c3, -c2, 1-c1, -c0-dms2deg(a))) #only return the real one for root in roots: if np.imag(root) == 0: Aapp = deg2dms(np.real(root)) return Aapp def trueAltZen(a): '''Finds true altitude from apparent using approximation near zenith a in [degrees,minutes,seconds] numpy array NOTE: THIS IS AN APPROXIMATION THAT ONLY WORKS CLOSE TO THE HORIZON ''' n = 1.0002923 return deg2dms(np.arcsin(n*np.sin(dms2deg(a)*np.pi/180))*180/np.pi) def apparAltZen(z): '''Finds apparent altitude from apparent using approximation near zenith z in [degrees,minutes,seconds] numpy array NOTE: THIS IS AN APPROXIMATION THAT ONLY WORKS CLOSE TO THE HORIZON ''' n = 1.0002923 return deg2dms(np.arcsin(np.sin(dms2deg(z)*np.pi/180)/n)*180/np.pi)
UTF-8
Python
false
false
2,014
3,685,081,964,635
c90ecd3e6611c8efcb74bd7673a32a0b436baf13
c0fa3ffcdcdb6ab92c239abaa77211f1ae387493
/engine/python/fife/extensions/serializers/__init__.py
05615060adaad4b190a73f2c5502d0618e35a4f0
[]
no_license
harendranathvegi9/Zero-Sum-Cascade-Old
https://github.com/harendranathvegi9/Zero-Sum-Cascade-Old
cfee79862e5f6dba24b51d0610b986d3051cafc3
4cf70a74d2282d4387e935402a9b94940e34bedc
refs/heads/main
2021-01-17T23:41:41.489875
2010-09-12T13:18:59
2010-09-12T13:18:59
null
0
0
null
null
null
null
null
null
null
null
null
null
null
null
null
# -*- coding: utf-8 -*- # #################################################################### # Copyright (C) 2005-2009 by the FIFE team # http://www.fifengine.de # This file is part of FIFE. # # FIFE is free software; you can redistribute it and/or # modify it under the terms of the GNU Lesser General Public # License as published by the Free Software Foundation; either # version 2.1 of the License, or (at your option) any later version. # # This library is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU # Lesser General Public License for more details. # # You should have received a copy of the GNU Lesser General Public # License along with this library; if not, write to the # Free Software Foundation, Inc., # 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA # #################################################################### import fife, sys, os from traceback import print_exc __all__ = ('ET', 'SerializerError', 'InvalidFormat', 'WrongFileType', 'NameClash', 'NotFound', 'warn', 'root_subfile', 'reverse_root_subfile') try: import xml.etree.cElementTree as ET except: import xml.etree.ElementTree as ET class SerializerError(Exception): pass class InvalidFormat(SerializerError): pass class WrongFileType(SerializerError): pass class NameClash(SerializerError): pass class NotFound(SerializerError): pass def warn(self, msg): print 'Warning (%s): %s' % (self.filename, msg) def root_subfile(masterfile, subfile): """ Returns new path for given subfile (path), which is rooted against masterfile E.g. if masterfile is ./../foo/bar.xml and subfile is ./../foo2/subfoo.xml, returned path is ../foo2/subfoo.xml NOTE: masterfile is expected to be *file*, not directory. subfile can be either """ s = '/' masterfile = norm_path(os.path.abspath(masterfile)) subfile = norm_path(os.path.abspath(subfile)) master_fragments = masterfile.split(s) sub_fragments = subfile.split(s) master_leftovers = [] sub_leftovers = [] for i in xrange(len(master_fragments)): try: if master_fragments[i] == sub_fragments[i]: master_leftovers = master_fragments[i+1:] sub_leftovers = sub_fragments[i+1:] except IndexError: break pathstr = '' for f in master_leftovers[:-1]: pathstr += '..' + s pathstr += s.join(sub_leftovers) return pathstr def reverse_root_subfile(masterfile, subfile): """ does inverse operation to root_subfile. E.g. E.g. if masterfile is ./../foo/bar.xml and subfile is ../foo2/subfoo.xml, returned path ./../foo2/subfoo.xml Usually this function is used to convert saved paths into engine relative paths NOTE: masterfile is expected to be *file*, not directory. subfile can be either """ s = '/' masterfile = norm_path(os.path.abspath(masterfile)).split(s)[:-1] subfile = norm_path(os.path.abspath( s.join(masterfile) + s + subfile )) masterfile = norm_path(os.getcwd()) + s + 'foo.bar' # cheat a little to satisfy root_subfile return root_subfile(masterfile, subfile) def norm_path(path): """ Makes the path use '/' delimited separators. FIFE always uses these delimiters, but some os-related routines will default to os.path.sep. """ if os.path.sep == '/': return path return '/'.join(path.split(os.path.sep))
UTF-8
Python
false
false
2,010
13,469,017,445,357
f3cdbdf43f34d2e2e0ec0665a03642b42234db8b
4ae36a0be4d359741c2278a89d1df9f0c7b680fa
/wwatcher.py
33d2e4096b2e4a250f0e6983cf8d1638f584c5c6
[]
no_license
coxlab/wwatcher
https://github.com/coxlab/wwatcher
2297b34e4d01a792f647581fb173cf2c4f1a0454
e6efabb1d53f9b96b982cd3b53ec63b7492c1fa7
refs/heads/master
2021-05-26T22:47:26.778609
2013-08-02T17:25:59
2013-08-02T17:25:59
11,372,670
0
0
null
null
null
null
null
null
null
null
null
null
null
null
null
#!/usr/bin/env python import gspread import sys import pylab import argparse import getpass import datetime import wwatcher from matplotlib import pyplot import matplotlib.dates from matplotlib.dates import DateFormatter, WeekdayLocator, DayLocator, MONDAY import random from matplotlib import legend def main(): ''' Parse command line options to analyze animal weight data from Google Sheets. Creates a WeightWatcher class and executes methods specified by the user on the command line. ''' #TODO add spreadsheet name and url customizability to command line interface parser = argparse.ArgumentParser(description="A command line tool to analyze animal weights stored in Google Sheets", \ usage="wwatcher.py Username animalName1 animalName2 animalName3 [options] \n\ or \n\ wwatcher.py [options] Username animalName1 animalName2 animalName3") parser.add_argument('username', help="Google Docs username, required as first argument (e.g. [email protected])") parser.add_argument('animals', help="Animal IDs to analyze, separated by spaces. At least 1 is required, but you \ can add as many as you want", nargs="+") parser.add_argument('-c', action="store_true", default=False, help="Check to make sure each animal weighed at least \ 90 percent of its most recent maximum (weekend) value for the last 4 weekdays") parser.add_argument('-d', help="Specify the number of weekdays to analyze with -c option") parser.add_argument('-g', action="store_true", default=False, help="Make a graph of each animal's weight over time") parser.add_argument('-a', action="store_true", default=False, help="Make one graph of every animal's weight over time") parser.add_argument('-r', action="store_true", default=False, help="Graph a linear regression where x values are max weights \ and y values are the previous week's average daily weight") parsed = parser.parse_args() #make sure at least 1 specified option calls a WeightWatcher class method, else give the user help and exit if (parsed.c == False) and (parsed.g == False) and (parsed.a == False) and (parsed.r == False): parser.print_help() sys.exit() username = parsed.username animals = parsed.animals #if the username is [email protected], no need to ask for password in terminal. It's this crazy string, and we want to run #the script automatically without stopping for user input every week if username == "[email protected]": password = "}ONCuD*Xh$LNN8ni;0P_HR_cIy|Q5p" else: password = getpass.getpass("Enter your Google Docs password: ") watcher = wwatcher.WeightWatcher(username, password, animals) #if the user selects the -c option, check animal weights to make sure they don't go below 90% max if parsed.c: if parsed.d: HeavyEnoughDict = watcher.IsHeavyEnough(days=parsed.d) else: HeavyEnoughDict = watcher.IsHeavyEnough() #make a list of animals that aren't heavy enough problem_animals = [] for animal in animals: if not HeavyEnoughDict[animal]: problem_animals.append(animal) #TODO implement email functionality for alerts when this option is run automatically if len(problem_animals) == 0: print "Animal weights look fine. Awesome!\n" else: for each in problem_animals: print "A stupid algorithm thinks %s is underweight. You might want to check on him!" % each if parsed.g: #dict with animals ID strings as keys and a list of lists of the same length [[dates], [weights for those dates], [whether it was a weekend weight Boolean]] data_for_graph = watcher.format_data_for_graph() for animal in animals: dates = data_for_graph[animal][0] weights = data_for_graph[animal][1] fig = pyplot.figure(str(datetime.date.today())) pyplot.title("Animal weight over time") pyplot.ylabel("Animal Weight (g)") ax = fig.gca() mondays = WeekdayLocator(MONDAY, interval=2) alldays = DayLocator() weekFormatter = DateFormatter('%b %d %y') ax.xaxis.set_major_locator(mondays) ax.xaxis.set_minor_locator(alldays) ax.xaxis.set_major_formatter(weekFormatter) r = lambda: random.randint(0,255) ax.plot_date(matplotlib.dates.date2num(dates), weights, '#%02X%02X%02X' % (r(),r(),r()), lw=2, label=str(animal)) pyplot.axis(ymin=400, ymax=750) ax.legend(loc='best') ax.xaxis_date() ax.autoscale_view() pyplot.setp(fig.gca().get_xticklabels(), rotation=35, horizontalalignment='right') pyplot.show() if parsed.a: #dict with animals ID strings as keys and a list of lists of the same length [[dates], [weights for those dates], [whether it was a weekend weight Boolean]] data_for_graph = watcher.format_data_for_graph() for animal in animals: dates = data_for_graph[animal][0] weights = data_for_graph[animal][1] fig = pyplot.figure(str(datetime.date.today())) pyplot.title("Animal weight over time") pyplot.ylabel("Animal Weight (g)") ax = fig.gca() mondays = WeekdayLocator(MONDAY, interval=2) alldays = DayLocator() weekFormatter = DateFormatter('%b %d %y') ax.xaxis.set_major_locator(mondays) ax.xaxis.set_minor_locator(alldays) ax.xaxis.set_major_formatter(weekFormatter) r = lambda: random.randint(0,255) ax.plot_date(matplotlib.dates.date2num(dates), weights, '#%02X%02X%02X' % (r(),r(),r()), lw=2, label=str(animal)) pyplot.axis(ymin=400, ymax=750) ax.legend(loc='best') ax.xaxis_date() ax.autoscale_view() pyplot.setp(fig.gca().get_xticklabels(), rotation=35, horizontalalignment='right') pyplot.show() if parsed.r: data_for_graph = watcher.regression() fitted = pylab.polyfit(data_for_graph[0], data_for_graph[1], 1) line = pylab.polyval(fitted, data_for_graph[0]) pylab.plot(data_for_graph[0], line) pylab.scatter(data_for_graph[0], data_for_graph[1]) pylab.xlabel('Weekend (max) weight') pylab.ylabel('Avg Weekday Weight') pylab.show() if __name__ == '__main__': main() class Spreadsheet(object): ''' An instance of this class uses the gspread package (https://github.com/burnash/gspread) to communicate with the Google Docs API. This opens the first worksheet in a spreadsheet specified in __init__ (i.e. sheet1 in 'Daily Weights after 7-11-13') ''' def __init__(self, username, password, spreadsheet_name='Daily Weights after 7-11-13', spreadsheet_url=None): ''' param username: A string, the user's Google Docs email (e.g. [email protected]) param password: A string, the user's password for Google Docs param spreadsheet_name: A string, name of the spreadsheet from which you want data, as it appears in Google Docs (e.g. "Daily Weights after 7-11-13") param spreadsheet_url: A string, the url for a Google Docs spreadsheet if you want to use a different one ''' print "\nLogging into Google Docs..." self.login = gspread.login(username, password) print "Importing spreadsheet from Google Docs..." if spreadsheet_url == None: self.worksheet_open = self.login.open(spreadsheet_name).sheet1 else: self.worksheet_open = self.login.open_by_url(spreadsheet_url).sheet1 class WeightWatcher(object): def __init__(self, username, password, animals, spreadsheet_name='Daily Weights after 7-11-13', \ spreadsheet_url=None): ''' An instance of the WeightWatcher class has a spreadsheet class attribute to access Google Sheets data with animal weights. The WeightWatcher class also has methods to monitor and analyze animal weights. param username: a string, login email for Google Docs param password: a string, login password for Google Docs param animals: a list, where each item in the list is an animal ID (str) param spreadsheet_name (optional): a string, Name of spreadsheet you want to parse, default is currently the Cox lab shared sheet 'Daily Weights after 7-11-13' param spreadsheet_url (optional): a string, url for a spreadsheet if you want to use this instead of a sheet name or the default spreadsheet_name ''' #self.data is a list of lists with all the spreadsheet data #e.g. nested list ['date/time', '[email protected]', 'animal ID', 'weight', 'after water? yes or no'] <--one row from spreadsheet self.data = Spreadsheet(username, password, spreadsheet_name, spreadsheet_url).worksheet_open.get_all_values() print "Successfully imported spreadsheet\n" self.animals_to_analyze = animals self.data_list_length = len(self.data) def IsHeavyEnough(self, days=4): ''' #go through last 4 weekday weights of each aninmal specified by user and make sure each day it weighs at least 90 percent its most recent max weight param self.animals_to_analyze should be a list of strings *Returns a dict with animal names (str) as keys and True as the value iff each of the last 4 weekdays it weighed enough* ''' #================================================================================================================ #get latest max weights from backwards spreadsheet (backwards so it starts looking for most recent data) #make dictionary to store animal names as keys and max weights as values #use data_position to remember where you are in the backwards (i.e. most recent) weights data during while loop maxes = {} animals_copy = self.animals_to_analyze[:] data_position = 0 backwards_data = self.data[::-1] #do the following until we've gotten every animal's max weight #backwards_data[data_position[4] is overnight h20 column, "yes" means the comp has found a max weight #backwards_data[data_position][2] is animal ID in the spreadsheet, so the first boolean makes sure it's an animal #for which the user wants to verify the weight while (len(animals_copy)) > 0 and (data_position < self.data_list_length): if (backwards_data[data_position][2] in animals_copy) and ("yes" in backwards_data[data_position][4]): #make sure there's an animal weight (not '-' or 'x' in position backwards_data[data_pos][4] #by trying to make the string an int; if there's an exception it's not a valid animal weight try: animal_weight = int(backwards_data[data_position][3]) #if no exception, add key (animal ID as string) and value (weight as int) to maxes dict maxes[backwards_data[data_position][2]] = animal_weight animals_copy.remove(backwards_data[data_position][2]) except ValueError: pass #print "ValueError at %s, skipping to next cell" % data_position (used for testing) data_position += 1 print '\nMax weights: ' + str(maxes) + "\n" #make sure all animal max weights were found if len(animals_copy) > 0: raise Exception("Could not find max weight for: " + str(animals_copy).strip('[]')) #================================================================================================================ #get most recent 4 weekday weights for each animal #make mins dict to store animal ID (str) as keys and 4 weekday weights as values (a list of ints) def DaysNeeded(animals_copy, days): ''' Returns a dict with a starting value of days (4 default) (int) for each animal ID key (str) in animals_copy Used in the while loop below to make it keep looping until each animal has at least 4 weekday weights ''' days_status = {} for each in animals_copy: days_status[each] = days return days_status def AllDaysRetrieved(DaysNeededDic): ''' Returns a boolean to indicate whether EVERY animal has 4 weekday weights recorded, indicated by a value of 0 in countdown ''' dict_values = DaysNeededDic.values() for each in dict_values: if each > 0: return False return True def MakeDictLists(animals_copy): ''' make an empty list as the value for each animal (key) in weekday_weights ''' dictionary = {} for each in animals_copy: dictionary[each] = [] return dictionary animals_copy = self.animals_to_analyze[:] #default number of days (4) used below "DaysNeeded(animals_copy, days) specified in WeightWatcher.IsHeavyEnough attributes countdown = DaysNeeded(animals_copy, days) weekday_weights = MakeDictLists(animals_copy) data_position = 0 #check to see if every animal has 4 weekday weights before continuing in the while loop while not (AllDaysRetrieved(countdown)) and (data_position < self.data_list_length): #do the following if the data position (row) is for an animal in self.animals_to_analyze and it's #a weekday weight (i.e. "no" in column 5 of the spreadsheet) if (backwards_data[data_position][2] in animals_copy) and ("no" in backwards_data[data_position][4]): try: animal_weight = int(backwards_data[data_position][3]) except ValueError: pass #print "Couldn't get weight at %s, skipping to next cell" % data_position else: if countdown[backwards_data[data_position][2]] > 0: weekday_weights[backwards_data[data_position][2]].append(animal_weight) countdown[backwards_data[data_position][2]] -= 1 data_position += 1 print "Latest weekday weights: " + str(weekday_weights) + "\n" if not AllDaysRetrieved(countdown): raise Exception("Could not find weekly weight for all animals") #================================================================================================================ #make a dict with animal ID keys (str) and True or False values if the animal weighed more than 90% of #its max (weekend) weight or less, respectively. Days equal to 90% of its max make #the animal "false" in IsHeavyEnoughDict IsHeavyEnoughDict = {} for animal in self.animals_to_analyze: for each in weekday_weights[animal]: if float(each) > (0.9*(maxes[animal])): IsHeavyEnoughDict[animal] = True else: IsHeavyEnoughDict[animal] = False break return IsHeavyEnoughDict #==================================================================================================================== #==================================================================================================================== def format_data_for_graph(self): ''' Returns a dict with animal IDs (str) as keys and a list of lists [[date objects list], [weights as ints list], [is_maxwgt list of Booleans]] as values. e.g. {"Q4":[[dates], [weights]]} ''' def date_string_to_object(date_string): ''' Takes in a date as a string from the spreadsheet (format 'month/day/year hrs:min:secs' or 'month/day/year') and returns that date as a date object from the datetime module ''' #make splat, which is a list with date info e.g. ['month', 'day', 'year', 'hrs', 'min', 'sec'] #makes date_obj, which is a python datetime object formatted = date_string.replace(":", "/").replace(" ", "/") splat = formatted.split("/") #splat[2] is year, splat[0] is month, and splat[1] is day. This is the format required by datetime.date date_obj = datetime.date(*(map(int, [splat[2], splat[0], splat[1]]))) return date_obj data_copy = self.data[:] animals = self.animals_to_analyze[:] graph_dict = {} for animal in animals: print "Getting data for %s" % animal data_position = 0 #dates is a list of date objects dates = [] #weights is a list of weights corresponding to the date objects above weights = [] #maxweight is a list of true or false for whether each date/weight pair was max weight "true"/"yes" #or a normal weekly weight "false"/"no" in data_copy[data_position][4] is_maxwgt = [] while (data_position < self.data_list_length): if (data_copy[data_position][2] == animal): try: wgt = int(data_copy[data_position][3]) weights.append(wgt) except ValueError: pass #print "Couldn't get weight at %s, skipping to next cell" % data_position #used for testing else: date = date_string_to_object(data_copy[data_position][0]) dates.append(date) if "yes" in data_copy[data_position][4]: is_maxwgt.append(True) else: is_maxwgt.append(False) data_position += 1 #after it has gotten dates, weights, is_maxwgt for each animal, put that info in graph_dict with #animal ID as the key for your list of lists graph_dict[animal] = [dates, weights, is_maxwgt] return graph_dict #==================================================================================================================== #==================================================================================================================== #TODO test this method better, lots of confusing while loops here def regression(self): ''' Returns 2 lists in a tuple: a weekend weights list, and a list of average weights from the most recent 4 weekdays (during water reprivation) associated with those weekend weights. ''' class addAppend(object): ''' A class the counts to 4 items in a list then averages those items, helps in a while loop below ''' def __init__(self): self.intList = [] self.avg = False def addInt(self, num): if len(self.intList) < 4: self.intList.append(num) elif len(self.intList) == 4: summed = sum(self.intList) self.avg = summed/4.0 else: pass weekend_weights = [] weekday_avgs = [] data_rev = self.data[::-1] animals_copy = self.animals_to_analyze[:] for animal in animals_copy: data_position = 0 while (data_position < self.data_list_length): if (data_rev[data_position][2] == animal) and ("yes" in data_rev[data_position][4]): new_position = data_position count_four = addAppend() weekend_wgt = None while not count_four.avg and (new_position < self.data_list_length): if (data_rev[new_position][2] == animal) and ("no" in data_rev[new_position][4]): try: weekend_wgt = int(data_rev[data_position][3]) weekday_wgt = int(data_rev[new_position][3]) except ValueError: pass else: count_four.addInt(weekday_wgt) new_position += 1 if type(count_four.avg) is float: weekday_avgs.append(count_four.avg) weekend_weights.append(weekend_wgt) data_position += 1 return (weekend_weights, weekday_avgs) #==================================================================================================================== #====================================================================================================================
UTF-8
Python
false
false
2,013
12,309,376,319,443
ffec9e1e0998545cbfa0519a33dda2f8e31cc070
3df60d3284be8fb481494a007b91f431efd43d92
/src/adzone/managers.py
79e819eabe181c967f9e97d7cf4ace8804f805e6
[]
no_license
bmeyer71/django-adzone
https://github.com/bmeyer71/django-adzone
445fdbfdd8e1ba019134f56d55ea22dc615a4705
87710dfaa5088587ff30d3d5262c2d1fb25798ea
refs/heads/master
2021-01-20T23:41:30.020489
2013-05-05T01:16:59
2013-05-05T01:16:59
null
0
0
null
null
null
null
null
null
null
null
null
null
null
null
null
from django.db import models class AdManager(models.Manager): """ A Custom Manager for ads """ def get_random_ad(self, ad_zone, ad_category=None): """ Returns a random advert that belongs for the specified ``ad_category`` and ``ad_zone``. If ``ad_category`` is None, the ad will be category independent. """ from adzone.models import AdImpression ad = None try: if ad_category: ads = self.get_query_set().filter( category__slug=ad_category, enabled=True, zone__slug=ad_zone).order_by('?') if ads != []: for item in ads: if item.impression_limit == 0: ad = item else: if AdImpression.objects.filter(ad_id=item.id).count() > item.impression_limit: ad = None continue else: ad = item break else: ads = self.get_query_set().filter( enabled=True, zone__slug=ad_zone).order_by('?') if ads != []: for item in ads: if item.impression_limit == 0: ad = item else: if AdImpression.objects.filter(ad_id=item.id).count() > item.impression_limit: ad = None continue else: ad = item break except IndexError: return None return ad
UTF-8
Python
false
false
2,013
12,043,088,329,795
e00379aa471675f2b6d457f2facbdc6ac0aced20
99746a3d0596510a971fd18f5b79246a7a2c1656
/old/admin.py
8adc7da85c17c5d7242281ed80b9af86787b70cb
[]
no_license
amfarrell/20q
https://github.com/amfarrell/20q
76834df7b610d7984a5757021dc27f7f00b9fdfd
7a1c2cb47cd14ecf5499e64caaba8c255ae6f18e
refs/heads/master
2020-05-18T04:19:00.648337
2010-04-30T19:04:13
2010-04-30T19:04:13
612,809
2
0
null
null
null
null
null
null
null
null
null
null
null
null
null
from django.contrib import admin from models import SurveyResult, SurveyResultLine admin.site.register(SurveyResult) admin.site.register(SurveyResultLine)
UTF-8
Python
false
false
2,010
7,610,682,060,523
3c574ed476b853d6cf6c1350d92aea20c5e9ea5f
df6844af35fa8499d22f7917a950073f28dd6f15
/SystemMain/auditec2.py
1bc21ff35226a17273fdd80319bdd29a639956d7
[]
no_license
Ipswitch/AWS_CI_Baseline
https://github.com/Ipswitch/AWS_CI_Baseline
bd9fa46666420c8dbf764d88a870cc33a1cc8d5c
0f206d8ed5d202c77603eae93cb24f2ae1ec828b
refs/heads/master
2016-08-05T19:16:27.795976
2013-05-17T10:02:30
2013-05-17T10:02:30
null
0
0
null
null
null
null
null
null
null
null
null
null
null
null
null
''' Created on May 4, 2013 @author: bob ''' from audit import AuditBase from ec2 import EC2 import logging import logging.config class AuditEC2(AuditBase): ''' classdocs ''' def __init__(self, sts_connection, role_to_assume=None, role_session_name=None, region_name=None, sourceCM=None, sqliteCM=None, usesqlite_for_masterCM=False, backupCMs=None, logging_config=None): self._sts_connection = sts_connection self._role_to_assume = role_to_assume self._role_session_name=role_session_name self._region_name =region_name self._account=role_to_assume.split(":")[4] self._sourceCM=sourceCM self._backupCMs=backupCMs self._sqlite=sqliteCM self._usesqlite_for_masterCM=usesqlite_for_masterCM self.SetLogging(logging_config) self._d = {'aws_access_key_id': self._sts_connection._connection.access_key, 'role_assumed':self._role_to_assume, 'aws is secure': self._sts_connection._connection.is_secure, 'region': self._sts_connection._connection.region, 'aws_account':role_to_assume.split(":")[4], 'hostname':self.hostname, 'app_name':self.app_name, 'severity':'6'} def Start(self, BaselineUnixDate): changes=dict() #EC2 items eC2 = EC2(self._sts_connection, self._role_to_assume, self._role_session_name, self._region_name) ConfigurationItemClass = "Amazon.EC2" self._logger.debug("Starting Audit of : %s" , ConfigurationItemClass, extra=self._d) Inst = eC2.ListAllAccountImages(self._account) ConfigurationItemName = "AccountImages" self._logger.debug("Starting Audit of %s.%s",ConfigurationItemClass,ConfigurationItemName, extra=self._d) changes[ConfigurationItemClass + "." + ConfigurationItemName] = self.CheckCIBaseline(self._account, Inst, ConfigurationItemClass + "." + ConfigurationItemName, ConfigurationItemName, BaselineUnixDate) Inst = eC2.ListInstances() ConfigurationItemName = "Instances" self._logger.debug("Starting Audit of %s.%s",ConfigurationItemClass,ConfigurationItemName, extra=self._d) changes[ConfigurationItemClass + "." + ConfigurationItemName] = self.CheckCIBaseline(self._account, Inst, ConfigurationItemClass + "." + ConfigurationItemName, ConfigurationItemName, BaselineUnixDate) Addr = eC2.ListAddresses() ConfigurationItemName = "Addresses" self._logger.debug("Starting Audit of %s.%s",ConfigurationItemClass,ConfigurationItemName, extra=self._d) changes[ConfigurationItemClass + "." + ConfigurationItemName] = self.CheckCIBaseline(self._account, Addr, ConfigurationItemClass + "." + ConfigurationItemName, ConfigurationItemName, BaselineUnixDate) SGs = eC2.ListSecurityGroups() self._logger.debug("Starting Audit of %s.%s",ConfigurationItemClass,ConfigurationItemName, extra=self._d) ConfigurationItemName = "SecurityGroups" changes[ConfigurationItemClass + "." + ConfigurationItemName] = self.CheckCIBaseline(self._account, SGs, ConfigurationItemClass + "." + ConfigurationItemName, ConfigurationItemName, BaselineUnixDate) NICs = eC2.ListNetworkInterfaces() ConfigurationItemName = "NetworkInterfaces" self._logger.debug("Starting Audit of %s.%s",ConfigurationItemClass,ConfigurationItemName, extra=self._d) changes[ConfigurationItemClass + "." + ConfigurationItemName] = self.CheckCIBaseline(self._account, NICs, ConfigurationItemClass + "." + ConfigurationItemName, ConfigurationItemName, BaselineUnixDate) PGs = eC2.ListPlacementGroups() ConfigurationItemName = "PlacementGroups" self._logger.debug("Starting Audit of %s.%s",ConfigurationItemClass,ConfigurationItemName, extra=self._d) changes[ConfigurationItemClass + "." + ConfigurationItemName] = self.CheckCIBaseline(self._account, PGs, ConfigurationItemClass + "." + ConfigurationItemName, ConfigurationItemName, BaselineUnixDate) RDs = eC2.ListRamdisks() ConfigurationItemName = "Ramdisks" self._logger.debug("Starting Audit of %s.%s",ConfigurationItemClass,ConfigurationItemName, extra=self._d) changes[ConfigurationItemClass + "." + ConfigurationItemName] = self.CheckCIBaseline(self._account, RDs, ConfigurationItemClass + "." + ConfigurationItemName, ConfigurationItemName, BaselineUnixDate) Vols = eC2.ListVolumes() ConfigurationItemName = "Volumes" self._logger.debug("Starting Audit of %s.%s",ConfigurationItemClass,ConfigurationItemName, extra=self._d) changes[ConfigurationItemClass + "." + ConfigurationItemName] = self.CheckCIBaseline(self._account, Vols, ConfigurationItemClass + "." + ConfigurationItemName, ConfigurationItemName, BaselineUnixDate) return changes
UTF-8
Python
false
false
2,013
18,408,229,860,347
8625b977bf5737293098102de29433ec3f4beb89
1b11cb65d3eee295d3f41d608b0e5dcdf4122d69
/application/pages/validators.py
c1cf95a56629599288232d629bacecbdf1896888
[]
no_license
mneudert/highball
https://github.com/mneudert/highball
582735da4747851b202bd37ba9c995eeef610c53
db9e832227c46c75b9e51c75ca72c2cee13fa00b
refs/heads/master
2016-09-05T21:02:55.412142
2013-03-10T21:25:50
2013-03-10T21:25:50
null
0
0
null
null
null
null
null
null
null
null
null
null
null
null
null
import re from django.core.validators import RegexValidator slashslug_re = re.compile(r'^[-a-zA-Z0-9_/]+$') validate_slashslug = RegexValidator(slashslug_re, ("Enter a valid 'slug' consisting of" " letters, numbers, underscores, hyphens" " or slashes."), 'invalid')
UTF-8
Python
false
false
2,013
850,403,560,285
edac32b6f695135ec2ca9a8fd4a3448b054d663e
80327c03358eea8905805df2158707eaf4454a78
/dialogs/monitoringAdministration/fMenuAuthority_intr.py
03c8cc72fa7824652f0fe900bfb8a716b01d8ab9
[]
no_license
wisnu/BMMEnterprise
https://github.com/wisnu/BMMEnterprise
97a3389279ba7831057cb083848215fecd6034d9
e424dd756d21de3a8f7591ee200fca0133aaaf55
refs/heads/master
2016-03-24T00:42:06.952722
2011-06-15T03:33:42
2011-06-15T03:33:42
1,898,198
1
0
null
null
null
null
null
null
null
null
null
null
null
null
null
class fMenuAuthority: def __init__(self, formObj, parentForm): self.app = formObj.ClientApplication self.Lokasi_Source = '' #self.menulist.First() def Show(self, Lokasi_Source): self.Lokasi_Source = Lokasi_Source self.FormContainer.Show() def bSelect_Click (self, button): formname = 'monitoringAdministration/fMenuAuthority_Edit' Nama_Menu = self.menulist.menu_name ph = self.app.CreateValues(['Nama_Menu', Nama_Menu], ['Lokasi_Source', self.Lokasi_Source]) editor = self.app.CreateForm(formname, formname, 0, ph, None) editor.FormContainer.Show()
UTF-8
Python
false
false
2,011
15,006,615,774,119
29a387772cd32a032d2b1c7cd7ad14cb81e1f937
8aae4e3ee7943f9293085fd57b4867e06ab836b2
/ProgrammingLanguage/Python/PP4E/Chapter2/teststreams.py
e52c5c40d39046b3248e7843e6bc1a59d4f9cd61
[]
no_license
eboladev/Study
https://github.com/eboladev/Study
42213e73384788671deacd8d37004a571c95b7bb
388f3629e7651e41d2569be92d4f65c4c94cd512
refs/heads/master
2020-12-25T22:47:03.624272
2014-10-24T12:10:13
2014-10-24T12:10:13
null
0
0
null
null
null
null
null
null
null
null
null
null
null
null
null
#! /usr/bin/env python # -*- coding:utf-8 -*- def interact(): """docstring for interact""" print 'Hello stream world' while True: try: reply = raw_input('Enter a number> ') except EOFError as e: break else: num = int(reply) print "%d squared is %d" %(num, num**2) print 'Bye' if __name__ == "__main__": interact()
UTF-8
Python
false
false
2,014
1,013,612,284,214
3f0129f885dd20a365438f37d622985e3a0b0865
37df673e3501b3ee11bc5427f294855eaf0bd0dd
/oxidizr/crawler.py
a85a94c8ce77525e18cea1e007f55d4e41047364
[ "GPL-2.0-only" ]
non_permissive
techunits/oxidizr
https://github.com/techunits/oxidizr
62250d441b841e3e1f25ffb02a4093f1e7f8e5c3
c7d9bd26ef4f6dbb8efbf175389930b6993b1160
refs/heads/master
2020-12-26T05:02:21.484117
2014-10-30T19:59:27
2014-10-30T19:59:27
null
0
0
null
null
null
null
null
null
null
null
null
null
null
null
null
#!/usr/bin/env python # -*- coding: utf-8 -*- from twisted.web.client import getPage from twisted.python.util import println from BeautifulSoup import BeautifulSoup from twisted.python import log from twisted.internet import defer, task, reactor import re from urlparse import urlparse # Needs : PyOpenSSL and Twisted 12.3+ def sleep(secs): d = defer.Deferred() reactor.callLater(secs, d.callback, None) return d def parallel(iterable, count, callable, *args, **named): coop = task.Cooperator() work = (callable(elem, *args, **named) for elem in iterable) return defer.DeferredList([coop.coiterate(work) for i in xrange(count)]) def union(p, q): for url in p: parsed = urlparse(url) if parsed.netloc and parsed.netloc != 'www.webhostingtalk.com': url = 'http://%s/' % parsed.netloc if url not in q: print url # q.append(url) def extractLinks(html, url): print "URL in extractLinks: ", url soup = BeautifulSoup(html) soup.prettify() return [str(anchor['href']) for anchor in soup.findAll('a', attrs={'href': re.compile("^http://")}) if anchor['href']] def crawlPage(url, urlList): sleep(10) d = getPage(url) d.addCallback(extractLinks, url) d.addCallback(union, urlList) d.addErrback(log.err) return d # def crawler(urls): # urls = list(urls) def main(reactor, *args): urls = list(args) return parallel(urls, len(urls), crawlPage, urls) if __name__ == '__main__': import sys task.react(main, ["http://www.webhostingtalk.com"]) # Can pass a list of urls
UTF-8
Python
false
false
2,014
10,299,331,591,064
8ec9db3e0695ac68cee5dff6616f0318a0e5832f
a24215dd8c072bb55890e8e232100b20d88b0583
/deli/artist/base_artist.py
8ae4efd8cf4e3ed71d644dbb1f5085047be50c0c
[ "LicenseRef-scancode-unknown-license-reference", "BSD-3-Clause" ]
non_permissive
tonysyu/deli
https://github.com/tonysyu/deli
d98f05b3900229e47f0c0185ba40216beb43de83
ea71d49318e40dc7752d435db0ce31b133994c4f
refs/heads/master
2021-01-10T20:19:22.104751
2014-10-20T04:17:45
2014-10-20T04:17:45
null
0
0
null
null
null
null
null
null
null
null
null
null
null
null
null
""" Defines the base class for artists. """ from contextlib import contextmanager from traits.api import Instance, Property, Tuple from ..core.component import Component from ..layout.bounding_box import BoundingBox from ..layout.bbox_transform import BboxTransform class BaseArtist(Component): """ Base class for all artists. Unlike styluses, artists contain the data that they render. Artists are simply specific types of plots: For example, line artists, marker artists, and bar artists, all operate on the same type of data, but those artist will render the data differently. Artists may use a few different styluses to compose a plot; for example, a box-and-whisker artist might have separate styluses to draw rectangles, error-bars (whiskers), and points (outliers). """ # ----------------------------------------------------------------------- # Data-related traits # ----------------------------------------------------------------------- #: The extents of the data (x_min, y_min, x_max, y_max) data_extents = Property(Tuple) #: Styluses associated with this artist. styluses = Property(Tuple) #: Bounding box for data in the graph. Note that this bounding box #: does not just describe the data in this artist; it's the currently #: displayed limits of the plot in data space. data_bbox = Instance(BoundingBox) #: Transform from data space to screen space. data_to_screen = Instance(BboxTransform) #: Transform from data space to screen space. screen_to_data = Property(Instance(BboxTransform), depends_on='data_to_screen') def _data_to_screen_default(self): return BboxTransform(self.data_bbox, self.screen_bbox) def _get_screen_to_data(self): return self.data_to_screen.inverted() # ------------------------------------------------------------------------- # BaseArtist interface # ------------------------------------------------------------------------- def _get_data_extents(self): msg = "`BaseArtist` subclasses must implement `_get_data_extents`" raise NotImplementedError(msg) def _container_changed(self): if self.container is not None: self.data_bbox = self.container.data_bbox self.screen_bbox = self.container.local_bbox @contextmanager def _clipped_context(self, gc): with gc: gc.clip_to_rect(*self.screen_bbox.rect) yield
UTF-8
Python
false
false
2,014
18,743,237,301,253
d123429e7149cd1115f41c5989b82d3caa5bffd3
20dfad54026fb3d14630af9d45ffd20043de64f5
/treasure/collection/forms.py
879e183b1d040ab6a01514f104c1814e6f327b8e
[]
no_license
nuty/treasure
https://github.com/nuty/treasure
7ad538bd340bb68f96f8efcfc41f87a1b71875d3
65b39f62bd0182afa88d5a7c2d787955a65ff3f0
refs/heads/master
2016-04-03T01:49:03.340319
2014-03-21T06:40:38
2014-03-21T06:40:38
null
0
0
null
null
null
null
null
null
null
null
null
null
null
null
null
# -*- coding: utf-8 -*- import re from models import Category, Treasure from wtforms.fields import HiddenField, TextField,\ IntegerField, SelectField, TextAreaField, BooleanField from wtforms.form import Form from wtforms.ext.sqlalchemy.fields import QuerySelectField from wtforms import validators from flask.ext.uploads import UploadSet, IMAGES from treasure.utils.tools import CKTextAreaField from flask_wtf.file import FileAllowed, FileRequired, FileField images = UploadSet('images', IMAGES) class GenFormSelect(object): STSTUS_DICTS = { '0': u"N", '1': u"B", '2': u"M", '3': u"P", '4': u"S", '5': u"D", } @classmethod def categories(cls): return Category.query.all() @classmethod def treasures(cls): return Treasure.query.all() @classmethod def status(cls): return iter(cls.STSTUS_DICTS.items()) class CategoryForm(Form): parent = QuerySelectField( query_factory=GenFormSelect.categories, label=u"parent", allow_blank=True) name = TextField(label=u"name") pic = HiddenField(label=u'pic') image_file = FileField(label=u"image") description = CKTextAreaField(label=u"dic") def validate_cover(form, field): if field.data: field.data = re.sub(r'[^a-z0-9_.-]', '_', field.data) class TreasureForm(Form): category = QuerySelectField( query_factory=GenFormSelect.categories, label=u"category") name = TextField(label=u"title") status = SelectField(label=u"status", default="0") brand = TextField(label=u"brand") code = TextField(label=u"code") origin = TextField(label=u"origin") period = TextField(label=u"period") viewpoint = TextAreaField(label=u"viewpoint") description = CKTextAreaField(label=u"desc") price = TextField(label=u"price") online = BooleanField(label=u"line") pick = BooleanField(label=u"pick") cover = HiddenField(label=u"cover") image_file = FileField(label=u"img", validators=[ # FileRequired(), FileAllowed(['jpg', 'png'], 'Images only!') ]) position = IntegerField( label=u"position", validators=[validators.optional()]) detail = TextField(label=u"detail") story = CKTextAreaField(label=u"story") style = CKTextAreaField(label=u"style") def __init__(self, *args, **kwargs): super(TreasureForm, self).__init__(*args, **kwargs) self.status.choices = GenFormSelect.status() def validate_cover(form, field): if field.data: field.data = re.sub(r'[^a-z0-9_.-]', '_', field.data) class PhotoForm(Form): treasure = QuerySelectField( query_factory=GenFormSelect.treasures, allow_blank=True, label=u"treasure") pic = HiddenField(label=u"photo") image_file = FileField(label=u"image") description = CKTextAreaField(label=u"desc")
UTF-8
Python
false
false
2,014
14,645,838,521,534
fe67040be8cc086b6bf3df928c5cef4e9f6e5702
068f358de35f7e57ad969c635aff95a4b1d3b670
/core/baseHeaders.py
feb128576b056367e16c078de9a60490bc9c5ecf
[ "GPL-1.0-or-later", "GPL-2.0-only" ]
non_permissive
BackupTheBerlios/pyhttpd-svn
https://github.com/BackupTheBerlios/pyhttpd-svn
39de2ac95ab22ff9c97a0ffdb879879c4ce213fb
7e9b81b4ceae67fc653f4c7083c3657ac295a841
refs/heads/master
2021-01-22T19:13:51.006813
2006-03-22T08:56:44
2006-03-22T08:56:44
40,803,887
0
0
null
null
null
null
null
null
null
null
null
null
null
null
null
# -*- coding: utf-8 -*- ################################################################## # pyHTTPd # $Id$ # (c) 2006 by Tim Taubert ################################################################## ''' def parseHeaders(httpd): parseCookies(httpd) def parseCookies(httpd): cookies = httpd.headers.getheader("cookie") if cookies: cookies = cookies.split(";") for cookie in cookies: name, value = cookie.strip().split("=") httpd.cookies[name] = value def parsePOSTData(httpd): clen = httpd.headers.getheader("content-length") httpd.posttype = httpd.headers.getheader("content-type") if clen: httpd.postdata = httpd.rfile.read(int(clen)) def parseGETData(httpd, data): parseValues(httpd, data) def parseValues(data): postdata = {} fields = data.split("&") for field in fields: print field name, value = field.split("=") postdata[name] = value return postdata '''
UTF-8
Python
false
false
2,006
8,203,387,545,178
ab0e0fdeab16ac4a2bcf67ae1535744e842b4fc3
e867517068ade1572691ac86c6f2ad6596c0d559
/film20/messages/models.py
b80ccf10f14b6308f8439074778b17320221aedc
[]
no_license
manlan2/filmaster
https://github.com/manlan2/filmaster
044ec124d91da0b6dcf2eb5b8af5aec6f0fffd53
90b2bb72c2bab9dfea0c0837971a625bc6880630
refs/heads/master
2021-05-26T22:24:55.012908
2012-05-27T09:30:37
2012-05-27T09:30:37
107,661,541
1
0
null
true
2017-10-20T09:51:53
2017-10-20T09:51:53
2017-10-20T09:51:53
2012-05-27T09:38:13
9,548
0
0
0
null
null
null
#------------------------------------------------------------------------------- # Filmaster - a social web network and recommendation engine # Copyright (c) 2009 Filmaster (Borys Musielak, Adam Zielinski). # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU Affero General Public License as # published by the Free Software Foundation, either version 3 of the # License, or (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU Affero General Public License for more details. # # You should have received a copy of the GNU Affero General Public License # along with this program. If not, see <http://www.gnu.org/licenses/>. #------------------------------------------------------------------------------- import datetime from django.db import models from django.conf import settings from django.db.models import signals, get_app from django.core.exceptions import ImproperlyConfigured from django.contrib.auth.models import User from django.utils.translation import ugettext_lazy as _ from django.core.urlresolvers import reverse from film20.utils import cache_helper as cache from film20.utils.db import QuerySet import logging logger = logging.getLogger(__name__) class MessageManager(models.Manager): def inbox_for(self, user): """ Returns all messages that were received by the given user and are not marked as deleted. """ return self.filter( recipient=user, recipient_deleted_at__isnull=True, ) def outbox_for(self, user): """ Returns all messages that were sent by the given user and are not marked as deleted. """ return self.filter( sender=user, sender_deleted_at__isnull=True, ) def trash_for(self, user): """ Returns all messages that were either received or sent by the given user and are marked as deleted. """ return self.filter( recipient=user, recipient_deleted_at__isnull=False, ) | self.filter( sender=user, sender_deleted_at__isnull=False, ) class Message(models.Model): """ A private message from user to user """ subject = models.CharField(_("Subject"), max_length=120) body = models.TextField(_("Body")) sender = models.ForeignKey(User, related_name='sent_messages', verbose_name=_("Sender")) recipient = models.ForeignKey(User, related_name='received_messages', null=True, blank=True, verbose_name=_("Recipient")) parent_msg = models.ForeignKey('self', related_name='next_messages', null=True, blank=True, verbose_name=_("Parent message")) conversation = models.ForeignKey('Conversation', related_name='messages', null=True, blank=True, verbose_name=_("Conversation")) sent_at = models.DateTimeField(_("sent at"), null=True, blank=True, auto_now_add=True) read_at = models.DateTimeField(_("read at"), null=True, blank=True) replied_at = models.DateTimeField(_("replied at"), null=True, blank=True) sender_deleted_at = models.DateTimeField(_("Sender deleted at"), null=True, blank=True) recipient_deleted_at = models.DateTimeField(_("Recipient deleted at"), null=True, blank=True) objects = MessageManager() def mark_as_read(self): if self.read_at is None: self.read_at = datetime.datetime.now() self.save() if self.conversation: # decrement conversation unread cnt for message recipient if self.recipient_id == self.conversation.recipient_id: self.conversation.recipient_unread_cnt = max(self.conversation.recipient_unread_cnt - 1, 0) if self.recipient_id == self.conversation.sender_id: self.conversation.sender_unread_cnt = max(self.conversation.sender_unread_cnt - 1, 0) self.conversation.save() def new(self): """returns whether the recipient has read the message or not""" return self.read_at is None def replied(self): """returns whether the recipient has written a reply to this message""" if self.replied_at is not None: return True return False def __unicode__(self): return self.subject def get_subject(self): return (self.subject or '').strip() or _('no subject') def get_absolute_url(self): return reverse('messages_view_conversation', args=[self.conversation_id]) + '#message_%s' % self.id def delete_by(self, user, update_conversation=True): now = datetime.datetime.now() c1 = user == self.sender and not self.sender_deleted_at c2 = user == self.recipient and not self.recipient_deleted_at if c1 or c2: if c1: self.sender_deleted_at = now if c2: self.recipient_deleted_at = now self.save() if update_conversation and self.conversation: self.conversation.inc_msg_cnt(user, -1) if not self.read_at: self.conversation.inc_unread_cnt(user, -1) self.conversation.save() return True def undelete_by(self, user, update_conversation=True): c1 = user == self.sender and self.sender_deleted_at c2 = user == self.recipient and self.recipient_deleted_at if c1 or c2: if c1: self.sender_deleted_at = None if c2: self.recipient_deleted_at = None self.save() if update_conversation and self.conversation: self.conversation.inc_msg_cnt(user, 1) if not self.read_at: self.conversation.inc_unread_cnt(user, 1) self.conversation.save() return True def fix(self, level=0): if not self.conversation_id: if self.parent_msg: # make sure parent is fixed self.parent_msg.fix(level+1) conversation = self.parent_msg.conversation conversation.sender_cnt += 1 conversation.recipient_cnt += 1 else: conversation = Conversation() conversation.sender_cnt = 1 conversation.recipient_cnt = 1 conversation.sender = self.sender conversation.recipient = self.recipient if self.read_at is None and self.recipient_deleted_at is None: conversation.inc_unread_cnt(self.recipient, 1) if self.replied_at is not None: conversation.is_replied = True conversation.subject = self.subject conversation.body = self.body conversation.last_sender = self.sender conversation.updated_at = self.sent_at conversation.save() self.conversation=conversation self.save() print ' '*level, "FIXED" @classmethod def fix_all(cls): while True: q=cls.objects.filter(conversation__isnull=True).order_by('sent_at') item = list(q[0:1]) item = item and item[0] if not item: break item.fix() total = Conversation.objects.count() cnt = 0 for c in Conversation.objects.all(): last_msg = c.messages.order_by('-sent_at') last_msg = last_msg and last_msg[0] if last_msg: c.updated_at = last_msg.sent_at c.is_replied = bool(c.messages.filter(replied_at__isnull=False)) c.save() cnt += 1 print cnt, '/', total def save(self, force_insert=False, force_update=False): if not self.id: if self.parent_msg: conversation = self.parent_msg.conversation # parent_msg.conversation is not None only for new conversations if conversation: conversation.sender_cnt += 1 conversation.recipient_cnt += 1 if self.recipient_id == self.parent_msg.sender_id: self.parent_msg.replied_at = datetime.datetime.now() self.parent_msg.save() conversation.is_replied = True else: conversation = Conversation() conversation.sender_cnt = 1 conversation.recipient_cnt = 1 conversation.sender = self.sender conversation.recipient = self.recipient if conversation: conversation.inc_unread_cnt(self.recipient, 1) conversation.subject = self.subject conversation.body = self.body conversation.last_sender = self.sender conversation.updated_at = self.sent_at or datetime.datetime.now() conversation.save() self.conversation = conversation super(Message, self).save(force_insert, force_update) @classmethod def send(cls, sender, recipients, subject, body, parent_msg=None): message_list = [] for r in recipients: msg = cls( sender = sender, recipient = r, subject = subject, body = body, parent_msg = parent_msg, sent_at = datetime.datetime.now() ) msg.save() message_list.append(msg) if notification: replied = msg.parent_msg and (msg.recipient_id == msg.parent_msg.sender_id) if replied: notification.send([sender], "messages_replied", {'message': msg,}) notification.send([r], "messages_reply_received", {'message': msg,}, priority=notification.PRIORITY_REALTIME) else: notification.send([sender], "messages_sent", {'message': msg,}) notification.send([r], "messages_received", {'message': msg,}, priority=notification.PRIORITY_REALTIME) return message_list class Meta: ordering = ['-sent_at'] verbose_name = _("Message") verbose_name_plural = _("Messages") class ConversationQuerySet(QuerySet): def default_filter(self): return self.order_by('-updated_at') def user_conversations(self, user, replied=False): extra = replied and {'is_replied':True} or {} query = self.filter(models.Q(sender=user, sender_cnt__gt=0, **extra) | \ models.Q(recipient=user, recipient_cnt__gt=0)) return query._clone(user=user) def unread_counter(self, user): key = cache.Key("conversation_unread_counter", user.id) cnt = cache.get(key) if cnt is None: query = self.filter(models.Q(sender=user, sender_cnt__gt=0, sender_unread_cnt__gt=0) | \ models.Q(recipient=user, recipient_cnt__gt=0, recipient_unread_cnt__gt=0)) cnt = query.distinct().count() cache.set(key, cnt) return cnt def iterator(self): items = super(ConversationQuerySet, self).iterator() def _fix(self, item): if hasattr(self, 'user'): item.user = self.user return item return (_fix(self, i) for i in items) def _clone(self, *args, **kw): ret = super(ConversationQuerySet, self)._clone(*args, **kw) if not hasattr(ret, 'user') and hasattr(self, 'user'): ret.user = self.user return ret class Conversation(models.Model): sender = models.ForeignKey(User, related_name="sent_conversations") recipient = models.ForeignKey(User, related_name="received_conversations") last_sender = models.ForeignKey(User) subject = models.CharField(_("Subject"), max_length=120) body = models.TextField(null=True, blank=True) created_at = models.DateTimeField(_("created at"), auto_now_add=True) updated_at = models.DateTimeField(_("sent at"), auto_now_add=True) sender_cnt = models.IntegerField(default=0, null=False, blank=False) recipient_cnt =models.IntegerField(default=0, null=False, blank=False) sender_unread_cnt = models.IntegerField(default=0, null=False, blank=False) recipient_unread_cnt =models.IntegerField(default=0, null=False, blank=False) is_replied = models.BooleanField(default=False) objects = ConversationQuerySet.as_manager() class Meta: ordering = ('-updated_at',) verbose_name = _("Conversation") verbose_name_plural = _("Conversations") def user_messages(self, user): return self.messages.exclude(sender_deleted_at__isnull=False, sender=user)\ .exclude(recipient_deleted_at__isnull=False, recipient=user)\ .order_by('sent_at') def delete_by(self, user): for msg in self.messages.all(): msg.delete_by(user, update_conversation=False) if user == self.sender: self.sender_cnt = 0 self.sender_unread_cnt = 0 if user == self.recipient: self.recipient_cnt = 0 self.recipient_unread_cnt = 0 self.save() def undelete_by(self, user): cnt = 0 unread_cnt = 0 for msg in self.messages.all(): undeleted = msg.undelete_by(user, update_conversation=False) cnt += 1 unread_cnt += bool(undeleted and not msg.read_at) if user == self.sender: self.sender_cnt = cnt self.sender_unread_cnt += unread_cnt if user == self.recipient: self.recipient_cnt = cnt self.recipient_unread_cnt += unread_cnt self.save() def inc_msg_cnt(self, user, delta): if user == self.sender: self.sender_cnt = max(0, self.sender_cnt + delta) if user == self.recipient: self.recipient_cnt = max(0, self.recipient_cnt + delta) def inc_unread_cnt(self, user, delta): if user == self.sender: self.sender_unread_cnt = max(0, self.sender_unread_cnt + delta) if user == self.recipient: self.recipient_unread_cnt = max(0, self.recipient_unread_cnt + delta) def threaded_messages(self, user): cache_key = "conversation_thread_%s" % self.pk thread = cache.get(cache_key) if thread is not None: return thread query = self.messages.order_by('id') messages = {} root = None for msg in query: msg.children = [] messages[msg.id] = msg if msg.parent_msg_id: parent = messages.get(msg.parent_msg_id) if parent: msg.level = parent.level + 1 parent.children.append(msg) else: msg.level = 0 root = msg def traverse(root): yield root for c in root.children: for i in traverse(c): yield i def not_deleted(msg): return msg.sender_id == user.id and msg.sender_deleted_at is None or \ msg.recipient_id == user.id and msg.recipient_deleted_at is None thread = root and list(traverse(root)) or () thread = filter(not_deleted, thread) cache.set(cache_key, thread) return thread @classmethod def invalidate_cache(cls, sender, instance, created, *args, **kw): cache.delete("conversation_thread_%s" % instance.pk) key1 = cache.Key("conversation_unread_counter", instance.sender_id) key2 = cache.Key("conversation_unread_counter", instance.recipient_id) cache.delete(key1) cache.delete(key2) def is_read(self): assert self.user if self.user == self.sender: return not bool(self.sender_unread_cnt) else: return not bool(self.recipient_unread_cnt) def mark_read(self): assert self.user if self.user == self.sender: self.sender_unread_cnt = 0 if self.user == self.recipient: self.recipient_unread_cnt = 0 def cnt(self): assert self.user return self.sender_cnt if self.user == self.sender else self.recipient_cnt @models.permalink def get_absolute_url(self): return ('messages_view_conversation', [self.id]) signals.post_save.connect(Conversation.invalidate_cache, sender=Conversation) class LazyUnreadCnt(object): def __get__(self, user, obj_type=None): if user.is_authenticated(): if not hasattr(user, '_unread_cnt'): user._unread_cnt = Conversation.objects.unread_counter(user) return user._unread_cnt User.add_to_class('unread_conversation_counter', LazyUnreadCnt()) # fallback for email notification if django-notification could not be found try: notification = get_app('notification') except ImproperlyConfigured: notification = None from messages.utils import new_message_email signals.post_save.connect(new_message_email, sender=Message) def inbox_count_for(user): """ returns the number of unread messages for the given user but does not mark them seen """ return Message.objects.filter(recipient=user, read_at__isnull=True, recipient_deleted_at__isnull=True).count()
UTF-8
Python
false
false
2,012
4,423,816,337,459
821c67341cf90aefffd5b690519e11cd8364eb50
5499d1dab4d2f0594384ce1a19213dbb8d0d7d43
/sickbeard/providers/fanzub.py
ee43e96651414d3fb07d14be2e5c5480ca3a8c32
[ "GPL-1.0-or-later", "LicenseRef-scancode-warranty-disclaimer", "GPL-3.0-only", "GPL-3.0-or-later", "LGPL-2.0-or-later", "LGPL-2.1-or-later" ]
non_permissive
keithbarrett/Sick-Beard-Animes
https://github.com/keithbarrett/Sick-Beard-Animes
7d912fd72c03f7d6464725ff2b11be50ff8ac337
732fe4f33121e8f04deaede9a4ac7d99547b1915
refs/heads/master
2021-01-16T22:31:30.050481
2014-12-23T18:00:32
2014-12-23T18:00:32
36,258,020
1
0
null
true
2015-05-25T22:32:29
2015-05-25T22:32:29
2014-12-23T18:00:44
2015-05-22T17:07:27
12,220
0
0
0
null
null
null
# Author: Nic Wolfe <[email protected]> # URL: http://code.google.com/p/sickbeard/ # # This file is part of Sick Beard. # # Sick Beard is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # Sick Beard is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with Sick Beard. If not, see <http://www.gnu.org/licenses/>. import urllib import datetime import time from xml.dom.minidom import parseString import sickbeard import generic from sickbeard import classes, show_name_helpers, helpers from sickbeard import exceptions, logger, db from sickbeard.common import * from sickbeard import tvcache from lib.dateutil.parser import parse as parseDate class Fanzub(generic.NZBProvider): def __init__(self): generic.NZBProvider.__init__(self, "Fanzub") self.supportsBacklog = False self.description = u"Only useful for anime.<br>Pseudo backlog support." self.supportsAbsoluteNumbering = True self.cache = FanzubCache(self) self.url = 'http://fanzub.com/' def isEnabled(self): return sickbeard.FANZUB def _checkAuth(self): return True def _get_season_search_strings(self, show, season, scene=False): names = [] if season is -1: names = [show.name.encode('utf-8')] names.extend(show_name_helpers.makeSceneSeasonSearchString(show, season, scene=scene)) return names def _get_episode_search_strings(self, ep_obj): # names = [(ep_obj.show.name + " " + str(ep_obj.absolute_number)).encode('utf-8')] names = show_name_helpers.makeSceneSearchString(ep_obj) return names def _doSearch(self, search_string, show=None): if show and not show.is_anime: logger.log(u"" + str(show.name) + " is not an anime skiping " + str(self.name)) return [] params = { "cat": "anime", "q": search_string.encode('utf-8'), "max": "100" } searchURL = self.url + "rss?" + urllib.urlencode(params) logger.log(u"Search string: " + searchURL, logger.DEBUG) searchResult = self.getURL(searchURL) # Pause to avoid 503's time.sleep(5) if searchResult == None: return [] try: parsedXML = parseString(searchResult) items = parsedXML.getElementsByTagName('item') except Exception, e: logger.log(u"Error trying to load FANZUB RSS feed: " + str(e).decode('utf-8'), logger.ERROR) return [] results = [] for curItem in items: (title, url) = self._get_title_and_url(curItem) if not title or not url: logger.log(u"The XML returned from the FANZUB RSS feed is incomplete, this result is unusable: " + searchResult, logger.ERROR) continue url = url.replace('&amp;', '&') results.append(curItem) return results class FanzubCache(tvcache.TVCache): def __init__(self, provider): tvcache.TVCache.__init__(self, provider) # only poll Fanzub every 20 minutes max # we get 100 post each call ! self.minTime = 20 def _getRSSData(self): url = self.provider.url + 'rss?' urlArgs = {"cat": "anime".encode('utf-8'), "max": "100".encode('utf-8') } url += urllib.urlencode(urlArgs) logger.log(u"FANZUB cache update URL: " + url, logger.DEBUG) data = self.provider.getURL(url) return data def _checkItemAuth(self, title, url): return True provider = Fanzub()
UTF-8
Python
false
false
2,014
10,995,116,302,941
d262d18ab6c3d1fc02cf4e76edb666edb399fc3f
4683fba0f2f6a2d5065a7de1b11a8a53ae6c46a0
/velo-monitor/tests/utils.py
6232b049df5c6698e7cee1362381943edeb77cf1
[]
no_license
suvayu/LHCbVeloView
https://github.com/suvayu/LHCbVeloView
bc42d1a76f56fe4f395a61880e206596bec2dfa8
c7fb34a7a6c81baa32cb33589fd6eb3931f0dd85
refs/heads/master
2020-04-06T07:09:30.173842
2014-12-02T15:10:49
2014-12-02T15:10:49
null
0
0
null
null
null
null
null
null
null
null
null
null
null
null
null
"""Utilities for testing the VELO monitor.""" from veloview.core import config as veloview_config RUNS = sorted(range(123987, 123995) + range(123960, 123975), reverse=True) def set_up_run_list(): """Create a dummy run list file filled with RUNS.""" prlf = '/tmp/runList.txt' veloview_config.old_prlf = veloview_config.processed_run_list_file veloview_config.processed_run_list_file = prlf with open(prlf, "w") as f: for r in RUNS: f.write("{0}\n".format(r)) def tear_down_run_list(): veloview_config.processed_run_list_file = veloview_config.old_prlf
UTF-8
Python
false
false
2,014
19,559,281,089,660
8aae06e175b2a013b3852e13374aaed4b1ef5d88
f0199a19b273238d73080282d80654f0e57a9e3e
/ovobot/production.py
62e93c971aee1b86187bc3904e6a49d22480b134
[]
no_license
mordaha/ovobot
https://github.com/mordaha/ovobot
38c29e8c8fd043bed229f9a9429a475d9cfe00dd
e197c6b523666b56770be731a689ad8a15b6b182
refs/heads/master
2020-05-16T13:42:23.731142
2011-12-04T16:41:50
2011-12-04T16:41:50
1,776,536
2
0
null
null
null
null
null
null
null
null
null
null
null
null
null
from ovobot.settings import *
UTF-8
Python
false
false
2,011
5,549,097,753,820
4d5eaebfb51ec08e39faad3bc95cb0b4b78169b7
bfc874767de27c84f3b61b7b5d0b6a4ee1fefb7f
/core/data/SpiderPendingResponsesDataModel.py
ee88914eb6e087b42033ac300f2743446c2758c4
[ "GPL-3.0-only" ]
non_permissive
pombreda/raft
https://github.com/pombreda/raft
294774b70d07fb4b7d57fac3ddb92e2681fb6a7f
c81c5778a8113e3c7095334ed91dc68352e5da5d
refs/heads/master
2021-01-01T19:07:04.417738
2014-08-12T21:17:50
2014-08-12T21:17:50
32,209,251
1
1
null
null
null
null
null
null
null
null
null
null
null
null
null
# # This module supports the data model for the spider results # # Author: Gregory Fleischer ([email protected]) # # Copyright (c) 2011 RAFT Team # # This file is part of RAFT. # # RAFT is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # RAFT is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with RAFT. If not, see <http://www.gnu.org/licenses/>. # from core.data.DataTableDataModel import DataTableDataModel from core.database.constants import SpiderPendingResponsesTable class SpiderPendingResponsesDataModel(DataTableDataModel): ITEM_DEFINITION = ( ('#', SpiderPendingResponsesTable.RESPONSE_ID), ('Type', SpiderPendingResponsesTable.REQUEST_TYPE), ('Depth', SpiderPendingResponsesTable.DEPTH), ('Status', SpiderPendingResponsesTable.STATUS), ) def __init__(self, framework, parent = None): DataTableDataModel.__init__(self, framework, SpiderPendingResponsesDataModel.ITEM_DEFINITION, parent)
UTF-8
Python
false
false
2,014
8,203,387,540,094
bc994962d9c36a73a3f3a400ec54a3f72e8036c9
0d2c482f24e6770626f071871adb416231160670
/myapp/mp4_parser.py
e1b47a90bbcb681fe11b3dbb02db0f7b3a5ecdc5
[]
no_license
tomerf8/AdInsertionPy
https://github.com/tomerf8/AdInsertionPy
c0c956cc48b6cd505138d9648d5c72e4f7247984
617cb13c3abd06d7ea97052510e2f852819b1910
refs/heads/master
2021-01-10T02:44:39.639851
2014-01-17T23:11:22
2014-01-17T23:11:22
50,796,127
3
1
null
null
null
null
null
null
null
null
null
null
null
null
null
''' Created on Apr 26, 2013 @author: mrklin ''' from myutils import utils_class from mytools import tools_class import os import re tools = tools_class() utils = utils_class() ad_path = '/home/mrklsin/Temp/mayo/' media_path = '/home/mrklin/Temp/car/' def scan_mp4_files(path): # check for list in path output = commands.getoutput('ls '+path) file_list = output.split('\n') # Scan files file_dict = {} for file_name in file_list: # make sure files are m4s if '.m4s' not in file_name: continue res = tools.get_mp4_data(path + file_name) # check file search of TFDT ok if res != {}: # Try to get segment number match = re.search(".*?([0-9]+).m4s",file_name) if match: segmnet_num = match.group(1) res['full_path'] = path + file_name res['file_name'] = file_name file_dict[int(segmnet_num)] = res return file_dict def copy_tfdt_src_to_dst(src_data,dst_data): utils.debug_print('Updating TFDT from '+src_data['full_path']+' to '+dst_data['full_path'],'log') tools.change_mp4_data(src_data['full_path'],src_data['tfdt1'],dst_data['tfdt1']) tools.change_mp4_data(src_data['full_path'],src_data['tfdt2'],dst_data['tfdt2']) utils.debug_print('Done updating TFDT','log') def insert_comercial_to(ad_dict, all_media_dict, segmnet_num): # Insert ad utils.debug_print('Updating TFDT of ad','log') if segmnet_num in all_media_dict: copy_tfdt_src_to_dst(ad_dict,all_media_dict[segmnet_num]) while (segmnet_num in all_media_dict) : curr_file = all_media_dict[segmnet_num] # Calculate new offset of TFDT1/2 new_tfdt1 = tools.make_tfdt_header(curr_file['tfdt1'],ad_dict['tfdt1_offset']) new_tfdt2 = tools.make_tfdt_header(curr_file['tfdt2'],ad_dict['tfdt2_offset']) # Update the files tools.change_mp4_data(curr_file['full_path'],curr_file['tfdt1'],new_tfdt1) tools.change_mp4_data(curr_file['full_path'],curr_file['tfdt2'],new_tfdt2) segmnet_num += 1 '''# update all following utils.debug_print('Updating TFDT of files after ad') while (segmnet_num in all_media_dict) and ((segmnet_num+1) in all_media_dict): copy_tfdt_src_to_dst(all_media_dict[segmnet_num], all_media_dict[segmnet_num+1]) segmnet_num += 1 utils.debug_print('Updating TFDT of last file') # Fix last segmnet''' import commands if __name__ == '__main__': # Scan both dirs ad_dict = scan_mp4_files(ad_path) ad_dict[1]['tfdt1_offset'] = '192512' ad_dict[1]['tfdt2_offset'] = '109000' media_dict = scan_mp4_files(media_path) import pdb;pdb.set_trace() # Update the rest of the files insert_comercial_to(ad_dict[1], media_dict, 4)
UTF-8
Python
false
false
2,014
14,104,672,625,066
b59902498c1f08fbb4d548886e569e6e590466fe
099c4154c0d54c9687c4c8de38d63755568c2451
/exercises/ex30.py
2de8270de4b0d3bd19153f382173ab2cc5f2be49
[]
no_license
suptaphilip/python_tutorials
https://github.com/suptaphilip/python_tutorials
c7cd57f575d76b0d5d56c10014241b628ff2b70e
2582a1eb52a48641f0ecf259d6e42680b591b776
refs/heads/master
2020-04-05T19:04:08.626860
2013-11-14T23:54:25
2013-11-14T23:54:25
null
0
0
null
null
null
null
null
null
null
null
null
null
null
null
null
#! usr/bin/env python #exercise 30: else and if people = 30 cars = 30 buses = 5 #if the number of cars are greater than the number of people than print ... if cars > people: print "We should take the cars." #other wise if the number of cars are less than people print... elif cars < people: print "We should not take the cars." #other wise print ... # *notice the else statement is immediately followed by a colon. else: print "We can't decide." if buses > cars: print "That's too many buses." elif buses < cars: print "Maybe we could take the buses." else: print "We still can't decide." if people > buses: print "Alright, let's just take the buses." else: print "Fine, let's stay home then." #extra credit: if cars > buses and people < cars: print "There's no reason to drive." elif cars < buses and people > buses: print "So maybe driving would be good?" elif people <= cars and people > buses: print "Let's take the bus!" else: print "Driving is a last resort!"
UTF-8
Python
false
false
2,013
15,599,321,260,629
c63a15192d520e223c58d7a706f735504bf23429
06283ebd66870627aecc230c4aa8b9606ff7c4f5
/hitranlbl/vss_query.py
ec5ac9005d52cfed607539fdee6fd3f88983fe91
[]
no_license
xnx/www_hitran
https://github.com/xnx/www_hitran
c0e26a87134b7e2aac435008dde39ea4713ef633
023eb6aa75541b530330d13601be27aceed926de
refs/heads/master
2015-08-01T23:56:22
2013-01-18T12:39:20
2013-01-18T12:39:20
5,718,406
1
1
null
null
null
null
null
null
null
null
null
null
null
null
null
# -*- coding: utf-8 -*- # vss_query.py # Defines the class VSSQuery, representing a query made of the database # in the VSS query language, a subset of SQL. from caseless_dict import CaselessDict from string import lower from datetime import date import sqlparse import logging log = logging.getLogger('vamdc.hitran_node') from tap_utils import get_base_URL, dquote from vamdc_standards import REQUESTABLES from dictionaries import restrictable_types from hitranmeta.models import Iso from xsams_queries import get_xsams_src_query, get_xsams_states_query,\ get_xsams_trans_query, get_xsams_trans_count_query,\ get_xsams_isos_count_query class VSSQuery(object): """ A class representing the VSS query, with methods to parse and validate it. """ def __init__(self, request): self.is_valid = True self.error_message = '' try: self.request = CaselessDict(dict(request.REQUEST)) except Exception, e: self.is_valid = False self.error_message = 'Failed to read argument dictionary: %s' % e log.error(self.error_message) if self.is_valid: self.parse_query() self.full_url = '%ssync?%s' % (get_base_URL(request), request.META.get('QUERY_STRING')) def parse_query(self): """ Parse and validate the query as VSS2. """ error_list = [] # check LANG=VSS2 try: self.lang = lower(self.request['LANG']) except: error_list.append('Couldn\'t find LANG in request') else: if self.lang != 'vss2': error_list.append('Only LANG=VSS2 is supported') # get the QUERY string try: self.query = self.request['QUERY'] except: error_list.append('Couldn\'t find QUERY in request') # get the FORMAT try: self.format = lower(self.request['FORMAT']) except: error_list.append('Couldn\'t find FORMAT in request') else: if self.format not in ('xsams', 'par'): error_list.append('Only XSAMS and PAR formats are supported') # parse the query try: self.parsed_sql = sqlparse.SQL.parseString(self.query, parseAll=True) except: # we failed to parse the query: bail with extreme prejudice error_list.append('Couldn\'t parse the QUERY string: %s' % self.query) self.error_message = '\n'.join(error_list) self.is_valid = False return self.requestables = set() self.where = self.parsed_sql.where if self.parsed_sql.columns not in ('*', 'ALL'): for requested in self.parsed_sql.columns: requested = lower(requested) if requested not in REQUESTABLES: self.error_list.append( 'Unsupported or unknown REQUESTABLE: %s' % requested) else: self.requestables.add(requested) if 'processes' in self.requestables: self.requestables.add('radiativetransitions') # always return sources self.requestables.add('sources') if error_list: # validation failed self.error_message = '\n'.join(error_list) self.is_valid = False def __str__(self): """ Return a string representation of the query. """ return self.query def make_sql_queries(self): """ Turn the VSS query into a series of SQL queries on the database. The returned queries are in a dictionary, keyed by 'src_query', 'st_query', 't_query' for the sources query, the states query, and the transitions query respectively. """ if not self.where: return {} # parse the where clause into restrictions, joined by logic: logic, restrictions, count = sqlparse.splitWhere(self.where) # logic is e.g. ['r0', 'and', 'r1', 'and', '(', 'r2', 'or', 'r3', ')'] # restrictions is a dictionary, keyed by '0', '1', ..., e.g. # {'1': ['RadTransWavenumber', '<', '6100.'], # '0': ['RadTransWavenumber', '>', '5000.'], # '2': ['MoleculeChemicalName', 'in', '(', "'H2O'", "'Ammonia'", ')'] # ... } node_restrictions = {} for ri in restrictions: restrictable, op, s_rvals = (restrictions[ri][0], restrictions[ri][1], restrictions[ri][2:]) # refer to all restrictables in lower case from here restrictable = restrictable.lower() if op not in sqlparse.OPTRANS.keys(): raise Exception('Illegal or unsupported operator in' ' restriction: %s' % op) try: restrictable_type = restrictable_types[restrictable] except KeyError: raise Exception('Unknown RESTRICTABLE: %s' % restrictable) try: self.check_rvals_type(s_rvals, restrictable_type) except: raise Exception('Invalid value for restrictable %s: %s' % (restrictable, s_rvals)) # translate the VAMDC restrictable keywords into the # appropriate of the hitranlbl_trans table in the HITRAN database # the hitranlbl_trans table must be aliased to 't'. Note that # node_restrictions[2] is *always a list*, unlike # restrictions[ri][2] if restrictable == 'radtranswavenumber': node_restrictions['r%s' % ri] = ['t.nu', op] + [s_rvals,] elif restrictable == 'radtranswavelength': op, s_nus = self.lambda_to_nu(op, s_rvals) node_restrictions['r%s' % ri] = ['t.nu', op] + [s_nus,] elif restrictable == 'radtransprobability': node_restrictions['r%s' % ri] = ['t.A', op] + [s_rvals,] elif restrictable in ('inchikey', 'moleculeinchikey'): op, s_iso_ids = self.get_isos_from_other(op, s_rvals, self.iso_from_inchikey) node_restrictions['r%s' % ri] = 't.iso_id', op, s_iso_ids elif restrictable == 'moleculestoichiometricformula': op, s_iso_ids = self.get_isos_from_other(op, s_rvals, self.iso_from_molec_stoich) node_restrictions['r%s' % ri] = 't.iso_id', op, s_iso_ids elif restrictable == 'moleculechemicalname': op, s_iso_ids = self.get_isos_from_other(op, s_rvals, self.iso_from_molec_name) node_restrictions['r%s' % ri] = 't.iso_id', op, s_iso_ids else: raise Exception('Unsupported or invalid restrictable keyword:' ' %s' % restrictable) # add restrictions on valid_to, valid_from dates: # XXX Hard-code these to the current date, because there's currently # no keyword (is there?) for valid_on date in the VAMDC standards. today = date.today().strftime('%Y-%m-%d') logic.extend(['and', 'r_valid_from', 'and', 'r_valid_to']) node_restrictions['r_valid_from'] = ['t.valid_from','<=', [dquote(today),]] node_restrictions['r_valid_to'] = ['t.valid_to','>', [dquote(today),]] q_where = [] for x in logic: if x in node_restrictions.keys(): q_where.append(self.make_sql_restriction(node_restrictions[x])) else: q_where.append(x) q_where = ' '.join(q_where) queries = {'src_query': get_xsams_src_query(q_where), 'st_query': get_xsams_states_query(q_where), 't_query': get_xsams_trans_query(q_where), 'tc_query': get_xsams_trans_count_query(q_where), 'ic_query': get_xsams_isos_count_query(q_where), } return queries def make_sql_restriction(self, node_restriction): """ Turn the node_restriction, a tuple of (field, operator, values) into the string representation of a valid SQL restriction. """ name, op, args = node_restriction if len(args) > 1: s_val = '(%s)' % ', '.join(args) else: s_val = args[0] return '%s %s %s' % (name, op, s_val) def lambda_to_nu(self, op, lambdas): """ Convert the arguments of a selection of wavelength (in Å) to the corresponding selection on wavenumber (in cm-1). Adjust the operator in the query fragment accordingly (e.g. '<' to '>'). Arguments: op: the operator in the query fragment (e.g. '<', 'in', '>=', etc.) lambdas: a list of arguments (wavelengths) to the query fragment. Typically, just one value (for operators '<', '>', '=', '>=', '<=', etc.), but could be a list (e.g. for 'in' operator). Returns: a tuple (op, ret_list) of op, the new operator applying to the query fragment on wavenumber and ret_list, a list of wavenumber values corresponding to the query. """ nu_list = [] has_parentheses = False if lambdas[0] == '(' and lambdas[-1] == ')': has_parentheses = True for lamda in lambdas: if lamda in ('(', ')'): continue try: # lambda in Å to nu in cm-1 nu = 1.e8/float(lamda) print '%f A = %f cm-1' % (float(lamda), nu) except ZeroDivisionError: # set nu to something huge if lambda = 0 nu = 1.e20 nu_list.append(str(nu)) op = sqlparse.reverse_op(op) if not has_parentheses: if len(nu_list) > 1: raise Exception('Invalid argument to RadTransWavelength: %s' % lambdas) else: return op, nu_list #return op, '(%s)' % (', '.join(nu_list),) ret_list = ['(',] ret_list.extend(nu_list) ret_list.append(')') return op, ret_list def iso_from_inchikey(self, inchikey): """ Return a list of isotopologue IDs matching the provided InChIKey. """ return Iso.objects.filter(InChIKey= inchikey).values_list('id', flat=True) def iso_from_molec_stoich(self, stoichiometric_formula): """ Return a list of isotopologue IDs matching the provided molecular (ie isotope-independent) stoichiometric formula. """ return Iso.objects.filter(molecule__stoichiometric_formula= stoichiometric_formula).values_list('id', flat=True) def iso_from_molec_name(self, name): """ Return a list of isotopologue IDs matching the provided molecule name (ie common chemical name). """ return Iso.objects.filter(molecule__moleculename__name= name).values_list('id', flat=True) def get_isos_from_other(self, op, s_rvals, isos_get_method): """ Return a string of requested isotopologue IDs corresponding to the requested list of s_rvals, using the method specified by isos_get_method. """ iso_ids = [] has_parentheses = False if s_rvals[0] == '(' and s_rvals[-1] == ')': has_parentheses = True for s_rval in s_rvals: s_rval = s_rval.strip('"\'') # strip all outside quotes, " and ' if s_rval in ('(', ')'): continue iso_id_list = isos_get_method(s_rval) iso_ids.extend([str(iso_id) for iso_id in iso_id_list]) if not iso_ids: # we didn't find any isotopologues matching the requested InChIKeys return op, ['(-1)',] if not has_parentheses: if len(iso_ids) > 1: # a single e.g. molecular stoichiometric formula maps to more # than one isotopologue, so generalise the operator if op == '=': op = 'in' elif op == '<>': op = 'not in' else: return op, iso_ids return op, iso_ids def check_rvals_type(self, s_rvals, rtype): """ Check that s_rvals corresponds to a list of strings which can be legitimately cast into their correct types. """ if rtype == str: # s_rvals is already a list of strings! return for s_rval in s_rvals: if s_rval in ('(', ')'): # skip the parentheses continue try: rval = rtype(s_rval) except: raise
UTF-8
Python
false
false
2,013
14,645,838,518,007
efbfaaba1496f55df82627f0d32d7954501a2c5f
cf991f5b59a1ac384bf98f599a4e0e836ee58a26
/fabfile.py
2a205afabfbddbd7a2707d1416395ab4f3b30aac
[ "MIT" ]
permissive
DjangoLover/gitality
https://github.com/DjangoLover/gitality
ba932c2966dda6cf8352059c3ce3de11197420d8
8cb063582c113c147ad04f83899c9ee3686a93ab
refs/heads/master
2020-04-09T01:55:46.515025
2013-09-29T22:12:10
2013-09-29T22:12:10
null
0
0
null
null
null
null
null
null
null
null
null
null
null
null
null
from ConfigParser import RawConfigParser from fabric.api import cd, env, run, shell_env from fabric.colors import green from fabric.contrib.files import exists from fabric.utils import puts config = RawConfigParser() with open('deploy/config.ini') as f: config.readfp(f) env.host_string = config.get('fabric_env', 'host_string') env.project_name = config.get('fabric_env', 'project_name') env.project_db_name = env.project_name env.project_home = config.get('fabric_env', 'project_home') env.project_root_dirname = config.get('fabric_env', 'project_root_dirname') env.project_root = '{0.project_home}/{0.project_root_dirname}'.format(env) env.virtualenv_home = config.get('fabric_env', 'virtualenv_home') env.virtualenv_root = '{0.virtualenv_home}/{0.project_name}'.format(env) env.virtualenv_activate_command = 'source {.virtualenv_root}/bin/activate'.format(env) env.site_down_file = '.down' env.touch_reload_file = '.reload' env.git_repository = config.get('fabric_env', 'git_repository') def prun(command): """ Runs command from project root directoy. """ with cd(env.project_root), shell_env(WORKON_HOME=env.virtualenv_home): run(command) def make(target): """ Invokes Makefile target. """ prun('make {}'.format(target)) def supervisorctl(action, program='', options=''): run('supervisorctl {0} {1} {2}'.format(action, options, program)) def site_down(): prun('touch {}'.format(env.site_down_file)) puts(green('Site is down for maintenance')) def site_up(): prun('rm {}'.format(env.site_down_file)) puts(green('Site is up and running')) def touch_reload(): """ uWSGI touch reload """ prun('touch {}'.format(env.touch_reload_file)) def git_clone(): with cd(env.project_home): if exists(env.project_root_dirname): run('rm -rf {.project_root_dirname}'.format(env)) run('git clone -q {0.git_repository} {0.project_root_dirname}'.format(env)) def git_pull(): prun('git pull -q') def bootstrap(): """ Bootstraps project for the first time. """ git_clone() make('bootstrap') make('settings_production') make('requirements') make('db_production') make('collectstatic') def deploy(): """ Deploys updated project. """ site_down() git_pull() make('requirements') make('syncdb') make('migrate') make('seed_production') make('collectstatic') touch_reload() supervisorctl('restart', 'celery') site_up()
UTF-8
Python
true
false
2,013
6,897,717,515,379
265294fb22dc321c31b2862bf4a4f737a4db2417
ec7f01918700ea39115446a323cd76ad23708a29
/src/Server/controllers/basecontroller.py
cd1af7a3a2b46bdbbd542bc734c28261ff9a9344
[ "MIT" ]
permissive
SkyLapse/DMS
https://github.com/SkyLapse/DMS
f218f591059c13596e34b2e9f45c81f4f51e5741
a8906498e2801f7f022d7d2d72d90ba2be6f0d21
refs/heads/master
2021-01-04T14:06:48.640996
2014-05-26T16:46:17
2014-05-26T16:46:17
16,044,209
0
1
null
null
null
null
null
null
null
null
null
null
null
null
null
__author__ = 'SkyLapse' from abc import abstractmethod from flask import current_app from flask.ext import restful class BaseController(restful.Resource): def __init__(self): self.app = current_app pass @abstractmethod def get(self, id=None): pass
UTF-8
Python
false
false
2,014
2,671,469,663,261
f767e7f6546ae461e58f43c48d5bf0438b8e6dd7
e147f9ff39e19d4d3bb035ce0b466fea3e247477
/src/edge.py
eb80fc6d597df5db7f580b11553faa862d0e9fd1
[]
no_license
yatsek/mindmaps-qt
https://github.com/yatsek/mindmaps-qt
e737e417ca88b48df763e2cd345e983eeab05c6d
bf93dc2731c26bab2782aba62ab2ec4ff2b927c0
refs/heads/master
2021-01-01T16:39:47.162525
2011-02-06T23:51:41
2011-02-06T23:51:41
37,589,605
0
0
null
null
null
null
null
null
null
null
null
null
null
null
null
from PyQt4.QtCore import * from PyQt4.QtGui import * from math import sin,cos,pi,acos TwoPi = pi*2 class Edge(QGraphicsItem): """Overrid of QGraphicsItem to handle drawing of edge connecting nodes""" def __init__(self,sourceNode, destNode,visible=True): """Constructor which connects nodes together""" super(Edge,self).__init__() self.sourcePoint=None self.destPoint=None self.setFlags(self.ItemIsSelectable) self.setAcceptedMouseButtons(Qt.NoButton) self.source=sourceNode self.dest=destNode self.source.addEdge(self) self.dest.addEdge(self) self.visible=visible self.adjust() self.setZValue(-1) def sourceNode(self): """returns the source node""" return self.source def setSourceNode(self,node): """ sets the source node""" self.source=node adjust() def destNode(self): """returns destination node""" return self.dest def setDestNode(self,node): """sets destination node""" self.dest=node adjust() def adjust(self): """Calculates new position of end points based on node positions""" if not self.source or not self.dest: return srcCenter=self.source.ellipsisCenter() dstCenter=self.dest.ellipsisCenter() line=QLineF(self.mapFromItem(self.source,srcCenter.x(),srcCenter.y()), \ self.mapFromItem(self.dest,dstCenter.x(),dstCenter.y())) length = line.length() self.prepareGeometryChange() if length > 20.0: edgeOffset=QPointF((line.dx()*10)/length,(line.dy()*10)/length) self.sourcePoint = line.p1() + edgeOffset self.destPoint = line.p2() - edgeOffset else: self.sourcePoint = self.destPoint = line.p1() def boundingRect(self): """Sets bounding rectangle of a scene""" if not self.source or not self.dest: return QRectF() return QRectF(self.sourcePoint, QSizeF(self.destPoint.x() - self.sourcePoint.x(),self.destPoint.y() - self.sourcePoint.y())).normalized() def paint(self,painter, option=None, widget=None): """Paint edge on a scene""" if not self.source or not self.dest: return line=QLineF(self.sourcePoint,self.destPoint) if line.length() == 0.0: return #draw the line itself color=QColor(Qt.black) if not self.visible: color=QColor(Qt.green) painter.setPen(QPen(color,1,Qt.SolidLine,Qt.RoundCap,Qt.RoundJoin)) painter.drawLine(line)
UTF-8
Python
false
false
2,011
5,858,335,434,780
9b7c2abad406df41fd7926bc94e32aec97fbdc1b
dc13636c35adefbf1579c93705a155781c071d5c
/app/managers.py
a02d12c7f9098f01a351e5dc812c4a6234c5c8df
[ "GPL-3.0-only", "GPL-3.0-or-later" ]
non_permissive
rosix-ru/barbaris
https://github.com/rosix-ru/barbaris
289047d19a6712d54210190498958425f5de94f0
1d300a65ef62285c54e748a8fec8cef32a5848ba
refs/heads/master
2021-01-10T04:26:21.554920
2014-06-07T01:46:31
2014-06-07T01:46:31
44,802,688
1
2
null
null
null
null
null
null
null
null
null
null
null
null
null
# -*- coding: utf-8 -*- """ ############################################################################### # Copyright 2012 Grigoriy Kramarenko. ############################################################################### # This file is part of Barbaris. # # Barbaris is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # Barbaris is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with Barbaris. If not, see <http://www.gnu.org/licenses/>. # # Этот файл — часть Barbaris. # # Barbaris - свободная программа: вы можете перераспространять ее и/или # изменять ее на условиях Стандартной общественной лицензии GNU в том виде, # в каком она была опубликована Фондом свободного программного обеспечения; # либо версии 3 лицензии, либо (по вашему выбору) любой более поздней # версии. # # Barbaris распространяется в надежде, что она будет полезной, # но БЕЗО ВСЯКИХ ГАРАНТИЙ; даже без неявной гарантии ТОВАРНОГО ВИДА # или ПРИГОДНОСТИ ДЛЯ ОПРЕДЕЛЕННЫХ ЦЕЛЕЙ. Подробнее см. в Стандартной # общественной лицензии GNU. # # Вы должны были получить копию Стандартной общественной лицензии GNU # вместе с этой программой. Если это не так, см. # <http://www.gnu.org/licenses/>. ############################################################################### """ from django.db import models from django.conf import settings import datetime class OrgManager(models.Manager): use_for_related_fields = True def get_query_set(self): return super(OrgManager, self).get_query_set().filter( client__isnull=False, ) class PersonManager(models.Manager): use_for_related_fields = True def get_query_set(self): return super(PersonManager, self).get_query_set().filter( client__isnull=False, ) class ActivePriceManager(models.Manager): use_for_related_fields = True def get_query_set(self): return super(ActivePriceManager, self).get_query_set().filter( is_active=True, start_date__lte=datetime.date.today() ) class CreateOrderManager(models.Manager): use_for_related_fields = True def get_query_set(self): return super(CreateOrderManager, self).get_query_set().filter( state=settings.STATE_ORDER_CREATE ) class AcceptOrderManager(models.Manager): use_for_related_fields = True def get_query_set(self): return super(AcceptOrderManager, self).get_query_set().filter( state=settings.STATE_ORDER_ACCEPT ) class CloseOrderManager(models.Manager): use_for_related_fields = True def get_query_set(self): return super(CloseOrderManager, self).get_query_set().filter( state=settings.STATE_ORDER_CLOSE ) class CancelOrderManager(models.Manager): use_for_related_fields = True def get_query_set(self): return super(CancelOrderManager, self).get_query_set().filter( state=settings.STATE_ORDER_CANCEL ) class ListOrderManager(models.Manager): use_for_related_fields = True def get_query_set(self): return super(ListOrderManager, self).get_query_set().filter( state__in=settings.SELECT_LIST_ORDERS ) class WorkOrderManager(models.Manager): use_for_related_fields = True def get_query_set(self): return super(WorkOrderManager, self).get_query_set().filter( state__in=settings.SELECT_WORK_ORDERS ) class WorkSpecificationManager(models.Manager): use_for_related_fields = True def get_query_set(self): return super(WorkSpecificationManager, self).get_query_set().filter( order__state__in=settings.SELECT_WORK_ORDERS ) class CreateInvoiceManager(models.Manager): use_for_related_fields = True def get_query_set(self): return super(CreateInvoiceManager, self).get_query_set().filter( state=settings.STATE_INVOICE_CREATE ) class PaymentInvoiceManager(models.Manager): use_for_related_fields = True def get_query_set(self): return super(PaymentInvoiceManager, self).get_query_set().filter( state=settings.STATE_INVOICE_PAYMENT ) class AvanceInvoiceManager(models.Manager): use_for_related_fields = True def get_query_set(self): return super(AvanceInvoiceManager, self).get_query_set().filter( state=settings.STATE_INVOICE_AVANCE ) class CashInvoiceManager(models.Manager): use_for_related_fields = True def get_query_set(self): return super(CashInvoiceManager, self).get_query_set().filter( state__in=settings.SELECT_CASH_INVOICES ) class WorkInvoiceManager(models.Manager): use_for_related_fields = True def get_query_set(self): return super(WorkInvoiceManager, self).get_query_set().filter( state__in=settings.SELECT_WORK_INVOICES ) class PrivatePersonManager(models.Manager): use_for_related_fields = True def get_query_set(self): return super(PrivatePersonManager, self).get_query_set().filter( org=None, client__isnull=False, )
UTF-8
Python
false
false
2,014
8,031,588,869,741
68d8ead2f38f13777545ea79960de5996fc3b7b5
14f85485b3115c3cc86de3fe86a6735eb8fbddea
/playgroundApp/urls.py
0208931a9bcae335e736e5d17fa50a0e66ae4ac8
[]
no_license
josieh/playground-finder
https://github.com/josieh/playground-finder
3edac25a798ad834c68abd7b646521fa1e4cc48f
5685fa8fec4aecbf7df025f4b5907f4e0f888843
refs/heads/master
2020-04-01T17:32:48.337324
2014-04-23T02:53:20
2014-04-23T02:53:20
null
0
0
null
null
null
null
null
null
null
null
null
null
null
null
null
from django.conf.urls import patterns, url from playgroundApp import views urlpatterns=patterns('', url(r'^$', views.Playground_List, name='playgroundapp_home'), url(r'^list$', views.testFilter), url(r'^playgroundapp/playground_info/(?P<pk>\d+)$', views.playgroundDetail, name='playground_info'), url(r'^playgroundapp/playground_suggest$', views.suggestPlayground, name='userSuggest'), url(r'^playgroundapp/user_profile$', views.userProfile, name='userProfile'), url(r'^playgroundapp/user_suggest$', views.userSuggest, name='userSuggest'), url(r'^playgroundapp/user_signup$', views.userSignUp, name='userSignUp'), url(r'^playgroundapp/user_login$', views.userLogin, name='userLogin'), #urls for the suggest a playground page url(r'^playgroundapp/map$', views.map, name='map'), url(r'^playgroundapp/add-playground$', views.formSuggest, name='form_suggest'), #url to test form created by following youTube video url(r'^playgroundapp/testForm', views.testCreate, name='testForm'), )
UTF-8
Python
false
false
2,014
1,228,360,663,344
1de2e50e7e1aa9bc574c1fc3730f42916dddaf61
1256539b405370d21c8f11cab73eb2d6f39a2c23
/src/git-got
4ee672ab27a6879c9c027a90683c985b4c979d8d
[]
no_license
jhj125/git-got
https://github.com/jhj125/git-got
3387dc5ac80ba5e68e099ffea3c239e2cb9b8c34
1a6d6f526243c5b99d708dde8198c1bed499a7ce
refs/heads/master
2021-01-14T11:19:59.891515
2014-12-17T16:19:29
2014-12-17T16:19:29
null
0
0
null
null
null
null
null
null
null
null
null
null
null
null
null
#!/usr/bin/python import sys import subprocess import os.path import hashlib import json import fnmatch import hashlib # For SRR support import httplib import re DEFAULT_LOG_LEVELS=['INFO', 'WARN', 'ERROR'] LOG_LEVELS = DEFAULT_LOG_LEVELS def load_scp(filename, checksum): remote = get_remote() subprocess.check_call(['scp', '%s/%s.got' % (remote, checksum), filename]) def store_scp(filename, checksum): remote = get_remote() subprocess.check_call(['scp', filename, '%s/%s.got' % (remote, checksum)]) def store_srr(filename, checksum): (server_name, parent_id) = get_location_info_srr() local_path = filename target_id = '' remote_path = '%s' % checksum description = 'Got storage for %s/%s @ TBD hashtag' % (get_root(), filename) # "body" includes everything up to (but not including) the file boundary = "-------------iRobot-Multipart-Boundary-------------" body = '--' + boundary + '\r\n' body += 'Content-Disposition: form-data; name="parent_id"\r\n\r\n' body += str(parent_id) + '\r\n' + '--' + boundary + '\r\n' body += 'Content-Disposition: form-data; name="target_id"\r\n\r\n' body += str(target_id) + '\r\n' + '--' + boundary + '\r\n' body += 'Content-Disposition: form-data; name="description"\r\n\r\n' body += description + '\r\n' + '--' + boundary + '\r\n' body += 'Content-Disposition: form-data; name="file"; filename="%s"\r\n'\ % remote_path body += 'Content-Transfer-Encoding: binary\r\n' body += 'MIME-Version: 1.0\r\n\r\n' tail = '\r\n--' + boundary + '--\r\n\r\n' content_length = len(body) + os.path.getsize(local_path) + len(tail) # Upload form and file to server # @todo Exception Handling http_c = httplib.HTTPConnection(server_name) http_c.putrequest('POST', '/srr/api/add_file') http_c.putheader('Content-Type', 'multipart/form-data; boundary=%s' % boundary) http_c.putheader('Content-Length', content_length) http_c.endheaders() http_c.send(body) # Send file in reasonably-sized blocks fp = open(local_path, 'rb') data_block = fp.read(4096) bytes_sent = 0 while data_block: http_c.send(data_block) bytes_sent += len(data_block) data_block = fp.read(4096) fp.close() http_c.send(tail) response = http_c.getresponse() result = response.read() http_c.close() if response.status != 200: raise SRRError("%s: %s" % (response.reason, result)) new_id_re = re.compile(r' file_id=(\d+)\s*$') m = new_id_re.search(result) if m: return int(m.group(1)) else: raise SRRError("Unexpected result from SRR: %s" % result) def parse_path_srr(path): result_re = re.compile('http://(.*)/.*/(\d+)$') matches = result_re.match(path) return (matches.group(1), matches.group(2)) def get_location_info_srr(): return parse_path_srr(get_remote()) def load_srr(filename, checksum): (server, parent_id) = get_location_info_srr() path = get_remote_path_srr(server, parent_id) subprocess.check_call( ['curl', '-o' , filename , '-#', '%s/%s' % (path, checksum)]) remote_store_file = store_srr remote_load_file = load_srr def init_backing_store(): configuration = load_configuration() if configuration['remote_type'] == 'srr': remote_load_file = load_srr remote_store_file = store_srr else: remote_load_file = load_scp remote_store_file = store_scp def usage(): print 'git got <command> [<args>]' print print 'The most commonly used git got commands are:' print ' init Initialize the remote to be used with the repository' print ' <type> <parameters>' print ' get Retrieve all remote files to the local working area' print ' add Add a file to the remote repository' print ' status Request the status of a got tracked file' print ' reset Ovewrite a gotted file with the remote copy' root_valid = False root_path = "" def get_root(): global root_valid global root_path if not root_valid: root_path = subprocess.check_output(['git', 'rev-parse', '--show-toplevel']).rstrip() root_valid = True return root_path configuration_loaded = False configuration = [] def file_hash(filename): hasher = hashlib.sha1() file = open(filename, 'rb') try: while True: data = file.read(8192) if not data: break hasher.update(data) except Exception as e: raise finally: file.close() return hasher.hexdigest() def load_configuration(): global configuration_loaded global configuration if not configuration_loaded: file = open('%s/.got/storage' % get_root(), 'r') configuration = json.load(file) file.close() configuration_loaded = True return configuration def get_remote(): configuration = load_configuration() return configuration['remote'] def get_local_got_filename(fully_qualified_filename): (base, filename) = os.path.split(fully_qualified_filename) return os.path.join(base, '.%s.got' % filename) def get_real_filename(fully_qualified_filename): (root, filename) = os.path.split(fully_qualified_filename) return os.path.join(root, filename[1:-4]) def get_cb(filename): try: real_filename = get_real_filename(filename) log_debug('get_cb: Using %s for local file' % real_filename) sum = open(filename).read().rstrip() remote_load_file(rel_filename, sum) except Exception as e: log_error('Failed to retrieve file %s' % filename, e) def reset_cb(filename): try: log_debug('reset_cb: Reseting %s' % filename) got_filename = get_local_got_filename(filename) log_debug('reset_cb: Using %s for local got file' % got_filename) sum = open(got_filename).read().rstrip() remote_load_file(filename, sum) except Exception as e: log_error('Failed to reset %s' % filename, e) def add_cb(filename): try: log_debug('add_cb: Adding %s' % filename) sum = file_hash(filename) remote_store_file(filename, sum) got_filename = get_local_got_filename(filename) hash_file = open(got_filename, 'w') hash_file.write('%s' % sum) hash_file.close() subprocess.check_call(['git', 'add', got_filename]) file = open('%s/.gitignore' % get_root(), 'w') file.write('%s\n' % filename) file.close() except Exception as e: print sys.exc_traceback.tb_lineno log_error('Failed to add %s' % filename, e) def status_cb(filename): try: actual_filename = get_real_filename(filename) log_debug('Actual %s' % actual_filename) if not os.path.exists(actual_filename): return 'Remote: %s' % actual_filename sum1 = file_hash(actual_filename) sum2 = open(filename).read().rstrip() if sum1 != sum2: return 'Modified: %s' % actual_filename except Exception as e: log_error('Failed to status %s' % filename, e) def start_transaction(): pass def end_transaction(): pass def log(level, message, exception): if None is exception: print '%s : %s' % (level, message) else: print '%s:%s:%s' % (level, message, exception) def log_error(message, exception = None): if 'ERROR' in LOG_LEVELS: log('ERROR', message, exception) def log_warn(message, exception = None): if 'WARN' in LOG_LEVELS: log('WARN', message, exception) def log_info(message, exception = None): if 'INFO' in LOG_LEVELS: log('INFO', message, exception) def log_debug(message, exception = None): if 'DEBUG' in LOG_LEVELS: log('DEBUG', message, exception) def upgrade_cb(new): pass def walker(function, args): output = [] for arg in args: log_debug('walker: processing argument %s' % arg) if os.path.isfile(arg): log_debug('walker: processing file %s' % arg) output.append(function(arg)) else: for base, dirs, filenames in os.walk(arg): if '.git' in dirs: dirs.remove('.git') if '.got' in dirs: dirs.remove('.got') for filename in fnmatch.filter(filenames, '.*.got'): log_debug('walker: processing file %s/%s' % (base, filename)) output.append(function('%s/%s' % (base, filename))) return output def check_initialized(): if os.path.isfile('%s/.got/storage' % get_root()): init_backing_store() return True return False def check_version(version): configuration = load_configuration() if VERSION != configuration['version']: return False return True VERSION = 1 num_args = len(sys.argv) if num_args < 2: usage() exit() command = sys.argv[1] if command == 'init': start_transaction() try: os.mkdir('%s/.got' % get_root()) except OSError: # This means the directory already existed according to # the python documentation pass print sys.argv type = sys.argv[2] remote = sys.argv[3] configuration = { 'remote' : remote , 'remote_type' : type , 'version' : VERSION } file = open('%s/.got/storage' % get_root(), 'w') json.dump(configuration, file) file.close() subprocess.check_call(['git', 'add', '%s/.got' % get_root()]) file = open('%s/.gitignore' % get_root(), 'w') file.close() subprocess.check_call(['git', 'add', '%s/.gitignore' % get_root()]) end_transaction() elif not check_initialized(): print 'Got not initialized\n' usage() exit() elif command == 'upgrade': upgrade_cb(VERSION) elif not check_version(VERSION): print 'Version of got repository requires upgrading, run upgrade command' usage() exit() elif command == 'add': start_transaction() log_debug('main: Add command %s' % sys.argv[2:]) walker(add_cb, sys.argv[2:]) end_transaction() elif command == 'reset': walker(reset_cb, sys.argv[2:]) elif command == 'get': walker(get_cb, [get_root()]) elif command == 'status': changes = walker(status_cb, [get_root()]) print '# Changes', for change in changes: if None != change: print '\n# %s' % change, print '\n', else: usage() # vim: set filetype=python :
UTF-8
Python
false
false
2,014
7,095,286,006,136
94607dc10ece7936d9bd0d00c295e0e64f1d6a47
7bafa3568fc321abfe8fc5a4a0c3982116cfa65f
/paypal/signatures.py
ed4d5f64bdb731eeb3ba3bc13570edaf3dc80d9e
[]
no_license
softak/webfaction_demo
https://github.com/softak/webfaction_demo
f8ea504e16609d7198f98333ff1472e0f23528d0
cc4b308ce964b04907f4c23777178ff900e0ad8b
refs/heads/master
2016-09-06T03:19:04.919531
2013-01-23T17:33:52
2013-01-23T17:33:52
null
0
0
null
null
null
null
null
null
null
null
null
null
null
null
null
import os import re import urlparse import urllib import httplib import hmac import base64 import hashlib ppencode_re = re.compile(r'([A-Za-z0-9_]+)') def ppencode(string): global ppencode_re result = '' for char in string: if re.match(ppencode_re, char) is None: result += '%' + hex(ord(char))[2:] elif char == ' ': result += '+' else: result += char return result class Uri(object): scheme = None host = None port = None path = None def __init__(self, scheme=None, host=None, port=None, path=None, query=None): self.query = query or {} if scheme is not None: self.scheme = scheme if host is not None: self.host = host if port is not None: self.port = port if path: self.path = path @staticmethod def parse_uri(uri_string): parts = urlparse.urlparse(uri_string) uri = Uri() if parts[0]: uri.scheme = parts[0] if parts[1]: host_parts = parts[1].split(':') if host_parts[0]: uri.host = host_parts[0] if len(host_parts) > 1: uri.port = int(host_parts[1]) if parts[2]: uri.path = parts[2] if parts[4]: param_pairs = parts[4].split('&') for pair in param_pairs: pair_parts = pair.split('=') if len(pair_parts) > 1: uri.query[urllib.unquote_plus(pair_parts[0])] = \ urllib.unquote_plus(pair_parts[1]) elif len(pair_parts) == 1: uri.query[urllib.unquote_plus(pair_parts[0])] = None return uri class HttpRequest(object): method = None uri = None def __init__(self, uri=None, method=None, headers=None): self.headers = headers or {} self._body_parts = [] if method is not None: self.method = method if isinstance(uri, (str, unicode)): uri = Uri.parse_uri(uri) self.uri = uri or Uri() def build_oauth_base_string(http_request, consumer_key, signature_type, timestamp, version, token): params = {} params['oauth_consumer_key'] = consumer_key params['oauth_signature_method'] = signature_type params['oauth_timestamp'] = str(timestamp) params['oauth_token'] = token params['oauth_version'] = version sorted_keys = sorted(params.keys()) pairs = [] for key in sorted_keys: pairs.append('%s=%s' % (key, params[key])) all_parameters = '&'.join(pairs) normalized_host = http_request.uri.host.lower() normalized_scheme = (http_request.uri.scheme or 'http').lower() non_default_port = None if (http_request.uri.port is not None and ((normalized_scheme == 'https' and http_request.uri.port != 443) or (normalized_scheme == 'http' and http_request.uri.port != 80))): non_default_port = http_request.uri.port path = http_request.uri.path or '/' request_path = None if not path.startswith('/'): path = '/%s' % path if non_default_port is not None: request_path = '%s://%s:%s%s' % (normalized_scheme, normalized_host, non_default_port, path) else: request_path = '%s://%s%s' % (normalized_scheme, normalized_host, path) base_string = '&'.join( (http_request.method.upper(), ppencode(request_path), ppencode(all_parameters))) return base_string def generate_hmac_signature(http_request, consumer_key, consumer_secret, timestamp, version, token, token_secret): base_string = build_oauth_base_string( http_request, consumer_key, 'HMAC-SHA1', timestamp, version, token) hash_key = '%s&%s' % (ppencode(consumer_secret), ppencode(token_secret)) hashed = hmac.new(hash_key, base_string, hashlib.sha1) return base64.b64encode(hashed.digest())
UTF-8
Python
false
false
2,013
10,462,540,356,015
88c06a907a9815582e4a199f2aa6a53b7b4e6c25
364249d5c7e9af7a7fd5d6122d4489fa5250919c
/salest/cart/middleware.py
0042d91c147c8c26d01dad8692571e154b189799
[ "GPL-3.0-or-later" ]
non_permissive
anvil8/salest
https://github.com/anvil8/salest
b67cbaee6edf4cdfae77bd31191f5bd05ace213b
a25b9ab5ff2fab309b5d8b85b4c46d0e60f71410
refs/heads/master
2020-05-30T14:23:47.923820
2012-08-07T11:29:27
2012-08-07T11:29:27
null
0
0
null
null
null
null
null
null
null
null
null
null
null
null
null
from django.utils.functional import SimpleLazyObject from salest.cart.models import Cart def get_cart(request): if not hasattr(request, '_cached_cart'): request._cached_cart = Cart.objects.get_or_create_from_request(request) return request._cached_cart class ShopingCartMiddleware(object): def process_request(self, request): assert hasattr(request, 'session'), "The ShopingCartMiddleware requires session middleware to be installed. Edit your MIDDLEWARE_CLASSES setting to insert 'django.contrib.sessions.middleware.SessionMiddleware'." request.cart = SimpleLazyObject(lambda: get_cart(request))
UTF-8
Python
false
false
2,012
764,504,187,249
1f1a04f6321918c0eb82f2d26790028b1a92b3b2
60130678bae6eaa3abb3b0336720d506173aeba1
/routes.py
cf9b04fb3a7357a0fd6f61b147a66ef809cd8553
[]
no_license
zrenx/pp4gae
https://github.com/zrenx/pp4gae
b2ca8340bbfd170cc0b65661e29ddb32700db18f
9da14062ce449e999167faa313adc25df82816cd
refs/heads/master
2021-01-13T01:49:10.739165
2010-09-06T04:29:08
2010-09-06T04:29:08
394,886
7
0
null
null
null
null
null
null
null
null
null
null
null
null
null
#!/usr/bin/python # -*- coding: utf-8 -*- # routes_in is a tuple of tuples. The first item in each is a regexp that will # be used to match the incoming request URL. The second item in the tuple is # what it will be replaced with. This mechanism allows you to redirect incoming # routes to different web2py locations # # Example: If you wish for your entire website to use init's static directory: # # routes_in=( ('/static/(?P<file>[\w\./_-]+)','/init/static/\g<file>') ) # routes_in = ( #('.*:/','/pp4gae/'), # rewrite to pp4gae ('/static/(?P<file>[\w\./_-]+)','/pp4gae/static/\g<file>'), ('/(?P<any>.*)', '/pp4gae/default/\g<any>'), #'.*:/robots.txt', '/pp4gae/static/robots.txt')) ) # routes_out, like routes_in translates URL paths created with the web2py URL() # function in the same manner that route_in translates inbound URL paths. # routes_out = ( ('/pp4gae/default/(?P<any>.*)', '/\g<any>'), ) # Error-handling redirects all HTTP errors (status codes >= 400) to a specified # path. If you wish to use error-handling redirects, uncomment the tuple # below. You can customize responses by adding a tuple entry with the first # value in 'appName/HTTPstatusCode' format. ( Only HTTP codes >= 400 are # routed. ) and the value as a path to redirect the user to. You may also use # '*' as a wildcard. # # The error handling page is also passed the error code and ticket as # variables. Traceback information will be stored in the ticket. # # routes_onerror = [ # ('init/400', '/init/default/login') # ,('init/*', '/init/static/fail.html') # ,('*/404', '/init/static/cantfind.html') # ,('*/*', '/init/error/index') # ] # specify action in charge of error handling # # error_handler = dict(application='error', # controller='default', # function='index') # In the event that the error-handling page itself returns an error, web2py will # fall back to its old static responses. You can customize them here. # ErrorMessageTicket takes a string format dictionary containing (only) the # "ticket" key. # error_message = '<html><body><h1>Invalid request</h1></body></html>' # error_message_ticket = '<html><body><h1>Internal error</h1>Ticket issued: <a href="/admin/default/ticket/%(ticket)s" target="_blank">%(ticket)s</a></body></html>'
UTF-8
Python
false
false
2,010
4,217,657,905,551
43dd4b75c9396ed4d52ab512d29adbdfa6a3818c
c5747e63f7bfef9bee2010ac85c7db7abd2423ef
/instasend.py
b9a450b105e55a86956ce97237a8bdd4d65bbde5
[]
no_license
mountaindude/instasend
https://github.com/mountaindude/instasend
016abc281233dd4bb9097a1793418e8d597a3c04
1a1dae83cde3e96970e5e90300277a5453108f34
refs/heads/master
2021-01-19T08:16:33.082799
2011-09-09T22:23:52
2011-09-09T22:23:52
2,358,461
0
0
null
null
null
null
null
null
null
null
null
null
null
null
null
# Sending commands to Insta RX/TX radio module # Goran Sander <[email protected]> # Based on miniterm.py by Chris Liechti <[email protected]> # python instasend.py --port /dev/ttyUSB2 -c a4on import sys, os, serial, threading, array, time EXITCHARCTER = '\x1d' # GS/CTRL+] MENUCHARACTER = '\x14' # Menu: CTRL+T def key_description(character): """generate a readable description for a key""" ascii_code = ord(character) if ascii_code < 32: return 'Ctrl+%c' % (ord('@') + ascii_code) else: return repr(character) # help text, starts with blank line! it's a function so that the current values # for the shortcut keys is used and not the value at program start def get_help_text(): return """ --- pySerial - miniterm - help --- --- %(exit)-8s Exit program --- %(menu)-8s Menu escape key, followed by: --- Menu keys: --- %(itself)-8s Send the menu character itself to remote --- %(exchar)-8s Send the exit character to remote --- %(info)-8s Show info --- %(upload)-8s Upload file (prompt will be shown) --- Toggles: --- %(rts)s RTS %(echo)s local echo --- %(dtr)s DTR %(break)s BREAK --- %(lfm)s line feed %(repr)s Cycle repr mode --- --- Port settings (%(menu)s followed by the following): --- 7 8 set data bits --- n e o s m change parity (None, Even, Odd, Space, Mark) --- 1 2 3 set stop bits (1, 2, 1.5) --- b change baud rate --- x X disable/enable software flow control --- r R disable/enable hardware flow control """ % { 'exit': key_description(EXITCHARCTER), 'menu': key_description(MENUCHARACTER), 'rts': key_description('\x12'), 'repr': key_description('\x01'), 'dtr': key_description('\x04'), 'lfm': key_description('\x0c'), 'break': key_description('\x02'), 'echo': key_description('\x05'), 'info': key_description('\x09'), 'upload': key_description('\x15'), 'itself': key_description(MENUCHARACTER), 'exchar': key_description(EXITCHARCTER), } # first choose a platform dependant way to read single characters from the console global console if os.name == 'nt': import msvcrt class Console: def __init__(self): pass def setup(self): pass # Do nothing for 'nt' def cleanup(self): pass # Do nothing for 'nt' def getkey(self): while 1: z = msvcrt.getch() if z == '\0' or z == '\xe0': #functions keys msvcrt.getch() else: if z == '\r': return '\n' return z console = Console() elif os.name == 'posix': import termios, sys, os class Console: def __init__(self): self.fd = sys.stdin.fileno() def setup(self): self.old = termios.tcgetattr(self.fd) new = termios.tcgetattr(self.fd) new[3] = new[3] & ~termios.ICANON & ~termios.ECHO & ~termios.ISIG new[6][termios.VMIN] = 1 new[6][termios.VTIME] = 0 termios.tcsetattr(self.fd, termios.TCSANOW, new) #s = '' # We'll save the characters typed and add them to the pool. def getkey(self): c = os.read(self.fd, 1) return c def cleanup(self): termios.tcsetattr(self.fd, termios.TCSAFLUSH, self.old) console = Console() def cleanup_console(): console.cleanup() console.setup() sys.exitfunc = cleanup_console #terminal modes have to be restored on exit... else: raise "Sorry no implementation for your platform (%s) available." % sys.platform CONVERT_CRLF = 2 CONVERT_CR = 1 CONVERT_LF = 0 NEWLINE_CONVERISON_MAP = ('\n', '\r', '\r\n') LF_MODES = ('LF', 'CR', 'CR/LF') REPR_MODES = ('raw', 'some control', 'all control', 'hex') INQ = 0xfa ACK = 0x05 CRLF = '\r\n' class Miniterm: def __init__(self, port, baudrate, cmd, echo=False, convert_outgoing=CONVERT_CRLF, repr_mode=0): self.serial = serial.Serial(port, baudrate, parity='N', rtscts=False, xonxoff=False, timeout=0.7) self.echo = echo self.repr_mode = repr_mode self.convert_outgoing = convert_outgoing self.newline = NEWLINE_CONVERISON_MAP[self.convert_outgoing] self.cmd = cmd self.break_state = False def start(self): self.alive = True # enter keyboard handling loop self.keyboard_thread = threading.Thread(target=self.keyb) self.keyboard_thread.setDaemon(1) self.keyboard_thread.start() # Send INSTA command cmd = self.cmd.lower() if self.echo: sys.stdout.write("cmd:%s\n" % cmd) sys.stdout.write("cmd len:%d\n" % len(cmd)) telegram = '\x55\x16\x00' device = 0; # add group # if cmd[0] == 'a': device = 0x00 elif cmd[0] == 'b': device = 0x08 elif cmd[0] == 'c': device = 0x10 # Add channel # channel = int(cmd[1]) # sys.stdout.write("channel:%d\n" % channel) device = device | (channel - 1) # Add on/off if cmd[2:4] == "on": device = device | 0x40 elif cmd [2:5] == "off": device = device | 0x80 # sys.stdout.write("device:%x\n" % device) telegram = telegram + chr(device) + '\x01\x00\x00\x00\x00\x00' sum = 0 for i in range(0, len(telegram)): sum = sum + ord(telegram[i]) # sys.stdout.write("i=%d, %x\n" % (i, ord(telegram[i]))) # sys.stdout.write("sum is:%x\n" % sum) sum = sum & 0xff # sys.stdout.write("sum2 is:%x\n" % sum) crc = 2**8 - sum # sys.stdout.write("CRC is:%x\n" % crc) telegram = telegram + chr(crc) + '\xaa' if self.echo: sys.stdout.write("INQ\n") self.serial.write(chr(INQ)) while self.serial.inWaiting() == 0: pass if self.serial.inWaiting() > 0: data = self.serial.read(1) # sys.stdout.write("\\x%s " % data.encode('hex')) if data == chr(ACK): try: if self.echo: sys.stdout.write("ACK\n") # telegram = '\x55\x16\x00\x42\x01\x00\x00\x00\x00\x00\x52\xaa' # version query # telegram = "\x55\x32\xcd\xf1\xfa\x00\x00\x00\x00\x00\xc1\xaa" self.serial.write(telegram) self.serial.flush() if self.echo: for i in range(0, len(telegram)): sys.stdout.write("i=%d, \\x%x\n" % (i, ord(telegram[i]))) sys.stdout.flush() # while self.serial.inWaiting() == 0: # pass # sys.stdout.write("Response:") # while self.serial.inWaiting() > 0: # data = self.serial.read(1) # sys.stdout.write("\\x%s " % data.encode('hex')) # sys.stdout.write("\n\n") # if data == chr(INQ): # sys.stdout.write("Received INQ\n") # self.serial.write(chr(ACK)) # self.serial.flush() # sys.stdout.write("Sending ACK\n") # i = 1 # while self.serial.inWaiting() == 0: # pass # while self.serial.inWaiting() > 0: # data = self.serial.read(1) # sys.stdout.write("%d:\\x%s " % (i, data.encode('hex'))) # i=i+1 # sys.stdout.write("\n") # sys.stdout.write("\n") except: print "Serial write exception" raise def stop(self): self.alive = False def join(self, transmit_only=False): pass # self.transmitter_thread.join() # if not transmit_only: # self.receiver_thread.join() # self.keyboard_thread.join() def keyb(self): """loop and copy console->serial until EXITCHARCTER character is found. when MENUCHARACTER is found, interpret the next key locally. """ try: while self.alive: # print "bbb" try: c = console.getkey() except KeyboardInterrupt: c = '\x03' if c == EXITCHARCTER: self.stop() break # exit app elif c == 's': # if self.echo: sys.stdout.write("Sending INQ\r\n") elif c == '\n': self.serial.write(self.newline) # send newline character(s) if self.echo: sys.stdout.write(c) # local echo is a real newline in any case # sys.stdout.flush() else: self.serial.write(c) # send character if self.echo: sys.stdout.write(c) sys.stdout.flush() except: self.alive = False raise def main(): import optparse parser = optparse.OptionParser( usage = "%prog [options] [port [baudrate]]", description = "Miniterm - A simple terminal program for the serial port." ) parser.add_option("-c", "--cmd", dest = "cmd", help ="command to send to INSTA transciever", default = "" ) parser.add_option("-p", "--port", dest = "port", help = "port, a number (default 0) or a device name (deprecated option)", default = "COM2" ) parser.add_option("-b", "--baud", dest = "baudrate", action = "store", type = 'int', help = "set baud rate, default %default", default = 9600 ) parser.add_option("-e", "--echo", dest = "echo", action = "store_true", help = "enable local echo (default off)", default = False ) parser.add_option("--cr", dest = "cr", action = "store_true", help = "do not send CR+LF, send CR only", default = False ) parser.add_option("--lf", dest = "lf", action = "store_true", help = "do not send CR+LF, send LF only", default = False ) parser.add_option("-D", "--debug", dest = "repr_mode", action = "count", help = """debug received data (escape non-printable chars) --debug can be given multiple times: 0: just print what is received 1: escape non-printable characters, do newlines as unusual 2: escape non-printable characters, newlines too 3: hex dump everything""", default = 0 ) parser.add_option("-q", "--quiet", dest = "quiet", action = "store_true", help = "suppress non error messages", default = False ) parser.add_option("--exit-char", dest = "exit_char", action = "store", type = 'int', help = "ASCII code of special character that is used to exit the application", default = 0x20 # Default 0x1d ) parser.add_option("--menu-char", dest = "menu_char", action = "store", type = 'int', help = "ASCII code of special character that is used to control miniterm (menu)", default = 0x14 ) (options, args) = parser.parse_args() if options.cr and options.lf: parser.error("only one of --cr or --lf can be specified") if options.cmd is "": parser.error('Must provide command') global EXITCHARCTER, MENUCHARACTER EXITCHARCTER = chr(options.exit_char) MENUCHARACTER = chr(options.menu_char) port = options.port baudrate = options.baudrate if args: if options.port is not None: parser.error("no arguments are allowed, options only when --port is given") port = args.pop(0) if args: try: baudrate = int(args[0]) except ValueError: parser.error("baud rate must be a number, not %r" % args[0]) args.pop(0) if args: parser.error("too many arguments") else: if port is None: port = 0 convert_outgoing = CONVERT_CRLF if options.cr: convert_outgoing = CONVERT_CR elif options.lf: convert_outgoing = CONVERT_LF try: miniterm = Miniterm( port, baudrate, cmd=options.cmd, echo=options.echo, convert_outgoing=convert_outgoing, repr_mode=options.repr_mode ) except serial.SerialException: sys.stderr.write("could not open port %r\n" % port) sys.exit(1) if not options.quiet: sys.stderr.write('--- InstaSend on %s: %d,%s,%s,%s ---\n' % ( miniterm.serial.portstr, miniterm.serial.baudrate, miniterm.serial.bytesize, miniterm.serial.parity, miniterm.serial.stopbits, )) # sys.stderr.write('--- Quit: %s | Menu: %s | Help: %s followed by %s ---\n' % ( # key_description(EXITCHARCTER), # key_description(MENUCHARACTER), # key_description(MENUCHARACTER), # key_description('\x08'), # )) miniterm.start() miniterm.join(True) if not options.quiet: sys.stderr.write("\n--- exit ---\n") miniterm.join() if __name__ == '__main__': main()
UTF-8
Python
false
false
2,011
3,238,405,386,727
ce328ab4c6642f5225135a7425e3290fb08ac920
7a488c1f0657c8fe8856e8ca9c34acd2b8c74ce3
/presentation/synthetic/invert.py
81d5cd0384eae77395085e1de2bff6f06d60b4f3
[]
no_license
whigg/seg2012
https://github.com/whigg/seg2012
3aeaa9c9d265aade8fe16ce8c5653fe275509d75
33a09bdad1531231deb5fffc2928c7032fc8f277
refs/heads/master
2021-05-28T10:49:09.682200
2014-01-15T16:30:28
2014-01-15T16:30:28
null
0
0
null
null
null
null
null
null
null
null
null
null
null
null
null
import sys import cPickle as pickle import fatiando as ft import numpy as np log = ft.log.get() log.info(ft.log.header()) def setview1(scene): scene.scene.camera.position = [-2267.5718325185544, 516.89047192363171, 325.41328402454576] scene.scene.camera.focal_point = [486.1565293791673, 491.11737744276104, 577.28350756789393] scene.scene.camera.view_angle = 30.0 scene.scene.camera.view_up = [0.091053683141375921, -0.0033155163338156627, -0.99584046620823252] scene.scene.camera.clipping_range = [1653.5666619996091, 4186.5783724965167] scene.scene.camera.compute_view_plane_normal() scene.scene.render() def setview2(scene): scene.scene.camera.position = [-2083.5891203179367, 2196.562816405461, -698.77837411337339] scene.scene.camera.focal_point = [467.08916568702983, 471.73128900160287, 610.85856263017729] scene.scene.camera.view_angle = 30.0 scene.scene.camera.view_up = [0.33814580712856857, -0.1984415469462483, -0.91993389195471487] scene.scene.camera.clipping_range = [1619.2366959256301, 5453.5227454505075] scene.scene.camera.compute_view_plane_normal() scene.scene.render() seedfile = sys.argv[1] mu = float(sys.argv[2]) delta = float(sys.argv[3]) if sys.argv[4] == 'classic': useshape = False elif sys.argv[4] == 'shape': useshape = True else: print "invalid argument" sys.exit() xp, yp, zp, gxx, gxy, gxz, gyy, gyz, gzz = np.loadtxt('data.txt', unpack=True) with open('model.pickle') as f: model = pickle.load(f) bounds = [0, 1000, 0, 1000, 0, 1000] mesh = ft.msh.ddd.PrismMesh(bounds, (30, 30, 30)) dms = ft.pot.harvester.wrapdata(mesh, xp, yp, zp, gxx=gxx, gxy=gxy, gxz=gxz, gyy=gyy, gyz=gyz, gzz=gzz) seeds = ft.pot.harvester.sow(ft.pot.harvester.loadseeds(seedfile), mesh, mu=mu, delta=delta, useshape=useshape) scene = ft.vis.figure3d(size=(1000, 1000)) ft.vis.prisms(model, 'density', style='wireframe', linewidth=5) ft.vis.prisms([s.get_prism() for s in seeds], 'density', vmin=0, vmax=1000) ft.vis.axes3d(ft.vis.outline3d(bounds), ranges=[b*0.001 for b in bounds], fmt='%0.1f', nlabels=3) ft.vis.wall_bottom(bounds) ft.vis.wall_north(bounds) setview1(scene) ft.vis.savefig3d('seeds-%s-%s1.png' % (seedfile, sys.argv[4])) setview2(scene) ft.vis.savefig3d('seeds-%s-%s2.png' % (seedfile, sys.argv[4])) #ft.vis.show3d() estimate, goals, misfits = ft.pot.harvester.harvest(dms, seeds) mesh.addprop('density', estimate['density']) result = ft.msh.ddd.vremove(0, 'density', mesh) with open('results-%s-%s.pickle' % (seedfile, sys.argv[4]), 'w') as f: pickle.dump({'estimate':result, 'predicted':dms[-1].predicted, 'seeds':[s.get_prism() for s in seeds]}, f) shape = [51, 51] ft.vis.figure(figsize=(3.33,4)) ft.vis.axis('scaled') levels = ft.vis.contourf(yp, xp, gzz, shape, 6) ft.vis.colorbar(orientation='horizontal', shrink=0.8) ft.vis.contour(yp, xp, dms[-1].predicted, shape, levels, color='k', linewidth=1.5) ft.vis.xlabel('y (km)') ft.vis.ylabel('x (km)') ft.vis.m2km() ft.vis.savefig('fit-%s-%s.png' % (seedfile, sys.argv[4]), dpi=300) #ft.vis.show() scene = ft.vis.figure3d(size=(1000, 1000)) ft.vis.prisms(model, 'density', style='wireframe', linewidth=8) ft.vis.prisms(result, 'density', vmin=0, vmax=1000) ft.vis.axes3d(ft.vis.outline3d(bounds), ranges=[b*0.001 for b in bounds], fmt='%0.1f', nlabels=3) ft.vis.wall_bottom(bounds) ft.vis.wall_north(bounds) setview1(scene) ft.vis.savefig3d('result-%s-%s1.png' % (seedfile, sys.argv[4])) setview2(scene) ft.vis.savefig3d('result-%s-%s2.png' % (seedfile, sys.argv[4])) ft.vis.show3d()
UTF-8
Python
false
false
2,014
11,596,411,742,801
dc241d2323075856a8a0138e1052b1593aaf51ae
7fefbf96f1fb509ad2213d59ec33fdf1ef4ab9fe
/examples/basic_usages.py
e060dd064dda98af9c5775c21637d6a8f12bf1c9
[ "MIT" ]
permissive
kaixiang-li/askme
https://github.com/kaixiang-li/askme
1a6157296b222113f7e1e23e49f2d6039d42768e
a53cf7affa6f6d16e47e5d05482ff046be384da0
refs/heads/master
2022-11-05T20:01:18.150746
2013-04-05T14:02:07
2013-04-06T16:06:34
null
0
0
null
null
null
null
null
null
null
null
null
null
null
null
null
from datetime import date from askme import Askme terminal = Askme() name = terminal.ask("<% cprint('Hello, World!', 'green', 'on_red') %>") condition = terminal.ask("hi?where are you from: ", default = "so nice", uppercase = True ,validate="^[A-Z]{2}$") birthday = terminal.ask("birthday?(year,month,day): ", date) password = terminal.ask("password: ", echo = False) print birthday print password print "the man %(name)s is from %(condition)s" % locals()
UTF-8
Python
false
false
2,013
6,957,847,050,812
9e1890b9e4e70becb07f301bb69cae48eaf54927
a15ed3d4e5351e174c8afde0bf0f7cc9c592e2d2
/googleappengine/electionsurvey/bin/load_freshdata.py
5a1b59e1c2cb288775d4e67a01cff5f7244f33de
[ "BSD-3-Clause", "LicenseRef-scancode-proprietary-license" ]
non_permissive
sebbacon/theyworkforyou
https://github.com/sebbacon/theyworkforyou
9d2c0a57711fb43b5ec84fefa6576f037b656803
5a95c73c0be78f9a677bd5861d855942bd0475d4
refs/heads/master
2021-01-15T20:17:59.071755
2010-04-24T21:05:54
2010-04-24T21:05:54
628,088
1
0
null
null
null
null
null
null
null
null
null
null
null
null
null
#!/usr/bin/python2.5 # coding=utf-8 # # load_freshdata.py: # Loads data from YourNextMP and DemocracyClub into GAE. Call this script as # main. # # Copyright (c) 2010 UK Citizens Online Democracy. All rights reserved. # Email: [email protected]; WWW: http://www.mysociety.org/ # import sys import csv import os import getpass import datetime import optparse import re import urllib2 import gzip sys.path = ["../", "../google_appengine/"] + sys.path import django.utils.simplejson as json from google.appengine.ext import db from google.appengine.ext.remote_api import remote_api_stub from google.appengine.api.datastore_types import Key import settings from models import Party, Candidate, Seat, Candidacy, RefinedIssue # Parameters DEMOCLUB_URL="http://www.democracyclub.org.uk/issues/refined_csv" YOURNEXTMP_URL="http://www.yournextmp.com/data/%s/latest/json_main" parser = optparse.OptionParser() parser.set_usage('''Load or update data in TheyWorkForYou election, from YourNextMP and Democracy Club. Arguments are JSON files from YourNextMP or CSV files from Democracy Club to load. You must specify *all* the files, as other entries in the database will be marked as deleted.''') parser.add_option('--host', type='string', dest="host", help='domain:port of application, e.g. localhost:8080, election.theyworkforyou.com', default="localhost:8080") parser.add_option('--email', type='string', dest="email", help='email address for authentication to application', default="[email protected]") parser.add_option('--fetch', action='store_true', dest='fetch', help='as well as command line arguments, also retrieve latest full dumps from YourNextMP and DemocracyClub and use this', default=False) (options, args) = parser.parse_args() for arg in args: if not re.search("(\.json|\.csv)$", arg): raise Exception("Please only .json or .csv files: " + arg) ###################################################################### # Helpers def convdate(d): return datetime.datetime.strptime(d, "%Y-%m-%dT%H:%M:%S") def int_or_null(i): if i is None: return i return int(i) seats_by_name = {} def find_seat(seat_name): if seat_name not in seats_by_name and '&' in seat_name: seat_name = seat_name.replace("&", "and") if seat_name not in seats_by_name: raise Exception("Could not find seat " + seat_name) return seats_by_name[seat_name] def log(msg): print datetime.datetime.now(), msg def put_in_batches(models, limit = 250): tot = len(models) c = 0 while len(models) > 0: put_models = models[0:limit] log(" db.put batch " + str(c) + ", size " + str(len(put_models)) + ", total " + str(tot)) db.put(put_models) models = models[limit:] c += 1 ###################################################################### # Load from YourNextMP # Find out which constituencies (seats) do not allow updates to local issues # any more (i.e. because a survey has already been sent out) def get_frozen_local_issues_seats(): log("Getting seats which are frozen to local issues changes") frozen_seats = {} fs = Seat.all().filter("frozen_local_issues =", True).fetch(100) while fs: for f in fs: log(" Seat is frozen to local issues changes: " + f.name) frozen_seats[f.key().name()] = f fs = Seat.all().filter("frozen_local_issues =",True).filter('__key__ >', fs[-1].key()).fetch(100) return frozen_seats def load_from_ynmp(ynmp, frozen_seats): # Put parties in datastore - don't worry about deleted ones, they just # won't be referenced by other tables. parties_by_key = {} for party_id, party_data in ynmp["Party"].iteritems(): key_name = party_id party = Party( ynmp_id = int(party_id), name = party_data["name"], code = party_data["code"], image_id = int_or_null(party_data["image_id"]), created = convdate(party_data["created"]), updated = convdate(party_data["updated"]), key_name = key_name ) log(" Storing party " + party.name) parties_by_key[key_name] = party log("Putting all parties") put_in_batches(parties_by_key.values()) # Put candidates in datastore - don't worry about deleted ones, they # just won't be referenced by a candidacy candidates_by_key = {} for candidate_id, candidate_data in ynmp["Candidate"].iteritems(): if "status" not in candidate_data: raise Exception("No status entry for " + str(candidate_data)) key_name = candidate_id candidate = Candidate( ynmp_id = int(candidate_id), name = candidate_data["name"], code = candidate_data["code"], status = candidate_data["status"], email = candidate_data["email"], party = parties_by_key[candidate_data["party_id"]], image_id = int_or_null(candidate_data["image_id"]), created = convdate(candidate_data["created"]), updated = convdate(candidate_data["updated"]), key_name = key_name ) log(" Storing candidate " + candidate.name) candidates_by_key[key_name] = candidate log("Putting all candidates") put_in_batches(candidates_by_key.values()) # Put seats in datastore - don't worry about deleted ones, they # just won't be referenced by a candidacy seats_by_key = {} for seat_id, seat_data in ynmp["Seat"].iteritems(): key_name = seat_id seat = Seat( ynmp_id = int(seat_id), name = seat_data["name"], code = seat_data["code"], created = convdate(seat_data["created"]), updated = convdate(seat_data["updated"]), key_name = key_name ) if key_name in frozen_seats: seat.frozen_local_issues = True log(" Storing seat " + seat.name) seats_by_key[key_name] = seat seats_by_name[seat.name] = seat log("Putting all seats") put_in_batches(seats_by_key.values()) # Get list of existing candiacies in remote datastore # in batches due to 1000 entity at a time limit, as per http://code.google.com/appengine/articles/remote_api.html log("Getting list of Candidacies") candidacies = Candidacy.all().filter("deleted =", False).fetch(100) to_be_marked_deleted = {} while candidacies: for candidacy in candidacies: key_name = candidacy.key().name() log("Marking before have candidacy key " + key_name) to_be_marked_deleted[key_name] = candidacy candidacies = Candidacy.all().filter("deleted =", False).filter('__key__ >', candidacies[-1].key()).fetch(100) # Loop through new dump of candidacies from YourNextMP, adding new ones candidacies_by_key = {} for candidacy_id, candidacy_data in ynmp["Candidacy"].iteritems(): candidate = candidates_by_key[candidacy_data["candidate_id"]] assert candidate.status in ['standing', 'standing_down', 'not-standing'] if candidate.status == 'standing_down' or candidate.status == 'not-standing': continue key_name = candidacy_data["seat_id"] + "-" + candidacy_data["candidate_id"] # find existing entry if there is one, or else make new one if key_name in to_be_marked_deleted: candidacy = to_be_marked_deleted[key_name] else: candidacy = Candidacy(key_name = key_name) # fill in values candidacy.ynmp_id = int(candidacy_id) candidacy.seat = seats_by_key[candidacy_data["seat_id"]] candidacy.candidate = candidate candidacy.created = convdate(candidacy_data["created"]) candidacy.updated = convdate(candidacy_data["updated"]) candidacy.deleted = False # make sure it has a survey token if not candidacy.survey_token: log("Generating survey token for " + candidacy.seat.name + " " + candidacy.candidate.name) candidacy.generate_survey_token() # this does save too, since it logs log("Storing candidacy " + candidacy.seat.name + " " + candidacy.candidate.name) candidacies_by_key[key_name] = candidacy # record we still have this candidacy if key_name in to_be_marked_deleted: del to_be_marked_deleted[key_name] log("Putting all candidacies") put_in_batches(candidacies_by_key.values()) # See which candidacies are left, i.e. are deleted for key_name, candidacy in to_be_marked_deleted.iteritems(): log("Marking deleted " + candidacy.seat.name + " " + candidacy.candidate.name) candidacy.deleted = True log("Putting marked deleted candidacies") put_in_batches(to_be_marked_deleted.values()) ###################################################################### # Load from DemocracyClub fs = Seat.all().filter("frozen_local_issues =", True).fetch(100) while fs: for f in fs: log(" Seat is frozen to local issues changes: " + f.name) frozen_seats[f.key().name()] = f fs = Seat.all().filter("frozen_local_issues =",True).filter('__key__ >', fs[-1].key()).fetch(100) def load_from_democlub(csv_files, frozen_seats): # Get list of existing refined issues in remote datastore, so can track what to delete log("Getting list of refined issues") refined_issues = RefinedIssue.all().filter("deleted =", False).fetch(100) to_be_marked_deleted = {} while refined_issues: for refined_issue in refined_issues: key_name = refined_issue.key().name() log(" Marking before have refined issue key " + key_name) to_be_marked_deleted[key_name] = refined_issue refined_issues = RefinedIssue.all().filter("deleted =", False).filter('__key__ >', refined_issues[-1].key()).fetch(100) # Load in CSV file and create/update all the issues refined_issues_by_key = {} for csv_file in csv_files: log("Reading CSV file " + csv_file) reader = csv.reader(open(csv_file, "rb")) for row in reader: if len(row) == 6: row.append(None) (democlub_id, question, reference_url, seat_name, created, updated, short_name) = row key_name = democlub_id # DemocracyClub has this constituency without its accent, YourNextMP has it with it. seat_name = seat_name.replace("Ynys Mon", "Ynys Môn") seat = find_seat(seat_name.decode('utf-8')) if seat.key().name() in frozen_seats: log(" Frozen seat " + seat_name + ", not storing issue: " + question) else: refined_issue = RefinedIssue( democlub_id = int(democlub_id), question = question.decode('utf-8'), reference_url = reference_url.decode('utf-8'), short_name = short_name and short_name.decode('utf-8') or None, national = (seat.name == 'National'), seat = seat, created = convdate(created), updated = convdate(updated), key_name = key_name ) log(" Storing local issue for " + seat_name + ": " + question) refined_issues_by_key[key_name] = refined_issue # record we still have this issue if key_name in to_be_marked_deleted: del to_be_marked_deleted[key_name] log("Putting all refined issues") put_in_batches(refined_issues_by_key.values()) # See which refined issues are left, i.e. are deleted for key_name, refined_issue in to_be_marked_deleted.iteritems(): log(" Marking deleted issue for " + refined_issue.seat.name + ":" + refined_issue.question) refined_issue.deleted = True log("Putting marked deleted refined issues") put_in_batches(to_be_marked_deleted.values()) ###################################################################### # Main # Configure connection via remote_api to datastore - after this # data store calls are remote log("Connecting to " + options.host) def auth_func(): return (options.email, getpass.getpass('Password:')) remote_api_stub.ConfigureRemoteDatastore('theyworkforyouelection', '/remote_api', auth_func, servername=options.host) # Load in extra files if options.fetch: log("Fetching latest Democracy Club CSV file") democlub_file = "/tmp/load_freshdata_democracy_club.csv" democlub_h = open(democlub_file, 'w') democlub_h.write(urllib2.urlopen(DEMOCLUB_URL).read()) democlub_h.close() args.append(democlub_file) log("Fetching latest YourNextMP JSON file") ynmp_url = YOURNEXTMP_URL % (settings.YOURNEXTMP_API_TOKEN) ynmp_file = "/tmp/load_freshdata_yournextmp.json" ynmp_h = open(ynmp_file + ".gz", 'w') ynmp_h.write(urllib2.urlopen(ynmp_url).read()) ynmp_h.close() ynmp_h = open(ynmp_file, 'w') ynmp_h.write(gzip.GzipFile(ynmp_file + ".gz").read()) ynmp_h.close() args.append(ynmp_file) log("File list: " + str(args)) # Which seats are frozen to changes in local issues? frozen_seats = get_frozen_local_issues_seats() # Load in JSON files, merging as we go ynmp = {} for arg in args: if re.search("(\.json)$", arg): content = open(arg).read() json_load = json.loads(content) for k, v in json_load.iteritems(): if k in ynmp: ynmp[k].update(json_load[k]) else: ynmp[k] = json_load[k] load_from_ynmp(ynmp, frozen_seats) # Get list of CSV files csv_files = [] for arg in args: if re.search("(\.csv)$", arg): csv_files.append(arg) load_from_democlub(csv_files, frozen_seats)
UTF-8
Python
false
false
2,010
3,865,470,577,782
ed36eff765d5260da2c27481e4fcbb010d38defd
0d5b73ef3f531fb8a5cacf71548309b30835c1aa
/lib/prompt.py
06fcaf73d18514be0f06b975af1a5e2291d57252
[ "GPL-3.0-only", "GPL-3.0-or-later", "GPL-1.0-or-later" ]
non_permissive
nsubiron/nscmd
https://github.com/nsubiron/nscmd
82dacb779d58bbf16d773f118be67881f275e22a
a3775f53d748638dd61b57c1d242124debf13694
refs/heads/master
2016-09-05T15:53:43.668841
2013-06-04T18:57:25
2013-06-04T18:57:25
9,841,521
1
0
null
null
null
null
null
null
null
null
null
null
null
null
null
import platform # Ask a yes/no question via input() and return their answer. # 'question' is a string that is presented to the user. # 'default' is the presumed answer if the user just hits <Enter>. # It must be 'yes' (the default), 'no' or None (meaning an answer is required of # the user). The 'answer' return value is one of 'yes' or 'no'. def yes_no(question, default='yes'): valid = {'yes': True, 'y': True, 'no': False, 'n': False} prompts = {True: '[Y/n]', False: '[y/N]', None: '[y/n]'} try: if default is not None: default = valid[default.lower()] prompt = prompts[default] except KeyError: raise ValueError('Invalid default answer \'%s\'.' % default) while True: choice = raw_input('%s %s: ' % (question, prompt)).lower() if default is not None and choice == '': return default elif choice in valid: return valid[choice] else: print('Please answer \'%s\'.' % '\' or \''.join(valid.keys())) if platform.system() == 'Windows': def color_string(string, *attributes): return string else: def color_string(string, *attributes): """ If 'bold', it must be last attribute.""" attrmap = { 'bold': '1', 'black': '0;30', 'blue': '0;34', 'brown': '0;33', 'cyan': '0;36', 'dark gray': '1;30', 'green': '0;32', 'light blue': '1;34', 'light cyan': '1;36', 'light gray': '0;37', 'light green': '1;32', 'light purple': '1;35', 'light red': '1;31', 'purple': '0;35', 'red': '0;31', 'white': '1;37', 'yellow': '1;33'} attrstr = ';'.join(a for a in map(attrmap.get, attributes) if a is not None) return '\x1b[%sm%s\x1b[0m' % (attrstr, string)
UTF-8
Python
false
false
2,013
5,566,277,653,166
9f287a8ae5f99998c273670d6229e0ca4f14db44
a40897e5221f837a7c270ceb3a625197b64402e2
/plugins/plugin_help.py
e8063ee903614db9d5d400e995d86e65a1f09734
[]
no_license
rogovvladimir/xmpp-bot
https://github.com/rogovvladimir/xmpp-bot
02a677bee6aba77d6e021c4e893b078d37193224
bd0bda7985295cfa5fcbef92256dfbd07c6a1b59
refs/heads/master
2021-01-21T12:39:29.256985
2012-07-19T15:23:46
2012-07-19T15:23:46
2,190,618
2
0
null
null
null
null
null
null
null
null
null
null
null
null
null
import re from . import BaseCommand, commands from twilix.base.myelement import BreakStanza class helpCommand(BaseCommand): COMMAND = u'help' HELP = u'get help' COMMAND_REGEX = re.compile(ur'^(?:help)?(.*)$') def chatHandler(self): res = u'' cmnd = self.cmdpars.group(1) if cmnd: res = u'[%s] is a bad command for me' % self.cmdpars.group() helpdict = {} for cmd in commands: helpdict[cmd.COMMAND] = getattr(cmd, 'HELP', u"(haven't help for this command)") res += u'\nThere are :\n\t%s\nlist of \ commands, supported by this bot' % \ '\n\t'.join(['[%s] -- %s;' % \ (cmd, helpdict[cmd]) \ for cmd in sorted(helpdict)]) reply = self.get_reply() reply.body = res return (reply, BreakStanza()) def groupchatHandler(self): res = u'' cmnd = self.cmdpars.group(1) if cmnd: return BreakStanza() helpdict = {} for cmd in commands: helpdict[cmd.COMMAND] = getattr(cmd, 'HELP', u"(haven't help for this command)") res += u'\nThere are :\n\t%s\nlist of \ commands, supported by this bot' % \ '\n\t'.join(['[%s] -- %s;' % \ (cmd, helpdict[cmd]) \ for cmd in sorted(helpdict)]) reply = self.get_reply() reply.body = u'%s: %s' %(reply.to.resource, res) reply.to = reply.to.bare() return (reply, BreakStanza())
UTF-8
Python
false
false
2,012
16,939,351,019,605
b502ae178207d72543ed7894228158204793ce74
a1101dd9d2a37c38254e365e0723bfb06101f3c0
/CSSE1001Assignment3/circuit_editor_csse1k/circuit_gui/objectwidgets.py
c7b1944bf383c1d4be338bce6d8ee4acf5927d0e
[ "MIT" ]
permissive
joeandersen/CSSE7030
https://github.com/joeandersen/CSSE7030
a7712c749838a9ed9a19ed59fa955fd086360af5
bd6a49a754c28de14cdcb8de20e33bf50b5331c0
refs/heads/master
2020-04-23T08:18:19.165696
2014-10-14T07:35:39
2014-10-14T07:35:39
null
0
0
null
null
null
null
null
null
null
null
null
null
null
null
null
""" This file contains the main classes of the GUI system, comprising the actual circuit editing widget and its components. It also contains a set of 'Sleepy' classes, serialization and analysis adapter classes used in tandem with the corresponding GUI widgets for saving/loading of layouts, and for performing circuit analysis. """ # import that barge, tote that bale! from PyQt4 import * from PyQt4.QtCore import * from PyQt4.QtGui import * #from py.magic import greenlet import circuit as analysis from dialogs import AttributeDialog class ObjectAttributeModel(QAbstractItemModel): """A Qt item model representing the attributes of a circuit component. C'tor: ObjectAttributeModel(ObjectWidget) """ def __init__(self, object): QAbstractItemModel.__init__(self) self._object = object # all these methods' signatures and function are documented extensively in the Qt documentation # so there is no real need to duplicate that effort here in a subclass def index(self, row, column, parent): return self.createIndex(row, column) def flags(self, index): # only make the values column editable if index.column() == 0: return Qt.ItemIsEnabled | Qt.ItemIsSelectable elif index.column() == 1: return Qt.ItemIsEnabled | Qt.ItemIsSelectable | Qt.ItemIsEditable def headerData(self, section, orientation, role): if orientation == Qt.Horizontal: if role == Qt.DisplayRole: if section == 0: return QVariant("Attribute") elif section == 1: return QVariant("Value") return QVariant() def data(self, index, role): if role in [Qt.DisplayRole, Qt.EditRole]: if index.column() == 0: return QVariant(self._object.attributes()[index.row()]) elif index.column() == 1: return QVariant(self._object[self._object.attributes()[index.row()]]) else: return QVariant() def rowCount(self, index): return len(self._object.attributes()) def columnCount(self, index): return 2 def setData(self, index, value, role): if role == Qt.EditRole: if index.column() == 1: # set the attribute on the object self._object[self._object.attributes()[index.row()]] = value.toString() #self.emit(SIGNAL("dataChanged(QModelIndex,QModelIndex)"), index, index) return True return False class ObjectWidget(QWidget): """The base class for all circuit object widgets. It implements a lot of basic functionality, including selecting, dragging, connection/disconnection and attribute storage. Each widget has a number of anchor points where connections can be made. Subclasses must set these up in their constructors, by changing the properties self._anchorPoints, self._connections (set it to empty lists initially) and self._maxConnections. See NodeWidget and friends below for examples of subclasses. SIGNALS: selected() - emitted when this object is selected unselected() connectionsChanged() clicked(int, QPoint) - emitted when the object is clicked, with distance from nearest anchor and which anchor it is. moved(QPoint) - emitted when the object moves. flipped() - emitted when the object flips orientation """ def __init__(self, parent=None): QWidget.__init__(self, parent) self._orientation = 'h' self._isSelected = False self._isDragging = False self._anchorPoints = [(0,0)] self._connections = {self._anchorPoints[0]: []} self._maxConnections = {self._anchorPoints[0]: 0} self._attributes = {'nickname': ''} # attribute model self._attrmodel = ObjectAttributeModel(self) # slot connections QObject.connect(self, SIGNAL("selected()"), self, SLOT("update()")) QObject.connect(self, SIGNAL("unselected()"), self, SLOT("update()")) def __getitem__(self, attr): """Get an attribute of the object.""" return self._attributes[attr] def __setitem__(self, attr, value): """Set an attribute of the object.""" self._attributes[attr] = value def attributes(self): """Returns the list of available attributes. attributes() -> list<string> """ return self._attributes.keys() def anchorPoints(self): """Returns the list of available anchorpoints. anchorPoints() -> list<tup> """ return self._anchorPoints def connections(self, ancPoint=None): """If given an anchor point, this functionr returns a list of connections associated with that anchor point. Otherwise, it will return all connections associated with this object. connections are of the form: list<tuple<object, anchor>, tuple<object,anchor>> connections([tup]) -> list<connections> """ if ancPoint: return [[(self, ancPoint), (other, other.anchorOf(self))] for other in self._connections[ancPoint]] else: cons = [] for ap in self._anchorPoints: cons = cons + self.connections(ap) return cons def anchorOf(self, other): """Gets the anchor point that connects this object to other. anchorOf(object) -> QPoint """ for x in self._connections: if other in self._connections[x]: return x return None def canConnect(self, ancPoint): """Is there space for another connection on the given anchor point? canConnect(QPoint) -> bool """ return (len(self._connections[ancPoint]) < self._maxConnections[ancPoint] or self._maxConnections[ancPoint] == -1) def connectTo(self, ancPoint, object): """Connects this object to another via the given anchor point, if possible, in a one-way fashion. connectTo(QPoint, object) -> void """ if self.canConnect(ancPoint): self._connections[ancPoint].append(object) self.emit(SIGNAL("connectionsChanged()")) def connect(self, ancPoint, object, otherAncPoint): """Connects this object to another via the two given anchors, if possible, in a two-way fashion. connect(QPoint, object ,QPoint) -> void """ if self.canConnect(ancPoint) and object.canConnect(otherAncPoint): self.connectTo(ancPoint, object) object.connectTo(otherAncPoint, self) def disconnectFrom(self, ancPoint, object): """Destroys one end of a connection towards object via anchor point.""" if object in self._connections[ancPoint]: self._connections[ancPoint].remove(object) self.emit(SIGNAL("connectionsChanged()")) def disconnect(self, ancPoint, object, otherAncPoint): """Destroys an entire connection towards object's otherAncPoint via this object's ancPoint.""" self.disconnectFrom(ancPoint, object) object.disconnectFrom(otherAncPoint, self) def mouseDoubleClickEvent(self, event): """Handles a mouse double click by opening an attribute dialog""" self._attrdlg = AttributeDialog(self._attrmodel) self._attrdlg.show() def mousePressEvent(self, event): """Handles a mouse press event on the object widget. Prepares to start the drag-movement process, and attempts to select the widget.""" self._isDragging = True self._dragLastPos = QPoint(event.globalX(), event.globalY()) self._dragStartPos = QPoint(self.x(), self.y()) # find nearest anchor point to where the user clicked anchorPoints = [QPoint(x,y) for x,y in self._anchorPoints] dragPoint = QPoint(event.x(), event.y()) leastDist = (dragPoint - anchorPoints[0]).manhattanLength() leastPoint = anchorPoints[0] for p in anchorPoints: dist = (dragPoint - p).manhattanLength() if dist < leastDist: leastDist = dist leastPoint = p self._dragClosestAnchor = leastPoint # emit the Qt signal self.emit(SIGNAL("clicked(int,QPoint)"), leastDist, leastPoint) #self.parent().selectMe(self) def mouseMoveEvent(self, event): """Handles mouse movement on the widget. If we're in drag mode, ie a button is held down, the widget will move by snapped intervals.""" if self._isDragging: curPos = QPoint(self.x(), self.y()) newPos = self._dragStartPos + (event.globalPos() - self._dragLastPos) snapPos = self.parent().snap(newPos) self.move(snapPos) self.emit(SIGNAL("moved(QPoint)"), snapPos) #self.lastPos = event.globalPos() def mouseReleaseEvent(self, event): self._isDragging = False def flip(self): """Flips the object's orientation over.""" self._orientation = {'h': 'v', 'v': 'h'}[self._orientation] self.update() self.emit(SIGNAL("flipped()")) def orientation(self): """Returns the object's present orientation. orientation() -> char (one of 'h' or 'v') """ return self._orientation def selected(self): """Returns True if the object is selected, otherwise False.""" return self._isSelected def select(self): """Sets this object as selected.""" self._isSelected = True self.emit(SIGNAL("selected()")) def unselect(self): """Sets this object as unselected.""" self._isSelected = False self.emit(SIGNAL("unselected()")) def drawSelectedOutline(self, painter): """A convenience function for subclasses to paint a red box around the widget when it is selected. drawSelectedOutline(QPainter) -> void """ if self._isSelected: painter.setPen(Qt.red) painter.drawRect(QRect(0,0,self.width(), self.height())) class CircuitWidget(QWidget): """The parent grid widget that holds circuit component widgets.""" def __init__(self, parent=None): QWidget.__init__(self, parent) self.resize(600,600) self._grid = 30 self.setAcceptDrops(True) def addWidget(self, widget): """Connects a child widget's signals appropriately to make sure everything works as expected (redrawing of connections etc)""" # connect signals on the child QObject.connect(widget, SIGNAL("flipped()"), self, SLOT("update()")) QObject.connect(widget, SIGNAL("clicked(int,QPoint)"), self._childClickedLambda(widget)) QObject.connect(widget, SIGNAL("connectionsChanged()"), self, SLOT("update()")) QObject.connect(widget, SIGNAL("moved(QPoint)"), self, SLOT("update()")) def _childClickedLambda(self, child): """Generates a lambda for calling the _childClicked function for a given child. _childClickedLambda(object) -> lambda """ return (lambda dist, point: self._childClicked(child, dist, (point.x(),point.y()))) def _childClicked(self, child, dist, point): """Handles a child click event.""" for c in self.children(): c.unselect() child.select() self.emit(SIGNAL("childClicked"), child, dist, point) def dragEnterEvent(self, event): event.acceptProposedAction() def dropEvent(self, event): """Circuit widget accepts drag-drops from a list widget with items carrying particular texts.""" try: wtype = event.source().selectedItems()[0].text(0) except: print "Not from a source list, can't drop this." return if wtype == 'Normal node': n = NodeWidget(self) n.move(self.snap(event.pos())) n.show() self.addWidget(n) elif wtype == 'Voltage source': vs = VoltageSourceWidget(self) vs.move(self.snap(event.pos())) vs.show() self.addWidget(vs) elif wtype == 'Current source': cs = CurrentSourceWidget(self) cs.move(self.snap(event.pos())) cs.show() self.addWidget(cs) elif wtype == 'Shockley diode': d = ShockleyWidget(self) d.move(self.snap(event.pos())) d.show() self.addWidget(d) elif wtype == 'Resistor': r = ResistorWidget(self) r.move(self.snap(event.pos())) r.show() self.addWidget(r) else: print "dropped mimedata:", wtype return event.setDropAction(Qt.CopyAction) event.accept() self.update() def snap(self, point): """Takes a given point, and snaps it based on the grid configuration for this widget. snap(QPoint) -> QPoint """ x = round(float(point.x()) / self._grid) * self._grid y = round(float(point.y()) / self._grid) * self._grid return QPoint(x,y) def grid(self): """Returns the grid unit size. grid() -> int """ return self._grid def setGrid(self, size): """Sets the grid unit size. setGrid(int) -> void """ self._grid = size def mousePressEvent(self, event): # when we see a mouse click event it's not hitting one of our children # so deselect anything selected for c in self.children(): c.unselect() c.update() self.update() def findConnections(self): """Finds all unique connections between all child widgets of this circuit widget. Used in painting to determine which links should be drawn. findConnections() -> list<connection> (for the signature of a connection, see ObjectWidget.connections() """ found = [] for c in self.children(): for cn in c.connections(): if len(cn)>0: if [cn[0],cn[1]] not in found and [cn[1],cn[0]] not in found: found.append(cn) return found def makeSleepy(self): sleepies,cons = self.makeSleepyWithHash() return (sleepies.values(), cons) def makeSleepyWithHash(self): sleepies = {} for c in self.children(): sleepies[c] = c.makeSleepy() cons = [] for c in self.findConnections(): nc = [(sleepies[c[0][0]], c[0][1]), \ (sleepies[c[1][0]], c[1][1])] cons.append(nc) return (sleepies, cons) def killChildren(self): for c in self.children(): c.deleteLater() self.update() def wakeUp(self, sleeptup): sleepies,cons = sleeptup wakies = {} for s in sleepies: wakies[s] = s.create(self) for c in cons: firstwidget = wakies[c[0][0]] firstpoint = c[0][1] secondwidget = wakies[c[1][0]] secondpoint = c[1][1] firstwidget.connect(firstpoint, secondwidget, secondpoint) self.update() # now we are awake def paintEvent(self, event): painter = QPainter(self) painter.setRenderHint(QPainter.Antialiasing) # draw grid painter.setPen(Qt.gray) for y in range(0, self.height(), self._grid): painter.drawLine(QPoint(0, y), QPoint(self.width(), y)) for x in range(0, self.width(), self._grid): painter.drawLine(QPoint(x, 0), QPoint(x, self.height())) # draw connections painter.setPen(Qt.black) cnx = self.findConnections() for c in cnx: point_a = QPoint(c[0][1][0],c[0][1][1]) + c[0][0].pos() point_b = QPoint(c[1][1][0],c[1][1][1])+c[1][0].pos() diff = point_a - point_b if abs(diff.x()) <= abs(diff.y()): painter.drawLine(point_a, QPoint(point_b.x(), point_a.y())) painter.drawLine(QPoint(point_b.x(), point_a.y()), point_b) else: painter.drawLine(point_a, QPoint(point_a.x(), point_b.y())) painter.drawLine(QPoint(point_a.x(), point_b.y()), point_b) class SleepyNodeWidget(): """A 'sleepy' partner to the node widget. This partner contains all of the necessary information needed to recreate the node widget, minus connections, it is serializable, and also knows how to turn itself into an analysis object. C'tor: SleepyNodeWidget(QPoint, dict) """ def __init__(self, location, attributes): self._location = (location.x(), location.y()) self._attributes = attributes def create(self, parent): n = NodeWidget(parent) parent.addWidget(n) n.show() n.move(QPoint(self._location[0], self._location[1])) for a in self._attributes: n[a] = self._attributes[a] return n def analyse(self, circuit): if not self._attributes['reference']: ref = None else: ref = float(self._attributes['reference']) n = analysis.Node(nickname=self._attributes['nickname'], reference=ref) circuit.nodes.append(n) return n class NodeWidget(ObjectWidget): """ObjectWidget subclass representing a simple circuit node.""" def __init__(self, parent=None): ObjectWidget.__init__(self, parent) self._midPoint = (15,15) self._anchorPoints = [self._midPoint] self._connections = {self._midPoint: []} self._maxConnections = {self._midPoint: -1} self['nickname'] = 'n' self['reference'] = '' self.resize(30,30) def makeSleepy(self): return SleepyNodeWidget(self.pos(), self._attributes) def paintEvent(self, pevent): painter = QPainter(self) painter.setRenderHint(QPainter.Antialiasing) painter.setPen(Qt.black) if self._orientation == 'v': painter.rotate(90.0) painter.translate(0.0, -30.0) painter.drawEllipse(QRect(self.width()/2 - 10,self.height()/2 - 10,20,20)) path = QPainterPath() path.addEllipse(QRectF(self.width()/2 - 10,self.height()/2 - 10,20,20)) painter.fillPath(path, QColor(40,40,40)) painter.setPen(Qt.white) painter.drawText(QRect(0,0,self.width(),self.height()), Qt.AlignCenter | Qt.AlignTop, self['nickname']) self.drawSelectedOutline(painter) class SleepyVoltageSourceWidget(): def __init__(self, location, attributes, orientation): self._location = (location.x(), location.y()) self._attributes = attributes self._orientation = orientation def create(self, parent): vs = VoltageSourceWidget(parent) parent.addWidget(vs) vs.move(QPoint(self._location[0], self._location[1])) for a in self._attributes: vs[a] = self._attributes[a] if vs.orientation() != self._orientation: vs.flip() vs.show() return vs def analyse(self, circuit): n = analysis.IdealVoltageSource(nickname=self._attributes['nickname'], voltage=float(self._attributes['voltage'])) circuit.components.append(n) return n class VoltageSourceWidget(ObjectWidget): """ObjectWidget subclass representing a simple independent voltage source. Attributes: voltage *Widget changes size based on orientation. """ def __init__(self, parent=None): ObjectWidget.__init__(self, parent) self.resize(90,30) self._posTerminal = (90,15) self._negTerminal = (0,15) self['nickname'] = 'vs' self['voltage'] = '0' self._anchorPoints = [self._posTerminal, self._negTerminal] self._connections = {self._posTerminal: [], self._negTerminal: []} self._maxConnections = {self._posTerminal: 1, self._negTerminal: 1} # event handlers QObject.connect(self, SIGNAL("flipped()"), self._flipped) def makeSleepy(self): return SleepyVoltageSourceWidget(self.pos(), self._attributes, self._orientation) def paintEvent(self, pevent): """Paints the widget.""" painter = QPainter(self) painter.setRenderHint(QPainter.Antialiasing) painter.setPen(Qt.black) if self._orientation == 'v': painter.rotate(90.0) painter.translate(0.0, -30.0) painter.drawEllipse(QRect(34, 4, 22, 22)) painter.setFont(QFont('Sans serif', 7)) painter.drawText(QRect(0,0,90,30), Qt.AlignCenter | Qt.AlignTop, "%s\n%sV" % (self['nickname'], self['voltage'])) painter.drawLine(QPoint(0,15), QPoint(34,15)) painter.drawLine(QPoint(56, 15), QPoint(90,15)) painter.setFont(QFont('Sans serif', 11)) painter.drawText(QRect(3,15,30,15), Qt.AlignLeft | Qt.AlignTop, "-") painter.drawText(QRect(56,15,30,15), Qt.AlignRight | Qt.AlignTop, "+") painter.resetMatrix() self.drawSelectedOutline(painter) def _flipped(self): """Called after the object is flipped, to resize and reorient its terminals.""" if self._orientation == 'h': self.resize(90,30) newpos = (90,15) newneg = (0,15) self._anchorPoints = [newpos, newneg] self._connections = {newpos: self._connections[self._posTerminal], \ newneg: self._connections[self._negTerminal]} self._maxConnections = {newpos: 1, newneg: 1} self._posTerminal = newpos self._negTerminal = newneg elif self._orientation == 'v': self.resize(30,90) newneg = (15,0) newpos = (15,90) self._anchorPoints = [newpos, newneg] self._connections = {newpos: self._connections[self._posTerminal], \ newneg: self._connections[self._negTerminal]} self._maxConnections = {newpos: 1, newneg: 1} self._posTerminal = newpos self._negTerminal = newneg # emit the moved signal, this forces our parent to redraw connections # kind of a hack, but it works. self.emit(SIGNAL("moved(QPoint)"), self.pos()) self.update() class SleepyCurrentSourceWidget(): """A 'sleepy' partner to the current source widget. This partner contains all of the necessary information needed to recreate the current source widget, minus connections, and it is serializable, and also knows how to turn itself into an analysis object. C'tor: SleepyCurrentSourceWidget(QPoint, dict, char) """ def __init__(self, location, attributes, orientation): self._location = (location.x(), location.y()) self._attributes = attributes self._orientation = orientation def create(self, parent): vs = CurrentSourceWidget(parent) parent.addWidget(vs) vs.move(QPoint(self._location[0], self._location[1])) for a in self._attributes: vs[a] = self._attributes[a] if vs.orientation() != self._orientation: vs.flip() vs.show() return vs def analyse(self, circuit): n = analysis.IdealCurrentSource(nickname=self._attributes['nickname'], current=float(self._attributes['current'])) circuit.components.append(n) return n class CurrentSourceWidget(ObjectWidget): """ObjectWidget subclass representing a simple independent current source. Attributes: current *Widget changes size based on orientation. """ def __init__(self, parent=None): ObjectWidget.__init__(self, parent) self.resize(90,30) self._posTerminal = (90,15) self._negTerminal = (0,15) self['nickname'] = 'cs' self['current'] = '0' self._anchorPoints = [self._posTerminal, self._negTerminal] self._connections = {self._posTerminal: [], self._negTerminal: []} self._maxConnections = {self._posTerminal: 1, self._negTerminal: 1} # event handlers QObject.connect(self, SIGNAL("flipped()"), self._flipped) def makeSleepy(self): return SleepyCurrentSourceWidget(self.pos(), self._attributes, self._orientation) def paintEvent(self, pevent): """Paints the widget.""" painter = QPainter(self) painter.setRenderHint(QPainter.Antialiasing) painter.setPen(Qt.black) if self._orientation == 'v': painter.rotate(90.0) painter.translate(0.0, -30.0) painter.drawEllipse(QRect(34, 4, 22, 22)) painter.setFont(QFont('Sans serif', 7)) painter.drawText(QRect(0,0,90,30), Qt.AlignCenter | Qt.AlignTop, "%s\n%sA" % (self['nickname'], self['current'])) painter.drawLine(QPoint(0,15), QPoint(34,15)) painter.drawLine(QPoint(56, 15), QPoint(90,15)) path = QPainterPath() path.moveTo(38, 15) path.lineTo(53, 15) path.lineTo(48, 10) path.moveTo(53, 15) path.lineTo(48, 20) painter.drawPath(path) painter.resetMatrix() self.drawSelectedOutline(painter) def _flipped(self): """Called after the object is flipped, to resize and reorient its terminals.""" if self._orientation == 'h': self.resize(90,30) newpos = (90,15) newneg = (0,15) self._anchorPoints = [newpos, newneg] self._connections = {newpos: self._connections[self._posTerminal], \ newneg: self._connections[self._negTerminal]} self._maxConnections = {newpos: 1, newneg: 1} self._posTerminal = newpos self._negTerminal = newneg elif self._orientation == 'v': self.resize(30,90) newneg = (15,0) newpos = (15,90) self._anchorPoints = [newpos, newneg] self._connections = {newpos: self._connections[self._posTerminal], \ newneg: self._connections[self._negTerminal]} self._maxConnections = {newpos: 1, newneg: 1} self._posTerminal = newpos self._negTerminal = newneg # emit the moved signal, this forces our parent to redraw connections # kind of a hack, but it works. self.emit(SIGNAL("moved(QPoint)"), self.pos()) self.update() class SleepyResistorWidget(): def __init__(self, location, attributes, orientation): self._location = (location.x(), location.y()) self._attributes = attributes self._orientation = orientation def create(self, parent): r = ResistorWidget(parent) parent.addWidget(r) r.move(QPoint(self._location[0], self._location[1])) for a in self._attributes: r[a] = self._attributes[a] if r.orientation() != self._orientation: r.flip() r.show() return r def analyse(self, circuit): n = analysis.IdealResistor(nickname=self._attributes['nickname'], resistance=int(self._attributes['resistance'])) circuit.components.append(n) return n class ResistorWidget(ObjectWidget): def __init__(self, parent=None): ObjectWidget.__init__(self, parent) self.resize(90,30) self._firstTerminal = (90,15) self._secondTerminal = (0,15) self['nickname'] = 'r' self['resistance'] = '0' self._anchorPoints = [self._firstTerminal, self._secondTerminal] self._connections = {self._firstTerminal: [], self._secondTerminal: []} self._maxConnections = {self._firstTerminal: 1, self._secondTerminal: 1} # event handlers QObject.connect(self, SIGNAL("flipped()"), self._flipped) def makeSleepy(self): return SleepyResistorWidget(self.pos(), self._attributes, self._orientation) def paintEvent(self, pevent): """Paints the widget.""" painter = QPainter(self) painter.setRenderHint(QPainter.Antialiasing) painter.setPen(Qt.black) if self._orientation == 'v': painter.rotate(90.0) painter.translate(0.0, -30.0) path = QPainterPath() path.moveTo(0,15) path.lineTo(30,15) path.lineTo(34,5) path.lineTo(38,20) path.lineTo(42,5) path.lineTo(46,20) path.lineTo(50,5) path.lineTo(54,20) path.lineTo(58,5) path.lineTo(60,15) path.lineTo(90,15) painter.drawPath(path) painter.setFont(QFont('Sans serif', 7)) painter.drawText(QRect(0,0,90,30), Qt.AlignCenter | Qt.AlignBottom, "%s: %s" % (self['nickname'], self['resistance'])) painter.resetMatrix() self.drawSelectedOutline(painter) def _flipped(self): """Called after the object is flipped, to resize and reorient its terminals.""" if self._orientation == 'h': self.resize(90,30) newfirst = (90,15) newsecond = (0,15) self._anchorPoints = [newsecond, newfirst] self._connections = {newsecond: self._connections[self._secondTerminal], \ newfirst: self._connections[self._firstTerminal]} self._maxConnections = {newsecond: 1, newfirst: 1} self._secondTerminal = newsecond self._firstTerminal = newfirst elif self._orientation == 'v': self.resize(30,90) newsecond = (15,0) newfirst = (15,90) self._anchorPoints = [newsecond, newfirst] self._connections = {newsecond: self._connections[self._secondTerminal], \ newfirst: self._connections[self._firstTerminal]} self._maxConnections = {newsecond: 1, newfirst: 1} self._secondTerminal = newsecond self._firstTerminal = newfirst # emit the moved signal, this forces our parent to redraw connections # kind of a hack, but it works. self.emit(SIGNAL("moved(QPoint)"), self.pos()) self.update() class SleepyShockleyWidget(): def __init__(self, location, attributes, orientation): self._location = (location.x(), location.y()) self._attributes = attributes self._orientation = orientation def create(self, parent): vs = ShockleyWidget(parent) parent.addWidget(vs) vs.move(QPoint(self._location[0], self._location[1])) for a in self._attributes: vs[a] = self._attributes[a] if vs.orientation() != self._orientation: vs.flip() vs.show() return vs def analyse(self, circuit): n = analysis.ShockleyDiode(nickname=self._attributes['nickname'], \ saturation_current=float(self._attributes['saturation_current']), \ thermal_voltage=float(self._attributes['thermal_voltage']), \ e_coeff=float(self._attributes['e_coeff'])) circuit.components.append(n) return n class ShockleyWidget(ObjectWidget): """ObjectWidget subclass representing a shockley diode. Attributes: saturation_current=1e-12, thermal_voltage=25.85e-03, e_coeff=1.00 *Widget changes size based on orientation. """ def __init__(self, parent=None): ObjectWidget.__init__(self, parent) self.resize(90,30) self._posTerminal = (90,15) self._negTerminal = (0,15) self['nickname'] = 'n' self['saturation_current'] = '1e-12' self['thermal_voltage'] = '25.85e-3' self['e_coeff'] = '1.00' self._anchorPoints = [self._posTerminal, self._negTerminal] self._connections = {self._posTerminal: [], self._negTerminal: []} self._maxConnections = {self._posTerminal: 1, self._negTerminal: 1} # event handlers QObject.connect(self, SIGNAL("flipped()"), self._flipped) def makeSleepy(self): return SleepyShockleyWidget(self.pos(), self._attributes, self._orientation) def paintEvent(self, pevent): """Paints the widget.""" painter = QPainter(self) painter.setRenderHint(QPainter.Antialiasing) painter.setPen(Qt.black) if self._orientation == 'v': painter.rotate(90.0) painter.translate(0.0, -30.0) painter.setFont(QFont('Sans serif', 7)) painter.drawText(QRect(0,0,90,30), Qt.AlignCenter | Qt.AlignTop, "%s" % (self['nickname'])) path = QPainterPath() path.moveTo(0,15) path.lineTo(33,15) path.moveTo(33,5) path.lineTo(33,25) path.lineTo(55,15) path.lineTo(33,5) path.moveTo(55,5) path.lineTo(55,25) path.moveTo(55,15) path.lineTo(90,15) painter.drawPath(path) painter.resetMatrix() self.drawSelectedOutline(painter) def _flipped(self): """Called after the object is flipped, to resize and reorient its terminals.""" if self._orientation == 'h': self.resize(90,30) newpos = (90,15) newneg = (0,15) self._anchorPoints = [newpos, newneg] self._connections = {newpos: self._connections[self._posTerminal], \ newneg: self._connections[self._negTerminal]} self._maxConnections = {newpos: 1, newneg: 1} self._posTerminal = newpos self._negTerminal = newneg elif self._orientation == 'v': self.resize(30,90) newneg = (15,0) newpos = (15,90) self._anchorPoints = [newpos, newneg] self._connections = {newpos: self._connections[self._posTerminal], \ newneg: self._connections[self._negTerminal]} self._maxConnections = {newpos: 1, newneg: 1} self._posTerminal = newpos self._negTerminal = newneg # emit the moved signal, this forces our parent to redraw connections # kind of a hack, but it works. self.emit(SIGNAL("moved(QPoint)"), self.pos()) self.update()
UTF-8
Python
false
false
2,014
8,211,977,505,630
1fbf8b284d585bd9582ee9b001f8b76e9cdf80cd
ebf7306bba2bdac746454a4b5099ab54c0aeba41
/server/app.py
e35eadf2f17e625982c1f4576231f3991cf4782d
[]
no_license
vladimir-myskov/web-music-switcher
https://github.com/vladimir-myskov/web-music-switcher
d84877a55b90d2ea4a4284911dfc5ba2e999b3ad
83326f6e4d44d7e544a77b707faeeac52c83e0e1
refs/heads/master
2020-05-07T08:53:24.170317
2012-10-01T09:27:15
2012-10-01T09:27:15
6,031,462
0
1
null
null
null
null
null
null
null
null
null
null
null
null
null
import os import json import tornado.ioloop import tornado.web import tornado.websocket from sockjs.tornado import SockJSRouter, SockJSConnection pages = {} def register_audio_page(key, page, handler): if not key in pages: page["handler"] = handler pages[key] = page return page def unregister_audio_page(key): del pages[key] class PagesHandler(tornado.web.RequestHandler): def get(self): self.write(str(pages)) class SeniorHandler(tornado.web.RequestHandler): def get(self, key): self.render("senior.html", key=key) class AudioPageHandler(SockJSConnection): def open(self): print "OPEN" pass def on_message(self, message): print message message = json.loads(message) method, data = "on_"+ message["event"], message["data"] try: getattr(self, method)(data) except: pass def on_close(self): unregister_audio_page(self.page["key"]) def on_register(self, data): print data key = data["key"] self.page = register_audio_page(key, data, self) def on_page_settings(self, data): for key,value in data.iteritems(): self.page[key] = value def on_senior_prev(self, data): pages[data["key"]]["handler"].send({ "event":"audio_prev", "data": {} }) def on_senior_next(self, data): pages[data["key"]]["handler"].send({ "event":"audio_next", "data": {} }) settings = { "static_path": os.path.join(os.path.dirname(__file__), "static") } AudioPageRouter = SockJSRouter(AudioPageHandler, '/websocket') application = tornado.web.Application([ (r"/pages", PagesHandler), #(r"/websocket",AudioPageHandler), (r"/senior/(.*)",SeniorHandler), ]+AudioPageRouter.urls, debug=True, **settings) if __name__ == "__main__": application.listen(8888) tornado.ioloop.IOLoop.instance().start()
UTF-8
Python
false
false
2,012
4,483,945,889,638
3a587b6716f7bb074dd428a52960dbe9a1303d5a
e59db7b595e3797ed212868f38d6d0a77395b776
/decommission.py
52580c59ce272c7e03f37ca6a563a7782c2c52d3
[ "GPL-3.0-only", "GPL-1.0-or-later", "GPL-3.0-or-later" ]
non_permissive
9apps/ReDiS
https://github.com/9apps/ReDiS
a157a7fba02fe58c0c431c010a471103e3ca4fb1
770b3f2bff71ab09a4ecb766235efbc853b11051
refs/heads/master
2016-09-09T12:22:11.585879
2012-07-26T14:35:09
2012-07-26T14:35:09
3,152,308
2
2
null
false
2012-07-25T07:40:20
2012-01-11T08:49:33
2012-07-25T07:40:19
2012-07-25T07:40:18
164
null
null
null
Python
null
null
# Copyright (C) 2011, 2012 9apps B.V. # # This file is part of Redis for AWS. # # Redis for AWS is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # Redis for AWS is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with Redis for AWS. If not, see <http://www.gnu.org/licenses/>. import os, sys import json, urllib2 from boto.ec2.connection import EC2Connection from boto.ec2.regioninfo import RegionInfo import administration, backup from host import Host from events import Events try: url = "http://169.254.169.254/latest/" userdata = json.load(urllib2.urlopen(url + "user-data")) except Exception as e: print e exit( "We couldn't get user-data or other meta-data...") # we are going to work with local files, we need our path path = os.path.dirname(os.path.abspath(__file__)) def delete_monitor(): os.system( "rm {0}/etc/monit/data".format(path)) def decommission(key, access, cluster, persistence="no"): events = Events(key, access, cluster) node = Host(cluster, events).get_node() def log(message, logging='warning'): events.log(node, 'Decommission', message, logging) log('start dommissioning', 'info') # make a last backup if "no" != persistence: log('make last backups, first RDB', 'info') # take the latest RDB and move it to S3 rdb = backup.put_RDB(key, access, cluster, 'monthly') administration.set_RDB(key, access, cluster, rdb) # make a last snapshot log('and now a snapshot', 'info') snapshot = backup.make_snapshot(key, access, cluster, 'monthly') administration.add_snapshot(key, access, cluster, snapshot) delete_monitor() # we don't have to get rid any the volume, it is deleted on termination # change to the default (no persistence) log('remove redis.conf', 'info') os.system("/bin/rm -f /etc/redis/redis.conf") # and empty the cron as well #log('empty the cron', 'info') #os.system("/bin/echo | /usr/bin/crontab") # make sure we make a clean AMI, with all monit checks monitored log("finally, monitor all (monit), but 'redis' and slave", 'info') os.system("/usr/bin/monit unmonitor redis") os.system("/usr/bin/monit unmonitor slave") if __name__ == '__main__': import os, sys try: persistence = userdata['persistence'] except: persistence = None # what is the domain to work with name = os.environ['REDIS_NAME'].strip() zone = os.environ['HOSTED_ZONE_NAME'].rstrip('.') # the name (and identity) of the cluster (the master) cluster = "{0}.{1}".format(name, zone) decommission(sys.argv[1], sys.argv[2], cluster, persistence=persistence)
UTF-8
Python
false
false
2,012
10,780,367,945,729
a10736dba74f60f3a7a23a9ede547f40de6a2a5e
648893482140747100f9efc5067fb7d9d72d1c11
/mtget.py
86f56bd297fd4b2f94e8ec497f2e28c248f173c8
[]
no_license
4poc/mtget
https://github.com/4poc/mtget
349bbf77d9d2987d3f6e7071e1b427814587325a
625409aeb58bc20f28410f8ea12dfc42be52555c
refs/heads/master
2021-01-01T19:01:05.594489
2012-11-07T22:13:22
2012-11-07T22:13:22
499,285
0
0
null
null
null
null
null
null
null
null
null
null
null
null
null
#!/usr/bin/env python # -*- coding: utf-8 -*- # ZDF Mediathek Download/Streaming Skript # v0.5.3 <[email protected]> http://apoc.sixserv.org/ # Stand: 2009-12-22 # Artikel: http://sixserv.org/2009/12/21/mtgetzdf-mediathek-downloadstream/ # Sollte auf jeder standard Python installation laufen, wenn nicht mailt mir bitte :) # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with this program. If not, see <http://www.gnu.org/licenses/>. ## # "Pseudo" constants for download and streaming commands # # %URL% and %OUTFILE% will be replaced. If you notice buffering lags in # streaming mode increase the cache size. # # alternativly: CMD_DOWNLOAD='mmsrip "--output=%OUTFILE%" "%URL%"' CMD_DOWNLOAD = 'mplayer -prefer-ipv4 -noframedrop -dumpfile "%OUTFILE%" -dumpstream -playlist "%URL%"' CMD_STREAM = 'mplayer -fs -zoom -display :0.0 -prefer-ipv4 -cache 2000 -playlist "%URL%"' # used for url constructing URL_BASE = "http://www.zdf.de" # fixxed enums for mode DOWNLOAD = 0 STREAM = 1 # default settings that can change via the options quality = 2 # DSL X000 (1k or 2k is currently supported) mode = STREAM # streaming the videos per default search = None maxr = 10 # maximum results to proceed interactive = False # interactive video and channel selection verbose = False directory = "./" title_filename = False ignore_channel = False # ignoriert kanaele in suchergebnissen colors = True # aktiviert kursiv und fettschrift in select_entries() import getopt import sys import string import re import urllib import os import htmlentitydefs ## # thanks to Fredrik Lundh for this function: # http://effbot.org/zone/re-sub.htm#unescape-html ## # Removes HTML or XML character references and entities from a text string. # # @param text The HTML (or XML) source text. # @return The plain text, as a Unicode string, if necessary. def unescape(text): def fixup(m): text = m.group(0) if text[:2] == "&#": # character reference try: if text[:3] == "&#x": return unichr(int(text[3:-1], 16)) else: return unichr(int(text[2:-1])) except ValueError: pass else: # named entity try: text = unichr(htmlentitydefs.name2codepoint[text[1:-1]]) except KeyError: pass return text # leave as is return re.sub("&#?\w+;", fixup, text) ## # Gather all entries in url # # Load url, parses for videos or channels including metadata. Proceed with # next page if necessary until <maxr> entries are found or end is reeched. # # The returning list includes dictionary entries for each found video or channel, # in the following format: # {'id': ID, 'type': TYPE, 'url': URL, 'info': list(infoA, infoB, ...)} # # @param string mediathek url # @return list def gather_entries(url): global verbose, URL_BASE, maxr, ignore_channel entry_count = maxr entries = [] while True: # laden der url inhalte => contents if verbose: print " [+] Gathering url: "+url try: url = re.sub('&amp;', '&', url) site = urllib.urlopen(url) contents = site.read() except: print "Error in retriving url(%s)" % url sys.exit(2) if verbose: print " [+] Searching page for videos/kanaele" # die beiden regex's matchen auf Videos _und_ Kanaele # 1ter ist url und titel 2ter ist url und untertitel url sollte gleich sein found=[] matches = re.findall('<p><b><a href="([^"]+)">([^<]+)<br', contents) matches += re.findall('class="grey"><a href="([^"]+)".?>([^<]+)<\/a><\/p>', contents) for match in matches: found_url = match[0] found_type = '' found_id = None # je nach format der gefundenen url wird type gesetzt zu video... video_match = re.match('/ZDFmediathek/beitrag/video/([0-9]+)/', found_url) if video_match: found_type = 'video' found_id = video_match.group(1) # oder 'kanaluebersicht' if not ignore_channel: kanaluebersicht_match = re.match('/ZDFmediathek/kanaluebersicht/aktuellste/([0-9]+)', found_url) if kanaluebersicht_match: found_type = 'kanal' found_id = kanaluebersicht_match.group(1) # nur videos oder kanal urls werden berücksichtigt, bilderstrecken # und interaktive inhalte werden ignoriert if found_id: try: found_info = [ match[1] ] # search for duplicate entry in found for item in found: # if found just add found data to str data if item['id'] == found_id: item['info'] += found_info break else: # yeah finally, first time using this python feature :) # wurde die id _nicht_ gefunden baue neues dict found += [{'id': found_id, 'type': found_type, 'url': found_url, 'info': found_info}] except IndexError: # sollte beim debugging gut helfen if verbose: print " [+] IndexError in parsing! Payload: "+match+"/"+id_match if verbose: print " [+] FOUND: %d entries" % len(found) # verschiebt "max result" gefundene einträge nach entries for item in found: entries += [item] entry_count -= 1 if entry_count <= 0: break # break if no next pages if not 'Nutzen Sie unsere Suchfilter' in contents: break # # proceed with next page # next_match = re.findall('<a href="([^"]+)" class="forward">Weiter</a>', contents) if not next_match: next_match = re.findall('<a href="([^"]+)" class="weitereBeitraege">Weitere Beitr&auml;ge laden.<\/a>', contents) if entry_count > 0 and len(next_match) > 0: if verbose: print " [+] Found Next Link!" url = next_match[0] if not 'http://' in url: url = URL_BASE+url else: entry_count = 0 if entry_count <= 0: break if verbose: print " [+] entry_count: %d" % entry_count return entries ## # Print and makes user selection, return url list # # The parameter entries format is the same returned from gather_entries(), # the function prints the entries and if the interactive setting is True the # the user can enter a selection of entries, the method generates a list # of all selected entries and returns a url list. ["<URL A>", "<URL B>", ...] # # @see gather_entries # @param list including dictionaries in gather_entries format # @return list def select_entries(entries): global URL_BASE, verbose, interactive, colors # print numeric list, create selected list with urls selected = [] i = 1 for item in entries: if len(item) != 4: if verbose: print " [+] Video Item Error! (Wrong List structure!)" next url = item['url'] if verbose: print " == > %s" % url print "%d : (%s)" % (i, string.capitalize(item['type'])) # vertausche "kategorie" mit titel, das alles mit info is ein wenig # unstrukturiert vll mal neu schreiben if len(item['info']) >= 2: (item['info'][0], item['info'][1]) = (item['info'][1], item['info'][0]) for idx, info in enumerate(item['info']): info = unescape(info) if colors and idx == 0: print "\t\x1B[3m%s\x1B[0m" % info elif colors and idx == 1: print "\t\x1B[1m%s\x1B[0m" % info elif colors and idx == 2: print "\t\x1B[3m(%s)\x1B[0m" % info else: # no colors: print "\t%s" % info print if not item['info'][0]: title = None else: title = item['info'][0] selected += [URL_BASE+url] i+=1 if interactive: print "Select Videos to play(space seperated list):" print " ===> ", sel = sys.stdin.readline()[:-1] sel_idx = sel.split(' ') new_selected=[] for idx, t in enumerate(selected): if str(idx+1) in sel_idx: new_selected += [t] selected = new_selected print print "+----------------------------------------------------------+" print return selected ## # Gather video link, parses for asx and execute cmd # # The function loads the given link, parses for a asx link in the given quality # setting(DSL 1000 / DSL 2000) and execute stream or download command according # to the mode setting. # # @param string def proceed_video(url): global mode, verbose, directory if verbose: print " [+] Proceed Video URL: Gathering video url: "+url try: url = re.sub('&amp;', '&', url) site = urllib.urlopen(url) contents = site.read() except: print "Error in retriving url(%s)" % url sys.exit(2) asx_match = re.findall('DSL %d000 <a href="(.*asx)"' % quality, contents) if len(asx_match) <= 0: return False asx = asx_match[0] if mode == STREAM: cmd = re.sub('%URL%', asx, CMD_STREAM) if verbose: print " [+] Execute Shell Command: "+cmd os.system(cmd) else: filename = re.findall("/([^\/]+)\.asx", asx)[0] + ".wmv" if title_filename: # nur wenn der cli parameter gesetzt ist title_match = re.findall('<h1 class="beitragHeadline">([^<]+)</h1>', contents) if title_match: title = title_match[0] # convert space title = re.sub(' ', "-", title) # strip all not alpha title = re.sub('[^a-zA-Z0-9-]', '', title) title = re.sub('[-]+', '_', title) filename = title + '.wmv' cmd = re.sub('%OUTFILE%', directory+filename, CMD_DOWNLOAD) cmd = re.sub('%URL%', asx, cmd) if verbose: print " [+] Execute Shell Command: "+cmd os.system(cmd) ## # print usage screen and exit def usage(): print """ZDF Mediathek Download/Streaming Skript v0.5 <[email protected]> http://apoc.sixserv.org/ Stand: 2009-12-20 Syntax: %s <URL/ID> [OPTIONS] <URL/ID> mediathek video/kanal url oder id -1 qualitaet DSL 1000 -2 qualitaet DSL 2000 (Standard) -m, --mode <d/s> download(d) oder streaming(s) -d, --dir <directory> das verzeichnis wohin gespeichert werden soll(.) -t, --title benutzt nicht den stream dateinamen sondern titel -s, --search <topic> suche in der mediathek -l, --maxr <max> wieviele ergebnisse verarbeiten(suche/kategorie) -c, --ignore-channel ignoriert kanaele --no-colors deaktiviert die kursiv und fettschrift -i interaktiv, auswahl der zu spielenden videos -v erweiterte ausgabe, zu debugging zwecken -h, --help zeigt diese hilfe """ % sys.argv[0] # # Parsing command line arguments # try: opts, args = getopt.getopt(sys.argv[1:], "12m:d:ts:l:civh", ["mode=", "dir=", "title", "search=", "maxr=", "ignore-channel", "no-colors", "help"]) except getopt.GetoptError, err: print str(err) usage() sys.exit(2) # # Change default settings according to the parameters # try: for o, a in opts: if o in ("-h", "--help"): usage() sys.exit() elif o in ("-1"): quality = 1 elif o in ("-2"): quality = 2 elif o in ("-m", "--mode"): if a != "d" and a != "s": print "mode d or s!" sys.exit() if a == "d": mode = DOWNLOAD else: mode = STREAM elif o in ("-d", "--dir"): directory = a if not os.path.isdir(directory): print "Error: No Directory!" sys.exit() # missing / ? if directory[-1:] != '/': directory += '/' elif o in ("-t", "--title"): title_filename = True elif o in ("-s", "--search"): search = a elif o in ("-l", "--maxr"): maxr = int(a) elif o in ("-c", "--ignore-channel"): ignore_channel = True elif o in ("--no-colors"): colors = False elif o in ("-i"): interactive = True elif o in ("-v"): verbose = True else: assert False, "unhandled option" except ValueError: print "Error in parsing parameter types." sys.exit(2) # # Print usage screen if url is missing # if len(sys.argv) <= 1: usage() exit # # Assign url or id variable # url_id = sys.argv[-1] # # Replace url_id with search url if seach option is given # if search: print "Searching... "+search url_id = "http://www.zdf.de/ZDFmediathek/suche?sucheText=%s&offset=0&flash=off" % urllib.quote_plus(search) if verbose: print " [+] Search URL: %s" % url_id # # Handling video ID # if re.match("^[0-9]+$", url_id): if verbose: print " [+] Proceed with Id: %s" % url_id proceed_video(URL_BASE+"/ZDFmediathek/beitrag/video/%s/?flash=off" % url_id) # # Handling video or channel url # elif re.match("^http:", url_id): if verbose: print " [+] Proceed with URL: %s" % url_id if "#" in url_id: url_id = url_id.replace('#', '') # make sure flash is off: if "flash=" in url_id: url_id = re.sub('flash=on', 'flash=off', url_id) else: if "?" in url_id: url_id += "&flash=off" else: url_id += "?flash=off" # # Handle Video URL: # if re.findall("/video/", url_id): proceed_video(url_id) # # Handling Channel or Search URL: # else: # kategorie url z.B. zeige liste/auswahl und abspielen url = url_id proceed_urls = [] while True: entries = gather_entries(url) selection = select_entries(entries) for select_url in selection: if 'kanal' in select_url: if verbose: print " [+] Follow Kanal entry!" url = select_url break else: if verbose: print " [+] Proceed with Video:" proceed_video(select_url) else: break #EOF
UTF-8
Python
false
false
2,012
7,172,595,385,757
dd8e72b32576eb50eaf589eef705cbac0601b3b6
e263d74a2ada7b9bdd9f3adcb8953418e1ee21bf
/gui.py
817e0b89299337213537eaa18ede39a6ee3ca563
[]
no_license
Sebelino/kexjobb
https://github.com/Sebelino/kexjobb
5aa47849c05c4a52fe27205d43c5bdef4ba6f829
7c06cda0143b292780194006bc60303b33b9a6f9
refs/heads/master
2021-01-16T19:33:25.918792
2013-04-12T16:53:08
2013-04-12T16:53:08
null
0
0
null
null
null
null
null
null
null
null
null
null
null
null
null
#!/usr/bin/env python # -*- coding: utf-8 -*- # # namnlös.py # # Copyright 2013 Jonatan Åkesson <jonatan@cbbb> # # This program is free software; you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation; either version 2 of the License, or # (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with this program; if not, write to the Free Software # Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, # MA 02110-1301, USA. # # import time import os import subprocess import re from threading import Thread def kod(): s = '' bl = '3000' s = get_active_window_title("") print("" + s) if s.find('Zenia') > 0: bl= '3500' elif s.find('Hej') > 0: print("Hittade Chrome!") bl= '100' elif s.find('Kate') > 0: bl='5' elif s.find('Chrom') > 0: bl='2000' elif s.find('no') > 0: bl='500' #sätter brightness p1 = subprocess.Popen(['echo', bl], stdout=subprocess.PIPE) #p2 = subprocess.Popen(['tee', '/sys/class/backlight/intel_backlight/brightness'], stdin=p1.stdout) p2 = subprocess.Popen(['tee', '/sys/class/backlight/acpi_video0/brightness'], stdin=p1.stdout) p1.stdout.close() output = p2.communicate() #läser actual brightness actual_bright = '' p3 = subprocess.Popen(['cat', '/sys/class/backlight/acpi_video0/brightness'], stdout=subprocess.PIPE) #p3 = subprocess.Popen(['cat', '/sys/class/backlight/intel_backlight/brightness'], stdout=subprocess.PIPE) for line in p3.stdout: actual_bright = line.rstrip() print("actual brightness: " + line) #read keypresses # key_presses = "33" # # skriver till fil klassifieringsfil skriv = "" + key_presses + ",?," + actual_bright + ",?,?,lower." f = open('power.test','w') f.write("") f.write(skriv) f.close() #Klassifierar med adaboost lista = boost() print(lista) #höjer eller sänker #sätter brightness #p1 = subprocess.Popen(['echo', bl], stdout=subprocess.PIPE) #p2 = subprocess.Popen(['tee', '/sys/class/backlight/intel_backlight/brightness'], stdin=p1.stdout) #p2 = subprocess.Popen(['tee', '/sys/class/backlight/acpi_video0/brightness'], stdin=p1.stdout) #p1.stdout.close() #output = p2.communicate() time.sleep(0.5) def get_active_window_title(self): root = subprocess.Popen(['xprop', '-root', '_NET_ACTIVE_WINDOW'], stdout=subprocess.PIPE) for line in root.stdout: m = re.search('^_NET_ACTIVE_WINDOW.* ([\w]+)$', line) if m != None: id_ = m.group(1) id_w = subprocess.Popen(['xprop', '-id', id_, 'WM_NAME'], stdout=subprocess.PIPE) break if id_w != None: for line in id_w.stdout: match = re.match("WM_NAME\(\w+\) = (?P<name>.+)$", line) if match != None: return match.group("name") return "active win no" def boost(): lista = [] read = os.popen("icsiboost -S power -C < power.test; echo $?") rad = read.readline() lista = rad.split(" ") return lista while 1==1: kod() root.mainloop()
UTF-8
Python
false
false
2,013
7,490,423,011,246
b5635ba0b0397820b43a45f549a027a9d4586c38
525821586d35422fadad2f4460dd227235884dbb
/djangomako.py
4162846fac7d7b08895b50889d4a308d9f4e68cf
[]
no_license
kk71/djangomako
https://github.com/kk71/djangomako
30eb0495a760a79149c1762730582ce075db1f7b
90f59bdc32a316e62e8e32c948dc0aa630f7558e
refs/heads/master
2016-08-05T04:27:09.877746
2013-11-27T13:54:41
2013-11-27T13:54:41
null
0
0
null
null
null
null
null
null
null
null
null
null
null
null
null
# -*- coding: utf-8 -*- ''' django-mako template connection module version 0.1, for django1.5+ ''' #python import from glob import glob #django import from django.http import HttpResponse from django.core.context_processors import csrf from django.conf import settings #mako import from mako.lookup import TemplateLookup djlookup=TemplateLookup(directories=settings.TEMPLATE_DIRS,input_encoding="utf-8") def render_to_string(template_name, dictionary=None, request=None): ''' render a template to a string(like render_to_string django.template.loader) ''' t=djlookup.get_template(template_name) if request!=None: dictionary.update(csrf(request)) page=t.render(**dictionary) return page def render_to_response(template_name, dictionary={}, content_type="text/html", request=None, status=200): ''' a simple http response method just like django's for easy alternativity ''' page=render_to_string(template_name,dictionary,request) return HttpResponse(content=page,content_type=content_type,status=status) def tmpldebug(request,tmpl=""): ''' argument: tmpl:specific template file name. ''' if tmpl=="": t=''' <!DOCTYPE html> <html> <head> <title>djangomako template design mode</title> </head> <body> ''' for tmpldir in settings.TEMPLATE_DIRS: if tmpldir[-1]!="/":tmpldir+="/" t+="<h2>"+tmpldir+"</h2>" for s in glob(tmpldir+"*"): if s[-1:]=="~":continue s=s[len(tmpldir):] t+='<p><a href=\"'+s+'\">'+s+"</a></p>" s+="<br>" t+=''' </body> </html> ''' return HttpResponse(t) else: return render_to_response(tmpl,{})
UTF-8
Python
false
false
2,013
3,444,563,793,200
d79983c0e1a7bbc4b4e3aaffc3f940c6aac7ba17
731f30b6c3a012618b5d5b57dd4af3d65fdd601c
/ROOT/Old Code/Early Testing - Testing of Various Modules/random test code/curve.py
23d2527a8e837faa946b458d0ab925b57518d878
[ "Apache-2.0", "LicenseRef-scancode-unknown-license-reference" ]
non_permissive
PhilipToddCoppola/Honours-Project
https://github.com/PhilipToddCoppola/Honours-Project
4d921653de47a0d623e590654f347f573441ceb0
fe226a197eb2d3448b6ecebb70597cbe097f0f26
refs/heads/master
2020-06-26T08:15:53.487096
2014-12-16T13:55:17
2014-12-16T13:55:17
null
0
0
null
null
null
null
null
null
null
null
null
null
null
null
null
from visual import* def R(x): y = -(1.0/4.0)*x**2 + 4 return y dx = 0.5 a = 0.0 b = 3.0 x_axis = curve(pos=[(-10,0,0),(10,0,0)]) y_axis = curve(pos=[(0,-10,0),(0,10,0)]) z_axis = curve(pos=[(0,0,-10),(0,0,10)]) line = curve(x=arange(0,3,.1)) line.color=color.cyan line.radius = .1 line.y = -(1.0/4.0) * (line.x**2) + 4 #scene.background = color.white for i in range(-10, 11): curve(pos=[(-0.5,i),(0.5,i)]) curve(pos=[(i,-0.5),(i,0.5)]) VT = 0 for x in arange(a + dx,b + dx,dx): V = pi * R(x)**2 * dx disk = cylinder(pos=(x,0,0),radius=R(x),axis=(-dx,0,0), color = color.yellow) VT = V + VT print V print "Volume =", VT
UTF-8
Python
false
false
2,014
11,940,009,112,460
c9fc48d1c33612692ed61dbea6561c5995c7a0a0
1d4669fc5788aaf98b1db585687c9365d4c7a3ac
/analyze/imgmix/imgmix.py
f2914c8d63f7fb85ac1fdaf688cd6bf9c0cc0059
[]
no_license
dmpots/hobbes
https://github.com/dmpots/hobbes
fa6787834b261565e14c6b9b34a62f277ae9e10b
cde59d45f2779ed2d363e5f5014b8ccaac1e0121
refs/heads/master
2016-09-05T17:39:54.405454
2012-02-28T22:56:11
2012-02-28T22:56:11
null
0
0
null
null
null
null
null
null
null
null
null
null
null
null
null
#!/usr/bin/env python3 import os import re import sys def find_logs(root): logs = [] for (dirpath, _, filenames) in os.walk(root): for filename in filenames: if re.match(r".*[.]imgmix[.]\d+[.]LOG", filename): logs.append(os.path.join(dirpath, filename)) return logs def read_logs(logs): def clean(l): return os.path.basename(l.strip()) libs = set() for log in logs: with open(log) as f: libs.update(map(clean, f.readlines())) return libs def filter_libs(libs): for lib in libs: # Get rid of anything not a lib if not lib.startswith("lib"): continue # Get rid of Haskell runtime if lib.startswith("libHSrts"): continue # Allow libquantum for SPEC if lib.startswith("libquantum"): yield lib # Get rid of non-Haskell libraries if not lib.startswith("libHS"): continue yield lib def normalize_libs(libs): def normalize(lib): dot = lib.find(".") dash= lib.find("-") if dot == -1 and dash == -1: return lib elif dot == -1: return lib[:dash] elif dash == -1: return lib[:dot] else: return lib[:min(dot,dash)] return map(normalize, libs) def main(argv): if len(argv) != 1: print("usage: imgmix.py <rootdir>") sys.exit(1) root = argv[0] logs = find_logs(root) libs = read_logs(logs) libs = filter_libs(libs) libs = normalize_libs(libs) final_set = sorted(set(libs)) print(" const char* AllowedNamesList[] = {", end="\n ") print(",\n ".join(map(lambda l: '"'+l+'"', final_set))) print(" };") if __name__ == "__main__": main(sys.argv[1:])
UTF-8
Python
false
false
2,012
18,657,337,935,490
721fa3d476c9d062eacf1552e1f849855c4d30f7
d0646ba7b8deef191bd89469836d78e6f65b1180
/sfa/util/threadmanager.py
b47b8186e12d1e9b86a1b0bd892650c73d52977c
[ "LicenseRef-scancode-unknown-license-reference" ]
non_permissive
planetlab/sfa
https://github.com/planetlab/sfa
5b6ca773bf0bbb66d0b0b1f9288a511844abea56
d0f743e245e0bb24d7ed1016bcc6e61d1e558a95
refs/heads/master
2021-01-23T20:13:24.722855
2011-10-17T19:48:52
2011-10-17T19:48:52
1,352,164
1
1
null
null
null
null
null
null
null
null
null
null
null
null
null
import threading import traceback import time from Queue import Queue from sfa.util.sfalogging import logger def ThreadedMethod(callable, results, errors): """ A function decorator that returns a running thread. The thread runs the specified callable and stores the result in the specified results queue """ def wrapper(args, kwds): class ThreadInstance(threading.Thread): def run(self): try: results.put(callable(*args, **kwds)) except Exception, e: logger.log_exc('ThreadManager: Error in thread: ') errors.put(traceback.format_exc()) thread = ThreadInstance() thread.start() return thread return wrapper class ThreadManager: """ ThreadManager executes a callable in a thread and stores the result in a thread safe queue. """ def __init__(self): self.results = Queue() self.errors = Queue() self.threads = [] def run (self, method, *args, **kwds): """ Execute a callable in a separate thread. """ method = ThreadedMethod(method, self.results, self.errors) thread = method(args, kwds) self.threads.append(thread) start = run def join(self): """ Wait for all threads to complete """ for thread in self.threads: thread.join() def get_results(self, lenient=True): """ Return a list of all the results so far. Blocks until all threads are finished. If lienent is set to false the error queue will be checked before the response is returned. If there are errors in the queue an SFA Fault will be raised. """ self.join() results = [] if not lenient: errors = self.get_errors() if errors: raise Exception(errors[0]) while not self.results.empty(): results.append(self.results.get()) return results def get_errors(self): """ Return a list of all errors. Blocks untill all threads are finished """ self.join() errors = [] while not self.errors.empty(): errors.append(self.errors.get()) return errors def get_return_value(self): """ Get the value that should be returuned to the client. If there are errors then the first error is returned. If there are no errors, then the first result is returned """ if __name__ == '__main__': def f(name, n, sleep=1): nums = [] for i in range(n, n+5): print "%s: %s" % (name, i) nums.append(i) time.sleep(sleep) return nums def e(name, n, sleep=1): nums = [] for i in range(n, n+3) + ['n', 'b']: print "%s: 1 + %s:" % (name, i) nums.append(i + 1) time.sleep(sleep) return nums threads = ThreadManager() threads.run(f, "Thread1", 10, 2) threads.run(f, "Thread2", -10, 1) threads.run(e, "Thread3", 19, 1) #results = threads.get_results() #errors = threads.get_errors() #print "Results:", results #print "Errors:", errors results_xlenient = threads.get_results(lenient=False)
UTF-8
Python
false
false
2,011
7,361,573,977,300
e2df286d83369a6994fcc6842aa04ff304e5249b
905a226b397698b528f867ce945a4605195dd81e
/CMGTools/H2TauTau/python/proto/analyzers/WHMMTAnalyzer.py
65b367aa97cd6ec04dcd6c152244707d634617f0
[]
no_license
gitytakahas/cmg-cmssw
https://github.com/gitytakahas/cmg-cmssw
f6dfff8827427676e08e1d6d098d72cc9bc01022
34b51fff47a11ad8a51b3949aaa433ac1ea88b55
refs/heads/CMG_PAT_V5_18_from-CMSSW_5_3_14
2021-07-07T05:30:24.122663
2014-12-16T21:22:34
2014-12-16T21:22:34
16,833,702
0
1
null
true
2015-03-10T17:07:19
2014-02-14T10:05:16
2014-12-16T21:23:00
2015-03-10T17:07:19
594,741
0
1
0
C++
null
null
import operator import math from ROOT import TLorentzVector, Double from CMGTools.RootTools.fwlite.Analyzer import Analyzer from CMGTools.RootTools.analyzers.DiLeptonAnalyzer import DiLeptonAnalyzer from CMGTools.RootTools.fwlite.AutoHandle import AutoHandle from CMGTools.RootTools.statistics.Counter import Counter, Counters from CMGTools.RootTools.physicsobjects.PhysicsObjects import Muon, Tau, GenParticle, Jet from CMGTools.RootTools.physicsobjects.HTauTauElectron import HTauTauElectron as Electron from CMGTools.RootTools.utils.DeltaR import cleanObjectCollection, matchObjectCollection, bestMatch from CMGTools.RootTools.utils.TriggerMatching import triggerMatched ####################################################################3 # # 11 Nov 2013 Y.Takahashi # This analyzer is for WH, EMuTau-channel # ####################################################################3 class WHMMTAnalyzer(Analyzer): # Class needed for the object selections LeptonClass = Muon OtherLeptonClass = Electron TauClass = Tau # Init def __init__(self, cfg_ana, cfg_comp, looperName): # print 'Init for the WHMMTAnalyzer' super(WHMMTAnalyzer,self).__init__(cfg_ana, cfg_comp, looperName) # beginLoop def beginLoop(self): # print 'Init for the beginLoop' super(WHMMTAnalyzer, self).beginLoop() self.counters.addCounter('MMT') count = self.counters.counter('MMT') count.register('all events') count.register('step1') count.register('step2') count.register('step3') def declareHandles(self): super(WHMMTAnalyzer, self).declareHandles() self.handles['electrons'] = AutoHandle( ('cmgElectronSel','','PAT'), 'std::vector<cmg::Electron>') self.handles['muons'] = AutoHandle( ('cmgMuonSel','','PAT'), 'std::vector<cmg::Muon>') self.handles['jets'] = AutoHandle( 'cmgPFJetSel', 'std::vector<cmg::PFJet>' ) self.handles['taus'] = AutoHandle( # ('cmgTauSel','','PAT'), 'std::vector<cmg::Tau>') # ('cmgTauSel','','MUTAUTAU'), 'std::vector<cmg::Tau>') # ('cmgTauSel','','DIMUTAU'), 'std::vector<cmg::Tau>') ('cmgTauSel','','DIMUTAU'), 'std::vector<cmg::Tau>', fallbackLabel=('cmgTauSel','','MUTAUTAU')) # Muon ################################################# def buildLooseLeptons(self, cmgLeptons, event): '''Build loose muons''' leptons = [] for index, lep in enumerate(cmgLeptons): pyl = self.__class__.LeptonClass(lep) pyl.associatedVertex = event.goodVertices[0] pyl.flag_id = False pyl.flag_iso = False pyl.trig_match = False if pyl.pt() > 10. and abs(pyl.eta()) < 2.4 and \ pyl.looseId() and abs(pyl.dz()) < 0.2 and \ pyl.sourcePtr().innerTrack().hitPattern().numberOfValidPixelHits()>0: leptons.append( pyl ) return leptons def muid(self, pyl): '''check muon ID''' return pyl.tightId() def muiso(self, pyl): '''check muon isolation''' relIso = False if abs(pyl.eta()) < 1.479 and self.testLeg2Iso(pyl, 0.15): relIso = True if abs(pyl.eta()) > 1.479 and self.testLeg2Iso(pyl, 0.1): relIso = True return relIso def buildVetoLeptons(self, cmgLeptons, event): '''Build muons''' leptons = [] for index, lep in enumerate(cmgLeptons): pyl = self.__class__.LeptonClass(lep) pyl.associatedVertex = event.goodVertices[0] if pyl.pt() > 5. and abs(pyl.eta()) < 2.3 and \ self.muid(pyl) and abs(pyl.dz()) < 0.2 and self.testLeg2Iso(pyl, 0.15) and abs(pyl.dB3D()) < 0.2: leptons.append( pyl ) return leptons # Electron ################################################# def buildLooseOtherLeptons(self, cmgOtherLeptons, event): '''Build loose electrons''' otherLeptons = [] for index, lep in enumerate(cmgOtherLeptons): pyl = self.__class__.OtherLeptonClass(lep) pyl.associatedVertex = event.goodVertices[0] pyl.flag_id = False pyl.flag_iso = False pyl.trig_match = False if pyl.pt() > 10. and abs(pyl.eta()) < 2.5 and \ pyl.loosestIdForTriLeptonVeto() and abs(pyl.dz()) < 0.2 and pyl.sourcePtr().isGsfCtfScPixChargeConsistent(): otherLeptons.append( pyl ) return otherLeptons def eid(self, pyl): '''check electron ID''' return pyl.mvaForLeptonVeto() def eiso(self, pyl): '''check electron ID''' relIso = False if abs(pyl.eta()) < 1.479 and self.testLeg2Iso(pyl, 0.15): relIso = True if abs(pyl.eta()) > 1.479 and self.testLeg2Iso(pyl, 0.1): relIso = True return relIso def buildVetoOtherLeptons(self, cmgOtherLeptons, event): '''Build electrons for third lepton veto, associate best vertex. ''' otherLeptons = [] for index, lep in enumerate(cmgOtherLeptons): pyl = self.__class__.OtherLeptonClass(lep) pyl.associatedVertex = event.goodVertices[0] if pyl.pt() > 10. and abs(pyl.eta()) < 2.5 and \ pyl.mvaForLeptonVeto() and abs(pyl.dz()) < 0.2 and self.testLeg2Iso(pyl, 0.3): otherLeptons.append( pyl ) return otherLeptons # Tau ################################################# def buildLooseTau(self, cmgLeptons, event): '''Build taus.''' leptons = [] for index, lep in enumerate(cmgLeptons): pyl = self.__class__.TauClass(lep) pyl.associatedVertex = event.goodVertices[0] pyl.flag_id = False pyl.flag_iso = False pyl.decaymode = -999 pyl.ep = -999 pyl.againstELooseArmin = False pyl.againstETight = False pyl.againstELoose = False pyl.againstEMedium = False pyl.againstE2Loose = False pyl.againstE2Medium = False # pyl.againstE0Loose = False # pyl.againstE0Medium = False pyl.againstERaw = -999 pyl.againstE2Raw = -999 pyl.againstE0Raw = -999 pyl.againstECat = -999 pyl.againstE2Cat = -999 # pyl.againstE0Cat = -999 pyl.againstMuLoose = False pyl.againstMuTight = False pyl.mvaisolation = -999 pyl.mvaisolation_loose = False pyl.dBisolation = -999 ### new tau ID ### pyl.byLooseCombinedIsolationDeltaBetaCorr3Hits = False pyl.byMediumCombinedIsolationDeltaBetaCorr3Hits = False pyl.byTightCombinedIsolationDeltaBetaCorr3Hits = False pyl.byCombinedIsolationDeltaBetaCorrRaw3Hits = -999 pyl.againstMuonLoose2 = False pyl.againstMuonMedium2 = False pyl.againstMuonTight2 = False pyl.againstElectronMVA5category = False pyl.againstElectronLooseMVA5 = False pyl.againstElectronMediumMVA5 = False pyl.againstElectronTightMVA5 = False pyl.againstElectronVTightMVA5 = False pyl.againstMuonLoose3 = False pyl.againstMuonTight3 = False pyl.againstMuonMVALoose = False pyl.againstMuonMVAMedium = False pyl.againstMuonMVATight = False pyl.againstMuonMVARaw = -999 pyl.byIsolationMVA3oldDMwoLTraw = -999 pyl.byLooseIsolationMVA3oldDMwoLT = False pyl.byMediumIsolationMVA3oldDMwoLT = False pyl.byTightIsolationMVA3oldDMwoLT = False pyl.byVTightIsolationMVA3oldDMwoLT = False pyl.byVVTightIsolationMVA3oldDMwoLT = False pyl.byIsolationMVA3oldDMwLTraw = -999 pyl.byLooseIsolationMVA3oldDMwLT = False pyl.byMediumIsolationMVA3oldDMwLT = False pyl.byTightIsolationMVA3oldDMwLT = False pyl.byVTightIsolationMVA3oldDMwLT = False pyl.byVVTightIsolationMVA3oldDMwLT = False pyl.byIsolationMVA3newDMwoLTraw = -999 pyl.byLooseIsolationMVA3newDMwoLT = False pyl.byMediumIsolationMVA3newDMwoLT = False pyl.byTightIsolationMVA3newDMwoLT = False pyl.byVTightIsolationMVA3newDMwoLT = False pyl.byVVTightIsolationMVA3newDMwoLT = False pyl.byIsolationMVA3newDMwLTraw = -999 pyl.byLooseIsolationMVA3newDMwLT = False pyl.byMediumIsolationMVA3newDMwLT = False pyl.byTightIsolationMVA3newDMwLT = False pyl.byVTightIsolationMVA3newDMwLT = False pyl.byVVTightIsolationMVA3newDMwLT = False # old tau ID pyl.decayModeFinding = False pyl.byVLooseCombinedIsolationDeltaBetaCorr = False pyl.byLooseCombinedIsolationDeltaBetaCorr = False pyl.byMediumCombinedIsolationDeltaBetaCorr = False pyl.byTightCombinedIsolationDeltaBetaCorr = False pyl.againstElectronLoose = False pyl.againstElectronMedium = False pyl.againstElectronTight = False pyl.againstElectronDeadECAL = False pyl.againstMuonLoose = False pyl.againstMuonMedium = False pyl.againstMuonTight = False if pyl.pt() > 20 and abs(pyl.eta()) < 2.3 and \ pyl.tauID("decayModeFinding") and abs(pyl.dz()) < 0.2: leptons.append( pyl ) return leptons def tauid(self, pyl): '''check tau ID.''' # print 'inside_check', pyl.tauID("againstMuonLoose"), pyl.tauID("againstElectronLooseMVA3") if pyl.tauID("againstMuonLoose") > 0.5 and pyl.tauID("againstElectronLooseMVA3"): # print 'This becomes true !!' return True else: return False def tauiso(self, pyl): '''check tau isolation.''' return self.testLeg1Iso(pyl, None) def buildVetoTau(self, cmgLeptons, event): '''Build taus.''' leptons = [] for index, lep in enumerate(cmgLeptons): pyl = self.__class__.TauClass(lep) pyl.associatedVertex = event.goodVertices[0] if pyl.pt() > 20 and abs(pyl.eta()) < 2.5 and \ pyl.tauID("decayModeFinding") and self.testLeg1Iso(pyl, None) and abs(pyl.dz()) < 0.2: leptons.append( pyl ) return leptons # process ##################################################### def process(self, iEvent, event): # print 'process ongoing!' # import pdb; pdb.set_trace() # import pdb; pdb.set_trace() self.readCollections(iEvent) self.counters.counter('MMT').inc('all events') event.muoncand = self.buildLooseLeptons(self.handles['muons'].product(), event) event.electroncand = self.buildLooseOtherLeptons(self.handles['electrons'].product(), event) event.taucand = self.buildLooseTau(self.handles['taus'].product(), event) cmgJets = self.handles['jets'].product() event.CSVjet = [] for cmgJet in cmgJets: jet = Jet( cmgJet ) if self.testVetoBJet(jet): event.CSVjet.append(jet) event.electroncand, dummpy = cleanObjectCollection(event.electroncand, masks = event.muoncand, deltaRMin = 0.5) # CSV veto electroncand_removebjet = [] muoncand_removebjet = [] for ielectron in event.electroncand: bm, dr2min = bestMatch(ielectron, event.CSVjet) if dr2min > 0.25: electroncand_removebjet.append(ielectron) for imuon in event.muoncand: bm, dr2min = bestMatch(imuon, event.CSVjet) if dr2min > 0.25: muoncand_removebjet.append(imuon) event.electroncand = electroncand_removebjet event.muoncand = muoncand_removebjet # event.flag_trigmatched = False # # if not event.flag_trigmatched: # return False # event.cleanelectron = [] # event.cleanmuon = [] for ii in event.electroncand: ii.flag_id = self.eid(ii) ii.flag_iso = self.eiso(ii) # ii.trig_match = True if hasattr(event, 'hltPath'): if self.triggerCheck(event, event.hltPath, ii): ii.trig_match = True # if hasattr(event, 'hltPaths'): # if self.triggerCheck(event, event.hltPaths, ii): # ii.trig_match = True # # for jj in event.muoncand: # if self.returnMass(jj, ii) > 20. and \ # ii.charge()*jj.charge()==1. and \ # self.returnDR(ii, jj) > 0.5: # # flag_add = True # # if flag_add: # ii.flag_id = self.eid(ii) # ii.flag_iso = self.eiso(ii) # event.cleanelectron.append(ii) # # # for ii in event.muoncand: ii.flag_id = self.muid(ii) ii.flag_iso = self.muiso(ii) # ii.trig_match = True # if hasattr(event, 'hltPaths'): # if self.triggerCheck(event, event.hltPaths, ii): # ii.trig_match = True if hasattr(event, 'hltPath'): if self.triggerCheck(event, event.hltPath, ii): ii.trig_match = True # continue # # for jj in event.electroncand: # if self.returnMass(jj, ii) > 20. and \ # ii.charge()*jj.charge()==1. and \ # self.returnDR(ii, jj) > 0.5: # # flag_add = True # # if flag_add: # ii.flag_id = self.muid(ii) # ii.flag_iso = self.muiso(ii) # event.cleanmuon.append(ii) # event.electroncand = event.cleanelectron # event.muoncand = event.cleanmuon # idiso_electron = [ie for ie in event.electroncand if self.eid(ie) and self.eiso(ie)] # idiso_muon = [im for im in event.muoncand if self.muid(im) and self.muiso(im)] # if idiso_electron[0].pt() > idiso_muon[0].pt(): # if not (len(event.muoncand)>=1 and len(event.electroncand)>=1 and len(event.taucand)>=1): # print 'YCheck : (m,e,t) = ', len(event.muoncand), len(event.electroncand), len(event.taucand) # return False # lepton1 = [] # Leading lepton # lepton2 = [] # 2nd leading lepton # if not (len(id_electron)>=1 and len(id_muon)>=1): # return False # lepton_type = '' # # if id_electron[0].pt() > id_muon[0].pt(): #e-mu # lepton1 = [ie for ie in id_electron if ie.pt() > 20.] # lepton2 = [im for im in id_muon if im.pt() > 10.] # lepton_type = 'electron' # elif id_electron[0].pt() < id_muon[0].pt(): # lepton1 = [im for im in id_muon if im.pt() > 20.] # lepton2 = [ie for ie in id_electron if ie.pt() > 10.] # lepton_type = 'muon' # import pdb; pdb.set_trace() # if not (len(lepton1)==1 and len(lepton2)==1): # return False # self.counters.counter('MMT').inc('1mu + 1e') # # # event.muon = '' # event.electron = '' # # if lepton_type=='muon': # event.muon = lepton1[0] # event.electron = lepton2[0] # elif lepton_type=='electron': # event.electron = lepton1[0] # event.muon = lepton2[0] event.loosetau = [] for itau in event.taucand: itau.decaymode = itau.decayMode() itau.ep = itau.calcEOverP() itau.flag_iso = self.tauiso(itau) itau.flag_id = self.tauid(itau) itau.againstERaw = itau.tauID('againstElectronMVA3raw') itau.againstE2Raw = itau.tauID('againstElectronMVA2raw') itau.againstE0Raw = itau.tauID('againstElectronMVA') itau.againstECat = int(round(itau.tauID('againstElectronMVA3category'))) itau.againstE2Cat = int(round(itau.tauID('againstElectronMVA2category'))) # itau.againstE0Cat = int(round(itau.tauID('againstElectronMVAcategory'))) itau.againstELooseArmin = itau.tauID("againstElectronLoose") itau.againstETight = itau.tauID("againstElectronTightMVA3") itau.againstELoose = itau.tauID("againstElectronLooseMVA3") itau.againstEMedium = itau.tauID("againstElectronMediumMVA3") itau.againstE2Loose = itau.tauID("againstElectronLooseMVA2") itau.againstE2Medium = itau.tauID("againstElectronMediumMVA2") # itau.againstE0Loose = itau.tauID("againstElectronLooseMVA") # itau.againstE0Medium = itau.tauID("againstElectronMediumMVA") itau.againstMuLoose = itau.tauID("againstMuonLoose") itau.againstMuTight = itau.tauID("againstMuonTight") itau.dBisolation = itau.tauID("byCombinedIsolationDeltaBetaCorrRaw3Hits") itau.mvaisolation = itau.tauID("byRawIsoMVA") itau.mvaisolation_loose = itau.tauID('byLooseIsoMVA') # new tau ID itau.byLooseCombinedIsolationDeltaBetaCorr3Hits = itau.tauID("byLooseCombinedIsolationDeltaBetaCorr3Hits") itau.byMediumCombinedIsolationDeltaBetaCorr3Hits = itau.tauID("byMediumCombinedIsolationDeltaBetaCorr3Hits") itau.byTightCombinedIsolationDeltaBetaCorr3Hits = itau.tauID("byTightCombinedIsolationDeltaBetaCorr3Hits") itau.byCombinedIsolationDeltaBetaCorrRaw3Hits = itau.tauID("byCombinedIsolationDeltaBetaCorrRaw3Hits") itau.againstMuonLoose2 = itau.tauID("againstMuonLoose2") itau.againstMuonMedium2 = itau.tauID("againstMuonMedium2") itau.againstMuonTight2 = itau.tauID("againstMuonTight2") itau.againstElectronMVA5category = itau.tauID("againstElectronMVA5category") itau.againstElectronLooseMVA5 = itau.tauID("againstElectronLooseMVA5") itau.againstElectronMediumMVA5 = itau.tauID("againstElectronMediumMVA5") itau.againstElectronTightMVA5 = itau.tauID("againstElectronTightMVA5") itau.againstElectronVTightMVA5 = itau.tauID("againstElectronVTightMVA5") itau.againstMuonLoose3 = itau.tauID("againstMuonLoose3") itau.againstMuonTight3 = itau.tauID("againstMuonTight3") itau.againstMuonMVALoose = itau.tauID("againstMuonMVALoose") itau.againstMuonMVAMedium = itau.tauID("againstMuonMVAMedium") itau.againstMuonMVATight = itau.tauID("againstMuonMVATight") itau.againstMuonMVARaw = itau.tauID("againstMuonMVARaw") itau.byIsolationMVA3oldDMwoLTraw = itau.tauID("byIsolationMVA3oldDMwoLTraw") itau.byLooseIsolationMVA3oldDMwoLT = itau.tauID("byLooseIsolationMVA3oldDMwoLT") itau.byMediumIsolationMVA3oldDMwoLT = itau.tauID("byMediumIsolationMVA3oldDMwoLT") itau.byTightIsolationMVA3oldDMwoLT = itau.tauID("byTightIsolationMVA3oldDMwoLT") itau.byVTightIsolationMVA3oldDMwoLT = itau.tauID("byVTightIsolationMVA3oldDMwoLT") itau.byVVTightIsolationMVA3oldDMwoLT = itau.tauID("byVVTightIsolationMVA3oldDMwoLT") itau.byIsolationMVA3oldDMwLTraw = itau.tauID("byIsolationMVA3oldDMwLTraw") itau.byLooseIsolationMVA3oldDMwLT = itau.tauID("byLooseIsolationMVA3oldDMwLT") itau.byMediumIsolationMVA3oldDMwLT = itau.tauID("byMediumIsolationMVA3oldDMwLT") itau.byTightIsolationMVA3oldDMwLT = itau.tauID("byTightIsolationMVA3oldDMwLT") itau.byVTightIsolationMVA3oldDMwLT = itau.tauID("byVTightIsolationMVA3oldDMwLT") itau.byVVTightIsolationMVA3oldDMwLT = itau.tauID("byVVTightIsolationMVA3oldDMwLT") itau.byIsolationMVA3newDMwoLTraw = itau.tauID("byIsolationMVA3newDMwoLTraw") itau.byLooseIsolationMVA3newDMwoLT = itau.tauID("byLooseIsolationMVA3newDMwoLT") itau.byMediumIsolationMVA3newDMwoLT = itau.tauID("byMediumIsolationMVA3newDMwoLT") itau.byTightIsolationMVA3newDMwoLT = itau.tauID("byTightIsolationMVA3newDMwoLT") itau.byVTightIsolationMVA3newDMwoLT = itau.tauID("byVTightIsolationMVA3newDMwoLT") itau.byVVTightIsolationMVA3newDMwoLT = itau.tauID("byVVTightIsolationMVA3newDMwoLT") itau.byIsolationMVA3newDMwLTraw = itau.tauID("byIsolationMVA3newDMwLTraw") itau.byLooseIsolationMVA3newDMwLT = itau.tauID("byLooseIsolationMVA3newDMwLT") itau.byMediumIsolationMVA3newDMwLT = itau.tauID("byMediumIsolationMVA3newDMwLT") itau.byTightIsolationMVA3newDMwLT = itau.tauID("byTightIsolationMVA3newDMwLT") itau.byVTightIsolationMVA3newDMwLT = itau.tauID("byVTightIsolationMVA3newDMwLT") itau.byVVTightIsolationMVA3newDMwLT = itau.tauID("byVVTightIsolationMVA3newDMwLT") # old tau ID itau.decayModeFinding = itau.tauID("decayModeFinding") itau.byVLooseCombinedIsolationDeltaBetaCorr = itau.tauID("byVLooseCombinedIsolationDeltaBetaCorr") itau.byLooseCombinedIsolationDeltaBetaCorr = itau.tauID("byLooseCombinedIsolationDeltaBetaCorr") itau.byMediumCombinedIsolationDeltaBetaCorr = itau.tauID("byMediumCombinedIsolationDeltaBetaCorr") itau.byTightCombinedIsolationDeltaBetaCorr = itau.tauID("byTightCombinedIsolationDeltaBetaCorr") itau.againstElectronLoose = itau.tauID("againstElectronLoose") itau.againstElectronMedium = itau.tauID("againstElectronMedium") itau.againstElectronTight = itau.tauID("againstElectronTight") itau.againstElectronDeadECAL = itau.tauID("againstElectronDeadECAL") itau.againstMuonLoose = itau.tauID("againstMuonLoose") itau.againstMuonMedium = itau.tauID("againstMuonMedium") itau.againstMuonTight = itau.tauID("againstMuonTight") # print 'dB, raw, loose', itau.tauID("byCombinedIsolationDeltaBetaCorrRaw3Hits"), itau.tauID("byRawIsoMVA"), itau.tauID('byLooseIsoMVA') # print 'ID_check', itau.tauID("againstMuonLoose"), itau.tauID("againstElectronLooseMVA3") # print 'mu_loose, e_loose, e_medium', itau.tauID("againstMuonLoose"), itau.tauID("againstElectronLooseMVA3"), itau.tauID("againstElectronMediumMVA3"), itau.flag_id # if flag_mu_mass and and \ # ((itau.decayMode()==0 and itau.calcEOverP() > 0.2) or (itau.decayMode()!=0)): # itau.flag_id = True # # # if flag_e_mass==False and flag_mu_mass==False and self.tauid(itau): # itau.flag_id = True # flag_e_overlap = False # flag_e_mass = False # # for ii in idiso_electron: # mass_et = self.returnMass(ii, itau) # if mass_et > 71.2 and mass_et < 111.2: # flag_e_mass = True # # if self.returnDR(itau, ii) < 0.5: # flag_e_overlap = True # # if flag_e_overlap: # continue # # # flag_mu_overlap = False # flag_mu_mass = False # # for ii in idiso_muon: # mass_mt = self.returnMass(ii, itau) # if mass_mt > 71.2 and mass_mt < 111.2: # flag_mu_mass = True # # if self.returnDR(itau, ii) < 0.5: # flag_mu_overlap = True # # if flag_mu_overlap: # continue # if self.tauiso(itau): # itau.flag_iso = True # # # if flag_e_mass and itau.tauID("againstElectronMediumMVA3"): # itau.flag_id = True # # # if flag_mu_mass and itau.tauID("againstMuonTight") and \ # ((itau.decayMode()==0 and itau.calcEOverP() > 0.2) or (itau.decayMode()!=0)): # itau.flag_id = True # # # if flag_e_mass==False and flag_mu_mass==False and self.tauid(itau): # itau.flag_id = True event.loosetau.append(itau) event.taucand = event.loosetau # Additional tau veto event.vetotaucand = self.buildVetoTau(self.handles['taus'].product(), event) event.vetomuoncand = self.buildVetoLeptons(self.handles['muons'].product(), event) event.vetoelectroncand = self.buildVetoOtherLeptons(self.handles['electrons'].product(), event) flag_plus = 0 flag_minus = 0 for im in event.muoncand: if im.charge()==1: flag_plus +=1 else: flag_minus +=1 self.counters.counter('MMT').inc('step1') if not (flag_plus >= 2 or flag_minus >= 2): return False self.counters.counter('MMT').inc('step2') if not (len(event.muoncand)>=2 and len(event.taucand)>=1): # if not (len(event.taucand)>=1 and len(event.muoncand)>=1 and len(event.electroncand)>=1): return False self.counters.counter('MMT').inc('step3') # idiso_tau = [it for it in event.taucand if (it.flag_id and it.flag_iso)] # if not len(idiso_tau)>=1 : # return False # if not len(lepton3) == 1: # return False # self.counters.counter('MMT').inc('1 e/mu/tau') # event.tau = lepton3[0] # event.M_l2t = self.returnMass(lepton2[0], event.tau) # if self.returnMass(event.muon, event.electron) < 20.: # return False # if self.returnMass(lepton2[0], event.tau) < 20.: # return False # charge requirement # SS for two light leptons # if event.electron.charge()*event.muon.charge()==-1.: # return False # if event.tau.charge()*event.muon.charge()!=-1.: # return False # dR separation # if self.returnDR(event.tau, event.muon) < 0.5: # return False # if self.returnDR(event.tau, event.electron) < 0.5: # return False # if self.returnDR(event.electron, event.muon) < 0.5: # return False # event.loosetaucand, dummpy = cleanObjectCollection(event.loosetaucand, # masks = [event.muon], ## masks = event.muoncand, # deltaRMin = 0.4) # # event.loosetaucand, dummpy = cleanObjectCollection(event.loosetaucand, # masks = [event.electron], ## masks = event.electroncand, # deltaRMin = 0.4) # # event.loosetaucand, dummpy = cleanObjectCollection(event.loosetaucand, # masks = [event.tau], # deltaRMin = 0.4) # # # event.loosemuoncand, dummpy = cleanObjectCollection(event.loosemuoncand, # masks = [event.muon], # deltaRMin = 0.4) # # event.loosemuoncand, dummpy = cleanObjectCollection(event.loosemuoncand, # masks = [event.electron], # deltaRMin = 0.4) # # event.loosemuoncand, dummpy = cleanObjectCollection(event.loosemuoncand, # masks = [event.tau], # deltaRMin = 0.4) # # event.looseelectroncand, dummpy = cleanObjectCollection(event.looseelectroncand, # masks = [event.muon], # deltaRMin = 0.4) # # event.looseelectroncand, dummpy = cleanObjectCollection(event.looseelectroncand, # masks = [event.electron], # deltaRMin = 0.4) # # event.looseelectroncand, dummpy = cleanObjectCollection(event.looseelectroncand, # masks = [event.tau], # deltaRMin = 0.4) # NadditionalLepton = len(event.loosetaucand) + len(event.loosemuoncand) + len(event.looseelectroncand) # if NadditionalLepton>=1: # return False # print 'All events passed : ', event.run, event.lumi, event.eventId return True def returnMass(self, obj1, obj2): e4 = TLorentzVector() t4 = TLorentzVector() e4.SetPtEtaPhiM(Double(obj1.pt()), Double(obj1.eta()), Double(obj1.phi()), Double(obj1.mass())) t4.SetPtEtaPhiM(Double(obj2.pt()), Double(obj2.eta()), Double(obj2.phi()), Double(obj2.mass())) return (e4 + t4).M() def returnDR(self, obj1, obj2): deta = obj1.eta() - obj2.eta() dphi = obj1.phi() - obj2.phi() dr2 = deta*deta + dphi*dphi return math.sqrt(dr2) def triggerCheck(self, event, hltPath, leg): flag_pass = False # for itrig in hltPaths: # if self.trigMatched(event, itrig, leg): # flag_pass = True if self.trigMatched(event, hltPath, leg): flag_pass = True return flag_pass def testLeg1Iso(self, tau, isocut): '''if isocut is None, returns true if three-hit iso cut is passed. Otherwise, returns true if iso MVA > isocut.''' if isocut is None: # print 'check tau ID ', tau.tauID('byCombinedIsolationDeltaBetaCorrRaw3Hits') # return tau.tauID("byCombinedIsolationDeltaBetaCorrRaw3Hits") < 1.5 # return tau.tauID("byMediumCombinedIsolationDeltaBetaCorr3Hits") return tau.tauID("byLooseCombinedIsolationDeltaBetaCorr3Hits") else: return tau.tauID("byRawIsoMVA")>isocut def testVertex(self, lepton): '''Tests vertex constraints, for mu and tau''' return abs(lepton.dxy()) < 0.045 and \ abs(lepton.dz()) < 0.2 def testLeg2ID(self, muon): '''Tight muon selection, no isolation requirement''' return muon.tightId() and \ self.testVertex( muon ) def testLeg2Iso(self, muon, isocut): '''Tight muon selection, with isolation requirement''' if isocut is None: isocut = self.cfg_ana.iso2 # print muon.relIsoAllChargedDB05, isocut # return muon.relIsoAllChargedDB05()<isocut # print 'relative_isolation = ',muon.relIso(0.5), 'cut = ', isocut return muon.relIso(0.5, 1)<isocut def thirdLeptonVeto(self, leptons, otherLeptons, ptcut = 10, isocut = 0.3) : '''The tri-lepton veto. returns False if > 2 leptons (e or mu).''' vleptons = [lep for lep in leptons if self.testLegKine(lep, ptcut=ptcut, etacut=2.4) and self.testLeg2ID(lep) and self.testLeg2Iso(lep, isocut) ] # count electrons votherLeptons = [olep for olep in otherLeptons if self.testLegKine(olep, ptcut=ptcut, etacut=2.5) and \ olep.looseIdForTriLeptonVeto() and \ self.testVertex( olep ) and \ olep.relIsoAllChargedDB05() < isocut ] if len(vleptons) + len(votherLeptons)> 1: return False else: return True def leptonAccept(self, leptons): '''The di-lepton veto, returns false if > one lepton. e.g. > 1 mu in the mu tau channel''' looseLeptons = [muon for muon in leptons if self.testLegKine(muon, ptcut=15, etacut=2.4) and muon.isGlobalMuon() and muon.isTrackerMuon() and muon.sourcePtr().userFloat('isPFMuon') and #COLIN Not sure this vertex cut is ok... check emu overlap #self.testVertex(muon) and # JAN: no dxy cut abs(muon.dz()) < 0.2 and self.testLeg2Iso(muon, 0.3) ] isPlus = False isMinus = False # import pdb; pdb.set_trace() for lepton in looseLeptons: if lepton.charge()<0: isMinus=True elif lepton.charge()>0: isPlus=True else: raise ValueError('Impossible!') veto = isMinus and isPlus return not veto def testVetoBJet(self, jet): # medium csv working point # https://twiki.cern.ch/twiki/bin/viewauth/CMS/BTagPerformanceOP#B_tagging_Operating_Points_for_3 jet.btagMVA = jet.btag("combinedSecondaryVertexBJetTags") return jet.pt()>12. and \ abs( jet.eta() ) < 2.4 and \ jet.btagMVA > 0.8 # def testBJet(self, jet): # # medium csv working point # # https://twiki.cern.ch/twiki/bin/viewauth/CMS/BTagPerformanceOP#B_tagging_Operating_Points_for_3 # jet.btagMVA = jet.btag("combinedSecondaryVertexBJetTags") # # return jet.pt()>20. and \ # abs( jet.eta() ) < 2.4 and \ # jet.btagMVA > 0.898 and \ # self.testJetID(jet) # # # def testJetID(self, jet): # jet.puJetIdPassed = jet.puJetId(wp53x=True) # jet.pfJetIdPassed = jet.looseJetId() # # return jet.puJetIdPassed and jet.pfJetIdPassed ### def trigMatched(self, event, leg, legName): ### '''Returns true if the leg is matched to a trigger object as defined in the ### triggerMap parameter''' ### if not hasattr( self.cfg_ana, 'triggerMap'): ### return True #### else: #### print 'Trigger OK' ### ### ### path = event.hltPath ### print 'path = ', path ### ### triggerObjects = event.triggerObjects ### print 'triggerObjects = ', triggerObjects ### ### filters = self.cfg_ana.triggerMap[ path ] ### print 'filters = ', filters ### ### filter = None ### print 'filter = ', filter ### ### #### import pdb; pdb.set_trace() ### ### if legName == 'leg1': ### filter = filters[0] ### elif legName == 'leg2': ### filter = filters[1] ### else: ### raise ValueError( 'legName should be leg1 or leg2, not {leg}'.format( ### leg=legName ) ) ### ### # JAN: Need a hack for the embedded samples: No trigger matching in that case ### if filter == '': #### print 'Jan filter' ### return True ### ### for it in triggerObjects: ### print '(path, filter, obj, hasPath, hasSelection = ', path, filter, it, it.hasPath(path), it.hasSelection(filter) ### ### ### # the dR2Max value is 0.3^2 ### pdgIds = None ### if len(filter) == 2: ### filter, pdgIds = filter[0], filter[1] ### return triggerMatched(leg, triggerObjects, path, filter, ### dR2Max=0.089999, #### dR2Max=0.25, ### pdgIds=pdgIds ) # def trigMatched(self, event, trigpath, leg1, leg2): # '''Returns true if the leg is matched to a trigger object as defined in the # triggerMap parameter''' # if not hasattr( self.cfg_ana, 'triggerMap'): # return True # # # # triggerObjects = event.triggerObjects # filters = self.cfg_ana.triggerMap[ trigpath ] # filter = filters[0] # pdgIds = None # # ## print 'trigger path = ', trigpath ## for it in triggerObjects: ## print '(filter, obj, hasPath, hasSelection = ', filter, it.hasPath(path), it.hasSelection(filter), it # # # # triggerMatched1 = triggerMatched(leg1, triggerObjects, trigpath, filter, # dR2Max=0.089999, # pdgIds=pdgIds ) # # # triggerMatched2 = triggerMatched(leg2, triggerObjects, trigpath, filter, # dR2Max=0.089999, # pdgIds=pdgIds ) # # ## import pdb; pdb.set_trace(); # # # if filter.find('Mu8_Ele17')!=-1: # return triggerMatched1 and triggerMatched2 and leg1.pt() > 10. and leg2.pt() > 20. # elif filter.find('Mu17_Ele8')!=-1: # return triggerMatched1 and triggerMatched2 and leg1.pt() > 20. and leg2.pt() > 10. # else: # print 'Unexpected Trigger !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!1' # return False def trigMatched(self, event, trigpath, leg): '''Returns true if the leg is matched to a trigger object''' if not hasattr( self.cfg_ana, 'triggerMap'): return True # print trigpath triggerObjects = event.triggerObjects filters = self.cfg_ana.triggerMap[ trigpath ] filter = filters[0] pdgIds = None flag = triggerMatched(leg, triggerObjects, trigpath, filter, dR2Max=0.089999, pdgIds=pdgIds ) if filter.find('Mu17_Mu8')!=-1 or filter.find('Mu17_TkMu8')!=-1: return flag else: return False
UTF-8
Python
false
false
2,014
7,258,494,777,165
cba1f546d7978d2d7a1f7285789027c60acb45fa
73e02ee1e3537247f51781ce6d2bd4b0aa8f2e93
/dynamodb/batch.py
5da32e8b025af9df052a48699624ff8c3833b4ba
[]
no_license
mulka/boto_mock
https://github.com/mulka/boto_mock
0f946b0844e4e4e81428abd5b2bf3b43cb7680d5
1c22b58fc52485c56a941764b0e1c460e35eb2f5
refs/heads/master
2020-06-06T11:29:56.164071
2012-06-07T03:31:29
2012-06-07T03:31:29
null
0
0
null
null
null
null
null
null
null
null
null
null
null
null
null
class BatchWrite(object): def __init__(self, table, puts=None): self.table = table self.puts = puts or [] class BatchWriteList(list): def __init__(self, layer2): list.__init__(self) self.layer2 = layer2 def add_batch(self, table, puts=None): self.append(BatchWrite(table, puts)) def submit(self): return self.layer2.batch_write_item(self)
UTF-8
Python
false
false
2,012
4,767,413,732,841
4111e3cb7176ac354cf914c24cd0a13fb4cfdf85
ba0ddfca9381ce1a55dabb248128c0f0342c85f1
/scripts/webui/config_tab_tests.py
a676d79e266a64a2e479158538550b1c4a195ec2
[ "LicenseRef-scancode-warranty-disclaimer", "Apache-2.0" ]
non_permissive
mloni/contrail-test
https://github.com/mloni/contrail-test
0237a247d9de41ab838763fde3c4da89e1a409ed
db27c1abae084a22ba5a0f5a6a3212f5480d68e4
refs/heads/master
2020-12-26T01:37:25.284010
2014-11-06T04:20:18
2014-11-06T04:20:18
null
0
0
null
null
null
null
null
null
null
null
null
null
null
null
null
# Need to import path to test/fixtures and test/scripts/ # Ex : export PYTHONPATH='$PATH:/root/test/fixtures/:/root/test/scripts/' # # To run tests, you can do 'python -m testtools.run tests'. To run specific tests, # You can do 'python -m testtools.run -l tests' # Set the env variable PARAMS_FILE to point to your ini file. Else it will try to pick params.ini in PWD # import os import fixtures import testtools from contrail_test_init import * from vn_test import * from vm_test import * from connections import ContrailConnections from floating_ip import * from policy_test import * from contrail_fixtures import * from tcutils.wrappers import preposttest_wrapper from testresources import ResourcedTestCase from .webui_sanity_resource import SolnSetupResource from selenium import webdriver from pyvirtualdisplay import Display from selenium.webdriver.common.keys import Keys import time import random from webui_test import * from selenium.webdriver.support.ui import WebDriverWait class ConfigTab( testtools.TestCase, ResourcedTestCase, fixtures.TestWithFixtures): resources = [('base_setup', SolnSetupResource)] def __init__(self, *args, **kwargs): testtools.TestCase.__init__(self, *args, **kwargs) self.res = SolnSetupResource.getResource() self.inputs = self.res.inputs self.connections = self.res.connections self.logger = self.res.logger self.nova_fixture = self.res.nova_fixture self.analytics_obj = self.connections.analytics_obj self.vnc_lib = self.connections.vnc_lib self.quantum_fixture = self.connections.quantum_fixture self.cn_inspect = self.connections.cn_inspect if self.inputs.webui_verification_flag: self.browser = self.connections.browser self.browser_openstack = self.connections.browser_openstack self.delay = 10 self.webui = WebuiTest(self.connections, self.inputs) self.webui_common = WebuiCommon(self.webui) def __del__(self): print "Deleting test_with_setup now" SolnSetupResource.finishedWith(self.res) def setUp(self): super(ConfigTab, self).setUp() if 'PARAMS_FILE' in os.environ: self.ini_file = os.environ.get('PARAMS_FILE') else: self.ini_file = 'params.ini' def tearDown(self): print "Tearing down test" super(ConfigTab, self).tearDown() SolnSetupResource.finishedWith(self.res) def runTest(self): pass # end runTest @preposttest_wrapper def test_floating_ips(self): '''Test floating ips on config->Networking->Manage Floating IPs page ''' assert self.webui.verify_floating_ip_api_data() return True # end test_floating_ips @preposttest_wrapper def test_networks(self): '''Test networks on config->Networking->Networks page ''' assert self.webui.verify_vn_api_data() return True # end test_networks @preposttest_wrapper def test_ipams(self): '''Test ipams on config->Networking->IP Adress Management page ''' assert self.webui.verify_ipam_api_data() return True # end test_ipams @preposttest_wrapper def test_policies(self): '''Test polcies on config->Networking->Policies page ''' assert self.webui.verify_policy_api_data() return True # end test_policies @preposttest_wrapper def test_service_templates(self): '''Test svc templates on config->Services->Service Templates page ''' assert self.webui.verify_service_template_api_basic_data() return True # end test_service_templates @preposttest_wrapper def test_service_instances(self): '''Test svc instances on config->Services->Service Instances page ''' assert self.webui.verify_service_instance_api_basic_data() return True # end test_service_instances @preposttest_wrapper def test_project_quotas(self): '''Test project quotas on config->Networking->Project Quotas page ''' assert self.webui.verify_project_quotas() return True # end test_project_quotas # end ConfigTab
UTF-8
Python
false
false
2,014
16,990,890,624,808
7e3f7e8007abca5301f65d6fd9150c6178366e6d
098463adcfcc4ab71f09f7b389377dbad6884e8e
/src/base/loader/code/user_code_range.py
ec9321448ad830d2e2c49231e65c990fd505558a
[ "BSD-3-Clause", "LicenseRef-scancode-proprietary-license", "LGPL-2.0-or-later", "MIT" ]
non_permissive
jcai19/ncc_gem5
https://github.com/jcai19/ncc_gem5
46c58172106adb01fdb9f863174b337ca6305352
3ac9d7f9e9eb89faf44e52cce3bb2bc321885f6f
refs/heads/master
2016-09-06T12:03:57.794510
2014-08-09T01:48:58
2014-08-09T01:48:58
null
0
0
null
null
null
null
null
null
null
null
null
null
null
null
null
import subprocess import sys import getopt def getUserCodeRange(argv): inputfile = '' outputfile = '' try: opts, args = getopt.getopt(argv,"hi:o:",["ifile=","ofile="]) except getopt.GetoptError: print 'user_code_range.py -i <inputfile> -o <outputfile>' sys.exit(2) for opt, arg in opts: if opt == '-h': print 'user_code_range.py -i <inputfile> -o <outputfile>' sys.exit() elif opt in ("-i", "--ifile"): inputfile = arg elif opt in ("-o", "--ofile"): outputfile = arg command = "llvm-nm --print-size " + inputfile +" > " + inputfile + ".syms" subprocess.call(command, shell=True) fin = open(inputfile + ".syms", 'r') addrRanges = [] for line in fin: lineSplit = line.split() if len(lineSplit) == 4 and (lineSplit[2] == "T" or lineSplit[2] == "t") and (lineSplit[3][0:2] == "_Z" or lineSplit[3] == "main"): start = int(lineSplit[0], 16) end = start + int(lineSplit[1], 16) #addrRanges.append((start, end, int(lineSplit[1], 16), lineSplit[3])) addrRanges.append((start, end)) addrRanges = sorted(addrRanges, key=lambda addrRange: addrRange[0]) retList = [] retList.append(addrRanges[0][0]) retList.append(addrRanges[len(addrRanges)-1][1]) fout = open(outputfile, 'w') fout.write(str(addrRanges[0][0]) + " " + str(addrRanges[len(addrRanges)-1][1])) fin.close() fout.close() command = "rm " + inputfile + ".syms" subprocess.call(command, shell=True) return retList if __name__ == "__main__": getUserCodeRange(sys.argv[1:])
UTF-8
Python
false
false
2,014
6,004,364,300,235
09544a13989d7e9fd230a1a0d88c4bcad5d38cc3
0302d051b460c3b803d0c609b8dd04c76aed1841
/nerd/configuration.py
c058b0799ac7610ee779dc506cc71f5347761c9c
[]
no_license
GendoIkari/nerdcommander
https://github.com/GendoIkari/nerdcommander
204a09e66d610407d7412afcfb29ed4dc9957583
b9f606aeae2e6404f658f9965ab67102ed0c7bb8
refs/heads/master
2015-08-05T09:43:16.971907
2013-02-05T23:27:33
2013-02-05T23:27:33
null
0
0
null
null
null
null
null
null
null
null
null
null
null
null
null
import os def preferiteFolders(): return [("Home", os.path.expanduser("~"), "HOME")]
UTF-8
Python
false
false
2,013
2,267,742,752,352
df7aa56b4b5cfdb6c58e009e17100d2c803c096d
c7d2acaf53b11cb552036d994bf409279d5831d7
/example/tclient.py
101ba21a52d7c40820bd75bed65a95ddbff20707
[ "BSD-3-Clause" ]
permissive
jamiesun/pyrad
https://github.com/jamiesun/pyrad
ff665b3464efc8c9926f1dabd9cc27fd5bccc2c8
b97843ec5314ef4cbd408dc13bf0a113f6206337
refs/heads/master
2020-12-25T10:35:46.802080
2012-09-05T09:26:10
2012-09-05T09:26:10
null
0
0
null
null
null
null
null
null
null
null
null
null
null
null
null
#!/usr/bin/env python #coding:utf-8 from twisted.internet import protocol from twisted.internet import reactor from twisted.python import log import sys,socket from pyrad import dictionary from pyrad import host import pyrad import time class RadiusTestClient(host.Host, protocol.DatagramProtocol): def __init__(self, server, authport=1812, acctport=1813, secret="secret", dict=dictionary.Dictionary("dictionary")): host.Host.__init__(self, dict=dict) self.server = server self.authport = authport self.acctport = acctport self.secret = secret self.reply = 0 def startProtocol(self): self.transport.socket.setsockopt(socket.SOL_SOCKET,socket.SO_RCVBUF,1024*10*20) self.transport.connect(self.server, self.authport) self.sendAuth() reactor.callLater(60,self.done) def done(self): times = self.lasttime - self.starttime percount = self.reply /times log.msg("reply:%s"%self.reply) log.msg("reply per second:%s"%percount) reactor.stop() def sendAuth(self): self.starttime = time.time() for i in xrange(1000): req=self.CreateAuthPacket(code=pyrad.packet.AccessRequest, User_Name="test01",secret=self.secret) req["User-Password"] = req.PwCrypt("888888") req["NAS-IP-Address"] = "198.168.8.139" self.transport.write(req.RequestPacket()) sendtimes = time.time() - self.starttime log.msg("sends per second:%s"%(1000/sendtimes)) def datagramReceived(self, datagram, (host, port)): self.reply += 1 self.lasttime = time.time() def main(): log.startLogging(sys.stdout, 0) protocol = RadiusTestClient("198.168.8.8",secret="secret") reactor.listenUDP(0, protocol) reactor.run() if __name__ == '__main__': main()
UTF-8
Python
false
false
2,012
7,206,955,123,144
4405531269689d0cb302f594693699e346826da8
42f97265c1e384860c5ea2e3c36172c3695cf069
/CardData.py
62fa91e37184cf19cc92e38a89315074554f2e66
[]
no_license
pmwheatley/HollywoodStudios
https://github.com/pmwheatley/HollywoodStudios
1a04abf81be939393818345847b766d88a904663
7f21203f3e703b62fa9cfc57c040c7e892471daa
refs/heads/master
2021-01-25T10:34:27.340443
2013-11-02T17:09:04
2013-11-02T17:09:04
13,828,142
0
0
null
null
null
null
null
null
null
null
null
null
null
null
null
from Cards import * from Constants import * ACTORSDECK = Deck([ ActorCard(0, 'Charlie Chaplin', C, [3000, 6000], [[0, 0], [4, 4]]), ActorCard(1, 'Buster Keaton', C, [2000, 4000], [[0, -1], [2, 0]]), ActorCard(2, 'Mack Sennett', C, [3000, 4000], [[0, 0], [2, 1]]), ActorCard(3, 'Marx Brothers', C, [2000, 4000], [[0, -1], [2, 0]]), ActorCard(4, 'Laurel & Hardy', C, [2000, 5000], [[0, 0], [2, 3]]), ActorCard(5, 'Harold Lloyd', C, [1000, 3000], [[0, 0], [1, 1]]), ActorCard(6, 'Will Rogers', C, [1000, 3000], [[0, 0], [1, 2]]), ActorCard(7, 'Karl Dane', C, [1000, 4000], [[0, -1], [2, 0]]), ActorCard(8, 'Boris Karloff', C, [2000, 5000], [[0, 0], [2, 4]]), ActorCard(9, 'Bela Lugosi', C, [2000, 5000], [[0, -1], [4, 1]]), ActorCard(11, 'Gene Kelly', BD, [2000, 4000], [[0, 0], [1, 3]]), ActorCard(11, 'Fred Astaire', BD, [2000, 5000], [[0, 0], [3, 4]]), ActorCard(12, 'Rex Harrison', BD, [1000, 4000], [[0, 0], [2, 3]]), ActorCard(13, 'Colin Clive', BD, [1000, 3000], [[0, -1], [2, 0]]), ActorCard(14, 'Tyrone Power', BD, [1000, 3000], [[0, 0], [1, 1]]), ActorCard(15, 'John Gilbert', BD, [3000, 5000], [[0, -1], [3, 1]]), ActorCard(16, 'Rudolph Valentino', BD, [3000, 5000], [[0, -1], [3, 0]]), ActorCard(17, 'Cary Grant', BD, [3000, 6000], [[0, 0], [4, 4]]), ActorCard(18, 'Milton Sills', BD, [1000, 3000], [[0, -1], [1, 0]]), ActorCard(19, 'James Stewart', BD, [2000, 5000], [[0, 0], [3, 3]]), ActorCard(20, 'Humphrey Bogart', DM, [3000, 6000], [[0, 0], [3, 5]]), ActorCard(21, 'James Cagney', DM, [2000, 5000], [[0, 0], [3, 4]]), ActorCard(22, 'Edward G. Robinson', DM, [1000, 3000], [[0, -1], [1, 0]]), ActorCard(23, 'Gary Cooper', DM, [2000, 4000], [[0, 0], [2, 1]]), ActorCard(24, 'Charles Laughton', DM, [3000, 5000], [[0, 0], [3, 3]]), ActorCard(25, 'Peter Lorre', DM, [1000, 4000], [[0, 0], [2, 2]]), ActorCard(26, 'Charles Boyer', DM, [3000, 4000], [[0, -1], [2, 0]]), ActorCard(27, 'Sterling Hayden', DM, [2000, 4000], [[0, 0], [2, 3]]), ActorCard(28, 'Fred MacMurray', DM, [1000, 3000], [[0, -1], [2, 0]]), ActorCard(29, 'Lon Chaney', DM, [2000, 5000], [[0, -1], [4, 1]]), ActorCard(30, 'Charlton Heston', FH, [3000, 5000], [[0, 0], [2, 4]]), ActorCard(31, 'John Gielgud', FH, [1000, 3000], [[0, 0], [1, 1]]), ActorCard(32, 'Henry Fonda', FH, [1000, 4000], [[0, 0], [2, 3]]), ActorCard(33, 'Spencer Tracy', FH, [1000, 3000], [[0, -1], [1, 1]]), ActorCard(34, 'Errol Flynn', FH, [2000, 5000], [[0, -1], [4, 0]]), ActorCard(35, 'Tom Mix', FH, [2000, 4000], [[0, -1], [2, 0]]), ActorCard(36, 'Douglas Fairbanks', FH, [3000, 6000], [[0, -1], [5, 2]]), ActorCard(37, 'Laurence Olivier', FH, [2000, 4000], [[0, 0], [2, 3]]), ActorCard(38, 'William S. Hart', FH, [1000, 3000], [[0, 0], [1, 0]]), ActorCard(39, 'John Barrymore', FH, [3000, 5000], [[0, 0], [3, 3]]), ActorCard(40, 'Mary Pickford', S, [3000, 6000], [[0, -1], [5, 2]]), ActorCard(41, 'Alice Faye', S, [2000, 4000], [[0, 0], [2, 1]]), ActorCard(42, 'Ava Gardner', S, [3000, 5000], [[0, -1], [3, 3]]), ActorCard(43, 'Judy Garland', S, [3000, 5000], [[0, 0], [2, 4]]), ActorCard(44, 'Shirley Temple', S, [1000, 5000], [[0, 0], [4, 1]]), ActorCard(45, 'Lillian Gish', S, [2000, 3000], [[0, 0], [1, 1]]), ActorCard(46, 'Janet Gaynor', S, [2000, 4000], [[0, -1], [2, 0]]), ActorCard(47, 'Ginger Rogers', S, [2000, 4000], [[0, 0], [2, 3]]), ActorCard(48, 'Claudette Colbert', S, [1000, 3000], [[0, 0], [1, 2]]), ActorCard(49, 'Jeanette MacDonald', S, [1000, 3000], [[0, -1], [1, 0]]), ActorCard(50, 'Mae West', SW, [3000, 5000], [[0, -1], [3, 1]]), ActorCard(51, 'Marlene Dietrich', SW, [3000, 5000], [[0, 0], [3, 2]]), ActorCard(52, 'Carole Lombard', SW, [2000, 5000], [[0, 0], [4, 1]]), ActorCard(53, 'Bette Davis', SW, [2000, 4000], [[0, 0], [2, 3]]), ActorCard(54, 'Jean Harlow', SW, [3000, 6000], [[0, 0], [3, 5]]), ActorCard(55, 'Rita Hayworth', SW, [2000, 4000], [[0, 0], [2, 2]]), ActorCard(56, 'Theda Bara', SW, [2000, 4000], [[0, -1], [2, 0]]), ActorCard(57, 'Mary Astor', SW, [1000, 3000], [[0, -1], [1, 2]]), ActorCard(58, 'Nita Naldi', SW, [1000, 3000], [[0, -1], [1, 0]]), ActorCard(59, 'Barbara Stanwyck', SW, [1000, 3000], [[0, 0], [1, 1]]), ActorCard(60, 'Greta Garbo', QL, [3000, 6000], [[1, 0], [5, 2]]), ActorCard(61, 'Ingrid Bergman', QL, [3000, 6000], [[1, 1], [4, 4]]), ActorCard(62, 'Mabel Normand', QL, [2000, 4000], [[1, 0], [2, 0]]), ActorCard(63, 'Irene Dunne', QL, [2000, 4000], [[1, 0], [2, 1]]), ActorCard(64, 'Joan Crawford', QL, [2000, 4000], [[1, 1], [2, 3]]), ActorCard(65, 'Vivien Leigh', QL, [3000, 5000], [[1, 1], [2, 4]]), ActorCard(66, 'Katherine Hepburn', QL, [3000, 6000], [[1, 1], [4, 4]]), ActorCard(67, 'Norma Shearer', QL, [3000, 5000], [[1, 0], [3, 1]]), ActorCard(68, 'Gloria Swanson', QL, [3000, 5000], [[1, 1], [3, 0]]), ActorCard(69, 'Lauren Bacall', QL, [3000, 6000], [[1, 1], [4, 5]])]) DIRECTORSDECK = Deck([ DirectorCard(0, 'Allan Smithee', 1000, {FILMNOIR: 0, ROMANCE: 0, HORROR: 0, COMEDY: 0, EPIC: 0, SWORDS: 0}, {}), DirectorCard(1, 'Georges Cochrane', 1000, {FILMNOIR: 0, ROMANCE: 1, HORROR: 0, COMEDY: 0, EPIC: 0, SWORDS: 0}, {}), DirectorCard(2, 'Beaumont Smith', 1000, {FILMNOIR: 0, ROMANCE: 0, HORROR: 0, COMEDY: 0, EPIC: 0, SWORDS: 1}, {}), DirectorCard(3, 'Cecil M. Hepworth', 1000, {FILMNOIR: 1, ROMANCE: 0, HORROR: 0, COMEDY: 0, EPIC: 0, SWORDS: 0}, {}), DirectorCard(4, 'Sinclair Hill', 1000, {FILMNOIR: 0, ROMANCE: 0, HORROR: 1, COMEDY: 0, EPIC: 0, SWORDS: 0}, {}), DirectorCard(5, 'Harry Edwards', 1000, {FILMNOIR: 0, ROMANCE: 0, HORROR: 0, COMEDY: 1, EPIC: 0, SWORDS: 0}, {}), DirectorCard(6, 'Raoul Walsh', 2000, {FILMNOIR: 1, ROMANCE: 0, HORROR: 0, COMEDY: 0, EPIC: 0, SWORDS: 1}, {}), DirectorCard(7, 'Frank Wilson', 2000, {FILMNOIR: 1, ROMANCE: 0, HORROR: 0, COMEDY: 0, EPIC: 1, SWORDS: 0}, {}), DirectorCard(8, 'John K. Wells', 2000, {FILMNOIR: 0, ROMANCE: 0, HORROR: 1, COMEDY: 0, EPIC: 0, SWORDS: 1}, {}), DirectorCard(9, 'Joseph Henabery', 2000, {FILMNOIR: 0, ROMANCE: 1, HORROR: 0, COMEDY: 1, EPIC: 0, SWORDS: 0}, {}), DirectorCard(10, 'Edgar Lewis', 2000, {FILMNOIR: 0, ROMANCE: 1, HORROR: 1, COMEDY: 1, EPIC: 0, SWORDS: 0}, {}), DirectorCard(11, 'Clarence Brown', 2000, {FILMNOIR: 0, ROMANCE: 1, HORROR: 0, COMEDY: 0, EPIC: 1, SWORDS: 0}, {}), DirectorCard(12, 'Franklyn Barrett', 2000, {FILMNOIR: 1, ROMANCE: 0, HORROR: 1, COMEDY: 0, EPIC: 0, SWORDS: 0}, {}), DirectorCard(13, 'Busby Berkeley', 3000, {FILMNOIR: 0, ROMANCE: 1, HORROR: 1, COMEDY: 1, EPIC: 0, SWORDS: 0}, {}), DirectorCard(14, 'D.W. Griffith', 3000, {FILMNOIR: 1, ROMANCE: 0, HORROR: 0, COMEDY: 0, EPIC: 1, SWORDS: 1}, {}), DirectorCard(15, 'Rouben Mamoulian', 3000, {FILMNOIR: 0, ROMANCE: 1, HORROR: 1, COMEDY: 1, EPIC: 0, SWORDS: 0}, {}), DirectorCard(16, 'Erich Von Stroheim', 3000, {FILMNOIR: 0, ROMANCE: 0, HORROR: 0, COMEDY: 0, EPIC: 3, SWORDS: 0}, {}), DirectorCard(17, 'Tod Browning', 3000, {FILMNOIR: 0, ROMANCE: 0, HORROR: 0, COMEDY: 1, EPIC: 0, SWORDS: 2}, {}), DirectorCard(18, 'Edgar Jones', 3000, {FILMNOIR: 1, ROMANCE: 0, HORROR: 0, COMEDY: 0, EPIC: 0, SWORDS: 1}, {}), DirectorCard(19, 'King Vidor', 4000, {FILMNOIR: 1, ROMANCE: 0, HORROR: 1, COMEDY: 0, EPIC: 1, SWORDS: 1}, {}), DirectorCard(20, 'F.W. Murneau', 4000, {FILMNOIR: 1, ROMANCE: 1, HORROR: 1, COMEDY: 0, EPIC: 1, SWORDS: 0}, {}), DirectorCard(21, 'Elia Kazan', 4000, {FILMNOIR: 1, ROMANCE: 1, HORROR: 0, COMEDY: 0, EPIC: 1, SWORDS: 1}, {}), DirectorCard(22, 'Leslie Goodwins', 4000, {FILMNOIR: 0, ROMANCE: 1, HORROR: 0, COMEDY: 1, EPIC: 0, SWORDS: 1}, {BMOVIEBONUS: 1}), DirectorCard(23, 'Ernst Lubitsch', 4000, {FILMNOIR: 0, ROMANCE: 1, HORROR: 0, COMEDY: 2, EPIC: 0, SWORDS: 0}, {ALSOWRITER: True}), DirectorCard(24, 'Alfred Hitchcock', 4000, {FILMNOIR: 2, ROMANCE: 0, HORROR: 2, COMEDY: 0, EPIC: 0, SWORDS: 0}, {AMOVIEEXTRACOST: -2000}), DirectorCard(25, 'Cecil B. DeMille', 5000, {FILMNOIR: 0, ROMANCE: 1, HORROR: 0, COMEDY: 0, EPIC: 3, SWORDS: 0}, {AMOVIEBONUS: 1}), DirectorCard(26, 'Frank Capra', 5000, {FILMNOIR: 0, ROMANCE: 1, HORROR: 0, COMEDY: 2, EPIC: 1, SWORDS: 0}, {ROLLOSCAR: 2}), DirectorCard(27, 'James Whale', 5000, {FILMNOIR: 1, ROMANCE: 0, HORROR: 3, COMEDY: 0, EPIC: 0, SWORDS: 0}, {FILMNOIRROLLCLASSIC: 2, HORRORROLLCLASSIC: 2}), DirectorCard(28, 'John Ford', 5000, {FILMNOIR: 0, ROMANCE: 1, HORROR: 0, COMEDY: 0, EPIC: 0, SWORDS: 3}, {BMOVIEBONUS: 1}), DirectorCard(29, 'Victor Fleming', 5000, {FILMNOIR: 2, ROMANCE: 0, HORROR: 1, COMEDY: 2, EPIC: 0, SWORDS: 1}, {AMOVIEEXTRACOST: 1000, BMOVIEEXTRACOST:1000})]) FILMNOIRDECK = Deck([ScriptCard(0, 'The Big Sleep', FILMNOIR, [DM, SW, C], 4000, 1000, 2, 0, False, False), ScriptCard(1, 'Rififi', FILMNOIR, [BD, DM], 4000, 1000, 2, 0, False, False), ScriptCard(2, 'The Asphalt Jungle', FILMNOIR, [DM, S], 5000, 2000, 3, 1, False, False), ScriptCard(3, 'Touch of Evil', FILMNOIR, [BD, DM], 5000, 2000, 3, 1, False, False), ScriptCard(4, 'The Killing', FILMNOIR, [DM, C], 6000, 3000, 3, 1, False, False), ScriptCard(5, 'The Third Man', FILMNOIR, [DM, SW], 5000, 1000, 3, 0, False, False), ScriptCard(6, 'Double Indemnity', FILMNOIR, [DM, SW], 5000, 2000, 3, 1, False, True), ScriptCard(7, 'Sunset Boulevard', FILMNOIR, [BD, SW, S], 7000, 4000, 4, 2, True, False), ScriptCard(8, 'Strangers on a Train', FILMNOIR, [BD, C], 4000, 1000, 2, 0, False, False), ScriptCard(9, 'Casablanca', FILMNOIR, [DM, BD, S], 7000, 4000, 5, 2, True, True), ScriptCard(10, 'Scarface', FILMNOIR, [DM, SW], 5000, 2000, 2, 0, False, True), ScriptCard(11, 'The Maltese Falcon', FILMNOIR, [DM, SW, DM], 6000, 3000, 4, 2, True, False), ScriptCard(12, 'The Blue Angel', FILMNOIR, [SW, DM], 6000, 3000, 4, 2, False, False), ScriptCard(13, 'Notorious', FILMNOIR, [DM, SW], 5000, 2000, 3, 1, False, False), ScriptCard(14, 'The Public Enemy', FILMNOIR, [DM], 5000, 2000, 3, 1, False, False), ScriptCard(15, 'Key Largo', FILMNOIR, [BD, SW, DM], 5000, 2000, 3, 1, False, False)]) ROMANCEDECK = Deck([ ScriptCard(16, 'Golden Earrings', ROMANCE, [SW, BD], 6000, 2000, 3, 0, False, False), ScriptCard(17, 'Romeo and Juliet', ROMANCE, [S, BD], 7000, 3000, 4, 2, False, False), ScriptCard(18, 'The Love Trap', ROMANCE, [S, FH], 5000, 3000, 2, 2, False, False), ScriptCard(19, "It's a Wonderful Life", ROMANCE, [BD, C, S], 9000, 5000, 4, 2, True, False), ScriptCard(20, 'Now, Voyager', ROMANCE, [SW, FH], 5000, 2000, 3, 1, False, False), ScriptCard(21, "Heart o' the Hills", ROMANCE, [QL, BD], 4000, 2000, 2, 1, False, False), ScriptCard(22, 'My Best Girl', ROMANCE, [BD, QL], 6000, 3000, 3, 2, False, False), ScriptCard(23, 'Grand Hotel', ROMANCE, [SW, BD, S], 7000, 3000, 4, 2, True, False), ScriptCard(24, 'Ninotchka', ROMANCE, [QL, BD], 5000, 2000, 3, 1, True, False), ScriptCard(25, 'Small Town Girl', ROMANCE, [S, C], 4000, 1000, 2, 0, False, False), ScriptCard(26, 'The Eagle', ROMANCE, [BD, SW], 4000, 2000, 2, 1, False, False), ScriptCard(27, 'Morocco', ROMANCE, [SW, BD], 5000, 2000, 3, 1, False, True), ScriptCard(28, 'The Shop Around the Corner', ROMANCE, [S, BD], 6000, 3000, 3, 2, False, True), ScriptCard(29, 'The Gilded Lily', ROMANCE, [S, BD, BD], 4000, 1000, 2, 0, False, False), ScriptCard(30, 'No Time for Love', ROMANCE, [S, BD], 6000, 2000, 3, 1, False, True), ScriptCard(31, 'The Philadelphia Story', ROMANCE, [FH, S, BD], 4000, 2000, 2, 1, False, False)]) HORRORDECK = Deck([ ScriptCard(32, 'Bride of Frankenstein', HORROR, [C, SW], 7000, 3000, 3, 2, False, True), ScriptCard(33, 'Faust', HORROR, [DM], 5000, 3000, 2, 2, False, False), ScriptCard(34, 'Freaks', HORROR, [DM, C], 5000, 2000, 2, 0, False, False), ScriptCard(35, 'The Mummy', HORROR, [C], 7000, 4000, 3, 2, False, False), ScriptCard(36, 'The Night of the Hunter', HORROR, [DM, S], 5000, 3000, 2, 2, False, False), ScriptCard(37, 'Dead of Night', HORROR, [C], 8000, 4000, 3, 1, False, False), ScriptCard(38, 'Nosferatu', HORROR, [C, FH], 7000, 4000, 3, 1, False, False), ScriptCard(39, 'The Unknown', HORROR, [C, DM], 5000, 3000, 2, 2, False, False), ScriptCard(40, 'The Hunchback of Notre Dame', HORROR, [S, C], 7000, 4000, 3, 2, False, True), ScriptCard(41, 'The Invisible Man', HORROR, [DM, S], 5000, 2000, 3, 1, False, False), ScriptCard(42, 'The Raven', HORROR, [DM, C], 6000, 3000, 3, 2, False, False), ScriptCard(43, 'Dracula', HORROR, [C, FH, SW], 6000, 4000, 4, 2, True, False), ScriptCard(44, 'Frankenstein', HORROR, [C, FH, S], 6000, 3000, 3, 2, True, False), ScriptCard(45, 'King Kong', HORROR, [S, FH], 8000, 4000, 4, 2, True, False), ScriptCard(46, 'The Invisible Ray', HORROR, [FH, DM], 5000, 2000, 3, 1, False, False), ScriptCard(47, 'The Phantom of the Opera', HORROR, [S, DM, FH], 6000, 3000, 3, 2, False, True)]) COMEDYDECK = Deck([ ScriptCard(48, 'The Electric House', COMEDY, [S, C], 6000, 3000, 2, 2, False, False), ScriptCard(49, 'The Music Box', COMEDY, [C, C], 5000, 2000, 2, 0, False, False), ScriptCard(50, 'Sherlock, Jr.', COMEDY, [C], 6000, 3000, 3, 2, False, False), ScriptCard(51, 'Limelight', COMEDY, [C, S], 7000, 3000, 4, 2, False, True), ScriptCard(52, 'Arsenic and Old Lace', COMEDY, [C, SW], 6000, 4000, 3, 3, True, False), ScriptCard(53, 'Modern Times', COMEDY, [C, S], 6000, 3000, 3, 2, False, True), ScriptCard(54, "Singin' in the Rain", COMEDY, [C, BD, S], 8000, 4000, 4, 1, True, False), ScriptCard(55, 'The General', COMEDY, [C, S], 6000, 4000, 3, 3, False, False), ScriptCard(56, 'The Circus', COMEDY, [C, C], 5000, 2000, 3, 1, False, False), ScriptCard(57, 'The Great Dictator', COMEDY, [C, C, S], 6000, 3000, 3, 2, False, True), ScriptCard(58, 'Mr. Smith Goes to Washington', COMEDY, [BD, S], 5000, 2000, 3, 1, True, False), ScriptCard(59, 'City Lights', COMEDY, [C, SW, BD], 6000, 2000, 3, 0, False, False), ScriptCard(60, "I'm No Angel", COMEDY, [SW, C], 6000, 3000, 3, 2, False, False), ScriptCard(61, 'Cops', COMEDY, [C, S], 7000, 3000, 4, 2, False, False), ScriptCard(62, 'The Tramp', COMEDY, [C], 5000, 2000, 3, 1, False, False), ScriptCard(63, 'The Playhouse', COMEDY, [S, C], 6000, 3000, 2, 2, False, False)]) EPICDECK = Deck([ ScriptCard(64, 'Ben-Hur: A Tale of the Christ',EPIC, [FH, DM, SW, QL], 8000, 3000, 4, 2, True, True), ScriptCard(65, 'Cleopatra', EPIC, [QL, BD, SW], 7000, 3000, 4, 2, True, False), ScriptCard(66, "Hell's Angels", EPIC, [FH, SW, BD], 12000, 7000, 4, 3, True, False), ScriptCard(67, 'David Copperfield', EPIC, [BD, SW, S, FH], 7000, 4000, 3, 1, True, False), ScriptCard(68, 'Paths of Glory', EPIC, [BD, DM, FH], 6000, 3000, 3, 2, True, False), ScriptCard(69, 'The Ten Commandments', EPIC, [FH, S, QL, DM], 9000, 5000, 5, 3, True, True), ScriptCard(70, 'The Wizard of Oz', EPIC, [S, C, QL, C], 8000, 4000, 4, 2, True, True), ScriptCard(71, 'Citizen Kane', EPIC, [BD, SW, FH], 7000, 4000, 4, 2, True, False), ScriptCard(72, 'The King of Kings', EPIC, [FH, S, DM], 6000, 3000, 3, 2, True, False), ScriptCard(73, 'The Great Bank Robbery', EPIC, [DM, BD, FH], 5000, 3000, 2, 2, True, False), ScriptCard(74, 'Birth of a Nation', EPIC, [BD, S, DM], 6000, 3000, 3, 2, True, False), ScriptCard(75, 'The Last Days of Pompei', EPIC, [FH, DM, SW, S], 8000, 5000, 3, 3, True, False), ScriptCard(76, 'The Grapes of Wrath', EPIC, [FH, FH, S], 6000, 3000, 3, 2, True, True), ScriptCard(77, 'The Crusades', EPIC, [FH, DM, SW], 7000, 4000, 3, 3, True, False), ScriptCard(78, 'The African Queen', EPIC, [DM, QL, BD], 6000, 3000, 3, 2, True, True), ScriptCard(79, 'Gone with the Wind', EPIC, [BD, QL, SW, FH], 10000, 6000, 5, 3, True, True)]) SWORDSDECK = Deck([ ScriptCard(80, 'Red Dusk', SWORDS, [SW, DM], 6000, 3000, 3, 2, False, False), ScriptCard(81, 'Fighting Caravans', SWORDS, [FH], 5000, 2000, 3, 1, False, False), ScriptCard(82, 'The Four Horsemen of the Apocalypse', SWORDS, [FH, DM, S], 6000, 4000, 3, 3, False, True), ScriptCard(83, 'The Black Pirate', SWORDS, [DM, SW, FH], 4000, 2000, 2, 1, False, False), ScriptCard(84, 'The Sheik', SWORDS, [FH, S], 8000, 3000, 4, 2, True, False), ScriptCard(85, 'The Prince and the Pauper', SWORDS, [FH, SW], 5000, 2000, 3, 1, False, True), ScriptCard(86, 'The Quest of Life', SWORDS, [FH, BD], 4000, 1000, 2, 0, False, False), ScriptCard(87, 'The Adventures of Robin Hood', SWORDS, [FH], 7000, 4000, 4, 3, False, False), ScriptCard(88, 'The Falcon', SWORDS, [BD, FH], 6000, 3000, 3, 2, False, False), ScriptCard(89, 'This Gun for Hire', SWORDS, [SW, FH, DM], 6000, 2000, 3, 0, False, False), ScriptCard(90, 'Jesse James', SWORDS, [FH, BD], 6000, 3000, 3, 2, False, False), ScriptCard(91, "A Rogue's Romance", SWORDS, [FH, SW], 4000, 1000, 2, 0, False, False), ScriptCard(92, 'The Avenging Sword', SWORDS, [FH, SW], 9000, 5000, 4, 3, False, False), ScriptCard(93, 'The Soldier and the Lady', SWORDS, [FH, S], 7000, 4000, 3, 2, True, False), ScriptCard(94, 'The Count of Monte Cristo', SWORDS, [FH, BD, C], 5000, 2000, 3, 1, False, True), ScriptCard(95, 'Adventures of Don Juan', SWORDS, [FH, S, SW], 4000, 2000, 2, 1, False, False), ScriptCard(96, 'The Thief of Bagdad', SWORDS, [FH, C, DM], 5000, 3000, 2, 2, True, False)]) CREWDECK = Deck([ CrewCard(ORDINARYCREW)] * 5 + [CrewCard(GOODCREW)] * 7 + [CrewCard(EXCELLENTCREW)] * 5) WRITERSDECK = Deck([ WriterCard(ORDINARYWRITER)] * 10 + [WriterCard(EXCELLENTWRITER)] * 8)
UTF-8
Python
false
false
2,013
8,993,661,568,168
98b06fff79cbc39dde40f6c9dd5cc0a8b928b9bb
ee53c87481baea8d3184230a59b9ff3317622b20
/my/parse.py
59a4fa63db3c14672f29d49ccdd822a6a7edd4bf
[]
no_license
zbhknight/pscripts
https://github.com/zbhknight/pscripts
b549ad670cf4732fdf9dea427ffa916f9df96cee
2f2ea0c77b0338746bbc21453375cb454a225860
refs/heads/master
2016-08-06T03:48:27.132204
2013-08-12T03:17:43
2013-08-12T03:17:43
null
0
0
null
null
null
null
null
null
null
null
null
null
null
null
null
#!/usr/bin/python import sys import re import datetime import shlex import subprocess import MySQLdb from os import path exIP = ['222.200.191.253', '1.3.6.1', '172.18.41.178'] def getData(filename): try: comm = 'grep -n ========== ' + filename args = shlex.split(comm) lineNum = int(subprocess.check_output(args).split('\n')[-2].split(':')[0]) wcNum = int(subprocess.check_output(['wc', filename]).split()[0]) number = wcNum - lineNum comm = 'tail -n ' + str(number) + ' ' + filename args = shlex.split(comm) data = subprocess.check_output(args).split('\n') except: f = open(filename, 'rb') data = [ line for line in f ] f.close() f = open(filename, 'ab') f.write('='*80+'\n') f.close() result = [] for line in data: unit = parseLine(line) if unit: result.append(unit) return result def parseLine(line): timeP = r'([a-zA-Z]{3}\s\d{1,2}\s(\d{2}:){2}\d{2})' ipP = r'((\d{1,3}\.){3}\d{1,3})' pattern = timeP + r'\s' + ipP + r'.*?' + ipP + r'(.*)' m = re.match(pattern, line) if m: return (m.group(1), m.group(5), m.group(7)) def getTime(string): year = datetime.datetime.today().year time = datetime.datetime.strptime(string+' '+str(year), "%b %d %H:%M:%S %Y") return time def packup(data): final = {} for item in data: if not item[1] in exIP: if final.has_key(item[1]): final[item[1]][1] = getTime(item[0]) final[item[1]][2].append(item[2]) else: final[item[1]] = [getTime(item[0]), 0, []] return final def insertDB(final, filename): db = MySQLdb.connect('localhost', 'root', '8817793', 'payroll') c = db.cursor() for key, argv in final.items(): destIP = path.basename(filename) sourceIP = key startTime = argv[0] endTime = argv[1] comms = argv[2] c.execute('insert into watch_login (sourceIP, destIP, startTime, endTime) values (%s,%s,%s,%s)', (sourceIP, destIP, startTime, endTime)) lastId = c.lastrowid commList = [ (lastId, comm) for comm in comms ] c.executemany('insert into watch_command (ip_id, comm) values (%s, %s)', commList) if __name__ == '__main__': argv = sys.argv[1:] for a in argv: data = getData(a) final = packup(data) insertDB(final, a)
UTF-8
Python
false
false
2,013
15,212,774,165,963
42760d3111b43066192f01cf6f2640be2ced904b
d210fa2dfc4ac8a917219e6e9e3632ebce4b2763
/blaze/compute/tests/test_bcolz_compute.py
acd084b3be5cb0652a56d1455b86b131723fbcbe
[ "BSD-3-Clause", "LicenseRef-scancode-unknown-license-reference" ]
non_permissive
chdoig/blaze
https://github.com/chdoig/blaze
53e74cbb31378185ad8385d4ca33c7c772033e22
caa5a497e1ca1ceb1cf585483312ff4cd74d0bda
refs/heads/master
2020-12-24T17:09:00.218435
2014-08-28T18:54:01
2014-08-28T18:54:01
21,960,748
1
0
null
null
null
null
null
null
null
null
null
null
null
null
null
from __future__ import absolute_import, division, print_function import pytest bcolz = pytest.importorskip('bcolz') import numpy as np from pandas import DataFrame from blaze.bcolz import into, chunks from blaze.expr import * from blaze.compute.core import compute b = bcolz.ctable([[1, 2, 3], [1., 2., 3.]], names=['a', 'b']) t = TableSymbol('t', '{a: int32, b: float64}') def test_chunks(): assert len(list(chunks(b, chunksize=2))) == 2 assert (next(chunks(b, chunksize=2)) == into(np.array(0), b)[:2]).all() def test_reductions(): assert compute(t.a.sum(), b) == 6 assert compute(t.a.min(), b) == 1 assert compute(t.a.max(), b) == 3 assert compute(t.a.mean(), b) == 2. assert abs(compute(t.a.std(), b) - np.std([1, 2, 3])) < 1e-5 assert abs(compute(t.a.var(), b) - np.var([1, 2, 3])) < 1e-5 assert compute(t.a.nunique(), b) == 3 assert compute(t.nunique(), b) == 3 assert len(list(compute(t.distinct(), b))) == 3 assert len(list(compute(t.a.distinct(), b))) == 3 def test_selection_head(): b = into(bcolz.ctable, ((i, i + 1, float(i)**2) for i in range(10000)), names=['a', 'b', 'c']) t = TableSymbol('t', '{a: int32, b: int32, c: float64}') assert compute((t.a < t.b).all(), b) == True assert list(compute(t[t.a < t.b].a.head(10), b)) == list(range(10)) assert list(compute(t[t.a > t.b].a.head(10), b)) == [] assert into([], compute(t[t.a + t.b > t.c], b)) == [(0, 1, 0), (1, 2, 1), (2, 3, 4)] assert len(compute(t[t.a + t.b > t.c].head(10), b)) # non-empty assert len(compute(t[t.a + t.b < t.c].head(10), b)) # non-empty def test_selection_isnan(): assert compute(t[t.a.isnan()].count(), b) == 0 assert compute(t[~(t.a.isnan())].count(), b) == 3
UTF-8
Python
false
false
2,014
11,544,872,137,282
e760a22e92b2061a57f28550b4d96630ca71dee4
cfd547b2cf7812d2534a1992e633fcf4a54d5fa6
/TriblerCode/Tribler/Core/API.py
4f75dde9ee35e2d203ffcd37ec7aaa8beb4e62a3
[ "LicenseRef-scancode-unknown-license-reference", "OpenSSL", "LGPL-2.1-only", "LGPL-2.0-or-later", "Python-2.0", "MIT", "LicenseRef-scancode-python-cwi", "LicenseRef-scancode-other-copyleft", "WxWindows-exception-3.1", "LGPL-2.1-or-later", "LicenseRef-scancode-openssl", "LicenseRef-scancode-warranty-disclaimer", "GPL-1.0-or-later", "LicenseRef-scancode-free-unknown", "LicenseRef-scancode-ssleay-windows", "LicenseRef-scancode-mit-old-style", "BitTorrent-1.1", "GPL-2.0-only" ]
non_permissive
thejosh223/cs198mojo
https://github.com/thejosh223/cs198mojo
14f359a8d55a24904aed7381a485f79774bb32dc
4d8d698f28e265ac91c0b1467ef3766cb33a854a
refs/heads/master
2021-01-22T05:28:11.558733
2014-04-02T11:44:00
2014-04-02T11:44:00
10,900,361
1
0
null
null
null
null
null
null
null
null
null
null
null
null
null
# Written by Arno Bakker # see LICENSE.txt for license information # # To use the Tribler Core just do: # from Tribler.Core.API import * # """ Tribler Core API v1.0.0rc5, July 14 2008. Import this to use the API """ # History: # 1.0.0rc5 : Added option to define auxiliary seeding servers for live stream # (=these servers are always unchoked at the source server). # # 1.0.0rc4 : Changed DownloadConfig.set_vod_start_callback() to a generic # event-driven interface. from Tribler.Core.simpledefs import * from Tribler.Core.Base import * from Tribler.Core.Session import * from Tribler.Core.SessionConfig import * from Tribler.Core.Download import * from Tribler.Core.DownloadConfig import * from Tribler.Core.DownloadState import * from Tribler.Core.exceptions import * from Tribler.Core.RequestPolicy import * from Tribler.Core.TorrentDef import * from Tribler.Core.LiveSourceAuthConfig import *
UTF-8
Python
false
false
2,014
7,799,660,630,506
6c9c62c808cb5a52773619403105ddc37b2c6fd6
682f5783b2b00ecb7973220f7163f3b55517fdcd
/application/screenlymanager/urls.py
0b8283b4b85aa877a1946f01cbeff1a416ee4300
[]
no_license
wandercampos/screenly-manager
https://github.com/wandercampos/screenly-manager
205f223f8e56c59d3aecaab04087b3a2000ab221
17b8fa70f474aeb82f68298ff52f2fbc8105e37b
refs/heads/master
2021-01-20T16:41:59.029541
2014-01-16T15:06:21
2014-01-16T15:06:21
null
0
0
null
null
null
null
null
null
null
null
null
null
null
null
null
from django.conf.urls import patterns, include, url from django.contrib import admin from django.conf.urls.static import static admin.autodiscover() urlpatterns = patterns('', url(r'^$', 'screenlymanager.views.index', name='index'), url(r'^clients/?$', 'screenlymanager.views.clients', name='clients'), url(r'^client/(?P<pk>[0-9]+)/?$', 'screenlymanager.views.client', name='client-detail'), url(r'^admin/', include(admin.site.urls)), )
UTF-8
Python
false
false
2,014
6,399,501,281,723
78f5d02998de0554f7a0ad16bab3b20643e3152b
d2915c3783bfd4720b56d503b07e9f73d67087eb
/JariSandbox/Tokenization/ExtractBioInferSentences.py
4b530f169e5ed64be2a76f3ff7dae622579a5a32
[]
no_license
arururu/Tdevel
https://github.com/arururu/Tdevel
36f0b3eaea9f689d31aeaf0f572b0f5eb433253d
362a0f3923b2dfdc5ed78a4f18c2478f67d81629
refs/heads/master
2021-05-28T01:12:41.319428
2012-06-26T12:25:48
2012-06-26T12:25:48
null
0
0
null
null
null
null
null
null
null
null
null
null
null
null
null
import cElementTree as ElementTree import cElementTreeUtils as ETUtils sentenceFile = None tokenizationFile = None def processDocument(documentElement): global sentenceFile, tokenizationFile sentenceElement = documentElement.find("sentence") sentenceFile.write(sentenceElement.get("text")+"\n") tokenElements = sentenceElement.getiterator("token") isFirst = True for tokenElement in tokenElements: if not isFirst: tokenizationFile.write(" ") tokenizationFile.write( tokenElement.get("text") ) isFirst = False tokenizationFile.write("\n") if __name__=="__main__": # Import Psyco if available try: import psyco psyco.full() print "Found Psyco, using" except ImportError: print "Psyco not installed" sentenceFile = open("BioInferSentences.txt", "wt") tokenizationFile = open("BioInferMedpostTokenization.txt", "wt") filename = "/usr/share/biotext/Tampere_project/PPI_Learning/Data/BioInferAnalysis.xml/BioInferAnalysis.xml" print "Processing documents" ETUtils.iterparse(filename, "document", processDocument) sentenceFile.close() tokenizationFile.close()
UTF-8
Python
false
false
2,012
11,768,210,411,937
64ecd020a41b639fa7ff21d9609b605dc500adb0
424fd2e60f747ba908c87449b0fbf2206f08c4e3
/tools/wordlist.py
a61de815dc7f29bc4bf60918b055eb615bfa7986
[]
no_license
Vishwanath17/android-target
https://github.com/Vishwanath17/android-target
7a5ea1bdb8e54136afeab02fb9e27b8a39d1b90f
a4e7eeed36931ae37b24bfbf9c21390f02619022
refs/heads/master
2016-08-12T10:05:33.214220
2009-09-01T08:25:01
2009-09-01T08:25:01
45,437,932
0
0
null
null
null
null
null
null
null
null
null
null
null
null
null
#!/usr/bin/python2.4 """Representation of a dictionary (list) of words.""" import re import string class Dict(list): """A dictionary. Attributes: filename: Original filename of dictionary name: str, Arbitrary name of the list """ def __init__(self, filename=None, name=''): super(Dict, self).__init__(self) self.filename = filename self.name = name if filename is not None: words = file(filename).readlines() # words = map(string.upper, words) words = map(string.strip, words) self.extend(words) def append(self, item): """ Append only non-empty values.""" if len(item): super(Dict, self).append(item) def extend(self, newlist): clean_list = [] for item in newlist: if len(item): clean_list.append(item) super(Dict, self).extend(clean_list) def replace(self, wordlist): del self[:] self.extend(wordlist) def getWordsByLetter(self): word_dict = {} for word in self: first_letter = word[0] if first_letter not in word_dict: word_dict[first_letter] = [] word_dict[first_letter].append(word) return word_dict def getWordsByLength(self, length): wordlist = [] for word in self: if len(word) == length: wordlist.append(word) return wordlist def wordsNotInList(self, wordlist): """Fetch words from 'self' that arent also in 'wordlist'.""" final_list = [] for word in self: if word not in wordlist: final_list.append(word) return final_list def wordsAlsoInList(self, wordlist): """Fetch words from 'self' that are also in 'wordlist'.""" final_list = [] for word in self: if word in wordlist: final_list.append(word) return final_list def filterByRegex(self, regex_str): """Filter list by the supplied regex string.""" new_list = [] regex = re.compile(regex_str, re.I) for word in self: if regex.match(word): new_list.append(word) del self[:] self.extend(new_list) def toUpper(self): newlist = [] for word in self: newlist.append(word.upper()) self.replace(newlist) def filterByWordsNotInList(self, wordlist): """Filter list to words not in the given list.""" newlist = [] for word in self: if word not in wordlist: newlist.append(word) self.replace(newlist); def writeToFile(self, filename=None): """Write all words to 'filename'.""" if filename is None: filename = self.name fd = file(filename, 'w') for word in self: fd.write(word + '\n') fd.close()
UTF-8
Python
false
false
2,009
19,043,884,999,184
a83c53720b9bbf54eb742848a6da92697731ac41
88e03e66109adb6325ccace96f37b31e15c5e86c
/docopt/required.py
a64ff10588f2dd1f58d91125fa4e6001486217c6
[]
no_license
abevieiramota/learning-python
https://github.com/abevieiramota/learning-python
53ee5d158af33f627c65a7d3960083a1242713ed
c9dfa37e5dd547ab03d1ff67932ff28be70bfbeb
refs/heads/master
2021-01-10T21:05:48.021976
2014-07-11T20:28:03
2014-07-11T20:28:03
null
0
0
null
null
null
null
null
null
null
null
null
null
null
null
null
"""Required. Usage: required.py (--parametro1=<valor1>) [--parametro2=<valor2>] """ from docopt import docopt args = docopt(__doc__, version="oi") print args
UTF-8
Python
false
false
2,014
5,059,471,493,131
dd9cb1d68fae68dd62c376b0bcff46f3f2b2ca68
6da35308c55fa8192b6c88a2c50f932647ee4229
/py/signal_protocol.py
2b32a01127d4cf5a0ae02abc5ddd4501f760ce45
[]
no_license
billghad/p2p
https://github.com/billghad/p2p
326781048267861237bf0c7f441557fcce72e19c
4808966552dfe2ae668ebc84c9a7ed42492ac9b3
refs/heads/master
2021-01-10T14:09:43.542212
2011-03-17T22:08:50
2011-03-17T22:08:50
47,732,874
0
0
null
null
null
null
null
null
null
null
null
null
null
null
null
""" Network protocol module. Converts python messages from/to JSON objects""" import json from collections import namedtuple from config import logs # pylint: disable=E0611 class ProtocolError(Exception): # pylint: disable=C0111 pass class Messages(object): """ Provides common messages for communication and a message to/from JSON converter.""" TERMINATOR = '\0' errors = [ProtocolError.__name__] messages = ['chat', 'error'] Message = namedtuple('Message', 'name args') @staticmethod def chat(message): """ converts chat message to an JSON object. JSON.name = "chat" json.args = message """ return Messages.serialize("chat", message) @staticmethod def error(spec, reason=None): """ converts an error to an JSON object JSON.name = "error" JSON.args = "spec, reason(s)" list """ logs.logger.debug("Message error: %s, reason: %s" % (spec, reason)) try: if spec.__name__ in Messages.errors: return Messages.serialize("error", spec.__name__, reason) else: raise ProtocolError(spec) except AttributeError, error: logs.logger.critical( "Messages.error exception, reason: %s" % error) raise ProtocolError(error) @staticmethod def serialize(name, *args): """ converts input to JSON objects: JSON.name = name JSON.args = args """ logs.logger.debug("serialize: %s, %s, %s", name, args) try: return json.dumps({'name': name, 'args': args}) except (ValueError, TypeError), reason: logs.logger.critical("serializer exception, reason %s" % reason) raise ProtocolError(name) @staticmethod def deserialize(data): """ converts from JSON objects to python dictionary """ logs.logger.debug("deserialize: %s" % data) try: result = json.loads(data) if result['name'] in Messages.messages: return Messages.Message(result['name'], result['args']) else: raise ProtocolError(result['name']) except (ValueError, TypeError, KeyError): raise ProtocolError(data)
UTF-8
Python
false
false
2,011
77,309,438,121
504dd9c166d52d80e046f8de198c6eca809b2efa
a6e571edef5fdda9adedbd2abcbcd2dd5e4c7c9c
/Prototype/Miscellaneous/forms.py
b70b31735a5ad091fa2cd54811ad95dab939cecf
[]
no_license
1101811b/DIM3-Team-Q-Fish
https://github.com/1101811b/DIM3-Team-Q-Fish
cd5ce885db3513873ce412e219aebe62c53e2561
b9791c649599a0069024f37ab6dac64e97d209a2
refs/heads/master
2016-09-05T22:42:22.553807
2014-03-21T09:40:44
2014-03-21T09:40:44
null
0
0
null
null
null
null
null
null
null
null
null
null
null
null
null
from django import forms from Miscellaneous.models import * class EmailForm(forms.ModelForm): class Meta: model = Contact class ComplaintForm(forms.ModelForm): class Meta: model = Complaint
UTF-8
Python
false
false
2,014
17,145,509,477,237
db6cb41e9e9bd4c537972be3b756a1de88f7191a
7359acef1cbed3e94d992979ebcb87544feafc1d
/qikify/controllers/GaussianProcess.py
2cac8f65eda07fe98c60f53dae90d60f03009606
[ "MIT" ]
permissive
abhishek-basu-git/qikify
https://github.com/abhishek-basu-git/qikify
de803b7313f94508d0b384fccd298b87a015ad9f
ed1384a1cbaf57fe4d570937e8a5859ab0858fde
refs/heads/master
2022-11-13T19:52:35.696803
2012-05-23T18:53:47
2012-05-23T18:53:47
null
0
0
null
null
null
null
null
null
null
null
null
null
null
null
null
import numpy as np from sklearn.gaussian_process import GaussianProcess class GaussianProcess(object): def __init__(self, nugget=0.1): self.nugget = nugget def fit(self, chips): X = pandas.DataFrame([[chip.X, chip.Y] for chip in chips]) y = [chip.gnd for chip in chips] self.gp = GaussianProcess(nugget=self.nugget) self.gp.fit(X, y) def predict(self, chip): return self.gp.predict([chip.X, chip.Y])
UTF-8
Python
false
false
2,012
18,880,676,242,140
fe7dddcc7282559894ea93db48e912aad04242d7
5df550c720cd63cd1bae8b7cddccac7f1e0d420d
/sorbic/stor/mpack.py
940bac8e463537da373685c71ce51b8433ceb991
[ "Apache-2.0" ]
permissive
SmithSamuelM/sorbic
https://github.com/SmithSamuelM/sorbic
936a88936478938893f597bf5d7f7998c06597d9
bf0b8d28f75c35c15f81a6f16b47755202be3fff
refs/heads/master
2021-01-18T05:32:07.731795
2014-12-07T07:13:47
2014-12-07T07:13:47
null
0
0
null
null
null
null
null
null
null
null
null
null
null
null
null
# -*- coding: utf-8 -*- ''' Storage using msgpack for serialization ''' # Import third party libs import msgpack class Mpack(object): ''' msgpack! ''' def __init__(self, root): self.root = root def dump(self, data): ''' prep the data for storage ''' return msgpack.dumps(data) def load(self, raw_data): ''' load data into serialized form ''' return msgpack.loads(raw_data)
UTF-8
Python
false
false
2,014
4,432,406,272,138
659e4aaf0ef2cdc4f04a3b5fcf238ad76f5a0d76
d8e79e8fe894c64a1a1e6e89ff869e88d5f722c1
/core/model.py
43db96f2a3baf207905428601522313994aeba57
[ "Apache-2.0" ]
permissive
justasabc/double_ball
https://github.com/justasabc/double_ball
ffb2e1244c603945c1bacdf997a35b5aaaf6685d
955231b477c2be6f86afbb7e3707396fc4bc0f1f
refs/heads/master
2021-01-10T19:46:58.104500
2014-12-06T06:36:31
2014-12-06T06:36:31
null
0
0
null
null
null
null
null
null
null
null
null
null
null
null
null
#!usr/bin/python # encoding: utf-8 __all__ = ['Record','RecordCollection','FileReader','Stats'] from constants import * # 03001 10 11 12 13 26 28 11 10307806 0 0 898744 1 2003-2-20 2003-2-23 class Record: def __init__(self,parts): self.__init(parts) self.__stats() #print self.str() #print self.stats_str() def __init(self,parts): self.id = parts[0] self.n1 = int(parts[1]) self.n2 = int(parts[2]) self.n3 = int(parts[3]) self.n4 = int(parts[4]) self.n5 = int(parts[5]) self.n6 = int(parts[6]) self.n7 = int(parts[7]) self.total_money = int(parts[8]) self.one_money = int(parts[9]) self.one_count = int(parts[10]) self.two_money = int(parts[11]) self.two_count = int(parts[12]) self.start_date = parts[13] self.end_date = parts[14] def __stats(self): self.__stats_red_blue() self.__stats_red() def __stats_red_blue(self): self.red_sum = 0 self.blue_sum = 0 self.red_01_str = "" self.blue_01_str = "" self.red_01_count = (0,0) self.blue_01_count = (0,0) self.red_prim_count = 0 self.blue_prim_count = 0 # 1-11, 12-22,23-33 self.red_3zone_count = (0,0,0) # 1-8, 9-16 self.blue_2zone_count = (0,0) # get stats # red red_list = [self.n1,self.n2,self.n3,self.n4,self.n5,self.n6] c0 = 0 c1 = 0 prim = 0 for n in red_list: self.red_sum += n if n%2==0: self.red_01_str += '0' c0 += 1 else: self.red_01_str += '1' c1 += 1 # red prim if n in RED_PRIM_LIST: prim += 1 self.red_01_count = (c0,c1) self.red_prim_count = prim # blue blue_list = [self.n7] c0 = 0 c1 = 0 prim = 0 for n in blue_list: self.blue_sum += n if n%2==0: self.blue_01_str += '0' c0 += 1 else: self.blue_01_str += '1' c1 += 1 # blue prim if n in BLUE_PRIM_LIST: prim += 1 self.blue_01_count = (c0,c1) self.blue_prim_count = prim # zone stats # red zone1 = 0 zone2 = 0 zone3 = 0 for n in red_list: if n>=RED_ZONE1[0] and n<=RED_ZONE1[1]: zone1 +=1 elif n>=RED_ZONE2[0] and n<=RED_ZONE2[1]: zone2 +=1 elif n>=RED_ZONE3[0] and n<=RED_ZONE3[1]: zone3 +=1 self.red_3zone_count = (zone1,zone2,zone3) # blue zone1 = 0 zone2 = 0 for n in blue_list: if n>=BLUE_ZONE1[0] and n<=BLUE_ZONE1[1]: zone1 +=1 elif n>=BLUE_ZONE2[0] and n<=BLUE_ZONE2[1]: zone2 +=1 self.blue_2zone_count = (zone1,zone2) def __stats_red(self): red_list = [self.n1,self.n2,self.n3,self.n4,self.n5,self.n6] # (1) red shift to base # 2,13,17,20,25,33===>11,15,18,23,31 self.red_shift_to_base = [] for n in red_list[1:]: self.red_shift_to_base.append(n-red_list[0]) # (2) red head-tail width self.red_width = self.n6-self.n1 # (3) red delta # 2,13,17,20,25,33===> 11,4,3,5,8 def stats_str(self): return "<stats>\n [red] sum={0} 01_str={1} 01_count={2} prim={3}\n [blue] sum={4} 01_str={5} 01_count={6} prim={7}\n 3zone = {8}".format(self.red_sum,self.red_01_str,self.red_01_count,self.red_prim_count, self.blue_sum,self.blue_01_str,self.blue_01_count,self.blue_prim_count, self.red_3zone_count) def long_str(self): return "{0} [{1} {2} {3} {4} {5} {6} {7}] {8} {9} {10} {11} {12} {13} {14}".format(self.id,self.n1,self.n2,self.n3,self.n4,self.n5,self.n6,self.n7,self.total_money,self.one_money,self.one_count,self.two_money,self.two_count,self.start_date,self.end_date) def short_str(self): return "{0} [{1} {2} {3} {4} {5} {6} {7}] {8}/{9}".format(self.id,self.n1,self.n2,self.n3,self.n4,self.n5,self.n6,self.n7,self.start_date,self.end_date) def str(self): return "%s [%02d %02d %02d %02d %02d %02d %02d] %s/%s" % (self.id,self.n1,self.n2,self.n3,self.n4,self.n5,self.n6,self.n7,self.start_date,self.end_date) def __str__(self): return self.long_str() class RecordCollection: def __init__(self): self.records = [] def get_records(self): return self.records def get_record_count(self): return len(self.records) def add_record(self,record): self.records.append(record) def get_record_by_id(self,id): # 03088 if(len(id)!=5): print "Error. invalid id %s".format(id) return None for record in self.records: if id == record.id: return record print "Warning. can not find record id =%s".format(id) return None """ querying methods: return a list of record """ def query_by_year(self,year): # 2003,2009--->03,09 # 2010,2011--->10,11 if(year<START_YEAR or len(str(year))!=4): print "Error. invalid year {0}".format(year) return None str_year = str(year) year = str_year[-2:] result = [] for record in self.records: if (year == record.id[:2]): result.append(record) return result def query_by_year_month(self,year,month): # 2003,2 # '2003-2-23' '2003-2-27' if(year<START_YEAR or len(str(year))!=4): print "Error. invalid year {0}".format(year) return None if(month<1 or month>MONTH): print "Error. invalid month {0}".format(month) return None result = [] for record in self.records: parts = record.end_date.split("-") if year==int(parts[0]) and month == int(parts[1]) : result.append(record) return result def __x_query_by_date(self,date): # '2003-9-4' for record in self.records: if (date == record.end_date): return [record] print "Warning. can not find record id =%s".format(id) return None def query_by_year_month_day(self,year,month,day): # 2003,2,23 # '2003-2-23' if(year<START_YEAR or len(str(year))!=4): print "Error. invalid year {0}".format(year) return None if(month<1 or month>MONTH): print "Error. invalid month {0}".format(month) return None if(day<1 or day>31): print "Error. invalid day {0}".format(day) return None date = '{0}-{1}-{2}'.format(year,month,day) return self.__x_query_by_date(date) def __x_query_by_number_pos_1(self,n): result = [] for record in self.records: if n == record.n1 : result.append(record) return result def __x_query_by_number_pos_2(self,n): result = [] for record in self.records: if n == record.n2 : result.append(record) return result def __x_query_by_number_pos_3(self,n): result = [] for record in self.records: if n == record.n3 : result.append(record) return result def __x_query_by_number_pos_4(self,n): result = [] for record in self.records: if n == record.n4 : result.append(record) return result def __x_query_by_number_pos_5(self,n): result = [] for record in self.records: if n == record.n5 : result.append(record) return result def __x_query_by_number_pos_6(self,n): result = [] for record in self.records: if n == record.n6 : result.append(record) return result def __x_query_by_number_pos_7(self,n): result = [] for record in self.records: if n == record.n7 : result.append(record) return result def query_by_number_pos(self,n,pos): # pos = 1,2,3,4,5,6,7 if(pos<1 or pos>7): print 'Error. valid pos is 1-7.' return None if(pos==7): if (n>BLUE_MAX_NUMBER): print "Error. blue number >=%s" % BLUE_MAX_NUMBER return None else: if (n>RED_MAX_NUMBER): print "Error. red number >=%s" % RED_MAX_NUMBER return None methods = { 1:self.__x_query_by_number_pos_1, 2:self.__x_query_by_number_pos_2, 3:self.__x_query_by_number_pos_3, 4:self.__x_query_by_number_pos_4, 5:self.__x_query_by_number_pos_5, 6:self.__x_query_by_number_pos_6, 7:self.__x_query_by_number_pos_7 } return methods[pos](n) def save(self,filepath): with open(filepath,'w') as f: for record in self.records: line = "%02d %02d %02d %02d %02d %02d %02d\n" % (record.n1,record.n2,record.n3,record.n4,record.n5,record.n6,record.n7) f.write(line) print "generated {0}.".format(filepath) def query_by_number_list(self,number_list): if(len(number_list)>7): print "Error. number list count>7" return None result = [] for record in self.records: list7 = [record.n1,record.n2,record.n3,record.n4,record.n5,record.n6,record.n7] base_set = set(list7) query_set = set(number_list) if query_set.issubset(base_set): result.append(record) return result def test_number(self,n1,n2,n3,n4,n5,n6,n7): #03056 08 17 21 26 28 29 07 32664536 5000000 1 557563 3 2003-8-31 2003-9-4 #result = query_by_number_list([n1,n2,n3,n4,n5,n6,n7]) for record in self.records: if (n1==record.n1 and n2==record.n2 and n3==record.n3 and n4==record.n4 and n5==record.n5 and n6==record.n6 and n7==record.n7): print 'Hit. [{0} {1} {2} {3} {4} {5} {6}] at {7} on {8}'.format(n1,n2,n3,n4,n5,n6,n7,record.id,record.end_date) return True print 'NO Hit. [{0} {1} {2} {3} {4} {5} {6}]'.format(n1,n2,n3,n4,n5,n6,n7) return False class FileReader: def __init__(self): self.sep = ' ' def process(self,filepath): rc = RecordCollection() for line in open(filepath,'r'): parts = line.strip('\n').split(self.sep) if(len(parts)!=RECORD_FIELD): print "ERROR. record field %d!" % len(parts) return None record = Record(parts) rc.add_record(record) return rc class Stats: def __init__(self,rc): self.__init(rc) def __init(self,rc): self.rc = rc # red stats self.red_sum_list = [] self.red_01_str_list = [] self.red_01_count_list = [] self.red_prim_count_list = [] self.red_3zone_count_list = [] self.__get_red_xxx_list() # blue stats self.blue_sum_list = [] self.blue_01_str_list = [] self.blue_01_count_list = [] self.blue_prim_count_list = [] self.blue_2zone_count_list = [] self.__get_blue_xxx_list() # avg avg_e self.red_sum_avg = self.__avg_list(self.red_sum_list) self.blue_sum_avg = self.__avg_list(self.blue_sum_list) self.red_sum_avg_e = (RED_MIN_NUMBER + RED_MAX_NUMBER)*RED_COUNT/2.0 self.blue_sum_avg_e = (BLUE_MIN_NUMBER + BLUE_MAX_NUMBER)*BLUE_COUNT/2.0 # red only # red shift base self.red_shift_to_base_list = self.__get_red_shift_to_base_list() # red width self.red_width_list = self.__get_red_width_list() # red/blue prim pair self.prim_count_list = zip(self.red_prim_count_list,self.blue_prim_count_list) def __avg_list(self,list): count = len(list) if(count==0): return 0.0 total_sum = 0 for n in list: total_sum += n return total_sum*1.0/count def __inf(self,filename): return "{0}{1}".format(INPUT_FOLDER,filename) def __outf(self,filename): return "{0}{1}".format(OUTPUT_FOLDER,filename) def __save_list(self,filename,list): filepath = self.__outf(filename) with open(filepath,'w') as f: for item in list: line = str(item)+"\n" f.write(line) print "saved {0}.".format(filepath) def save(self): self.__save_list('red_sum_list',self.red_sum_list) self.__save_list('blue_sum_list',self.blue_sum_list) self.__save_list('red_01_str_list',self.red_01_str_list) self.__save_list('blue_01_str_list',self.blue_01_str_list) self.__save_list('red_01_count_list',self.red_01_count_list) self.__save_list('blue_01_count_list',self.blue_01_count_list) self.__save_list('red_prim_count_list',self.red_prim_count_list) self.__save_list('blue_prim_count_list',self.blue_prim_count_list) self.__save_list('prim_count_list',self.prim_count_list) self.__save_list('red_3zone_count_list',self.red_3zone_count_list) self.__save_list('blue_2zone_count_list',self.blue_2zone_count_list) # red only self.__save_list('red_shift_to_base_list',self.red_shift_to_base_list) self.__save_list('red_width_list',self.red_width_list) """ get red xxx list """ def __get_red_xxx_list(self): for record in self.rc.get_records(): self.red_sum_list.append(record.red_sum) self.red_01_str_list.append(record.red_01_str) self.red_01_count_list.append(record.red_01_count) self.red_prim_count_list.append(record.red_prim_count) self.red_3zone_count_list.append(record.red_3zone_count) """ get blue xxx list """ def __get_blue_xxx_list(self): for record in self.rc.get_records(): self.blue_sum_list.append(record.blue_sum) self.blue_01_str_list.append(record.blue_01_str) self.blue_01_count_list.append(record.blue_01_count) self.blue_prim_count_list.append(record.blue_prim_count) self.blue_2zone_count_list.append(record.blue_2zone_count) def get_red_sum_avg_e(self): return self.red_sum_avg_e def get_red_sum_avg(self): return self.red_sum_avg def get_blue_sum_avg_e(self): return self.blue_sum_avg_e def get_blue_sum_avg(self): return self.blue_sum_avg """ red related methods """ # red shift to base def __get_red_shift_to_base_list(self): self.red_shift_to_base_list = [] for record in self.rc.get_records(): self.red_shift_to_base_list.append(record.red_shift_to_base) return self.red_shift_to_base_list # red width def __get_red_width_list(self): self.red_width_list = [] for record in self.rc.get_records(): self.red_width_list.append(record.red_width) return self.red_width_list
UTF-8
Python
false
false
2,014
1,357,209,695,915
f6f4e75c439d9144a71a7f61e7e73c94d5c2a55a
82d3b8250984c97e2bde35bf48637a9a8ed15a2c
/taiga/domains/models.py
13e5effe3910ef3c9ae3039b5f31638f856575bf
[ "LicenseRef-scancode-unknown-license-reference", "AGPL-3.0-only" ]
non_permissive
anler/taiga-back
https://github.com/anler/taiga-back
75ab8578826e8d5d4b965916caaa60bfb43df9e3
552a9d83f88bca5b113b5255bc636243671aecd2
refs/heads/master
2021-01-18T11:30:46.822563
2014-05-13T15:33:31
2014-05-14T08:32:14
null
0
0
null
null
null
null
null
null
null
null
null
null
null
null
null
# Copyright (C) 2014 Andrey Antukh <[email protected]> # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU Affero General Public License as # published by the Free Software Foundation, either version 3 of the # License, or (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU Affero General Public License for more details. # # You should have received a copy of the GNU Affero General Public License # along with this program. If not, see <http://www.gnu.org/licenses/>. import string from django.db import models from django.db.models.signals import pre_save, pre_delete from django.dispatch import receiver from django.utils.translation import ugettext_lazy as _ from django.core.exceptions import ValidationError from .base import clear_domain_cache def _simple_domain_name_validator(value): """ Validates that the given value contains no whitespaces to prevent common typos. """ if not value: return checks = ((s in value) for s in string.whitespace) if any(checks): raise ValidationError( _("The domain name cannot contain any spaces or tabs."), code='invalid', ) class Domain(models.Model): domain = models.CharField(_('domain name'), max_length=255, unique=True, validators=[_simple_domain_name_validator]) name = models.CharField(_('display name'), max_length=255) scheme = models.CharField(_('scheme'), max_length=60, null=True, default=None) # Site Metadata public_register = models.BooleanField(default=False) default_language = models.CharField(max_length=20, null=False, blank=True, default="", verbose_name=_("default language")) alias_of = models.ForeignKey("self", null=True, default=None, blank=True, verbose_name=_("Mark as alias of"), related_name="+") class Meta: verbose_name = _('domain') verbose_name_plural = _('domain') ordering = ('domain',) def __str__(self): return self.domain def user_is_owner(self, user): return self.members.filter(user_id=user.id, is_owner=True).exists() def user_is_staff(self, user): return self.members.filter(user_id=user.id, is_staff=True).exists() def user_is_normal_user(self, user): return self.members.filter(user_id=user.id, is_owner=False, is_staff=False).exists() class DomainMember(models.Model): domain = models.ForeignKey("Domain", related_name="members", null=True) user = models.ForeignKey("users.User", related_name="+", null=True) email = models.EmailField(max_length=255) is_owner = models.BooleanField(default=False) is_staff = models.BooleanField(default=False) class Meta: ordering = ["email"] verbose_name = "Domain Member" verbose_name_plural = "Domain Members" unique_together = ("domain", "user") def __str__(self): return "DomainMember: {0}:{1}".format(self.domain, self.user) pre_save.connect(clear_domain_cache, sender=Domain) pre_delete.connect(clear_domain_cache, sender=Domain) @receiver(pre_delete, sender=DomainMember, dispatch_uid="domain_member_pre_delete") def domain_member_pre_delete(sender, instance, *args, **kwargs): for domain_project in instance.domain.projects.all(): domain_project.memberships.filter(user=instance.user).delete()
UTF-8
Python
false
false
2,014
17,557,826,310,194
2344515785ec67a1838719e726e814a4a43b4ff4
ce7e01e55644b423d9dca279bfe41762cdc6462a
/studio/config/installer.py
b664d9bcd00c6fa85ce053e12ac76430d698b12b
[ "GPL-3.0-or-later", "LGPL-2.0-or-later", "LicenseRef-scancode-warranty-disclaimer", "GPL-1.0-or-later", "LGPL-2.1-or-later", "GPL-3.0-only", "AGPL-3.0-or-later", "LicenseRef-scancode-other-copyleft", "LicenseRef-scancode-unknown-license-reference", "LicenseRef-scancode-free-unknown" ]
non_permissive
Chirag19/Studio
https://github.com/Chirag19/Studio
830256697b745466c5ae4ec91d53bd2506fe8845
43cb7298434fb606b15136801b79b03571a2f27e
refs/heads/master
2020-05-09T13:18:16.518211
2011-03-17T08:51:32
2011-03-17T08:51:32
null
0
0
null
null
null
null
null
null
null
null
null
null
null
null
null
# # Copyright (C) 2010 Camptocamp # # This file is part of Studio # # Studio is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # Studio is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with Studio. If not, see <http://www.gnu.org/licenses/>. # import getpass from pylons.util import PylonsInstaller from paste.script.templates import var from paste.script.util import secret class StudioInstaller(PylonsInstaller): def config_content(self, command, vars): """ Called by ``self.write_config``, this returns the text content for the config file, given the provided variables. """ settable_vars = [ var('db_url', 'Database url for sqlite, postgres or mysql', default='sqlite:///%(here)s/studio.db'), var('ms_url','Url to the mapserv CGI', default='http://localhost/cgi-bin/mapserv'), var('admin_password','Password for default admin user', default=secret.secret_string(length=8)) ] for svar in settable_vars: if command.interactive: prompt = 'Enter %s' % svar.full_description() response = command.challenge(prompt, svar.default, svar.should_echo) vars[svar.name] = response else: if not vars.has_key(svar.name): vars[svar.name] = svar.default vars['cookie_secret'] = secret.secret_string() # call default pylons install return super(StudioInstaller, self).config_content(command, vars)
UTF-8
Python
false
false
2,011
8,306,466,767,290
0024a68ed4e25e52011fba01735bf617f71a6cf5
5773af03f49d4950f8b1d07f941591a5f9813f0f
/aprs2tracker.py
6ee85a9a3ddd9e42f31f5434991a7e8923c26bd5
[]
no_license
chuckhacker/APRS2Tracker
https://github.com/chuckhacker/APRS2Tracker
416e88e2213c645c5ab500f8f3ad2ec211db60c0
53348be31fd0665b0b683ad26837d2c337feb92c
refs/heads/master
2021-05-27T02:44:39.266613
2011-08-13T18:55:59
2011-08-13T18:55:59
null
0
0
null
null
null
null
null
null
null
null
null
null
null
null
null
# APRS servers list: http://www.aprs-is.net/APRSServers.aspx # Tier 2 servers: http://www.aprs2.net/serverstats.php from aprs_client import APRSClient from aprs_handler import APRSPacket from tracker import Tracker from optparse import OptionParser, Option class Main: def __init__(self, trackerUrl, trackerPass): self.tracker = Tracker(trackerUrl, trackerPass) def packetHandler(self, aprsString): print 'APRS String: %s' % aprsString packet = APRSPacket() if packet.parse(aprsString): print '%s -> %s' % (packet.source, packet.dest) print 'Report type: %s' % packet.reportType if packet.hasLocation: print 'Time: %sZ' % packet.time print 'Coordinates: %f, %f, Altitude: %d ft' % (packet.latitude, packet.longitude, packet.altitude) print 'Course: %d, Speed: %d kn, Bearing: %d' % (packet.course, packet.speed, packet.bearing) print 'Comment: %s' % packet.comment print 'Uploading to tracker' self.tracker.track(packet) print '' class ExtendOption(Option): ACTIONS = Option.ACTIONS + ("extend",) STORE_ACTIONS = Option.STORE_ACTIONS + ("extend",) TYPED_ACTIONS = Option.TYPED_ACTIONS + ("extend",) ALWAYS_TYPED_ACTIONS = Option.ALWAYS_TYPED_ACTIONS + ("extend",) def take_action(self, action, dest, opt, value, values, parser): if action == "extend": lvalue = value.split(",") values.ensure_value(dest, []).extend(lvalue) else: Option.take_action( self, action, dest, opt, value, values, parser) def defaultOpt(value, default): if value: return value else: return default def run(): parser = OptionParser(option_class=ExtendOption) parser.add_option("-u", "--url", dest="url", help="Tracker URL including track.php") parser.add_option("-w", "--password", dest="password", help="Tracker password") parser.add_option("-a", "--host", dest="host", help="APRS server host name") parser.add_option("-p", "--port", dest="port", type="int", help="APRS server port") parser.add_option("-c", "--callsigns", dest="callsigns", action="extend", help="Comma delimeted callsigns to monitor (you can use *)") parser.add_option("-j", "--adjunct", dest="adjunct", help="APRS adjunct string") (options, args) = parser.parse_args() if options.callsigns: adjunct = 'filter b/' + '/'.join(options.callsigns) if options.adjunct: adjunct += ' ' + options.adjunct else: adjunct = defaultOpt(options.adjunct, '') main = Main(defaultOpt(options.url, 'http://spacenear.us/tracker/track.php'), defaultOpt(options.password, 'aurora')) client = APRSClient(main.packetHandler, defaultOpt(options.host, 'ontario.aprs2.net'), adjunct, defaultOpt(options.port, 14580)) client.start() #main.packetHandler('KE7MK-9>APOTC1,WIDE1-1,WIDE2-1,qAR,WT7T-6:/280229z4448.85N/10656.63Wv195/018/A=003888KE7MK Mobile Monitoring 146.820') if __name__=='__main__': run()
UTF-8
Python
false
false
2,011
6,399,501,279,523
153f2d6b3f1f40f7bdb8419780a5ac29b9dd1a75
08867249c328e6da73a8b52bb01954050e8c360a
/controllers/test_util.py
f63e13e3608508412400d51c0f4d99aa557ae7d8
[]
no_license
pombredanne/tinyclassified
https://github.com/pombredanne/tinyclassified
b054537a04210a6d6646906029c966d4ca300f6d
c8f11fc61b89a40c686c4783220b95476c76bd86
refs/heads/master
2017-12-04T16:06:23.908925
2014-09-02T00:50:22
2014-09-02T00:50:22
null
0
0
null
null
null
null
null
null
null
null
null
null
null
null
null
"""Utility functions / classes for controller testing. @author: Rory Olsen (rolsen, Gleap LLC 2014) @license: GNU GPLv3 """ class TestCursor: def __init__(self, results): self.results = results self.index = 0 self.distinct_param = '' def count(self): return len(self.results) def __iter__(self): return self def next(self): if not self.index < len(self.results): raise StopIteration ret = self.results[self.index] self.index += 1 return ret def distinct(self, distinct): self.distinct_param = distinct ret = [] for result in self.results: ret.append(result[distinct]) return ret def __getitem__(self, trash): return self.results[0] class TestCollection(): find_hash = None find_result = None deleted = [] def find_one(self, find_hash): self.find_hash = find_hash return self.find_result def remove(self, remove): self.deleted.append(remove) class TestDBAdapter(): collection = None def get_listings_collection(self): return self.collection def check_dict(expected_dict, test_dict): """Check that two dictionaries are the same for each key in the first dict. Check that each key value pair in expected_dict is also in test dictionary but do not check that the relationship is bidirectional (test_dict may contain keys / values not in expected_dict). @return: True if everything in expected_dict is in test_dict. False otherwise. @rtype: bool """ for (key, value) in expected_dict.items(): if test_dict[key] != value: return False return True
UTF-8
Python
false
false
2,014
1,803,886,300,718
8411157785a8fa8d6dd13f776a7bdf6a454f71dc
0fa6012b852d78c849e1385d9a926e35a07516df
/nengo/tests/test_node.py
2b39c76634550ee65186b77b0e3d23ab0b14d886
[ "MIT" ]
permissive
jaberg/nengo
https://github.com/jaberg/nengo
b87a5408f77c19cd3b44c40aa0f4af55cc08c148
1e882c8ee684aac757a6f181d8b8498aad9d330d
refs/heads/master
2020-04-01T19:10:26.717474
2013-08-29T14:27:24
2013-08-29T14:27:24
10,359,867
0
1
null
true
2013-11-19T00:03:38
2013-05-29T13:10:28
2013-11-19T00:03:38
2013-11-19T00:03:38
5,422
0
0
0
Python
null
null
import numpy as np import nengo import nengo.old_api as nef from nengo.tests.helpers import SimulatorTestCase, unittest class TestNode(SimulatorTestCase): def test_simple(self): params = dict(simulator=self.Simulator, seed=123, dt=0.001) # Old API net = nef.Network('test_simple', **params) net.make_input('in', value=np.sin) p = net.make_probe('in', dt_sample=0.001, pstc=0.0) rawp = net._raw_probe(net.inputs['in'], dt_sample=.001) st_probe = net._raw_probe(net.model.simtime, dt_sample=.001) net.run(0.01) data = p.get_data() raw_data = rawp.get_data() st_data = st_probe.get_data() self.assertTrue(np.allclose(st_data.ravel(), np.arange(0.001, 0.0105, .001))) self.assertTrue(np.allclose(raw_data.ravel(), np.sin(np.arange(0, 0.0095, .001)))) # -- the make_probe call induces a one-step delay # on readout even when the pstc is really small. self.assertTrue(np.allclose(data.ravel()[1:], np.sin(np.arange(0, 0.0085, .001)))) # New API m = nengo.Model('test_simple', **params) node = m.make_node('in', output=np.sin) m.probe('in') m.run(0.01) self.assertTrue(np.allclose(m.data[m.simtime].ravel(), np.arange(0.001, 0.0105, .001))) self.assertTrue(np.allclose(m.data['in'].ravel(), np.sin(np.arange(0, 0.0095, .001)))) if __name__ == "__main__": nengo.log_to_file('log.txt', debug=True) unittest.main()
UTF-8
Python
false
false
2,013