{ // 获取包含Hugging Face文本的span元素 const spans = link.querySelectorAll('span.whitespace-nowrap, span.hidden.whitespace-nowrap'); spans.forEach(span => { if (span.textContent && span.textContent.trim().match(/Hugging\s*Face/i)) { span.textContent = 'AI快站'; } }); }); // 替换logo图片的alt属性 document.querySelectorAll('img[alt*="Hugging"], img[alt*="Face"]').forEach(img => { if (img.alt.match(/Hugging\s*Face/i)) { img.alt = 'AI快站 logo'; } }); } // 替换导航栏中的链接 function replaceNavigationLinks() { // 已替换标记,防止重复运行 if (window._navLinksReplaced) { return; } // 已经替换过的链接集合,防止重复替换 const replacedLinks = new Set(); // 只在导航栏区域查找和替换链接 const headerArea = document.querySelector('header') || document.querySelector('nav'); if (!headerArea) { return; } // 在导航区域内查找链接 const navLinks = headerArea.querySelectorAll('a'); navLinks.forEach(link => { // 如果已经替换过,跳过 if (replacedLinks.has(link)) return; const linkText = link.textContent.trim(); const linkHref = link.getAttribute('href') || ''; // 替换Spaces链接 - 仅替换一次 if ( (linkHref.includes('/spaces') || linkHref === '/spaces' || linkText === 'Spaces' || linkText.match(/^s*Spacess*$/i)) && linkText !== 'OCR模型免费转Markdown' && linkText !== 'OCR模型免费转Markdown' ) { link.textContent = 'OCR模型免费转Markdown'; link.href = 'https://fast360.xyz'; link.setAttribute('target', '_blank'); link.setAttribute('rel', 'noopener noreferrer'); replacedLinks.add(link); } // 删除Posts链接 else if ( (linkHref.includes('/posts') || linkHref === '/posts' || linkText === 'Posts' || linkText.match(/^s*Postss*$/i)) ) { if (link.parentNode) { link.parentNode.removeChild(link); } replacedLinks.add(link); } // 替换Docs链接 - 仅替换一次 else if ( (linkHref.includes('/docs') || linkHref === '/docs' || linkText === 'Docs' || linkText.match(/^s*Docss*$/i)) && linkText !== '模型下载攻略' ) { link.textContent = '模型下载攻略'; link.href = '/'; replacedLinks.add(link); } // 删除Enterprise链接 else if ( (linkHref.includes('/enterprise') || linkHref === '/enterprise' || linkText === 'Enterprise' || linkText.match(/^s*Enterprises*$/i)) ) { if (link.parentNode) { link.parentNode.removeChild(link); } replacedLinks.add(link); } }); // 查找可能嵌套的Spaces和Posts文本 const textNodes = []; function findTextNodes(element) { if (element.nodeType === Node.TEXT_NODE) { const text = element.textContent.trim(); if (text === 'Spaces' || text === 'Posts' || text === 'Enterprise') { textNodes.push(element); } } else { for (const child of element.childNodes) { findTextNodes(child); } } } // 只在导航区域内查找文本节点 findTextNodes(headerArea); // 替换找到的文本节点 textNodes.forEach(node => { const text = node.textContent.trim(); if (text === 'Spaces') { node.textContent = node.textContent.replace(/Spaces/g, 'OCR模型免费转Markdown'); } else if (text === 'Posts') { // 删除Posts文本节点 if (node.parentNode) { node.parentNode.removeChild(node); } } else if (text === 'Enterprise') { // 删除Enterprise文本节点 if (node.parentNode) { node.parentNode.removeChild(node); } } }); // 标记已替换完成 window._navLinksReplaced = true; } // 替换代码区域中的域名 function replaceCodeDomains() { // 特别处理span.hljs-string和span.njs-string元素 document.querySelectorAll('span.hljs-string, span.njs-string, span[class*="hljs-string"], span[class*="njs-string"]').forEach(span => { if (span.textContent && span.textContent.includes('huggingface.co')) { span.textContent = span.textContent.replace(/huggingface.co/g, 'aifasthub.com'); } }); // 替换hljs-string类的span中的域名(移除多余的转义符号) document.querySelectorAll('span.hljs-string, span[class*="hljs-string"]').forEach(span => { if (span.textContent && span.textContent.includes('huggingface.co')) { span.textContent = span.textContent.replace(/huggingface.co/g, 'aifasthub.com'); } }); // 替换pre和code标签中包含git clone命令的域名 document.querySelectorAll('pre, code').forEach(element => { if (element.textContent && element.textContent.includes('git clone')) { const text = element.innerHTML; if (text.includes('huggingface.co')) { element.innerHTML = text.replace(/huggingface.co/g, 'aifasthub.com'); } } }); // 处理特定的命令行示例 document.querySelectorAll('pre, code').forEach(element => { const text = element.innerHTML; if (text.includes('huggingface.co')) { // 针对git clone命令的专门处理 if (text.includes('git clone') || text.includes('GIT_LFS_SKIP_SMUDGE=1')) { element.innerHTML = text.replace(/huggingface.co/g, 'aifasthub.com'); } } }); // 特别处理模型下载页面上的代码片段 document.querySelectorAll('.flex.border-t, .svelte_hydrator, .inline-block').forEach(container => { const content = container.innerHTML; if (content && content.includes('huggingface.co')) { container.innerHTML = content.replace(/huggingface.co/g, 'aifasthub.com'); } }); // 特别处理模型仓库克隆对话框中的代码片段 try { // 查找包含"Clone this model repository"标题的对话框 const cloneDialog = document.querySelector('.svelte_hydration_boundary, [data-target="MainHeader"]'); if (cloneDialog) { // 查找对话框中所有的代码片段和命令示例 const codeElements = cloneDialog.querySelectorAll('pre, code, span'); codeElements.forEach(element => { if (element.textContent && element.textContent.includes('huggingface.co')) { if (element.innerHTML.includes('huggingface.co')) { element.innerHTML = element.innerHTML.replace(/huggingface.co/g, 'aifasthub.com'); } else { element.textContent = element.textContent.replace(/huggingface.co/g, 'aifasthub.com'); } } }); } // 更精确地定位克隆命令中的域名 document.querySelectorAll('[data-target]').forEach(container => { const codeBlocks = container.querySelectorAll('pre, code, span.hljs-string'); codeBlocks.forEach(block => { if (block.textContent && block.textContent.includes('huggingface.co')) { if (block.innerHTML.includes('huggingface.co')) { block.innerHTML = block.innerHTML.replace(/huggingface.co/g, 'aifasthub.com'); } else { block.textContent = block.textContent.replace(/huggingface.co/g, 'aifasthub.com'); } } }); }); } catch (e) { // 错误处理但不打印日志 } } // 当DOM加载完成后执行替换 if (document.readyState === 'loading') { document.addEventListener('DOMContentLoaded', () => { replaceHeaderBranding(); replaceNavigationLinks(); replaceCodeDomains(); // 只在必要时执行替换 - 3秒后再次检查 setTimeout(() => { if (!window._navLinksReplaced) { console.log('[Client] 3秒后重新检查导航链接'); replaceNavigationLinks(); } }, 3000); }); } else { replaceHeaderBranding(); replaceNavigationLinks(); replaceCodeDomains(); // 只在必要时执行替换 - 3秒后再次检查 setTimeout(() => { if (!window._navLinksReplaced) { console.log('[Client] 3秒后重新检查导航链接'); replaceNavigationLinks(); } }, 3000); } // 增加一个MutationObserver来处理可能的动态元素加载 const observer = new MutationObserver(mutations => { // 检查是否导航区域有变化 const hasNavChanges = mutations.some(mutation => { // 检查是否存在header或nav元素变化 return Array.from(mutation.addedNodes).some(node => { if (node.nodeType === Node.ELEMENT_NODE) { // 检查是否是导航元素或其子元素 if (node.tagName === 'HEADER' || node.tagName === 'NAV' || node.querySelector('header, nav')) { return true; } // 检查是否在导航元素内部 let parent = node.parentElement; while (parent) { if (parent.tagName === 'HEADER' || parent.tagName === 'NAV') { return true; } parent = parent.parentElement; } } return false; }); }); // 只在导航区域有变化时执行替换 if (hasNavChanges) { // 重置替换状态,允许再次替换 window._navLinksReplaced = false; replaceHeaderBranding(); replaceNavigationLinks(); } }); // 开始观察document.body的变化,包括子节点 if (document.body) { observer.observe(document.body, { childList: true, subtree: true }); } else { document.addEventListener('DOMContentLoaded', () => { observer.observe(document.body, { childList: true, subtree: true }); }); } })(); \n \n '''\n\n return outstring\n #@+node:2015.20150330144929.1713: *3* twoDgear\n @cherrypy.expose\n # N 為齒數, M 為模數, P 為壓力角\n def twoDgear(self, N=20, M=5, P=15):\n outstring = '''\n \n \n \n \n \n \n \n \n \n \n \n \n
\n 齒數:
\n 模數:
\n 壓力角:
\n \n
\n \n \n '''\n\n return outstring\n #@+node:2015.20150331094055.1733: *3* threeDgear\n @cherrypy.expose\n # N 為齒數, M 為模數, P 為壓力角\n def threeDgear(self, N=20, M=5, P=15):\n outstring = '''\n \n \n \n \n \n \n \n \n \n \n \n \n
\n 齒數:
\n 模數:
\n 壓力角:
\n \n
\n \n \n '''\n\n return outstring\n #@+node:2015.20150330144929.1762: *3* do2Dgear\n @cherrypy.expose\n # N 為齒數, M 為模數, P 為壓力角\n def do2Dgear(self, N=20, M=5, P=15):\n outstring = '''\n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n '''\n\n return outstring\n #@+node:2015.20150331094055.1735: *3* do3Dgear\n @cherrypy.expose\n # N 為齒數, M 為模數, P 為壓力角\n def do3Dgear(self, N=20, M=5, P=15):\n outstring = '''\n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n '''\n\n return outstring\n #@+node:2015.20150330144929.1765: *3* mygeartest\n @cherrypy.expose\n # N 為齒數, M 為模數, P 為壓力角\n def mygeartest(self, N=20, M=5, P=15):\n outstring = '''\n \n \n \n \n \n \n \n \n \n \n \n\n \n \n \n \n \n '''\n\n return outstring\n #@+node:amd.20150415215023.1: *3* mygeartest2\n @cherrypy.expose\n # N 為齒數, M 為模數, P 為壓力角\n def mygeartest2(self, N=20, M=5, P=15):\n outstring = '''\n \n \n \n \n \n \n \n \n \n \n \n\n \n \n \n \n \n '''\n\n return outstring\n #@+node:2015.20150331094055.1737: *3* my3Dgeartest\n @cherrypy.expose\n # N 為齒數, M 為模數, P 為壓力角\n def my3Dgeartest(self, N=20, M=5, P=15):\n outstring = '''\n \n \n \n \n \n \n \n \n \n \n \n\n \n \n \n \n \n '''\n\n return outstring\n #@+node:2014fall.20141215194146.1793: *3* doCheck\n @cherrypy.expose\n def doCheck(self, guess=None):\n # 假如使用者直接執行 doCheck, 則設法轉回根方法\n if guess is None:\n raise cherrypy.HTTPRedirect(\"/\")\n # 從 session 取出 answer 對應資料, 且處理直接執行 doCheck 時無法取 session 值情況\n try:\n theanswer = int(cherrypy.session.get('answer'))\n except:\n raise cherrypy.HTTPRedirect(\"/\")\n # 經由表單所取得的 guess 資料型別為 string\n try:\n theguess = int(guess)\n except:\n return \"error \" + self.guessform()\n # 每執行 doCheck 一次,次數增量一次\n cherrypy.session['count'] += 1\n # 答案與所猜數字進行比對\n if theanswer < theguess:\n return \"big \" + self.guessform()\n elif theanswer > theguess:\n return \"small \" + self.guessform()\n else:\n # 已經猜對, 從 session 取出累計猜測次數\n thecount = cherrypy.session.get('count')\n return \"exact: 再猜\"\n #@+node:2014fall.20141215194146.1789: *3* guessform\n def guessform(self):\n # 印出讓使用者輸入的超文件表單\n outstring = str(cherrypy.session.get('answer')) + \"/\" + str(cherrypy.session.get('count')) + '''
\n 請輸入您所猜的整數:
\n \n
'''\n return outstring\n #@-others\n#@-others\n################# (4) 程式啟動區\n# 配合程式檔案所在目錄設定靜態目錄或靜態檔案\napplication_conf = {'/static':{\n 'tools.staticdir.on': True,\n # 程式執行目錄下, 必須自行建立 static 目錄\n 'tools.staticdir.dir': _curdir+\"/static\"},\n '/downloads':{\n 'tools.staticdir.on': True,\n 'tools.staticdir.dir': data_dir+\"/downloads\"},\n '/images':{\n 'tools.staticdir.on': True,\n 'tools.staticdir.dir': data_dir+\"/images\"}\n }\n \nroot = Hello()\nroot.gear = gear.Gear()\n\nif 'OPENSHIFT_REPO_DIR' in os.environ.keys():\n # 表示在 OpenSfhit 執行\n application = cherrypy.Application(root, config=application_conf)\nelse:\n # 表示在近端執行\n cherrypy.quickstart(root, config=application_conf)\n#@-leo\n"},"license":{"kind":"string","value":"gpl-3.0"},"hash":{"kind":"number","value":421274124816849400,"string":"421,274,124,816,849,400"},"line_mean":{"kind":"number","value":29.5359042553,"string":"29.535904"},"line_max":{"kind":"number","value":137,"string":"137"},"alpha_frac":{"kind":"number","value":0.554152332,"string":"0.554152"},"autogenerated":{"kind":"bool","value":false,"string":"false"},"ratio":{"kind":"number","value":2.0499018032494196,"string":"2.049902"},"config_test":{"kind":"bool","value":false,"string":"false"},"has_no_keywords":{"kind":"bool","value":false,"string":"false"},"few_assignments":{"kind":"bool","value":false,"string":"false"}}},{"rowIdx":43662,"cells":{"repo_name":{"kind":"string","value":"NeerajM999/recap-python"},"path":{"kind":"string","value":"LearnPython/data_structures/binary_tree.py"},"copies":{"kind":"string","value":"1"},"size":{"kind":"string","value":"1761"},"content":{"kind":"string","value":"class Node:\n def __init__(self, value):\n self.value = value\n self.left = None\n self.right = None\n\n\nclass BinaryTree(object):\n def __init__(self, root_val):\n self.root = Node(root_val)\n\n def preorder_traversal(self, start, traversal):\n \"\"\" Root -> left -> right \"\"\"\n\n if start:\n traversal += (str(start.value) + \"-\")\n traversal = self.preorder_traversal(start.left, traversal)\n traversal = self.preorder_traversal(start.right, traversal)\n\n return traversal\n\n def inorder_traversal(self, start, traversal):\n \"\"\" left -> root -> right \"\"\"\n if start:\n traversal = self.inorder_traversal(start.left, traversal)\n traversal += (str(start.value) + \"-\")\n traversal = self.inorder_traversal(start.right, traversal)\n\n return traversal\n\n def postorder_traversal(self, start, traversal):\n \"\"\" left -> right -> root \"\"\"\n if start:\n traversal = self.postorder_traversal(start.left, traversal)\n traversal = self.postorder_traversal(start.right, traversal)\n traversal += (str(start.value) + \"-\")\n\n return traversal\n\n\nif __name__ == \"__main__\":\n \"\"\"\n 1\n / \\\n 2 3\n / \\ / \\\n 4 5 6 7\n \"\"\"\n tree = BinaryTree(1)\n tree.root.left = Node(2)\n tree.root.right = Node(3)\n tree.root.left.left = Node(4)\n tree.root.left.right = Node(5)\n tree.root.right.left = Node(6)\n tree.root.right.right = Node(7)\n\n print(\"preorder-traversal: \", tree.preorder_traversal(tree.root, \"\"))\n\n print(\"inorder-traversal: \", tree.inorder_traversal(tree.root, \"\"))\n\n print(\"postorder-traversal: \", tree.postorder_traversal(tree.root, \"\"))"},"license":{"kind":"string","value":"gpl-3.0"},"hash":{"kind":"number","value":1749836215305637600,"string":"1,749,836,215,305,637,600"},"line_mean":{"kind":"number","value":27.8852459016,"string":"27.885246"},"line_max":{"kind":"number","value":75,"string":"75"},"alpha_frac":{"kind":"number","value":0.571266326,"string":"0.571266"},"autogenerated":{"kind":"bool","value":false,"string":"false"},"ratio":{"kind":"number","value":3.6841004184100417,"string":"3.6841"},"config_test":{"kind":"bool","value":false,"string":"false"},"has_no_keywords":{"kind":"bool","value":false,"string":"false"},"few_assignments":{"kind":"bool","value":false,"string":"false"}}},{"rowIdx":43663,"cells":{"repo_name":{"kind":"string","value":"kret0s/gnuhealth-live"},"path":{"kind":"string","value":"tryton/server/trytond-3.8.3/trytond/model/fields/one2one.py"},"copies":{"kind":"string","value":"1"},"size":{"kind":"string","value":"2080"},"content":{"kind":"string","value":"# This file is part of Tryton. The COPYRIGHT file at the top level of\n# this repository contains the full copyright notices and license terms.\nfrom types import NoneType\n\nfrom trytond.model.fields.field import Field\nfrom trytond.model.fields.many2many import Many2Many\nfrom trytond.pool import Pool\n\n\nclass One2One(Many2Many):\n '''\n Define one2one field (``int``).\n '''\n _type = 'one2one'\n\n def get(self, ids, model, name, values=None):\n '''\n Return target record.\n\n :param ids: a list of ids\n :param model: a string with the name of the model\n :param name: a string with the name of the field\n :param values: a dictionary with the read values\n :return: a dictionary with ids as key and target id as value\n '''\n res = super(One2One, self).get(ids, model, name, values=values)\n for i, vals in res.iteritems():\n res[i] = vals[0] if vals else None\n return res\n\n def set(self, Model, name, ids, value, *args):\n '''\n Set the values.\n '''\n pool = Pool()\n Relation = pool.get(self.relation_name)\n to_delete = []\n to_create = []\n args = iter((ids, value) + args)\n for ids, value in zip(args, args):\n relations = Relation.search([\n (self.origin, 'in', ids),\n ])\n to_delete.extend(relations)\n if value:\n for record_id in ids:\n to_create.append({\n self.origin: record_id,\n self.target: value,\n })\n if to_delete:\n Relation.delete(to_delete)\n if to_create:\n Relation.create(to_create)\n\n def __set__(self, inst, value):\n Target = self.get_target()\n if isinstance(value, dict):\n value = Target(*value)\n elif isinstance(value, (int, long)):\n value = Target(value)\n assert isinstance(value, (Target, NoneType))\n Field.__set__(self, inst, value)\n"},"license":{"kind":"string","value":"gpl-3.0"},"hash":{"kind":"number","value":4749240343853297000,"string":"4,749,240,343,853,297,000"},"line_mean":{"kind":"number","value":32.0158730159,"string":"32.015873"},"line_max":{"kind":"number","value":72,"string":"72"},"alpha_frac":{"kind":"number","value":0.5466346154,"string":"0.546635"},"autogenerated":{"kind":"bool","value":false,"string":"false"},"ratio":{"kind":"number","value":4.16,"string":"4.16"},"config_test":{"kind":"bool","value":false,"string":"false"},"has_no_keywords":{"kind":"bool","value":false,"string":"false"},"few_assignments":{"kind":"bool","value":false,"string":"false"}}},{"rowIdx":43664,"cells":{"repo_name":{"kind":"string","value":"Metonimie/Beaglebone"},"path":{"kind":"string","value":"programs/server.py"},"copies":{"kind":"string","value":"1"},"size":{"kind":"string","value":"3147"},"content":{"kind":"string","value":"#!/usr/bin/env python\n\"\"\"\nA very simple server in python\nused to control gpio pins on the beaglebone black.\n\nThe server listens for POST requests on port\n6410. It has no security at all, which means\nthat it accepts post-data from everyone.\n\nSend a GET request::\n curl http://localhost\nSend a POST request::\n curl -d \"foo=bar&bin=baz\" http://localhost\n\nUsage:\n nohup python3 server.py &\n\"\"\"\n\n# TODO: Add basic security\n# TODO: Use dictionary for gpio name : file\n\nimport http.server\nimport urllib\n\nPORT = 6410\ngpio_path = \"/sys/class/gpio/\"\n\n# If the param name is in here then we handle the value.\nauthorized_gpio = [\"gpio60\"]\n\nclass Server(http.server.BaseHTTPRequestHandler):\n\n def prepare_response(self, code):\n \"\"\"\n Prepares the response that will be send back to the requester,\n along with the code.\n \"\"\"\n self.send_response(code)\n self.send_header(\"Content-type\", \"text/html\")\n self.send_header(\"Access-Control-Allow-Origin\", \"*\")\n self.end_headers()\n\n def handle_gpio(self, key, value):\n \"\"\"\n Very basic gpio handling, converts the value into \n an int and then it writes it to the file.\n \"\"\"\n try:\n clean_value = int(value)\n with open(\"{}{}/value\".format(gpio_path, key), mode=\"w\") as file:\n file.write(str(clean_value))\n return False\n except ValueError as e:\n print(e)\n except Exception as e:\n print(\"Exception: {}\".format(e))\n return True\n\n def unsupported(self):\n self.wfile.write(\"Go Away!\\n\".encode())\n\n def do_GET(self):\n self.unsupported()\n\n def do_HEAD(self):\n self.unsupported()\n\n def do_POST(self):\n \"\"\"\n Handles the post request.\n If error is True then the handling has failed or the request is \n invalid\n \"\"\"\n error = False\n try:\n # The length of the request, in bytes.\n length = int(self.headers['content-length'])\n # Dictionary containing keys and values from the request.\n postvars = urllib.parse.parse_qs(self.rfile.read(length))\n for key, value in postvars.items():\n clean_key = key.decode()\n clean_value = value[0].decode()\n print(\"Received: \" + clean_key + \" : \" + clean_value)\n if clean_key in authorized_gpio:\n error = self.handle_gpio(clean_key, clean_value)\n else:\n error = True\n except Exception as e:\n print(e)\n error = True\n\n response = None\n if not error:\n self.prepare_response(200)\n response = \"Operation authorized.\\n\"\n else:\n self.prepare_response(403)\n response = \"Go away!\\n\"\n # Write response to the client.\n self.wfile.write(response.encode())\n\nif __name__ == \"__main__\":\n server_address = ('', PORT)\n httpd = http.server.HTTPServer(server_address, Server)\n print('Starting server')\n httpd.serve_forever()\n"},"license":{"kind":"string","value":"gpl-3.0"},"hash":{"kind":"number","value":3281151376845631500,"string":"3,281,151,376,845,631,500"},"line_mean":{"kind":"number","value":28.6886792453,"string":"28.688679"},"line_max":{"kind":"number","value":77,"string":"77"},"alpha_frac":{"kind":"number","value":0.5719733079,"string":"0.571973"},"autogenerated":{"kind":"bool","value":false,"string":"false"},"ratio":{"kind":"number","value":4.157199471598415,"string":"4.157199"},"config_test":{"kind":"bool","value":false,"string":"false"},"has_no_keywords":{"kind":"bool","value":false,"string":"false"},"few_assignments":{"kind":"bool","value":false,"string":"false"}}},{"rowIdx":43665,"cells":{"repo_name":{"kind":"string","value":"arbrandes/edx-configuration"},"path":{"kind":"string","value":"playbooks/roles/supervisor/files/pre_supervisor_checks.py"},"copies":{"kind":"string","value":"1"},"size":{"kind":"string","value":"12593"},"content":{"kind":"string","value":"import argparse\nimport boto.ec2\nfrom boto.utils import get_instance_metadata, get_instance_identity\nfrom boto.exception import AWSConnectionError\nimport hipchat\nimport os\nimport subprocess\nimport traceback\nimport socket\nimport time\n\n# Services that should be checked for migrations.\nMIGRATION_COMMANDS = {\n 'lms': \"/edx/bin/edxapp-migrate-lms --noinput --list\",\n 'cms': \"/edx/bin/edxapp-migrate-cms --noinput --list\",\n 'xqueue': \". {env_file}; sudo -E -u xqueue {python} {code_dir}/manage.py showmigrations\",\n 'ecommerce': \". {env_file}; sudo -E -u ecommerce {python} {code_dir}/manage.py showmigrations\",\n 'insights': \". {env_file}; sudo -E -u insights {python} {code_dir}/manage.py showmigrations\",\n 'analytics_api': \". {env_file}; sudo -E -u analytics_api {python} {code_dir}/manage.py showmigrations\",\n 'credentials': \". {env_file}; sudo -E -u credentials {python} {code_dir}/manage.py showmigrations\",\n 'discovery': \". {env_file}; sudo -E -u discovery {python} {code_dir}/manage.py showmigrations\",\n }\nHIPCHAT_USER = \"PreSupervisor\"\n\n# Max amount of time to wait for tags to be applied.\nMAX_BACKOFF = 120\nINITIAL_BACKOFF = 1\n\nREGION = get_instance_identity()['document']['region']\n\ndef services_for_instance(instance_id):\n \"\"\"\n Get the list of all services named by the services tag in this\n instance's tags.\n \"\"\"\n ec2 = boto.ec2.connect_to_region(REGION)\n reservations = ec2.get_all_instances(instance_ids=[instance_id])\n for reservation in reservations:\n for instance in reservation.instances:\n if instance.id == instance_id:\n try:\n services = instance.tags['services'].split(',')\n except KeyError as ke:\n msg = \"Tag named 'services' not found on this instance({})\".format(instance_id)\n raise Exception(msg)\n\n for service in services:\n yield service\n\ndef edp_for_instance(instance_id):\n ec2 = boto.ec2.connect_to_region(REGION)\n reservations = ec2.get_all_instances(instance_ids=[instance_id])\n for reservation in reservations:\n for instance in reservation.instances:\n if instance.id == instance_id:\n try:\n environment = instance.tags['environment']\n deployment = instance.tags['deployment']\n play = instance.tags['play']\n except KeyError as ke:\n msg = \"{} tag not found on this instance({})\".format(ke.message, instance_id)\n raise Exception(msg)\n return (environment, deployment, play)\n\nif __name__ == '__main__':\n parser = argparse.ArgumentParser(\n description=\"Enable all services that are in the services tag of this ec2 instance.\")\n parser.add_argument(\"-a\",\"--available\",\n help=\"The location of the available services.\")\n parser.add_argument(\"-e\",\"--enabled\",\n help=\"The location of the enabled services.\")\n\n migration_args = parser.add_argument_group(\"edxapp_migrations\",\n \"Args for running edxapp migration checks.\")\n migration_args.add_argument(\"--edxapp-code-dir\",\n help=\"Location of the edx-platform code.\")\n migration_args.add_argument(\"--edxapp-python\",\n help=\"Path to python to use for executing migration check.\")\n migration_args.add_argument(\"--edxapp-env\",\n help=\"Location of the edxapp environment file.\")\n\n xq_migration_args = parser.add_argument_group(\"xqueue_migrations\",\n \"Args for running xqueue migration checks.\")\n xq_migration_args.add_argument(\"--xqueue-code-dir\",\n help=\"Location of the xqueue code.\")\n xq_migration_args.add_argument(\"--xqueue-python\",\n help=\"Path to python to use for executing migration check.\")\n migration_args.add_argument(\"--xqueue-env\",\n help=\"Location of the xqueue environment file.\")\n\n ecom_migration_args = parser.add_argument_group(\"ecommerce_migrations\",\n \"Args for running ecommerce migration checks.\")\n ecom_migration_args.add_argument(\"--ecommerce-python\",\n help=\"Path to python to use for executing migration check.\")\n ecom_migration_args.add_argument(\"--ecommerce-env\",\n help=\"Location of the ecommerce environment file.\")\n ecom_migration_args.add_argument(\"--ecommerce-code-dir\",\n help=\"Location of the ecommerce code.\")\n\n credentials_migration_args = parser.add_argument_group(\"credentials_migrations\",\n \"Args for running credentials migration checks.\")\n credentials_migration_args.add_argument(\"--credentials-python\",\n help=\"Path to python to use for executing migration check.\")\n credentials_migration_args.add_argument(\"--credentials-env\",\n help=\"Location of the credentials environment file.\")\n credentials_migration_args.add_argument(\"--credentials-code-dir\",\n help=\"Location of the credentials code.\")\n\n discovery_migration_args = parser.add_argument_group(\"discovery_migrations\",\n \"Args for running discovery migration checks.\")\n discovery_migration_args.add_argument(\"--discovery-python\",\n help=\"Path to python to use for executing migration check.\")\n discovery_migration_args.add_argument(\"--discovery-env\",\n help=\"Location of the discovery environment file.\")\n discovery_migration_args.add_argument(\"--discovery-code-dir\",\n help=\"Location of the discovery code.\")\n\n insights_migration_args = parser.add_argument_group(\"insights_migrations\",\n \"Args for running insights migration checks.\")\n insights_migration_args.add_argument(\"--insights-python\",\n help=\"Path to python to use for executing migration check.\")\n insights_migration_args.add_argument(\"--insights-env\",\n help=\"Location of the insights environment file.\")\n insights_migration_args.add_argument(\"--insights-code-dir\",\n help=\"Location of the insights code.\")\n\n analyticsapi_migration_args = parser.add_argument_group(\"analytics_api_migrations\",\n \"Args for running analytics_api migration checks.\")\n analyticsapi_migration_args.add_argument(\"--analytics-api-python\",\n help=\"Path to python to use for executing migration check.\")\n analyticsapi_migration_args.add_argument(\"--analytics-api-env\",\n help=\"Location of the analytics_api environment file.\")\n analyticsapi_migration_args.add_argument(\"--analytics-api-code-dir\",\n help=\"Location of the analytics_api code.\")\n\n hipchat_args = parser.add_argument_group(\"hipchat\",\n \"Args for hipchat notification.\")\n hipchat_args.add_argument(\"-c\",\"--hipchat-api-key\",\n help=\"Hipchat token if you want to receive notifications via hipchat.\")\n hipchat_args.add_argument(\"-r\",\"--hipchat-room\",\n help=\"Room to send messages to.\")\n\n args = parser.parse_args()\n\n report = []\n prefix = None\n notify = None\n\n try:\n if args.hipchat_api_key:\n hc = hipchat.HipChat(token=args.hipchat_api_key)\n notify = lambda message: hc.message_room(room_id=args.hipchat_room,\n message_from=HIPCHAT_USER, message=message)\n except Exception as e:\n print(\"Failed to initialize hipchat, {}\".format(e))\n traceback.print_exc()\n\n instance_id = get_instance_metadata()['instance-id']\n prefix = instance_id\n\n ec2 = boto.ec2.connect_to_region(REGION)\n reservations = ec2.get_all_instances(instance_ids=[instance_id])\n instance = reservations[0].instances[0]\n if instance.instance_profile['arn'].endswith('/abbey'):\n print(\"Running an abbey build. Not starting any services.\")\n # Needs to exit with 1 instead of 0 to prevent\n # services from starting.\n exit(1)\n time_left = MAX_BACKOFF\n backoff = INITIAL_BACKOFF\n\n environment = None\n deployment = None\n play = None\n while time_left > 0:\n try:\n environment, deployment, play = edp_for_instance(instance_id)\n prefix = \"{environment}-{deployment}-{play}-{instance_id}\".format(\n environment=environment,\n deployment=deployment,\n play=play,\n instance_id=instance_id)\n break\n except Exception as e:\n print(\"Failed to get EDP for {}: {}\".format(instance_id, str(e)))\n # With the time limit being 2 minutes we will\n # try 5 times before giving up.\n time.sleep(backoff)\n time_left -= backoff\n backoff = backoff * 2\n\n if environment is None or deployment is None or play is None:\n msg = \"Unable to retrieve environment, deployment, or play tag.\"\n print(msg)\n if notify:\n notify(\"{} : {}\".format(prefix, msg))\n exit(0)\n\n #get the hostname of the sandbox\n hostname = socket.gethostname()\n\n try:\n #get the list of the volumes, that are attached to the instance\n volumes = ec2.get_all_volumes(filters={'attachment.instance-id': instance_id})\n\n for volume in volumes:\n volume.add_tags({\"hostname\": hostname,\n \"environment\": environment,\n \"deployment\": deployment,\n \"cluster\": play,\n \"instance-id\": instance_id,\n \"created\": volume.create_time })\n except Exception as e:\n msg = \"Failed to tag volumes associated with {}: {}\".format(instance_id, str(e))\n print(msg)\n if notify:\n notify(msg)\n\n try:\n for service in services_for_instance(instance_id):\n if service in MIGRATION_COMMANDS:\n services = {\n \"lms\": {'python': args.edxapp_python, 'env_file': args.edxapp_env, 'code_dir': args.edxapp_code_dir},\n \"cms\": {'python': args.edxapp_python, 'env_file': args.edxapp_env, 'code_dir': args.edxapp_code_dir},\n \"ecommerce\": {'python': args.ecommerce_python, 'env_file': args.ecommerce_env, 'code_dir': args.ecommerce_code_dir},\n \"credentials\": {'python': args.credentials_python, 'env_file': args.credentials_env, 'code_dir': args.credentials_code_dir},\n \"discovery\": {'python': args.discovery_python, 'env_file': args.discovery_env, 'code_dir': args.discovery_code_dir},\n \"insights\": {'python': args.insights_python, 'env_file': args.insights_env, 'code_dir': args.insights_code_dir},\n \"analytics_api\": {'python': args.analytics_api_python, 'env_file': args.analytics_api_env, 'code_dir': args.analytics_api_code_dir},\n \"xqueue\": {'python': args.xqueue_python, 'env_file': args.xqueue_env, 'code_dir': args.xqueue_code_dir},\n }\n\n if service in services and all(arg!=None for arg in services[service].values()) and service in MIGRATION_COMMANDS:\n serv_vars = services[service]\n\n cmd = MIGRATION_COMMANDS[service].format(**serv_vars)\n if os.path.exists(serv_vars['code_dir']):\n os.chdir(serv_vars['code_dir'])\n # Run migration check command.\n output = subprocess.check_output(cmd, shell=True, )\n if '[ ]' in output:\n raise Exception(\"Migrations have not been run for {}\".format(service))\n\n\n # Link to available service.\n available_file = os.path.join(args.available, \"{}.conf\".format(service))\n link_location = os.path.join(args.enabled, \"{}.conf\".format(service))\n if os.path.exists(available_file):\n subprocess.call(\"sudo -u supervisor ln -sf {} {}\".format(available_file, link_location), shell=True)\n report.append(\"Enabling service: {}\".format(service))\n else:\n raise Exception(\"No conf available for service: {}\".format(link_location))\n except AWSConnectionError as ae:\n msg = \"{}: ERROR : {}\".format(prefix, ae)\n if notify:\n notify(msg)\n notify(traceback.format_exc())\n raise ae\n except Exception as e:\n msg = \"{}: ERROR : {}\".format(prefix, e)\n print(msg)\n if notify:\n notify(msg)\n traceback.print_exc()\n raise e\n else:\n msg = \"{}: {}\".format(prefix, \" | \".join(report))\n print(msg)\n if notify:\n notify(msg)\n"},"license":{"kind":"string","value":"agpl-3.0"},"hash":{"kind":"number","value":-2843304026179480000,"string":"-2,843,304,026,179,480,000"},"line_mean":{"kind":"number","value":45.6407407407,"string":"45.640741"},"line_max":{"kind":"number","value":152,"string":"152"},"alpha_frac":{"kind":"number","value":0.6152624474,"string":"0.615262"},"autogenerated":{"kind":"bool","value":false,"string":"false"},"ratio":{"kind":"number","value":4.1823314513450685,"string":"4.182331"},"config_test":{"kind":"bool","value":false,"string":"false"},"has_no_keywords":{"kind":"bool","value":false,"string":"false"},"few_assignments":{"kind":"bool","value":false,"string":"false"}}},{"rowIdx":43666,"cells":{"repo_name":{"kind":"string","value":"cyanogen/uchroma"},"path":{"kind":"string","value":"uchroma/traits.py"},"copies":{"kind":"string","value":"1"},"size":{"kind":"string","value":"11759"},"content":{"kind":"string","value":"#\n# uchroma - Copyright (C) 2021 Stefanie Kondik\n#\n# This program is free software: you can redistribute it and/or modify it\n# under the terms of the GNU Lesser General Public License as published\n# by the Free Software Foundation, version 3.\n#\n# This program is distributed in the hope that it will be useful, but\n# WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY\n# or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public\n# License for more details.\n#\n\n# pylint: disable=protected-access, invalid-name, no-member\n\nimport enum\nimport importlib\nimport sys\n\nfrom argparse import ArgumentParser\nfrom typing import Iterable\n\nfrom traitlets import CaselessStrEnum, Container, Dict, Enum, Int, HasTraits, \\\n List, TraitType, Undefined, UseEnum\nfrom frozendict import frozendict\n\nfrom uchroma.color import to_color\nfrom uchroma.util import ArgsDict\n\n\n\nclass ColorTrait(TraitType):\n \"\"\"\n A traitlet which encapsulates a grapefruit.Color and performs\n type coercion as needed.\n \"\"\"\n info_text = \"a color\"\n allow_none = True\n default_value = 'black'\n\n def __init__(self, *args, **kwargs):\n super(ColorTrait, self).__init__(*args, **kwargs)\n\n def validate(self, obj, value):\n try:\n if value is not None:\n value = to_color(value)\n except:\n self.error(obj, value)\n return value\n\n\n\nclass ColorSchemeTrait(List):\n \"\"\"\n A list of ColorTraits which comprise a scheme\n \"\"\"\n info_text = 'a list of colors'\n\n def __init__(self, trait=ColorTrait(), default_value=(),\n minlen=0, maxlen=sys.maxsize, **kwargs):\n super(ColorSchemeTrait, self).__init__(trait=trait, default_value=default_value,\n minlen=minlen, maxlen=maxlen, **kwargs)\n\n\nclass ColorPresetTrait(UseEnum):\n \"\"\"\n A trait which represents a group of color schemes defined\n as a Python Enum.\n \"\"\"\n info_text = 'a predefined color scheme'\n\n def __init__(self, enum_class, default_value=None, **kwargs):\n super(ColorPresetTrait, self).__init__(enum_class, default_value=default_value, **kwargs)\n\n\n\nclass WriteOnceMixin(object):\n \"\"\"\n Mixin for traits which cannot be changed after an initial\n value has been set.\n \"\"\"\n write_once = True\n\n def validate(self, obj, value):\n if self.name not in obj._trait_values or \\\n obj._trait_values[self.name] == self.default_value:\n return super().validate(obj, value)\n\n self.error(obj, value)\n\n\nclass WriteOnceInt(WriteOnceMixin, Int):\n \"\"\"\n Subclass of Int which may only be written once\n \"\"\"\n pass\n\n\nclass FrozenDict(WriteOnceMixin, Dict):\n \"\"\"\n Subclass of Dict which converts the value to a frozendict on\n the first setting.\n \"\"\"\n def validate(self, obj, value):\n return frozendict(super().validate(obj, value))\n\n\nclass UseEnumCaseless(UseEnum):\n \"\"\"\n Subclass of UseEnum which allows selection of values using\n case insensitive strings\n \"\"\"\n\n def select_by_name(self, value, default=Undefined):\n if value.startswith(self.name_prefix):\n # -- SUPPORT SCOPED-NAMES, like: \"Color.red\" => \"red\"\n value = value.replace(self.name_prefix, \"\", 1)\n\n keys = [x.lower() for x in self.enum_class.__members__.keys()]\n idx = keys.index(value.lower())\n if idx < 0:\n return Undefined\n return self.enum_class[list(self.enum_class.__members__.keys())[idx]]\n\n\nclass WriteOnceUseEnumCaseless(WriteOnceMixin, UseEnumCaseless):\n \"\"\"\n Subclass of UseEnumCaseless which may only be written once.\n \"\"\"\n pass\n\n\nclass DefaultCaselessStrEnum(CaselessStrEnum):\n \"\"\"\n Extension of CaselessStrEnum which handles default values better\n \"\"\"\n def validate(self, obj, value):\n if self.default_value and (value is None or value == ''):\n value = self.default_value\n\n return super().validate(obj, value)\n\n\ndef is_trait_writable(trait: TraitType) -> bool:\n \"\"\"\n Test if a trait is writable\n\n :param trait: the trait to be tested\n :return: True if the trait is writable\n \"\"\"\n if trait.read_only:\n return False\n\n if hasattr(trait, 'write_once') and trait.write_once:\n return False\n\n return True\n\n\ndef trait_as_dict(trait: TraitType) -> dict:\n \"\"\"\n Convert a trait to a dict for sending over D-Bus or the like\n\n :param trait: the trait to be converted\n :return: dict representing this trait\n \"\"\"\n cls = trait.__class__\n tdict = {}\n\n for k, v in vars(trait).items():\n if k.startswith('__') or k == 'this_class':\n continue\n if hasattr(cls, k) and getattr(cls, k) == v:\n continue\n if isinstance(v, Iterable) and len(v) == 0:\n continue\n\n if k.startswith('_'):\n tdict[k[1:]] = v\n else:\n tdict[k] = v\n\n if isinstance(trait, UseEnum):\n cls = CaselessStrEnum\n tdict['values'] = tuple(trait.enum_class.__members__.keys())\n if 'enum_class' in tdict:\n del tdict['enum_class']\n\n for k, v in tdict.items():\n if isinstance(v, TraitType):\n tdict[k] = trait_as_dict(v)\n if isinstance(v, enum.Enum):\n tdict[k] = v.name\n if isinstance(v, type):\n tdict[k] = '%s.%s' % (v.__module__, v.__name__)\n\n tdict['__class__'] = (cls.__module__, cls.__name__)\n return tdict\n\n\ndef class_traits_as_dict(obj: HasTraits, values: dict=None) -> dict:\n \"\"\"\n Create a dict which represents all traits of the given object.\n This dict itself can be inspected in a generic API, or it\n may be converted back to a (stub) instance of HasTraits. This\n facilitates the sending of configurable object properties over\n an interface such as D-Bus.\n\n :param obj: an instance of HasTraits\n :param value: optional dict of trait values (pulled from obj by default)\n :return: dict representing all traits in obj\n \"\"\"\n cls_dt = {}\n if isinstance(obj, type) and hasattr(obj, 'class_traits'):\n traits = obj.class_traits()\n elif isinstance(obj, dict):\n traits = obj\n elif isinstance(obj, HasTraits):\n traits = obj.traits()\n values = obj._trait_values\n else:\n raise TypeError(\"Object does not support traits\")\n\n for k, v in traits.items():\n dt = trait_as_dict(v)\n if dt is None:\n continue\n if values is not None and k in values:\n dt['__value__'] = values[k]\n cls_dt[k] = dt\n return cls_dt\n\n\ndef dict_as_trait(obj: dict) -> TraitType:\n \"\"\"\n Create a trait from a dict (trait_as_dict).\n \"\"\"\n if '__class__' not in obj:\n raise ValueError(\"No module and class attribute present\")\n\n tobj = obj.copy()\n\n module_name, trait_class = tobj.pop('__class__')\n\n module = importlib.import_module(module_name)\n if not hasattr(module, trait_class):\n raise TypeError(\"Unknown class: %s\" % trait_class)\n cls = getattr(module, trait_class)\n\n if 'trait' in tobj:\n tobj['trait'] = dict_as_trait(tobj.pop('trait'))\n\n metadata = {}\n if 'metadata' in tobj:\n metadata.update(tobj.pop('metadata'))\n\n if issubclass(cls, Enum):\n trait = cls(tobj.pop('values'), **tobj)\n else:\n trait = cls(**tobj)\n\n for k in list(metadata.keys()):\n if k in ('name', 'default_args', 'default_kwargs'):\n setattr(trait, k, metadata.pop(k))\n\n trait.metadata = metadata\n\n return trait\n\n\ndef dict_as_class_traits(obj: dict) -> HasTraits:\n \"\"\"\n Convert a dict of unpacked traits to a HasTraits instance.\n Useful for remote parameter inspection and validation.\n\n :param obj: dict of unpacked traits\n :return: the stub HasTraits instance\n \"\"\"\n if not isinstance(obj, dict):\n raise TypeError(\"Object must be a dict (was: %s)\" % obj)\n\n traits = {}\n values = {}\n for k, v in obj.items():\n if '__value__' in v:\n values[k] = v.pop('__value__')\n\n trait = dict_as_trait(v)\n if trait is None:\n continue\n traits[k] = trait\n\n cls = HasTraits()\n cls.add_traits(**traits)\n\n for k, v in values.items():\n setattr(cls, k, v)\n\n return cls\n\n\ndef get_args_dict(obj: HasTraits, incl_all=False):\n \"\"\"\n Return a dict of user-configurable traits for an object\n\n :param obj: an instance of HasTraits\n :param incl_all: If all items should be included, regardless of RO status\n :return: dict of arguments\n \"\"\"\n argsdict = ArgsDict()\n for k in sorted(obj._trait_values.keys()):\n v = obj._trait_values[k]\n trait = obj.traits()[k]\n if incl_all or (not trait.get_metadata('hidden') and is_trait_writable(trait)):\n argsdict[k] = v\n return argsdict\n\n\ndef add_traits_to_argparse(obj: HasTraits, parser: ArgumentParser,\n prefix: str=None):\n \"\"\"\n Add all traits from the given object to the argparse context.\n\n :param obj: an instance of HasTraits\n :param parser: argparse parser\n :param prefix: string to prefix keys with\n \"\"\"\n for key, trait in obj.traits().items():\n if trait.get_metadata('config') is not True:\n continue\n\n argname = '--%s' % key\n if prefix is not None:\n argname = '--%s.%s' % (prefix, key)\n\n if isinstance(trait, Container):\n parser.add_argument(argname, nargs='+', help=trait.info_text)\n elif isinstance(trait, Enum):\n parser.add_argument(argname, type=str.lower,\n choices=[x.lower() for x in trait.values],\n help=trait.info_text)\n else:\n argtype = str\n if hasattr(trait, 'default_value'):\n argtype = type(trait.default_value)\n parser.add_argument(argname, type=argtype, help=trait.info_text)\n\n\ndef apply_from_argparse(args, traits=None, target: HasTraits=None) -> dict:\n \"\"\"\n Applies arguments added via add_traits_to_argparse to\n a target object which implements HasTraits. If a target\n is not known, a dict of traits may be passed instead.\n Will throw TraitError if validation fails.\n\n :param args: Parsed args from argparse\n :param traits: Dictionary of traits (optional)\n :param target: Target object (optional)\n :return: Dict of the arguments which actually changed\n \"\"\"\n # apply the traits to an empty object, which will run\n # the validators on the client\n if isinstance(traits, HasTraits):\n traits = traits.traits()\n\n traits = traits.copy()\n for k, v in traits.items():\n if not isinstance(v, TraitType):\n if isinstance(v, dict):\n k[v] = dict_as_trait(v)\n else:\n raise TypeError(\"A dict or trait object must be supplied\")\n\n if target is None:\n if traits is None:\n raise ValueError(\"Either traits or target must be specified\")\n target = HasTraits()\n target.add_traits(**traits)\n\n # determine what should actually be changed\n argkeys = [k for k, v in vars(args).items() if v is not None]\n intersect = set(target.traits().keys()).intersection(set(argkeys))\n\n # apply the argparse flags to the target object\n for key in intersect:\n if target.traits()[key].get_metadata('config') is not True:\n raise ValueError(\"Trait is not marked as configurable: %s\" % key)\n\n setattr(target, key, getattr(args, key))\n\n # if all validators passed, return a dict of the changed args\n changed = {}\n for key in intersect:\n changed[key] = target._trait_values[key]\n\n return changed\n"},"license":{"kind":"string","value":"lgpl-3.0"},"hash":{"kind":"number","value":468926082809482900,"string":"468,926,082,809,482,900"},"line_mean":{"kind":"number","value":28.6196473552,"string":"28.619647"},"line_max":{"kind":"number","value":97,"string":"97"},"alpha_frac":{"kind":"number","value":0.6151883664,"string":"0.615188"},"autogenerated":{"kind":"bool","value":false,"string":"false"},"ratio":{"kind":"number","value":3.944649446494465,"string":"3.944649"},"config_test":{"kind":"bool","value":false,"string":"false"},"has_no_keywords":{"kind":"bool","value":false,"string":"false"},"few_assignments":{"kind":"bool","value":false,"string":"false"}}},{"rowIdx":43667,"cells":{"repo_name":{"kind":"string","value":"cardmagic/PyAMF"},"path":{"kind":"string","value":"pyamf/adapters/_django_db_models_base.py"},"copies":{"kind":"string","value":"1"},"size":{"kind":"string","value":"8476"},"content":{"kind":"string","value":"# Copyright (c) 2007-2009 The PyAMF Project.\n# See LICENSE.txt for details.\n\n\"\"\"\n`django.db.models` adapter module.\n\n:see: `Django Project `_\n\n:since: 0.4.1\n\"\"\"\n\nfrom django.db.models.base import Model\nfrom django.db.models import fields\nfrom django.db.models.fields import related, files\n\nimport datetime\n\nimport pyamf\nfrom pyamf.util import imports\n\n\nclass DjangoReferenceCollection(dict):\n \"\"\"\n This helper class holds a dict of klass to pk/objects loaded from the\n underlying db.\n\n :since: 0.5\n \"\"\"\n\n def _getClass(self, klass):\n if klass not in self.keys():\n self[klass] = {}\n\n return self[klass]\n\n def getClassKey(self, klass, key):\n \"\"\"\n Return an instance based on klass/key.\n\n If an instance cannot be found then `KeyError` is raised.\n\n :param klass: The class of the instance.\n :param key: The primary_key of the instance.\n :return: The instance linked to the `klass`/`key`.\n :rtype: Instance of `klass`.\n \"\"\"\n d = self._getClass(klass)\n\n return d[key]\n\n def addClassKey(self, klass, key, obj):\n \"\"\"\n Adds an object to the collection, based on klass and key.\n\n :param klass: The class of the object.\n :param key: The datastore key of the object.\n :param obj: The loaded instance from the datastore.\n \"\"\"\n d = self._getClass(klass)\n\n d[key] = obj\n\n\nclass DjangoClassAlias(pyamf.ClassAlias):\n\n def getCustomProperties(self):\n self.fields = {}\n self.relations = {}\n self.columns = []\n\n self.meta = self.klass._meta\n\n for name in self.meta.get_all_field_names():\n x = self.meta.get_field_by_name(name)[0]\n\n if isinstance(x, files.FileField):\n self.readonly_attrs.update([name])\n\n if isinstance(x, related.RelatedObject):\n continue\n\n if not isinstance(x, related.ForeignKey):\n self.fields[name] = x\n else:\n self.relations[name] = x\n\n for k, v in self.klass.__dict__.iteritems():\n if isinstance(v, related.ReverseManyRelatedObjectsDescriptor):\n self.fields[k] = v.field\n\n parent_fields = []\n\n for field in self.meta.parents.values():\n parent_fields.append(field.attname)\n del self.relations[field.name]\n\n self.exclude_attrs.update(parent_fields)\n\n props = self.fields.keys()\n\n self.encodable_properties.update(props)\n self.decodable_properties.update(props)\n\n def _compile_base_class(self, klass):\n if klass is Model:\n return\n\n pyamf.ClassAlias._compile_base_class(self, klass)\n\n def _encodeValue(self, field, value):\n if value is fields.NOT_PROVIDED:\n return pyamf.Undefined\n\n if value is None:\n return value\n\n # deal with dates ..\n if isinstance(field, fields.DateTimeField):\n return value\n elif isinstance(field, fields.DateField):\n return datetime.datetime(value.year, value.month, value.day, 0, 0, 0)\n elif isinstance(field, fields.TimeField):\n return datetime.datetime(1970, 1, 1,\n value.hour, value.minute, value.second, value.microsecond)\n elif isinstance(value, files.FieldFile):\n return value.name\n\n return value\n\n def _decodeValue(self, field, value):\n if value is pyamf.Undefined:\n return fields.NOT_PROVIDED\n\n if isinstance(field, fields.AutoField) and value == 0:\n return None\n elif isinstance(field, fields.DateTimeField):\n # deal with dates\n return value\n elif isinstance(field, fields.DateField):\n if not value:\n return None\n\n return datetime.date(value.year, value.month, value.day)\n elif isinstance(field, fields.TimeField):\n if not value:\n return None\n\n return datetime.time(value.hour, value.minute, value.second, value.microsecond)\n\n return value\n\n def getEncodableAttributes(self, obj, **kwargs):\n attrs = pyamf.ClassAlias.getEncodableAttributes(self, obj, **kwargs)\n\n if not attrs:\n attrs = {}\n\n for name, prop in self.fields.iteritems():\n if name not in attrs.keys():\n continue\n\n if isinstance(prop, related.ManyToManyField):\n attrs[name] = [x for x in getattr(obj, name).all()]\n else:\n attrs[name] = self._encodeValue(prop, getattr(obj, name))\n\n keys = attrs.keys()\n\n for key in keys:\n if key.startswith('_'):\n del attrs[key]\n\n for name, relation in self.relations.iteritems():\n if '_%s_cache' % name in obj.__dict__:\n attrs[name] = getattr(obj, name)\n\n del attrs[relation.column]\n\n if not attrs:\n attrs = None\n\n return attrs\n\n def getDecodableAttributes(self, obj, attrs, **kwargs):\n attrs = pyamf.ClassAlias.getDecodableAttributes(self, obj, attrs, **kwargs)\n\n for n in self.decodable_properties:\n if n in self.relations:\n continue\n\n f = self.fields[n]\n\n attrs[f.attname] = self._decodeValue(f, attrs[n])\n\n # primary key of django object must always be set first for\n # relationships with other model objects to work properly\n # and dict.iteritems() does not guarantee order\n #\n # django also forces the use only one attribute as primary key, so\n # our obj._meta.pk.attname check is sufficient)\n try:\n setattr(obj, obj._meta.pk.attname, attrs[obj._meta.pk.attname])\n del attrs[obj._meta.pk.attname]\n except KeyError:\n pass\n\n return attrs\n\n\ndef getDjangoObjects(context):\n \"\"\"\n Returns a reference to the `django_objects` on the context. If it doesn't\n exist then it is created.\n\n :param context: The context to load the `django_objects` index from.\n :type context: Instance of :class:`pyamf.BaseContext`\n :return: The `django_objects` index reference.\n :rtype: Instance of :class:`DjangoReferenceCollection`\n :since: 0.5\n \"\"\"\n if not hasattr(context, 'django_objects'):\n context.django_objects = DjangoReferenceCollection()\n\n return context.django_objects\n\n\ndef writeDjangoObject(self, obj, *args, **kwargs):\n \"\"\"\n The Django ORM creates new instances of objects for each db request.\n This is a problem for PyAMF as it uses the id(obj) of the object to do\n reference checking.\n\n We could just ignore the problem, but the objects are conceptually the\n same so the effort should be made to attempt to resolve references for a\n given object graph.\n\n We create a new map on the encoder context object which contains a dict of\n C{object.__class__: {key1: object1, key2: object2, .., keyn: objectn}}. We\n use the primary key to do the reference checking.\n\n :since: 0.5\n \"\"\"\n if not isinstance(obj, Model):\n self.writeNonDjangoObject(obj, *args, **kwargs)\n\n return\n\n context = self.context\n kls = obj.__class__\n\n s = obj.pk\n\n if s is None:\n self.writeNonDjangoObject(obj, *args, **kwargs)\n\n return\n\n django_objects = getDjangoObjects(context)\n\n try:\n referenced_object = django_objects.getClassKey(kls, s)\n except KeyError:\n referenced_object = obj\n django_objects.addClassKey(kls, s, obj)\n\n self.writeNonDjangoObject(referenced_object, *args, **kwargs)\n\n\ndef install_django_reference_model_hook(mod):\n \"\"\"\n Called when :module:`pyamf.amf0` or :module:`pyamf.amf3` are imported. Attaches the\n :func:`writeDjangoObject` method to the `Encoder` class in that module.\n\n :param mod: The module imported.\n :since: 0.4.1\n \"\"\"\n if not hasattr(mod.Encoder, 'writeNonDjangoObject'):\n mod.Encoder.writeNonDjangoObject = mod.Encoder.writeObject\n mod.Encoder.writeObject = writeDjangoObject\n\n\n# initialise the module here: hook into pyamf\n\npyamf.register_alias_type(DjangoClassAlias, Model)\n\n# hook the L{writeDjangobject} method to the Encoder class on import\nimports.when_imported('pyamf.amf0', install_django_reference_model_hook)\nimports.when_imported('pyamf.amf3', install_django_reference_model_hook)\n"},"license":{"kind":"string","value":"mit"},"hash":{"kind":"number","value":-6217738496913844000,"string":"-6,217,738,496,913,844,000"},"line_mean":{"kind":"number","value":28.4305555556,"string":"28.430556"},"line_max":{"kind":"number","value":91,"string":"91"},"alpha_frac":{"kind":"number","value":0.619749882,"string":"0.61975"},"autogenerated":{"kind":"bool","value":false,"string":"false"},"ratio":{"kind":"number","value":4.08088589311507,"string":"4.080886"},"config_test":{"kind":"bool","value":false,"string":"false"},"has_no_keywords":{"kind":"bool","value":false,"string":"false"},"few_assignments":{"kind":"bool","value":false,"string":"false"}}},{"rowIdx":43668,"cells":{"repo_name":{"kind":"string","value":"ICOS-Carbon-Portal/data"},"path":{"kind":"string","value":"src/main/python/update-restheart/Restheart.py"},"copies":{"kind":"string","value":"1"},"size":{"kind":"string","value":"2242"},"content":{"kind":"string","value":"import requests\n\n\nclass Restheart(object):\n\tdef __init__(self):\n\t\t# self._baseUrl = 'http://127.0.0.1:8088/db/'\t\t# localhost\n\t\tself._baseUrl = 'https://restheart.icos-cp.eu/db/'\t# production\n\t\tself._verfify = True if self._baseUrl.__contains__('restheart') else False\n\n\tdef get_records_to_update(self, op, pagesize, collection):\n\t\tresp = None\n\n\t\ttry:\n\t\t\turl = self.get_url(op, pagesize, collection)\n\t\t\tresp = requests.get(url, timeout=10, verify=self._verfify)\n\n\t\t\tif resp.status_code != 200:\n\t\t\t\tprint(resp.status_code, resp.reason, resp.json())\n\n\t\t\treturn resp.json()\n\t\texcept:\n\t\t\tprint(resp)\n\n\tdef update_record(self, id, record, collection):\n\t\turl = self._baseUrl + collection + '/' + id\n\t\theaders = {\"Content-Type\": \"application/json\"}\n\t\tresp = None\n\n\t\ttry:\n\t\t\tresp = requests.patch(url, headers=headers, json=record, timeout=5, verify=self._verfify)\n\n\t\t\tif resp.status_code != 200:\n\t\t\t\tprint(resp.status_code, resp.reason)\n\t\texcept:\n\t\t\tprint(resp)\n\n\tdef get_url(self, op, pagesize, collection):\n\t\tif op == 'geo':\n\t\t\tif collection == 'portaluse':\n\t\t\t\treturn self._baseUrl + collection + '?filter={\"city\":{\"$exists\":0}}&np&pagesize=' + str(pagesize)\n\n\t\t\telif collection == 'dobjdls':\n\t\t\t\treturn self._baseUrl + collection + '?filter={\"$and\":[{\"ip\":{\"$exists\":1}},{\"city\":{\"$exists\":0}}]}&np&pagesize=' + str(pagesize)\n\n\t\t\telse:\n\t\t\t\traise ValueError(\"Unknown collection: \" + collection)\n\n\t\telif op == 'label':\n\t\t\tif collection == 'portaluse':\n\t\t\t\treturn self._baseUrl + collection + '?np&pagesize=' + str(pagesize)\n\t\t\t\t# return self._baseUrl + collection + '?filter={\"_id\":{\"$oid\":\"5bb21519f17df4d065e9c53c\"}}&np&pagesize=' + str(pagesize)\n\t\t\t\t# return self._baseUrl + collection + '?filter={\"filterChange\":{\"$exists\":1}}&np&pagesize=' + str(pagesize)\n\t\t\t\t# return self._baseUrl + collection + '?filter={\"previewNetCDF\":{\"$exists\":1}}&np&pagesize=' + str(pagesize)\n\t\t\t\t# return self._baseUrl + collection + '?filter={\"previewTimeserie\":{\"$exists\":1}}&np&pagesize=' + str(pagesize)\n\t\t\t\t# return self._baseUrl + collection + '?filter={\"$and\":[{\"filterChange\":{\"$exists\":0}},{\"previewNetCDF\":{\"$exists\":0}},{\"previewTimeserie\":{\"$exists\":0}}]}&np&pagesize=' + str(pagesize)\n\n\t\t\telse:\n\t\t\t\traise ValueError(\"Unknown collection: \" + collection)\n"},"license":{"kind":"string","value":"gpl-3.0"},"hash":{"kind":"number","value":5153336162370622000,"string":"5,153,336,162,370,622,000"},"line_mean":{"kind":"number","value":37.6551724138,"string":"37.655172"},"line_max":{"kind":"number","value":189,"string":"189"},"alpha_frac":{"kind":"number","value":0.6507582516,"string":"0.650758"},"autogenerated":{"kind":"bool","value":false,"string":"false"},"ratio":{"kind":"number","value":3.058663028649386,"string":"3.058663"},"config_test":{"kind":"bool","value":false,"string":"false"},"has_no_keywords":{"kind":"bool","value":false,"string":"false"},"few_assignments":{"kind":"bool","value":false,"string":"false"}}},{"rowIdx":43669,"cells":{"repo_name":{"kind":"string","value":"domecraft/Games"},"path":{"kind":"string","value":"RPG/classes.py"},"copies":{"kind":"string","value":"1"},"size":{"kind":"string","value":"3530"},"content":{"kind":"string","value":"class character:\n def __init__(self, name, gender ,health, race, role, status, strength, defense, magic, bounty, income, reputation):\n self.name = name\n self.health = health\n self.status = status\n self.strength = strength\n self.defense = defense\n self.race = race\n self.role = role\n self.bounty = bounty\n self.magic = magic\n self.gender = gender\n self.income = income\n self.reputation = reputation\n self.inventory = []\n \n def modify_health(self, amount):\n self.health += amount\n\n def set_health(self, amount):\n self.health = amount\n \n def set_status(self, status):\n self.status = status\n \n def modify_str(self, amount):\n self.strength += amount\n \n def modify_def(self, amount):\n self.defense += amount\n \n def add_item(self, item):\n self.inventory.append(item)\n \n def remove_item(self, item):\n if item in self.inventory:\n self.inventory.remove(item)\n else:\n print item + \" is not in your inventory!\"\n \n def set_race(self, race):\n self.race = race\n\n def modify_bounty(self, amount):\n self.bounty += amount\n \n def checkDead(self, health):\n if self.health <= 0:\n self.status = \"dead\"\n return \"dead\"\n else:\n self.status = \"alive\"\n return \"alive\"\n def modify_income(self, amount):\n self.income += amount\n \n def modify_reputation(self, amount):\n self.reputation += amount\n \n#The following class is used for random npcs that I don't really develop in the storyline. \nclass basicCharacter:\n def __init__(self, name, gender, income, status):\n self.name = name\n self.gender = gender\n self.income = income\n self.status = status\n \n def set_status(self, status):\n self.status = status\n \nclass store:\n def __init__(self, name = \"General Store\" , owner = \"Store Owner\", alliance = \"Rebellion\"):\n self.name = name\n self.store_owner = owner\n self.alliance = alliance\n self.stock = {\n 'longsword': {'cost': 10, 'speed': 3, 'strength': 7, 'defense': 2},\n 'shortsword': {'cost': 8, 'speed': 5, 'strength': 4, 'defense': 2},\n 'bronze_armor': {'cost': 10, 'speed': -2, 'strength': 1, 'defense': 6},\n 'silver_armor': {'cost': 20, 'speed': -5, 'strength': 2, 'defense': 12},\n 'platinum_armor': {'cost': 35, 'speed': -8, 'strength': 4, 'defense': 20}\n }\n \nclass town:\n def __init__(self, name, ruler, alliance, income, population):\n self.name = name\n self.ruler = ruler\n self.alliance = alliance\n self.income = income\n self.population = population\n def set_ruler(self, ruler):\n self.ruler = ruler\n \n def set_name(self, name):\n self.name = name\n \n def set_alliance(self, alliance):\n self.alliance = alliance\n \n def modify_income(self, amount):\n self.income += amount\n \n def modify_pop(self, population):\n self.population += population\n \nclass bar:\n def __init__(self, name, owner, income):\n self.name = name\n self.owner = owner\n self.income = income\n def set_owner(self, owner):\n self.owner = owner\n def modify_income(amount):\n self.income += amount\n"},"license":{"kind":"string","value":"gpl-2.0"},"hash":{"kind":"number","value":3359393522367290000,"string":"3,359,393,522,367,290,000"},"line_mean":{"kind":"number","value":29.9649122807,"string":"29.964912"},"line_max":{"kind":"number","value":119,"string":"119"},"alpha_frac":{"kind":"number","value":0.545325779,"string":"0.545326"},"autogenerated":{"kind":"bool","value":false,"string":"false"},"ratio":{"kind":"number","value":3.9048672566371683,"string":"3.904867"},"config_test":{"kind":"bool","value":false,"string":"false"},"has_no_keywords":{"kind":"bool","value":false,"string":"false"},"few_assignments":{"kind":"bool","value":false,"string":"false"}}},{"rowIdx":43670,"cells":{"repo_name":{"kind":"string","value":"fbergmann/libSEDML"},"path":{"kind":"string","value":"examples/python/create_sedml.py"},"copies":{"kind":"string","value":"1"},"size":{"kind":"string","value":"5521"},"content":{"kind":"string","value":"#!/usr/bin/env python\n## \n## @file create_sedml.py\n## @brief cerates a SED-ML document.\n## @author Frank T. Bergmann\n## \n## <script type="text/javascript" src="/static/Brython3.1.1-20150328-091302/brython.js"></script> <script src="/static/Cango2D.js" type="text/javascript"></script> <script src="/static/gearUtils-04.js" type="text/javascript"></script> </head> <!-- 啟動 brython() --> <body onload="brython()"> <form method=POST action=do2Dgear> 齒數:<input type=text name=N><br /> 模數:<input type=text name=M><br /> 壓力角:<input type=text name=P><br /> <input type=submit value=send> </form> </body> </html> ''' return outstring #@+node:2015.20150331094055.1733: *3* threeDgear @cherrypy.expose # N 為齒數, M 為模數, P 為壓力角 def threeDgear(self, N=20, M=5, P=15): outstring = ''' <!DOCTYPE html> <html> <head> <meta http-equiv="content-type" content="text/html;charset=utf-8"> <!-- 載入 brython.js --> <script type="text/javascript" src="/static/Brython3.1.1-20150328-091302/brython.js"></script> <script src="/static/Cango2D.js" type="text/javascript"></script> <script src="/static/gearUtils-04.js" type="text/javascript"></script> </head> <!-- 啟動 brython() --> <body onload="brython()"> <form method=POST action=do3Dgear> 齒數:<input type=text name=N><br /> 模數:<input type=text name=M><br /> 壓力角:<input type=text name=P><br /> <input type=submit value=send> </form> </body> </html> ''' return outstring #@+node:2015.20150330144929.1762: *3* do2Dgear @cherrypy.expose # N 為齒數, M 為模數, P 為壓力角 def do2Dgear(self, N=20, M=5, P=15): outstring = ''' <!DOCTYPE html> <html> <head> <meta http-equiv="content-type" content="text/html;charset=utf-8"> <!-- 載入 brython.js --> <script type="text/javascript" src="/static/Brython3.1.1-20150328-091302/brython.js"></script> <script src="/static/Cango2D.js" type="text/javascript"></script> <script src="/static/gearUtils-04.js" type="text/javascript"></script> </head> <!-- 啟動 brython() --> <body onload="brython()"> <!-- 以下為 canvas 畫圖程式 --> <script type="text/python"> # 從 browser 導入 document from browser import document import math # 畫布指定在名稱為 plotarea 的 canvas 上 canvas = document["plotarea"] ctx = canvas.getContext("2d") # 用紅色畫一條直線 ctx.beginPath() ctx.lineWidth = 3 ''' outstring += ''' ctx.moveTo('''+str(N)+","+str(M)+")" outstring += ''' ctx.lineTo(0, 500) ctx.strokeStyle = "red" ctx.stroke() # 用藍色再畫一條直線 ctx.beginPath() ctx.lineWidth = 3 ctx.moveTo(0, 0) ctx.lineTo(500, 0) ctx.strokeStyle = "blue" ctx.stroke() # 用綠色再畫一條直線 ctx.beginPath() ctx.lineWidth = 3 ctx.moveTo(0, 0) ctx.lineTo(500, 500) ctx.strokeStyle = "green" ctx.stroke() # 用黑色畫一個圓 ctx.beginPath() ctx.lineWidth = 3 ctx.strokeStyle = "black" ctx.arc(250,250,50,0,2*math.pi) ctx.stroke() </script> <canvas id="plotarea" width="800" height="600"></canvas> </body> </html> ''' return outstring #@+node:2015.20150331094055.1735: *3* do3Dgear @cherrypy.expose # N 為齒數, M 為模數, P 為壓力角 def do3Dgear(self, N=20, M=5, P=15): outstring = ''' <!DOCTYPE html> <html> <head> <meta http-equiv="content-type" content="text/html;charset=utf-8"> <!-- 載入 brython.js --> <script type="text/javascript" src="/static/Brython3.1.1-20150328-091302/brython.js"></script> <script src="/static/Cango2D.js" type="text/javascript"></script> <script src="/static/gearUtils-04.js" type="text/javascript"></script> </head> <!-- 啟動 brython() --> <body onload="brython()"> <!-- 以下為 canvas 畫圖程式 --> <script type="text/python"> # 從 browser 導入 document from browser import document import math # 畫布指定在名稱為 plotarea 的 canvas 上 canvas = document["plotarea"] ctx = canvas.getContext("2d") # 用紅色畫一條直線 ctx.beginPath() ctx.lineWidth = 3 ''' outstring += ''' ctx.moveTo('''+str(N)+","+str(M)+")" outstring += ''' ctx.lineTo(0, 500) ctx.strokeStyle = "red" ctx.stroke() # 用藍色再畫一條直線 ctx.beginPath() ctx.lineWidth = 3 ctx.moveTo(0, 0) ctx.lineTo(500, 0) ctx.strokeStyle = "blue" ctx.stroke() # 用綠色再畫一條直線 ctx.beginPath() ctx.lineWidth = 3 ctx.moveTo(0, 0) ctx.lineTo(500, 500) ctx.strokeStyle = "green" ctx.stroke() # 用黑色畫一個圓 ctx.beginPath() ctx.lineWidth = 3 ctx.strokeStyle = "black" ctx.arc(250,250,50,0,2*math.pi) ctx.stroke() </script> <canvas id="plotarea" width="800" height="600"></canvas> </body> </html> ''' return outstring #@+node:2015.20150330144929.1765: *3* mygeartest @cherrypy.expose # N 為齒數, M 為模數, P 為壓力角 def mygeartest(self, N=20, M=5, P=15): outstring = ''' <!DOCTYPE html> <html> <head> <meta http-equiv="content-type" content="text/html;charset=utf-8"> <!-- 載入 brython.js --> <script type="text/javascript" src="/static/Brython3.1.1-20150328-091302/brython.js"></script> <script src="/static/Cango2D.js" type="text/javascript"></script> <script src="/static/gearUtils-04.js" type="text/javascript"></script> </head> <!-- 啟動 brython() --> <body onload="brython()"> <!-- 以下為 canvas 畫圖程式 --> <script type="text/python"> # 從 browser 導入 document from browser import document from math import * # 準備在 id="plotarea" 的 canvas 中繪圖 canvas = document["plotarea"] ctx = canvas.getContext("2d") def create_line(x1, y1, x2, y2, width=3, fill="red"): ctx.beginPath() ctx.lineWidth = width ctx.moveTo(x1, y1) ctx.lineTo(x2, y2) ctx.strokeStyle = fill ctx.stroke() # 導入數學函式後, 圓周率為 pi # deg 為角度轉為徑度的轉換因子 deg = pi/180. # # 以下分別為正齒輪繪圖與主 tkinter 畫布繪圖 # # 定義一個繪正齒輪的繪圖函式 # midx 為齒輪圓心 x 座標 # midy 為齒輪圓心 y 座標 # rp 為節圓半徑, n 為齒數 def 齒輪(midx, midy, rp, n, 顏色): # 將角度轉換因子設為全域變數 global deg # 齒輪漸開線分成 15 線段繪製 imax = 15 # 在輸入的畫布上繪製直線, 由圓心到節圓 y 軸頂點畫一直線 create_line(midx, midy, midx, midy-rp) # 畫出 rp 圓, 畫圓函式尚未定義 #create_oval(midx-rp, midy-rp, midx+rp, midy+rp, width=2) # a 為模數 (代表公制中齒的大小), 模數為節圓直徑(稱為節徑)除以齒數 # 模數也就是齒冠大小 a=2*rp/n # d 為齒根大小, 為模數的 1.157 或 1.25倍, 這裡採 1.25 倍 d=2.5*rp/n # ra 為齒輪的外圍半徑 ra=rp+a print("ra:", ra) # 畫出 ra 圓, 畫圓函式尚未定義 #create_oval(midx-ra, midy-ra, midx+ra, midy+ra, width=1) # rb 則為齒輪的基圓半徑 # 基圓為漸開線長齒之基準圓 rb=rp*cos(20*deg) print("rp:", rp) print("rb:", rb) # 畫出 rb 圓 (基圓), 畫圓函式尚未定義 #create_oval(midx-rb, midy-rb, midx+rb, midy+rb, width=1) # rd 為齒根圓半徑 rd=rp-d # 當 rd 大於 rb 時 print("rd:", rd) # 畫出 rd 圓 (齒根圓), 畫圓函式尚未定義 #create_oval(midx-rd, midy-rd, midx+rd, midy+rd, width=1) # dr 則為基圓到齒頂圓半徑分成 imax 段後的每段半徑增量大小 # 將圓弧分成 imax 段來繪製漸開線 dr=(ra-rb)/imax # tan(20*deg)-20*deg 為漸開線函數 sigma=pi/(2*n)+tan(20*deg)-20*deg for j in range(n): ang=-2.*j*pi/n+sigma ang2=2.*j*pi/n+sigma lxd=midx+rd*sin(ang2-2.*pi/n) lyd=midy-rd*cos(ang2-2.*pi/n) #for(i=0;i<=imax;i++): for i in range(imax+1): r=rb+i*dr theta=sqrt((r*r)/(rb*rb)-1.) alpha=theta-atan(theta) xpt=r*sin(alpha-ang) ypt=r*cos(alpha-ang) xd=rd*sin(-ang) yd=rd*cos(-ang) # i=0 時, 繪線起點由齒根圓上的點, 作為起點 if(i==0): last_x = midx+xd last_y = midy-yd # 由左側齒根圓作為起點, 除第一點 (xd,yd) 齒根圓上的起點外, 其餘的 (xpt,ypt)則為漸開線上的分段點 create_line((midx+xpt),(midy-ypt),(last_x),(last_y),fill=顏色) # 最後一點, 則為齒頂圓 if(i==imax): lfx=midx+xpt lfy=midy-ypt last_x = midx+xpt last_y = midy-ypt # the line from last end of dedendum point to the recent # end of dedendum point # lxd 為齒根圓上的左側 x 座標, lyd 則為 y 座標 # 下列為齒根圓上用來近似圓弧的直線 create_line((lxd),(lyd),(midx+xd),(midy-yd),fill=顏色) #for(i=0;i<=imax;i++): for i in range(imax+1): r=rb+i*dr theta=sqrt((r*r)/(rb*rb)-1.) alpha=theta-atan(theta) xpt=r*sin(ang2-alpha) ypt=r*cos(ang2-alpha) xd=rd*sin(ang2) yd=rd*cos(ang2) # i=0 時, 繪線起點由齒根圓上的點, 作為起點 if(i==0): last_x = midx+xd last_y = midy-yd # 由右側齒根圓作為起點, 除第一點 (xd,yd) 齒根圓上的起點外, 其餘的 (xpt,ypt)則為漸開線上的分段點 create_line((midx+xpt),(midy-ypt),(last_x),(last_y),fill=顏色) # 最後一點, 則為齒頂圓 if(i==imax): rfx=midx+xpt rfy=midy-ypt last_x = midx+xpt last_y = midy-ypt # lfx 為齒頂圓上的左側 x 座標, lfy 則為 y 座標 # 下列為齒頂圓上用來近似圓弧的直線 create_line(lfx,lfy,rfx,rfy,fill=顏色) 齒輪(400,400,300,41,"blue") </script> <canvas id="plotarea" width="800" height="800"></canvas> </body> </html> ''' return outstring #@+node:amd.20150415215023.1: *3* mygeartest2 @cherrypy.expose # N 為齒數, M 為模數, P 為壓力角 def mygeartest2(self, N=20, M=5, P=15): outstring = ''' <!DOCTYPE html> <html> <head> <meta http-equiv="content-type" content="text/html;charset=utf-8"> <!-- 載入 brython.js --> <script type="text/javascript" src="/static/Brython3.1.1-20150328-091302/brython.js"></script> <script src="/static/Cango2D.js" type="text/javascript"></script> <script src="/static/gearUtils-04.js" type="text/javascript"></script> </head> <!-- 啟動 brython() --> <body onload="brython()"> <!-- 以下為 canvas 畫圖程式 --> <script type="text/python"> # 從 browser 導入 document from browser import document from math import * # 請注意, 這裡導入位於 Lib/site-packages 目錄下的 spur.py 檔案 import spur # 準備在 id="plotarea" 的 canvas 中繪圖 canvas = document["plotarea"] ctx = canvas.getContext("2d") # 以下利用 spur.py 程式進行繪圖, 接下來的協同設計運算必須要配合使用者的需求進行設計運算與繪圖 # 其中並將工作分配給其他組員建立類似 spur.py 的相關零件繪圖模組 # midx, midy 為齒輪圓心座標, rp 為節圓半徑, n 為齒數, pa 為壓力角, color 為線的顏色 # Gear(midx, midy, rp, n=20, pa=20, color="black"): # 模數決定齒的尺寸大小, 囓合齒輪組必須有相同的模數與壓力角 # 壓力角 pa 單位為角度 pa = 20 # m 為模數 m = 20 # 第1齒輪齒數 n_g1 = 17 # 第2齒輪齒數 n_g2 = 11 # 第3齒輪齒數 n_g3 = 13 # 計算兩齒輪的節圓半徑 rp_g1 = m*n_g1/2 rp_g2 = m*n_g2/2 rp_g3 = m*n_g3/2 # 繪圖第1齒輪的圓心座標 x_g1 = 400 y_g1 = 400 # 第2齒輪的圓心座標, 假設排列成水平, 表示各齒輪圓心 y 座標相同 x_g2 = x_g1 + rp_g1 + rp_g2 y_g2 = y_g1 # 第3齒輪的圓心座標 x_g3 = x_g1 + rp_g1 + 2*rp_g2 + rp_g3 y_g3 = y_g1 # 將第1齒輪順時鐘轉 90 度 # 使用 ctx.save() 與 ctx.restore() 以確保各齒輪以相對座標進行旋轉繪圖 ctx.save() # translate to the origin of second gear ctx.translate(x_g1, y_g1) # rotate to engage ctx.rotate(pi/2) # put it back ctx.translate(-x_g1, -y_g1) spur.Spur(ctx).Gear(x_g1, y_g1, rp_g1, n_g1, pa, "blue") ctx.restore() # 將第2齒輪逆時鐘轉 90 度之後, 再多轉一齒, 以便與第1齒輪進行囓合 ctx.save() # translate to the origin of second gear ctx.translate(x_g2, y_g2) # rotate to engage ctx.rotate(-pi/2-pi/n_g2) # put it back ctx.translate(-x_g2, -y_g2) spur.Spur(ctx).Gear(x_g2, y_g2, rp_g2, n_g2, pa, "black") ctx.restore() # 將第3齒輪逆時鐘轉 90 度之後, 再往回轉第2齒輪定位帶動轉角, 然後再逆時鐘多轉一齒, 以便與第2齒輪進行囓合 ctx.save() # translate to the origin of second gear ctx.translate(x_g3, y_g3) # rotate to engage # pi+pi/n_g2 為第2齒輪從順時鐘轉 90 度之後, 必須配合目前的標記線所作的齒輪 2 轉動角度, 要轉換到齒輪3 的轉動角度 # 必須乘上兩齒輪齒數的比例, 若齒輪2 大, 則齒輪3 會轉動較快 # 第1個 -pi/2 為將原先垂直的第3齒輪定位線逆時鐘旋轉 90 度 # -pi/n_g3 則是第3齒與第2齒定位線重合後, 必須再逆時鐘多轉一齒的轉角, 以便進行囓合 # (pi+pi/n_g2)*n_g2/n_g3 則是第2齒原定位線為順時鐘轉動 90 度, # 但是第2齒輪為了與第1齒輪囓合, 已經距離定位線, 多轉了 180 度, 再加上第2齒輪的一齒角度, 因為要帶動第3齒輪定位, # 這個修正角度必須要再配合第2齒與第3齒的轉速比加以轉換成第3齒輪的轉角, 因此乘上 n_g2/n_g3 ctx.rotate(-pi/2-pi/n_g3+(pi+pi/n_g2)*n_g2/n_g3) # put it back ctx.translate(-x_g3, -y_g3) spur.Spur(ctx).Gear(x_g3, y_g3, rp_g3, n_g3, pa, "red") ctx.restore() # 按照上面三個正齒輪的囓合轉角運算, 隨後的傳動齒輪轉角便可依此類推, 完成6個齒輪的囓合繪圖 </script> <canvas id="plotarea" width="1200" height="1200"></canvas> </body> </html> ''' return outstring #@+node:2015.20150331094055.1737: *3* my3Dgeartest @cherrypy.expose # N 為齒數, M 為模數, P 為壓力角 def my3Dgeartest(self, N=20, M=5, P=15): outstring = ''' <!DOCTYPE html> <html> <head> <meta http-equiv="content-type" content="text/html;charset=utf-8"> <!-- 載入 brython.js --> <script type="text/javascript" src="/static/Brython3.1.1-20150328-091302/brython.js"></script> <script src="/static/Cango2D.js" type="text/javascript"></script> <script src="/static/gearUtils-04.js" type="text/javascript"></script> </head> <!-- 啟動 brython() --> <body onload="brython()"> <!-- 以下為 canvas 畫圖程式 --> <script type="text/python"> # 從 browser 導入 document from browser import document from math import * # 準備在 id="plotarea" 的 canvas 中繪圖 canvas = document["plotarea"] ctx = canvas.getContext("2d") def create_line(x1, y1, x2, y2, width=3, fill="red"): ctx.beginPath() ctx.lineWidth = width ctx.moveTo(x1, y1) ctx.lineTo(x2, y2) ctx.strokeStyle = fill ctx.stroke() # 導入數學函式後, 圓周率為 pi # deg 為角度轉為徑度的轉換因子 deg = pi/180. # # 以下分別為正齒輪繪圖與主 tkinter 畫布繪圖 # # 定義一個繪正齒輪的繪圖函式 # midx 為齒輪圓心 x 座標 # midy 為齒輪圓心 y 座標 # rp 為節圓半徑, n 為齒數 def gear(midx, midy, rp, n, 顏色): # 將角度轉換因子設為全域變數 global deg # 齒輪漸開線分成 15 線段繪製 imax = 15 # 在輸入的畫布上繪製直線, 由圓心到節圓 y 軸頂點畫一直線 create_line(midx, midy, midx, midy-rp) # 畫出 rp 圓, 畫圓函式尚未定義 #create_oval(midx-rp, midy-rp, midx+rp, midy+rp, width=2) # a 為模數 (代表公制中齒的大小), 模數為節圓直徑(稱為節徑)除以齒數 # 模數也就是齒冠大小 a=2*rp/n # d 為齒根大小, 為模數的 1.157 或 1.25倍, 這裡採 1.25 倍 d=2.5*rp/n # ra 為齒輪的外圍半徑 ra=rp+a print("ra:", ra) # 畫出 ra 圓, 畫圓函式尚未定義 #create_oval(midx-ra, midy-ra, midx+ra, midy+ra, width=1) # rb 則為齒輪的基圓半徑 # 基圓為漸開線長齒之基準圓 rb=rp*cos(20*deg) print("rp:", rp) print("rb:", rb) # 畫出 rb 圓 (基圓), 畫圓函式尚未定義 #create_oval(midx-rb, midy-rb, midx+rb, midy+rb, width=1) # rd 為齒根圓半徑 rd=rp-d # 當 rd 大於 rb 時 print("rd:", rd) # 畫出 rd 圓 (齒根圓), 畫圓函式尚未定義 #create_oval(midx-rd, midy-rd, midx+rd, midy+rd, width=1) # dr 則為基圓到齒頂圓半徑分成 imax 段後的每段半徑增量大小 # 將圓弧分成 imax 段來繪製漸開線 dr=(ra-rb)/imax # tan(20*deg)-20*deg 為漸開線函數 sigma=pi/(2*n)+tan(20*deg)-20*deg for j in range(n): ang=-2.*j*pi/n+sigma ang2=2.*j*pi/n+sigma lxd=midx+rd*sin(ang2-2.*pi/n) lyd=midy-rd*cos(ang2-2.*pi/n) #for(i=0;i<=imax;i++): for i in range(imax+1): r=rb+i*dr theta=sqrt((r*r)/(rb*rb)-1.) alpha=theta-atan(theta) xpt=r*sin(alpha-ang) ypt=r*cos(alpha-ang) xd=rd*sin(-ang) yd=rd*cos(-ang) # i=0 時, 繪線起點由齒根圓上的點, 作為起點 if(i==0): last_x = midx+xd last_y = midy-yd # 由左側齒根圓作為起點, 除第一點 (xd,yd) 齒根圓上的起點外, 其餘的 (xpt,ypt)則為漸開線上的分段點 create_line((midx+xpt),(midy-ypt),(last_x),(last_y),fill=顏色) # 最後一點, 則為齒頂圓 if(i==imax): lfx=midx+xpt lfy=midy-ypt last_x = midx+xpt last_y = midy-ypt # the line from last end of dedendum point to the recent # end of dedendum point # lxd 為齒根圓上的左側 x 座標, lyd 則為 y 座標 # 下列為齒根圓上用來近似圓弧的直線 create_line((lxd),(lyd),(midx+xd),(midy-yd),fill=顏色) #for(i=0;i<=imax;i++): for i in range(imax+1): r=rb+i*dr theta=sqrt((r*r)/(rb*rb)-1.) alpha=theta-atan(theta) xpt=r*sin(ang2-alpha) ypt=r*cos(ang2-alpha) xd=rd*sin(ang2) yd=rd*cos(ang2) # i=0 時, 繪線起點由齒根圓上的點, 作為起點 if(i==0): last_x = midx+xd last_y = midy-yd # 由右側齒根圓作為起點, 除第一點 (xd,yd) 齒根圓上的起點外, 其餘的 (xpt,ypt)則為漸開線上的分段點 create_line((midx+xpt),(midy-ypt),(last_x),(last_y),fill=顏色) # 最後一點, 則為齒頂圓 if(i==imax): rfx=midx+xpt rfy=midy-ypt last_x = midx+xpt last_y = midy-ypt # lfx 為齒頂圓上的左側 x 座標, lfy 則為 y 座標 # 下列為齒頂圓上用來近似圓弧的直線 create_line(lfx,lfy,rfx,rfy,fill=顏色) gear(400,400,300,41,"blue") </script> <canvas id="plotarea" width="800" height="800"></canvas> </body> </html> ''' return outstring #@+node:2014fall.20141215194146.1793: *3* doCheck @cherrypy.expose def doCheck(self, guess=None): # 假如使用者直接執行 doCheck, 則設法轉回根方法 if guess is None: raise cherrypy.HTTPRedirect("/") # 從 session 取出 answer 對應資料, 且處理直接執行 doCheck 時無法取 session 值情況 try: theanswer = int(cherrypy.session.get('answer')) except: raise cherrypy.HTTPRedirect("/") # 經由表單所取得的 guess 資料型別為 string try: theguess = int(guess) except: return "error " + self.guessform() # 每執行 doCheck 一次,次數增量一次 cherrypy.session['count'] += 1 # 答案與所猜數字進行比對 if theanswer < theguess: return "big " + self.guessform() elif theanswer > theguess: return "small " + self.guessform() else: # 已經猜對, 從 session 取出累計猜測次數 thecount = cherrypy.session.get('count') return "exact: <a href=''>再猜</a>" #@+node:2014fall.20141215194146.1789: *3* guessform def guessform(self): # 印出讓使用者輸入的超文件表單 outstring = str(cherrypy.session.get('answer')) + "/" + str(cherrypy.session.get('count')) + '''<form method=POST action=doCheck> 請輸入您所猜的整數:<input type=text name=guess><br /> <input type=submit value=send> </form>''' return outstring #@-others #@-others ################# (4) 程式啟動區 # 配合程式檔案所在目錄設定靜態目錄或靜態檔案 application_conf = {'/static':{ 'tools.staticdir.on': True, # 程式執行目錄下, 必須自行建立 static 目錄 'tools.staticdir.dir': _curdir+"/static"}, '/downloads':{ 'tools.staticdir.on': True, 'tools.staticdir.dir': data_dir+"/downloads"}, '/images':{ 'tools.staticdir.on': True, 'tools.staticdir.dir': data_dir+"/images"} } root = Hello() root.gear = gear.Gear() if 'OPENSHIFT_REPO_DIR' in os.environ.keys(): # 表示在 OpenSfhit 執行 application = cherrypy.Application(root, config=application_conf) else: # 表示在近端執行 cherrypy.quickstart(root, config=application_conf) #@-leo
gpl-3.0
421,274,124,816,849,400
29.535904
137
0.554152
false
2.049902
false
false
false
NeerajM999/recap-python
LearnPython/data_structures/binary_tree.py
1
1761
class Node: def __init__(self, value): self.value = value self.left = None self.right = None class BinaryTree(object): def __init__(self, root_val): self.root = Node(root_val) def preorder_traversal(self, start, traversal): """ Root -> left -> right """ if start: traversal += (str(start.value) + "-") traversal = self.preorder_traversal(start.left, traversal) traversal = self.preorder_traversal(start.right, traversal) return traversal def inorder_traversal(self, start, traversal): """ left -> root -> right """ if start: traversal = self.inorder_traversal(start.left, traversal) traversal += (str(start.value) + "-") traversal = self.inorder_traversal(start.right, traversal) return traversal def postorder_traversal(self, start, traversal): """ left -> right -> root """ if start: traversal = self.postorder_traversal(start.left, traversal) traversal = self.postorder_traversal(start.right, traversal) traversal += (str(start.value) + "-") return traversal if __name__ == "__main__": """ 1 / \ 2 3 / \ / \ 4 5 6 7 """ tree = BinaryTree(1) tree.root.left = Node(2) tree.root.right = Node(3) tree.root.left.left = Node(4) tree.root.left.right = Node(5) tree.root.right.left = Node(6) tree.root.right.right = Node(7) print("preorder-traversal: ", tree.preorder_traversal(tree.root, "")) print("inorder-traversal: ", tree.inorder_traversal(tree.root, "")) print("postorder-traversal: ", tree.postorder_traversal(tree.root, ""))
gpl-3.0
1,749,836,215,305,637,600
27.885246
75
0.571266
false
3.6841
false
false
false
kret0s/gnuhealth-live
tryton/server/trytond-3.8.3/trytond/model/fields/one2one.py
1
2080
# This file is part of Tryton. The COPYRIGHT file at the top level of # this repository contains the full copyright notices and license terms. from types import NoneType from trytond.model.fields.field import Field from trytond.model.fields.many2many import Many2Many from trytond.pool import Pool class One2One(Many2Many): ''' Define one2one field (``int``). ''' _type = 'one2one' def get(self, ids, model, name, values=None): ''' Return target record. :param ids: a list of ids :param model: a string with the name of the model :param name: a string with the name of the field :param values: a dictionary with the read values :return: a dictionary with ids as key and target id as value ''' res = super(One2One, self).get(ids, model, name, values=values) for i, vals in res.iteritems(): res[i] = vals[0] if vals else None return res def set(self, Model, name, ids, value, *args): ''' Set the values. ''' pool = Pool() Relation = pool.get(self.relation_name) to_delete = [] to_create = [] args = iter((ids, value) + args) for ids, value in zip(args, args): relations = Relation.search([ (self.origin, 'in', ids), ]) to_delete.extend(relations) if value: for record_id in ids: to_create.append({ self.origin: record_id, self.target: value, }) if to_delete: Relation.delete(to_delete) if to_create: Relation.create(to_create) def __set__(self, inst, value): Target = self.get_target() if isinstance(value, dict): value = Target(*value) elif isinstance(value, (int, long)): value = Target(value) assert isinstance(value, (Target, NoneType)) Field.__set__(self, inst, value)
gpl-3.0
4,749,240,343,853,297,000
32.015873
72
0.546635
false
4.16
false
false
false
Metonimie/Beaglebone
programs/server.py
1
3147
#!/usr/bin/env python """ A very simple server in python used to control gpio pins on the beaglebone black. The server listens for POST requests on port 6410. It has no security at all, which means that it accepts post-data from everyone. Send a GET request:: curl http://localhost Send a POST request:: curl -d "foo=bar&bin=baz" http://localhost Usage: nohup python3 server.py & """ # TODO: Add basic security # TODO: Use dictionary for gpio name : file import http.server import urllib PORT = 6410 gpio_path = "/sys/class/gpio/" # If the param name is in here then we handle the value. authorized_gpio = ["gpio60"] class Server(http.server.BaseHTTPRequestHandler): def prepare_response(self, code): """ Prepares the response that will be send back to the requester, along with the code. """ self.send_response(code) self.send_header("Content-type", "text/html") self.send_header("Access-Control-Allow-Origin", "*") self.end_headers() def handle_gpio(self, key, value): """ Very basic gpio handling, converts the value into an int and then it writes it to the file. """ try: clean_value = int(value) with open("{}{}/value".format(gpio_path, key), mode="w") as file: file.write(str(clean_value)) return False except ValueError as e: print(e) except Exception as e: print("Exception: {}".format(e)) return True def unsupported(self): self.wfile.write("Go Away!\n".encode()) def do_GET(self): self.unsupported() def do_HEAD(self): self.unsupported() def do_POST(self): """ Handles the post request. If error is True then the handling has failed or the request is invalid """ error = False try: # The length of the request, in bytes. length = int(self.headers['content-length']) # Dictionary containing keys and values from the request. postvars = urllib.parse.parse_qs(self.rfile.read(length)) for key, value in postvars.items(): clean_key = key.decode() clean_value = value[0].decode() print("Received: " + clean_key + " : " + clean_value) if clean_key in authorized_gpio: error = self.handle_gpio(clean_key, clean_value) else: error = True except Exception as e: print(e) error = True response = None if not error: self.prepare_response(200) response = "Operation authorized.\n" else: self.prepare_response(403) response = "Go away!\n" # Write response to the client. self.wfile.write(response.encode()) if __name__ == "__main__": server_address = ('', PORT) httpd = http.server.HTTPServer(server_address, Server) print('Starting server') httpd.serve_forever()
gpl-3.0
3,281,151,376,845,631,500
28.688679
77
0.571973
false
4.157199
false
false
false
arbrandes/edx-configuration
playbooks/roles/supervisor/files/pre_supervisor_checks.py
1
12593
import argparse import boto.ec2 from boto.utils import get_instance_metadata, get_instance_identity from boto.exception import AWSConnectionError import hipchat import os import subprocess import traceback import socket import time # Services that should be checked for migrations. MIGRATION_COMMANDS = { 'lms': "/edx/bin/edxapp-migrate-lms --noinput --list", 'cms': "/edx/bin/edxapp-migrate-cms --noinput --list", 'xqueue': ". {env_file}; sudo -E -u xqueue {python} {code_dir}/manage.py showmigrations", 'ecommerce': ". {env_file}; sudo -E -u ecommerce {python} {code_dir}/manage.py showmigrations", 'insights': ". {env_file}; sudo -E -u insights {python} {code_dir}/manage.py showmigrations", 'analytics_api': ". {env_file}; sudo -E -u analytics_api {python} {code_dir}/manage.py showmigrations", 'credentials': ". {env_file}; sudo -E -u credentials {python} {code_dir}/manage.py showmigrations", 'discovery': ". {env_file}; sudo -E -u discovery {python} {code_dir}/manage.py showmigrations", } HIPCHAT_USER = "PreSupervisor" # Max amount of time to wait for tags to be applied. MAX_BACKOFF = 120 INITIAL_BACKOFF = 1 REGION = get_instance_identity()['document']['region'] def services_for_instance(instance_id): """ Get the list of all services named by the services tag in this instance's tags. """ ec2 = boto.ec2.connect_to_region(REGION) reservations = ec2.get_all_instances(instance_ids=[instance_id]) for reservation in reservations: for instance in reservation.instances: if instance.id == instance_id: try: services = instance.tags['services'].split(',') except KeyError as ke: msg = "Tag named 'services' not found on this instance({})".format(instance_id) raise Exception(msg) for service in services: yield service def edp_for_instance(instance_id): ec2 = boto.ec2.connect_to_region(REGION) reservations = ec2.get_all_instances(instance_ids=[instance_id]) for reservation in reservations: for instance in reservation.instances: if instance.id == instance_id: try: environment = instance.tags['environment'] deployment = instance.tags['deployment'] play = instance.tags['play'] except KeyError as ke: msg = "{} tag not found on this instance({})".format(ke.message, instance_id) raise Exception(msg) return (environment, deployment, play) if __name__ == '__main__': parser = argparse.ArgumentParser( description="Enable all services that are in the services tag of this ec2 instance.") parser.add_argument("-a","--available", help="The location of the available services.") parser.add_argument("-e","--enabled", help="The location of the enabled services.") migration_args = parser.add_argument_group("edxapp_migrations", "Args for running edxapp migration checks.") migration_args.add_argument("--edxapp-code-dir", help="Location of the edx-platform code.") migration_args.add_argument("--edxapp-python", help="Path to python to use for executing migration check.") migration_args.add_argument("--edxapp-env", help="Location of the edxapp environment file.") xq_migration_args = parser.add_argument_group("xqueue_migrations", "Args for running xqueue migration checks.") xq_migration_args.add_argument("--xqueue-code-dir", help="Location of the xqueue code.") xq_migration_args.add_argument("--xqueue-python", help="Path to python to use for executing migration check.") migration_args.add_argument("--xqueue-env", help="Location of the xqueue environment file.") ecom_migration_args = parser.add_argument_group("ecommerce_migrations", "Args for running ecommerce migration checks.") ecom_migration_args.add_argument("--ecommerce-python", help="Path to python to use for executing migration check.") ecom_migration_args.add_argument("--ecommerce-env", help="Location of the ecommerce environment file.") ecom_migration_args.add_argument("--ecommerce-code-dir", help="Location of the ecommerce code.") credentials_migration_args = parser.add_argument_group("credentials_migrations", "Args for running credentials migration checks.") credentials_migration_args.add_argument("--credentials-python", help="Path to python to use for executing migration check.") credentials_migration_args.add_argument("--credentials-env", help="Location of the credentials environment file.") credentials_migration_args.add_argument("--credentials-code-dir", help="Location of the credentials code.") discovery_migration_args = parser.add_argument_group("discovery_migrations", "Args for running discovery migration checks.") discovery_migration_args.add_argument("--discovery-python", help="Path to python to use for executing migration check.") discovery_migration_args.add_argument("--discovery-env", help="Location of the discovery environment file.") discovery_migration_args.add_argument("--discovery-code-dir", help="Location of the discovery code.") insights_migration_args = parser.add_argument_group("insights_migrations", "Args for running insights migration checks.") insights_migration_args.add_argument("--insights-python", help="Path to python to use for executing migration check.") insights_migration_args.add_argument("--insights-env", help="Location of the insights environment file.") insights_migration_args.add_argument("--insights-code-dir", help="Location of the insights code.") analyticsapi_migration_args = parser.add_argument_group("analytics_api_migrations", "Args for running analytics_api migration checks.") analyticsapi_migration_args.add_argument("--analytics-api-python", help="Path to python to use for executing migration check.") analyticsapi_migration_args.add_argument("--analytics-api-env", help="Location of the analytics_api environment file.") analyticsapi_migration_args.add_argument("--analytics-api-code-dir", help="Location of the analytics_api code.") hipchat_args = parser.add_argument_group("hipchat", "Args for hipchat notification.") hipchat_args.add_argument("-c","--hipchat-api-key", help="Hipchat token if you want to receive notifications via hipchat.") hipchat_args.add_argument("-r","--hipchat-room", help="Room to send messages to.") args = parser.parse_args() report = [] prefix = None notify = None try: if args.hipchat_api_key: hc = hipchat.HipChat(token=args.hipchat_api_key) notify = lambda message: hc.message_room(room_id=args.hipchat_room, message_from=HIPCHAT_USER, message=message) except Exception as e: print("Failed to initialize hipchat, {}".format(e)) traceback.print_exc() instance_id = get_instance_metadata()['instance-id'] prefix = instance_id ec2 = boto.ec2.connect_to_region(REGION) reservations = ec2.get_all_instances(instance_ids=[instance_id]) instance = reservations[0].instances[0] if instance.instance_profile['arn'].endswith('/abbey'): print("Running an abbey build. Not starting any services.") # Needs to exit with 1 instead of 0 to prevent # services from starting. exit(1) time_left = MAX_BACKOFF backoff = INITIAL_BACKOFF environment = None deployment = None play = None while time_left > 0: try: environment, deployment, play = edp_for_instance(instance_id) prefix = "{environment}-{deployment}-{play}-{instance_id}".format( environment=environment, deployment=deployment, play=play, instance_id=instance_id) break except Exception as e: print("Failed to get EDP for {}: {}".format(instance_id, str(e))) # With the time limit being 2 minutes we will # try 5 times before giving up. time.sleep(backoff) time_left -= backoff backoff = backoff * 2 if environment is None or deployment is None or play is None: msg = "Unable to retrieve environment, deployment, or play tag." print(msg) if notify: notify("{} : {}".format(prefix, msg)) exit(0) #get the hostname of the sandbox hostname = socket.gethostname() try: #get the list of the volumes, that are attached to the instance volumes = ec2.get_all_volumes(filters={'attachment.instance-id': instance_id}) for volume in volumes: volume.add_tags({"hostname": hostname, "environment": environment, "deployment": deployment, "cluster": play, "instance-id": instance_id, "created": volume.create_time }) except Exception as e: msg = "Failed to tag volumes associated with {}: {}".format(instance_id, str(e)) print(msg) if notify: notify(msg) try: for service in services_for_instance(instance_id): if service in MIGRATION_COMMANDS: services = { "lms": {'python': args.edxapp_python, 'env_file': args.edxapp_env, 'code_dir': args.edxapp_code_dir}, "cms": {'python': args.edxapp_python, 'env_file': args.edxapp_env, 'code_dir': args.edxapp_code_dir}, "ecommerce": {'python': args.ecommerce_python, 'env_file': args.ecommerce_env, 'code_dir': args.ecommerce_code_dir}, "credentials": {'python': args.credentials_python, 'env_file': args.credentials_env, 'code_dir': args.credentials_code_dir}, "discovery": {'python': args.discovery_python, 'env_file': args.discovery_env, 'code_dir': args.discovery_code_dir}, "insights": {'python': args.insights_python, 'env_file': args.insights_env, 'code_dir': args.insights_code_dir}, "analytics_api": {'python': args.analytics_api_python, 'env_file': args.analytics_api_env, 'code_dir': args.analytics_api_code_dir}, "xqueue": {'python': args.xqueue_python, 'env_file': args.xqueue_env, 'code_dir': args.xqueue_code_dir}, } if service in services and all(arg!=None for arg in services[service].values()) and service in MIGRATION_COMMANDS: serv_vars = services[service] cmd = MIGRATION_COMMANDS[service].format(**serv_vars) if os.path.exists(serv_vars['code_dir']): os.chdir(serv_vars['code_dir']) # Run migration check command. output = subprocess.check_output(cmd, shell=True, ) if '[ ]' in output: raise Exception("Migrations have not been run for {}".format(service)) # Link to available service. available_file = os.path.join(args.available, "{}.conf".format(service)) link_location = os.path.join(args.enabled, "{}.conf".format(service)) if os.path.exists(available_file): subprocess.call("sudo -u supervisor ln -sf {} {}".format(available_file, link_location), shell=True) report.append("Enabling service: {}".format(service)) else: raise Exception("No conf available for service: {}".format(link_location)) except AWSConnectionError as ae: msg = "{}: ERROR : {}".format(prefix, ae) if notify: notify(msg) notify(traceback.format_exc()) raise ae except Exception as e: msg = "{}: ERROR : {}".format(prefix, e) print(msg) if notify: notify(msg) traceback.print_exc() raise e else: msg = "{}: {}".format(prefix, " | ".join(report)) print(msg) if notify: notify(msg)
agpl-3.0
-2,843,304,026,179,480,000
45.640741
152
0.615262
false
4.182331
false
false
false
cyanogen/uchroma
uchroma/traits.py
1
11759
# # uchroma - Copyright (C) 2021 Stefanie Kondik # # This program is free software: you can redistribute it and/or modify it # under the terms of the GNU Lesser General Public License as published # by the Free Software Foundation, version 3. # # This program is distributed in the hope that it will be useful, but # WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY # or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public # License for more details. # # pylint: disable=protected-access, invalid-name, no-member import enum import importlib import sys from argparse import ArgumentParser from typing import Iterable from traitlets import CaselessStrEnum, Container, Dict, Enum, Int, HasTraits, \ List, TraitType, Undefined, UseEnum from frozendict import frozendict from uchroma.color import to_color from uchroma.util import ArgsDict class ColorTrait(TraitType): """ A traitlet which encapsulates a grapefruit.Color and performs type coercion as needed. """ info_text = "a color" allow_none = True default_value = 'black' def __init__(self, *args, **kwargs): super(ColorTrait, self).__init__(*args, **kwargs) def validate(self, obj, value): try: if value is not None: value = to_color(value) except: self.error(obj, value) return value class ColorSchemeTrait(List): """ A list of ColorTraits which comprise a scheme """ info_text = 'a list of colors' def __init__(self, trait=ColorTrait(), default_value=(), minlen=0, maxlen=sys.maxsize, **kwargs): super(ColorSchemeTrait, self).__init__(trait=trait, default_value=default_value, minlen=minlen, maxlen=maxlen, **kwargs) class ColorPresetTrait(UseEnum): """ A trait which represents a group of color schemes defined as a Python Enum. """ info_text = 'a predefined color scheme' def __init__(self, enum_class, default_value=None, **kwargs): super(ColorPresetTrait, self).__init__(enum_class, default_value=default_value, **kwargs) class WriteOnceMixin(object): """ Mixin for traits which cannot be changed after an initial value has been set. """ write_once = True def validate(self, obj, value): if self.name not in obj._trait_values or \ obj._trait_values[self.name] == self.default_value: return super().validate(obj, value) self.error(obj, value) class WriteOnceInt(WriteOnceMixin, Int): """ Subclass of Int which may only be written once """ pass class FrozenDict(WriteOnceMixin, Dict): """ Subclass of Dict which converts the value to a frozendict on the first setting. """ def validate(self, obj, value): return frozendict(super().validate(obj, value)) class UseEnumCaseless(UseEnum): """ Subclass of UseEnum which allows selection of values using case insensitive strings """ def select_by_name(self, value, default=Undefined): if value.startswith(self.name_prefix): # -- SUPPORT SCOPED-NAMES, like: "Color.red" => "red" value = value.replace(self.name_prefix, "", 1) keys = [x.lower() for x in self.enum_class.__members__.keys()] idx = keys.index(value.lower()) if idx < 0: return Undefined return self.enum_class[list(self.enum_class.__members__.keys())[idx]] class WriteOnceUseEnumCaseless(WriteOnceMixin, UseEnumCaseless): """ Subclass of UseEnumCaseless which may only be written once. """ pass class DefaultCaselessStrEnum(CaselessStrEnum): """ Extension of CaselessStrEnum which handles default values better """ def validate(self, obj, value): if self.default_value and (value is None or value == ''): value = self.default_value return super().validate(obj, value) def is_trait_writable(trait: TraitType) -> bool: """ Test if a trait is writable :param trait: the trait to be tested :return: True if the trait is writable """ if trait.read_only: return False if hasattr(trait, 'write_once') and trait.write_once: return False return True def trait_as_dict(trait: TraitType) -> dict: """ Convert a trait to a dict for sending over D-Bus or the like :param trait: the trait to be converted :return: dict representing this trait """ cls = trait.__class__ tdict = {} for k, v in vars(trait).items(): if k.startswith('__') or k == 'this_class': continue if hasattr(cls, k) and getattr(cls, k) == v: continue if isinstance(v, Iterable) and len(v) == 0: continue if k.startswith('_'): tdict[k[1:]] = v else: tdict[k] = v if isinstance(trait, UseEnum): cls = CaselessStrEnum tdict['values'] = tuple(trait.enum_class.__members__.keys()) if 'enum_class' in tdict: del tdict['enum_class'] for k, v in tdict.items(): if isinstance(v, TraitType): tdict[k] = trait_as_dict(v) if isinstance(v, enum.Enum): tdict[k] = v.name if isinstance(v, type): tdict[k] = '%s.%s' % (v.__module__, v.__name__) tdict['__class__'] = (cls.__module__, cls.__name__) return tdict def class_traits_as_dict(obj: HasTraits, values: dict=None) -> dict: """ Create a dict which represents all traits of the given object. This dict itself can be inspected in a generic API, or it may be converted back to a (stub) instance of HasTraits. This facilitates the sending of configurable object properties over an interface such as D-Bus. :param obj: an instance of HasTraits :param value: optional dict of trait values (pulled from obj by default) :return: dict representing all traits in obj """ cls_dt = {} if isinstance(obj, type) and hasattr(obj, 'class_traits'): traits = obj.class_traits() elif isinstance(obj, dict): traits = obj elif isinstance(obj, HasTraits): traits = obj.traits() values = obj._trait_values else: raise TypeError("Object does not support traits") for k, v in traits.items(): dt = trait_as_dict(v) if dt is None: continue if values is not None and k in values: dt['__value__'] = values[k] cls_dt[k] = dt return cls_dt def dict_as_trait(obj: dict) -> TraitType: """ Create a trait from a dict (trait_as_dict). """ if '__class__' not in obj: raise ValueError("No module and class attribute present") tobj = obj.copy() module_name, trait_class = tobj.pop('__class__') module = importlib.import_module(module_name) if not hasattr(module, trait_class): raise TypeError("Unknown class: %s" % trait_class) cls = getattr(module, trait_class) if 'trait' in tobj: tobj['trait'] = dict_as_trait(tobj.pop('trait')) metadata = {} if 'metadata' in tobj: metadata.update(tobj.pop('metadata')) if issubclass(cls, Enum): trait = cls(tobj.pop('values'), **tobj) else: trait = cls(**tobj) for k in list(metadata.keys()): if k in ('name', 'default_args', 'default_kwargs'): setattr(trait, k, metadata.pop(k)) trait.metadata = metadata return trait def dict_as_class_traits(obj: dict) -> HasTraits: """ Convert a dict of unpacked traits to a HasTraits instance. Useful for remote parameter inspection and validation. :param obj: dict of unpacked traits :return: the stub HasTraits instance """ if not isinstance(obj, dict): raise TypeError("Object must be a dict (was: %s)" % obj) traits = {} values = {} for k, v in obj.items(): if '__value__' in v: values[k] = v.pop('__value__') trait = dict_as_trait(v) if trait is None: continue traits[k] = trait cls = HasTraits() cls.add_traits(**traits) for k, v in values.items(): setattr(cls, k, v) return cls def get_args_dict(obj: HasTraits, incl_all=False): """ Return a dict of user-configurable traits for an object :param obj: an instance of HasTraits :param incl_all: If all items should be included, regardless of RO status :return: dict of arguments """ argsdict = ArgsDict() for k in sorted(obj._trait_values.keys()): v = obj._trait_values[k] trait = obj.traits()[k] if incl_all or (not trait.get_metadata('hidden') and is_trait_writable(trait)): argsdict[k] = v return argsdict def add_traits_to_argparse(obj: HasTraits, parser: ArgumentParser, prefix: str=None): """ Add all traits from the given object to the argparse context. :param obj: an instance of HasTraits :param parser: argparse parser :param prefix: string to prefix keys with """ for key, trait in obj.traits().items(): if trait.get_metadata('config') is not True: continue argname = '--%s' % key if prefix is not None: argname = '--%s.%s' % (prefix, key) if isinstance(trait, Container): parser.add_argument(argname, nargs='+', help=trait.info_text) elif isinstance(trait, Enum): parser.add_argument(argname, type=str.lower, choices=[x.lower() for x in trait.values], help=trait.info_text) else: argtype = str if hasattr(trait, 'default_value'): argtype = type(trait.default_value) parser.add_argument(argname, type=argtype, help=trait.info_text) def apply_from_argparse(args, traits=None, target: HasTraits=None) -> dict: """ Applies arguments added via add_traits_to_argparse to a target object which implements HasTraits. If a target is not known, a dict of traits may be passed instead. Will throw TraitError if validation fails. :param args: Parsed args from argparse :param traits: Dictionary of traits (optional) :param target: Target object (optional) :return: Dict of the arguments which actually changed """ # apply the traits to an empty object, which will run # the validators on the client if isinstance(traits, HasTraits): traits = traits.traits() traits = traits.copy() for k, v in traits.items(): if not isinstance(v, TraitType): if isinstance(v, dict): k[v] = dict_as_trait(v) else: raise TypeError("A dict or trait object must be supplied") if target is None: if traits is None: raise ValueError("Either traits or target must be specified") target = HasTraits() target.add_traits(**traits) # determine what should actually be changed argkeys = [k for k, v in vars(args).items() if v is not None] intersect = set(target.traits().keys()).intersection(set(argkeys)) # apply the argparse flags to the target object for key in intersect: if target.traits()[key].get_metadata('config') is not True: raise ValueError("Trait is not marked as configurable: %s" % key) setattr(target, key, getattr(args, key)) # if all validators passed, return a dict of the changed args changed = {} for key in intersect: changed[key] = target._trait_values[key] return changed
lgpl-3.0
468,926,082,809,482,900
28.619647
97
0.615188
false
3.944649
false
false
false
cardmagic/PyAMF
pyamf/adapters/_django_db_models_base.py
1
8476
# Copyright (c) 2007-2009 The PyAMF Project. # See LICENSE.txt for details. """ `django.db.models` adapter module. :see: `Django Project <http://www.djangoproject.com>`_ :since: 0.4.1 """ from django.db.models.base import Model from django.db.models import fields from django.db.models.fields import related, files import datetime import pyamf from pyamf.util import imports class DjangoReferenceCollection(dict): """ This helper class holds a dict of klass to pk/objects loaded from the underlying db. :since: 0.5 """ def _getClass(self, klass): if klass not in self.keys(): self[klass] = {} return self[klass] def getClassKey(self, klass, key): """ Return an instance based on klass/key. If an instance cannot be found then `KeyError` is raised. :param klass: The class of the instance. :param key: The primary_key of the instance. :return: The instance linked to the `klass`/`key`. :rtype: Instance of `klass`. """ d = self._getClass(klass) return d[key] def addClassKey(self, klass, key, obj): """ Adds an object to the collection, based on klass and key. :param klass: The class of the object. :param key: The datastore key of the object. :param obj: The loaded instance from the datastore. """ d = self._getClass(klass) d[key] = obj class DjangoClassAlias(pyamf.ClassAlias): def getCustomProperties(self): self.fields = {} self.relations = {} self.columns = [] self.meta = self.klass._meta for name in self.meta.get_all_field_names(): x = self.meta.get_field_by_name(name)[0] if isinstance(x, files.FileField): self.readonly_attrs.update([name]) if isinstance(x, related.RelatedObject): continue if not isinstance(x, related.ForeignKey): self.fields[name] = x else: self.relations[name] = x for k, v in self.klass.__dict__.iteritems(): if isinstance(v, related.ReverseManyRelatedObjectsDescriptor): self.fields[k] = v.field parent_fields = [] for field in self.meta.parents.values(): parent_fields.append(field.attname) del self.relations[field.name] self.exclude_attrs.update(parent_fields) props = self.fields.keys() self.encodable_properties.update(props) self.decodable_properties.update(props) def _compile_base_class(self, klass): if klass is Model: return pyamf.ClassAlias._compile_base_class(self, klass) def _encodeValue(self, field, value): if value is fields.NOT_PROVIDED: return pyamf.Undefined if value is None: return value # deal with dates .. if isinstance(field, fields.DateTimeField): return value elif isinstance(field, fields.DateField): return datetime.datetime(value.year, value.month, value.day, 0, 0, 0) elif isinstance(field, fields.TimeField): return datetime.datetime(1970, 1, 1, value.hour, value.minute, value.second, value.microsecond) elif isinstance(value, files.FieldFile): return value.name return value def _decodeValue(self, field, value): if value is pyamf.Undefined: return fields.NOT_PROVIDED if isinstance(field, fields.AutoField) and value == 0: return None elif isinstance(field, fields.DateTimeField): # deal with dates return value elif isinstance(field, fields.DateField): if not value: return None return datetime.date(value.year, value.month, value.day) elif isinstance(field, fields.TimeField): if not value: return None return datetime.time(value.hour, value.minute, value.second, value.microsecond) return value def getEncodableAttributes(self, obj, **kwargs): attrs = pyamf.ClassAlias.getEncodableAttributes(self, obj, **kwargs) if not attrs: attrs = {} for name, prop in self.fields.iteritems(): if name not in attrs.keys(): continue if isinstance(prop, related.ManyToManyField): attrs[name] = [x for x in getattr(obj, name).all()] else: attrs[name] = self._encodeValue(prop, getattr(obj, name)) keys = attrs.keys() for key in keys: if key.startswith('_'): del attrs[key] for name, relation in self.relations.iteritems(): if '_%s_cache' % name in obj.__dict__: attrs[name] = getattr(obj, name) del attrs[relation.column] if not attrs: attrs = None return attrs def getDecodableAttributes(self, obj, attrs, **kwargs): attrs = pyamf.ClassAlias.getDecodableAttributes(self, obj, attrs, **kwargs) for n in self.decodable_properties: if n in self.relations: continue f = self.fields[n] attrs[f.attname] = self._decodeValue(f, attrs[n]) # primary key of django object must always be set first for # relationships with other model objects to work properly # and dict.iteritems() does not guarantee order # # django also forces the use only one attribute as primary key, so # our obj._meta.pk.attname check is sufficient) try: setattr(obj, obj._meta.pk.attname, attrs[obj._meta.pk.attname]) del attrs[obj._meta.pk.attname] except KeyError: pass return attrs def getDjangoObjects(context): """ Returns a reference to the `django_objects` on the context. If it doesn't exist then it is created. :param context: The context to load the `django_objects` index from. :type context: Instance of :class:`pyamf.BaseContext` :return: The `django_objects` index reference. :rtype: Instance of :class:`DjangoReferenceCollection` :since: 0.5 """ if not hasattr(context, 'django_objects'): context.django_objects = DjangoReferenceCollection() return context.django_objects def writeDjangoObject(self, obj, *args, **kwargs): """ The Django ORM creates new instances of objects for each db request. This is a problem for PyAMF as it uses the id(obj) of the object to do reference checking. We could just ignore the problem, but the objects are conceptually the same so the effort should be made to attempt to resolve references for a given object graph. We create a new map on the encoder context object which contains a dict of C{object.__class__: {key1: object1, key2: object2, .., keyn: objectn}}. We use the primary key to do the reference checking. :since: 0.5 """ if not isinstance(obj, Model): self.writeNonDjangoObject(obj, *args, **kwargs) return context = self.context kls = obj.__class__ s = obj.pk if s is None: self.writeNonDjangoObject(obj, *args, **kwargs) return django_objects = getDjangoObjects(context) try: referenced_object = django_objects.getClassKey(kls, s) except KeyError: referenced_object = obj django_objects.addClassKey(kls, s, obj) self.writeNonDjangoObject(referenced_object, *args, **kwargs) def install_django_reference_model_hook(mod): """ Called when :module:`pyamf.amf0` or :module:`pyamf.amf3` are imported. Attaches the :func:`writeDjangoObject` method to the `Encoder` class in that module. :param mod: The module imported. :since: 0.4.1 """ if not hasattr(mod.Encoder, 'writeNonDjangoObject'): mod.Encoder.writeNonDjangoObject = mod.Encoder.writeObject mod.Encoder.writeObject = writeDjangoObject # initialise the module here: hook into pyamf pyamf.register_alias_type(DjangoClassAlias, Model) # hook the L{writeDjangobject} method to the Encoder class on import imports.when_imported('pyamf.amf0', install_django_reference_model_hook) imports.when_imported('pyamf.amf3', install_django_reference_model_hook)
mit
-6,217,738,496,913,844,000
28.430556
91
0.61975
false
4.080886
false
false
false
ICOS-Carbon-Portal/data
src/main/python/update-restheart/Restheart.py
1
2242
import requests class Restheart(object): def __init__(self): # self._baseUrl = 'http://127.0.0.1:8088/db/' # localhost self._baseUrl = 'https://restheart.icos-cp.eu/db/' # production self._verfify = True if self._baseUrl.__contains__('restheart') else False def get_records_to_update(self, op, pagesize, collection): resp = None try: url = self.get_url(op, pagesize, collection) resp = requests.get(url, timeout=10, verify=self._verfify) if resp.status_code != 200: print(resp.status_code, resp.reason, resp.json()) return resp.json() except: print(resp) def update_record(self, id, record, collection): url = self._baseUrl + collection + '/' + id headers = {"Content-Type": "application/json"} resp = None try: resp = requests.patch(url, headers=headers, json=record, timeout=5, verify=self._verfify) if resp.status_code != 200: print(resp.status_code, resp.reason) except: print(resp) def get_url(self, op, pagesize, collection): if op == 'geo': if collection == 'portaluse': return self._baseUrl + collection + '?filter={"city":{"$exists":0}}&np&pagesize=' + str(pagesize) elif collection == 'dobjdls': return self._baseUrl + collection + '?filter={"$and":[{"ip":{"$exists":1}},{"city":{"$exists":0}}]}&np&pagesize=' + str(pagesize) else: raise ValueError("Unknown collection: " + collection) elif op == 'label': if collection == 'portaluse': return self._baseUrl + collection + '?np&pagesize=' + str(pagesize) # return self._baseUrl + collection + '?filter={"_id":{"$oid":"5bb21519f17df4d065e9c53c"}}&np&pagesize=' + str(pagesize) # return self._baseUrl + collection + '?filter={"filterChange":{"$exists":1}}&np&pagesize=' + str(pagesize) # return self._baseUrl + collection + '?filter={"previewNetCDF":{"$exists":1}}&np&pagesize=' + str(pagesize) # return self._baseUrl + collection + '?filter={"previewTimeserie":{"$exists":1}}&np&pagesize=' + str(pagesize) # return self._baseUrl + collection + '?filter={"$and":[{"filterChange":{"$exists":0}},{"previewNetCDF":{"$exists":0}},{"previewTimeserie":{"$exists":0}}]}&np&pagesize=' + str(pagesize) else: raise ValueError("Unknown collection: " + collection)
gpl-3.0
5,153,336,162,370,622,000
37.655172
189
0.650758
false
3.058663
false
false
false
domecraft/Games
RPG/classes.py
1
3530
class character: def __init__(self, name, gender ,health, race, role, status, strength, defense, magic, bounty, income, reputation): self.name = name self.health = health self.status = status self.strength = strength self.defense = defense self.race = race self.role = role self.bounty = bounty self.magic = magic self.gender = gender self.income = income self.reputation = reputation self.inventory = [] def modify_health(self, amount): self.health += amount def set_health(self, amount): self.health = amount def set_status(self, status): self.status = status def modify_str(self, amount): self.strength += amount def modify_def(self, amount): self.defense += amount def add_item(self, item): self.inventory.append(item) def remove_item(self, item): if item in self.inventory: self.inventory.remove(item) else: print item + " is not in your inventory!" def set_race(self, race): self.race = race def modify_bounty(self, amount): self.bounty += amount def checkDead(self, health): if self.health <= 0: self.status = "dead" return "dead" else: self.status = "alive" return "alive" def modify_income(self, amount): self.income += amount def modify_reputation(self, amount): self.reputation += amount #The following class is used for random npcs that I don't really develop in the storyline. class basicCharacter: def __init__(self, name, gender, income, status): self.name = name self.gender = gender self.income = income self.status = status def set_status(self, status): self.status = status class store: def __init__(self, name = "General Store" , owner = "Store Owner", alliance = "Rebellion"): self.name = name self.store_owner = owner self.alliance = alliance self.stock = { 'longsword': {'cost': 10, 'speed': 3, 'strength': 7, 'defense': 2}, 'shortsword': {'cost': 8, 'speed': 5, 'strength': 4, 'defense': 2}, 'bronze_armor': {'cost': 10, 'speed': -2, 'strength': 1, 'defense': 6}, 'silver_armor': {'cost': 20, 'speed': -5, 'strength': 2, 'defense': 12}, 'platinum_armor': {'cost': 35, 'speed': -8, 'strength': 4, 'defense': 20} } class town: def __init__(self, name, ruler, alliance, income, population): self.name = name self.ruler = ruler self.alliance = alliance self.income = income self.population = population def set_ruler(self, ruler): self.ruler = ruler def set_name(self, name): self.name = name def set_alliance(self, alliance): self.alliance = alliance def modify_income(self, amount): self.income += amount def modify_pop(self, population): self.population += population class bar: def __init__(self, name, owner, income): self.name = name self.owner = owner self.income = income def set_owner(self, owner): self.owner = owner def modify_income(amount): self.income += amount
gpl-2.0
3,359,393,522,367,290,000
29.964912
119
0.545326
false
3.904867
false
false
false
fbergmann/libSEDML
examples/python/create_sedml.py
1
5521
#!/usr/bin/env python ## ## @file create_sedml.py ## @brief cerates a SED-ML document. ## @author Frank T. Bergmann ## ## <!-------------------------------------------------------------------------- ## This file is part of libSEDML. Please visit http://sed-ml.org for more ## information about SEDML, and the latest version of libSEDML. ## ## Copyright (c) 2013, Frank T. Bergmann ## All rights reserved. ## ## Redistribution and use in source and binary forms, with or without ## modification, are permitted provided that the following conditions are met: ## ## 1. Redistributions of source code must retain the above copyright notice, this ## list of conditions and the following disclaimer. ## 2. Redistributions in binary form must reproduce the above copyright notice, ## this list of conditions and the following disclaimer in the documentation ## and/or other materials provided with the distribution. ## ## THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ## ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED ## WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE ## DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ## ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES ## (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; ## LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ## ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT ## (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS ## SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. ## ------------------------------------------------------------------------ -. ## import sys import os.path import libsedml def main (args): """Usage: create_sedml output-filename """ if (len(args) != 2): print(main.__doc__) sys.exit(1); # create the document doc = libsedml.SedDocument(); doc.setLevel(1); doc.setVersion(1); # create a first model referencing an sbml file model = doc.createModel(); model.setId("model1"); model.setSource("file.xml"); model.setLanguage("urn:sedml:language:sbml"); # create a second model modifying a variable of that other sbml file model = doc.createModel(); model.setId("model2"); model.setSource("model1"); model.setLanguage("urn:sedml:sbml"); # change a paramerter 'k' to 0.1 change = model.createChangeAttribute(); change.setTarget("/sbml:sbml/sbml:model/sbml:listOfParameters/sbml:parameter[@id='k']/@value"); change.setNewValue("0.1"); # remove species 's1' remove = model.createRemoveXML(); remove.setTarget("/sbml:sbml/sbml:model/sbml:listOfSpecies/sbml:species[@id='S1']"); # now for something tricky we want to update the initialConcentration of 'S2' to be # half what it was in the original model compute = model.createComputeChange(); compute.setTarget("/sbml:sbml/sbml:model/sbml:listOfSpecies/sbml:species[@id=&quot;S2&quot;]/@initialConcentration"); variable = compute.createVariable(); variable.setId("S2"); variable.setModelReference("model1"); variable.setTarget("/sbml:sbml/sbml:model/sbml:listOfSpecies/sbml:species[@id='S2']"); compute.setMath(libsedml.parseFormula("S2 / 2")); # create simulation tc = doc.createUniformTimeCourse(); tc.setId("sim1"); tc.setInitialTime(0.0); tc.setOutputStartTime(0.0); tc.setOutputEndTime(10.0); tc.setNumberOfPoints(1000); # need to set the correct KISAO Term alg = tc.createAlgorithm(); alg.setKisaoID("KISAO:0000019"); # create a task that uses the simulation and the model above task = doc.createTask(); task.setId("task1"); task.setModelReference("model1"); task.setSimulationReference("sim1"); # add a DataGenerator to hold the output for time dg = doc.createDataGenerator(); dg.setId("time"); dg.setName("time"); var = dg.createVariable(); var.setId("v0"); var.setName("time"); var.setTaskReference("task1"); var.setSymbol("urn:sedml:symbol:time"); dg.setMath(libsedml.parseFormula("v0")); # and one for S1 dg = doc.createDataGenerator(); dg.setId("S1"); dg.setName("S1"); var = dg.createVariable(); var.setId("v1"); var.setName("S1"); var.setTaskReference("task1"); var.setTarget("/sbml:sbml/sbml:model/sbml:listOfSpecies/sbml:species[@id='S1']"); dg.setMath(libsedml.parseFormula("v1")); # add a report report = doc.createReport(); report.setId("r1"); report.setName("report 1"); set = report.createDataSet(); set.setId("ds1"); set.setLabel("time"); set.setDataReference("time"); set = report.createDataSet(); set.setId("ds2"); set.setLabel("S1"); set.setDataReference("S1"); # add a 2d plot plot = doc.createPlot2D(); plot.setId("p1"); plot.setName("S1 Timecourse"); curve = plot.createCurve(); curve.setId("c1"); curve.setName("S1"); curve.setLogX(False); curve.setLogY(False); curve.setXDataReference("time"); curve.setYDataReference("S1"); # add a 3D Plot plot2 = doc.createPlot3D(); plot2.setId("p2"); plot2.setName("dunno"); surf = plot2.createSurface(); surf.setId("surf1"); surf.setName("S1"); surf.setLogX(False); surf.setLogY(False); surf.setLogZ(False); surf.setXDataReference("time"); surf.setYDataReference("S1"); surf.setZDataReference("S1"); # write the document libsedml.writeSedML(doc, args[1]); if __name__ == '__main__': main(sys.argv)
bsd-2-clause
3,096,274,935,878,346,000
32.05988
119
0.685926
false
3.393362
false
false
false
molmod/yaff
yaff/pes/colvar.py
1
13249
# -*- coding: utf-8 -*- # YAFF is yet another force-field code. # Copyright (C) 2011 Toon Verstraelen <[email protected]>, # Louis Vanduyfhuys <[email protected]>, Center for Molecular Modeling # (CMM), Ghent University, Ghent, Belgium; all rights reserved unless otherwise # stated. # # This file is part of YAFF. # # YAFF is free software; you can redistribute it and/or # modify it under the terms of the GNU General Public License # as published by the Free Software Foundation; either version 3 # of the License, or (at your option) any later version. # # YAFF is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with this program; if not, see <http://www.gnu.org/licenses/> # # -- '''Collective variables This module implements the computation of collective variables and their derivatives, typically used in advanced sampling methods such as umbrella sampling or metadynamics. The ``CollectiveVariable`` class is the main item in this module, which is normally used in conjuction with an instance of the ``Bias`` class. Note that many collective variables such as bond lengths, bending angles, improper angles, ... are already implemented by the :mod:`yaff.pes.iclist` module, so no separate implementation needs to be provided here. ''' from __future__ import division import numpy as np from yaff.log import log from yaff.pes.dlist import DeltaList from yaff.pes.iclist import InternalCoordinateList from yaff.sampling.utils import cell_lower __all__ = [ 'CollectiveVariable', 'CVVolume', 'CVCOMProjection','CVInternalCoordinate', 'CVLinCombIC', ] class CollectiveVariable(object): '''Base class for collective variables.''' def __init__(self, name, system): """ **Arguments:** name A name for the collective variable. system The system for the collective variable. """ self.name = name self.system = system self.value = np.nan self.gpos = np.zeros((system.natom, 3), float) self.vtens = np.zeros((3, 3), float) def get_conversion(self): '''Auxiliary routine that allows base classes the specify the unit conversion associated with the internal coordinate. ''' raise NotImplementedError def get_log(self): '''Describe the internal coordinate in a format that is suitable for screen logging. ''' return '%s' % (self.__class__.__name__) def compute(self, gpos=None, vtens=None): """Compute the collective variable and optionally some derivatives The only variable inputs for the compute routine are the atomic positions and the cell vectors. **Optional arguments:** gpos The derivatives of the collective variable towards the Cartesian coordinates of the atoms. ('g' stands for gradient and 'pos' for positions.) This must be a writeable numpy array with shape (N, 3) where N is the number of atoms. vtens The force contribution to the pressure tensor. This is also known as the virial tensor. It represents the derivative of the energy towards uniform deformations, including changes in the shape of the unit cell. (v stands for virial and 'tens' stands for tensor.) This must be a writeable numpy array with shape (3, 3). The collective variable value is returned. The optional arguments are Fortran-style output arguments. When they are present, the corresponding results are computed and **stored** to the current contents of the array. """ #Subclasses implement their compute code here. raise NotImplementedError def get_last_computed_value(self): """Return the last value that was computed. It is not assured that this value reflects the value for the current state of the system. This is merely a convenience method to obtain the value without performing an actual computation. """ return self.value class CVInternalCoordinate(CollectiveVariable): ''' An InternalCoordinate disguised as a CollectiveVariable so that it can be used together with a BiasPotential. This is less efficient than using the InternalCoordinate with a ValenceTerm, so the latter is preferred if it is possible. ''' def __init__(self, system, ic, comlist=None): self.system = system self.ic = ic self.comlist = comlist self.dlist = DeltaList(system if comlist is None else comlist) self.iclist = InternalCoordinateList(self.dlist) self.iclist.add_ic(ic) def get_conversion(self): return self.ic.get_conversion() def compute(self, gpos=None, vtens=None): if self.comlist is not None: self.comlist.forward() self.dlist.forward() self.iclist.forward() self.value = self.iclist.ictab[0]['value'] if gpos is not None: gpos[:] = 0.0 if vtens is not None: vtens[:] = 0.0 if not ((gpos is None) and (vtens is None)): self.iclist.ictab[0]['grad'] = 1.0 self.iclist.back() if self.comlist is None: self.dlist.back(gpos, vtens) else: self.comlist.gpos[:] = 0.0 self.dlist.back(self.comlist.gpos, vtens) self.comlist.back(gpos) return self.value class CVVolume(CollectiveVariable): '''The volume of the simulation cell.''' def __init__(self, system): ''' **Arguments:** system An instance of the ``System`` class. ''' if system.cell.nvec == 0: raise TypeError('Can not compute volume of a non-periodic system.') CollectiveVariable.__init__(self, 'CVVolume', system) def get_conversion(self): return np.power(log.length.conversion, self.system.cell.nvec) def compute(self, gpos=None, vtens=None): self.value = self.system.cell.volume if gpos is not None: # No dependence on atomic positions gpos[:] = 0.0 if vtens is not None: vtens[:] = np.identity(3)*self.value return self.value class CVCOMProjection(CollectiveVariable): '''Compute the vector connecting two centers of masses and return the projection along a selected vector. cv=(r_{COM}^{B}-r_{COM}^{A})[index] and r_{COM} is a vector with centers of mass of groups A and B: * first component: projected onto ``a`` vector of cell * second component: projected onto vector perpendicular to ``a`` and in the plane spanned by ``a`` and ``b`` * third component: projected onto vector perpendicular to ``a`` and ``b`` Note that periodic boundary conditions are NOT taken into account * the centers of mass are computed using absolute positions; this is most likely the desired behavior * the center of mass difference can in principle be periodic, but the periodicity is not the same as the periodicity of the system, because of the projection on a selected vector ''' def __init__(self, system, groups, index): ''' **Arguments:** system An instance of the ``System`` class groups List of 2 arrays, each array containing atomic indexes used to compute one of the centers of mass index Selected projection vector: * if index==0, projection onto ``a`` vector of cell * if index==1, projection onto vector perpendicular to ``a`` and in the plane spanned by ``a`` and ``b`` * if index==2, projection onto vector perpendicular to ``a`` and ``b`` ''' CollectiveVariable.__init__(self, 'CVCOMProjection', system) self.index = index # Safety checks assert len(groups)==2, "Exactly 2 groups need to be defined" assert system.cell.nvec==3, "Only 3D periodic systems are supported" assert self.index in [0,1,2], "Index should be one of 0,1,2" # Masses need to be defined in order to compute centers of mass if self.system.masses is None: self.system.set_standard_masses() # Define weights w_i such that difference of centers of mass can be # computed as sum_i w_i r_i self.weights = np.zeros((system.natom)) self.weights[groups[0]] = -self.system.masses[groups[0]]/np.sum(self.system.masses[groups[0]]) self.weights[groups[1]] = self.system.masses[groups[1]]/np.sum(self.system.masses[groups[1]]) def get_conversion(self): return log.length.conversion def compute(self, gpos=None, vtens=None): ''' Consider a rotation of the entire system such that the ``a`` vector is aligned with the X-axis, the ``b`` vector is in the XY-plane, and the ``c`` vector chosen such that a right-handed basis is formed. The rotated cell is lower-diagonal in the Yaff notation. In this rotated system, it is fairly simple to compute the required projections and derivatives, because the projections are simply the Cartesian components. Values obtained in the rotated system are then transformed back to the original system. ''' # Compute rotation that makes cell lower diagonal _, R = cell_lower(self.system.cell.rvecs) # The projected vector of centers of mass difference (aka the # collective variable) in the rotated system cv_orig = np.sum(self.weights.reshape((-1,1))*self.system.pos, axis=0) # Transform back to the original system cv = np.dot(R, cv_orig) self.value = cv[self.index] if gpos is not None: gpos[:] = 0.0 gpos[:,self.index] = self.weights # Forces (vector) need to be rotated back to original system gpos[:] = np.einsum('ij,kj', gpos, R.T) if vtens is not None: vtens[:] = 0.0 vtens[self.index,self.index:] = cv[self.index:] vtens[self.index:,self.index] = cv[self.index:] # Virial (tensor) needs to be rotated back to original system vtens[:] = np.dot(R.T,np.dot(vtens[:],R)) return self.value class CVLinCombIC(CollectiveVariable): ''' A linear combination of InternalCoordinates: cv = w0*ic0 + w1*ic1 + ... ''' def __init__(self, system, ics, weights, comlist=None): ''' **Arguments:** system An instance of the ``System`` class. ics A list of InternalCoordinate instances. weights A list defining the weight of each InternalCoordinate that is used when computing the linear combination. **Optional arguments:** comlist An instance COMList; if provided, this is used instead of the normal DeltaList to compute the InternalCoordinates ''' assert len(weights)==len(ics) self.system = system self.ics = ics self.comlist = comlist self.dlist = DeltaList(system if comlist is None else comlist) self.iclist = InternalCoordinateList(self.dlist) for ic in self.ics: self.iclist.add_ic(ic) self.weights = weights def get_conversion(self): # Units depend on the particular linear combination of internal # coordinates return 1.0 def compute(self, gpos=None, vtens=None): if self.comlist is not None: self.comlist.forward() self.dlist.forward() self.iclist.forward() self.value = 0.0 for iic in range(len(self.ics)): self.value += self.weights[iic]*self.iclist.ictab[iic]['value'] if gpos is not None: gpos[:] = 0.0 if vtens is not None: vtens[:] = 0.0 if not ((gpos is None) and (vtens is None)): for iic in range(len(self.ics)): # Derivative of the linear combination to this particular # internal coordinate self.iclist.ictab[iic]['grad'] = self.weights[iic] self.iclist.back() if self.comlist is None: self.dlist.back(gpos, vtens) else: self.comlist.gpos[:] = 0.0 self.dlist.back(self.comlist.gpos, vtens) self.comlist.back(gpos) return self.value
gpl-3.0
-3,113,228,627,627,625,000
37.853372
102
0.611669
false
4.174228
false
false
false
russorat/savage-leads
api/models/lead.py
1
2649
from elasticsearch import Elasticsearch,RequestsHttpConnection,NotFoundError from flask import url_for import config import json class Lead(object): es = Elasticsearch(config.ES_HOSTS,connection_class=RequestsHttpConnection) @staticmethod def create_lead(lead_data): try: results = Lead.es.create(index='leads', doc_type='leads', body=lead_data ) if results['created']: return { 'status': 'success', 'message': '', 'created_id': results['_id'] } else: return { 'status': 'failure', 'message': 'failed to create new lead.', 'created_id': '' } except Exception as e: print e return { 'status': 'failure', 'message': 'unknown error', 'created_id': '' } @staticmethod def delete_lead(lead_id): try : Lead.es.delete(index='leads', doc_type='leads', id=lead_id ) return { 'status': 'success', 'message': '' } except NotFoundError as e: return { 'status': 'failure', 'message': 'id not found' } except Exception as e: print e return { 'status': 'failure', 'message': 'unknown error' } @staticmethod def get_lead(lead_id): try: results = Lead.es.get( index='leads', doc_type='leads', id='%s'%(lead_id), ignore=404 ) if results and results['found'] : return {'status':'success','message':'','results':[Lead.from_es_hit(results)]} return {'status':'success','message':'','results':[]} except NotFoundError as e: return { 'status': 'failure', 'message': 'id not found', 'results': [] } except Exception as e: print e return { 'status': 'failure', 'message': 'unknown exception', 'results': [] } @staticmethod def get_leads(size,page,search): try: results = Lead.es.search( index='leads', doc_type='leads', size=size, q=search or "*", sort='last_name:ASC,first_name:ASC' ) retVal = [] if results and results['hits']['total'] > 0 : for hit in results['hits']['hits']: retVal.append(Lead.from_es_hit(hit)) return {'status':'success','message':'','results':retVal} except Exception as e: print e return {'status':'failure','message':'unknown error','results':[]} @staticmethod def from_es_hit(hit): lead = {} lead['id'] = hit['_id'] for key,val in hit['_source'].items(): lead[key] = val lead['uri'] = url_for('get_lead', lead_id=lead['id'], _external=True) return lead
apache-2.0
-1,660,902,958,443,782,400
29.102273
86
0.559079
false
3.800574
false
false
false
jaantollander/CrowdDynamics
crowddynamics/core/tests/test_interactions_benchmark.py
1
1239
import numpy as np import pytest from crowddynamics.core.interactions import agent_agent_block_list from crowddynamics.core.vector2D import unit_vector from crowddynamics.simulation.agents import Agents, Circular, ThreeCircle, \ AgentGroup def attributes(): orientation = np.random.uniform(-np.pi, np.pi) return dict(body_type='adult', orientation=orientation, velocity=np.random.uniform(0.0, 1.3, 2), angular_velocity=np.random.uniform(-1.0, 1.0), target_direction=unit_vector(orientation), target_orientation=orientation) @pytest.mark.parametrize('size', (200, 500, 1000)) @pytest.mark.parametrize('agent_type', (Circular, ThreeCircle)) def test_agent_agent_block_list(benchmark, size, agent_type, algorithm): # Grow the area with size. Keeps agent density constant. area_size = np.sqrt(2 * size) agents = Agents(agent_type=agent_type) group = AgentGroup( agent_type=agent_type, size=size, attributes=attributes) agents.add_non_overlapping_group( group, position_gen=lambda: np.random.uniform(-area_size, area_size, 2)) benchmark(agent_agent_block_list, agents.array) assert True
gpl-3.0
4,276,068,190,012,276,700
36.545455
80
0.684423
false
3.580925
false
false
false
myshkov/bnn-analysis
models/bbb_sampler.py
1
4851
""" This module implements Bayes By Backprop -based sampler for NNs. http://jmlr.org/proceedings/papers/v37/blundell15.pdf """ import numpy as np from keras.models import Sequential from keras.layers.core import Activation from keras import backend as K from keras.engine.topology import Layer from sampler import Sampler, SampleStats class BBBSampler(Sampler): """ BBB sampler for NNs. """ def __init__(self, model=None, batch_size=None, n_epochs=None, **kwargs): """ Creates a new BBBSampler object. """ super().__init__(**kwargs) self.sampler_type = 'BBB' self.model = model self.batch_size = batch_size if batch_size is not None else self.train_set_size self.n_epochs = n_epochs def __repr__(self): s = super().__repr__() return s def _fit(self, n_epochs=None, verbose=0, **kwargs): """ Fits the model before sampling. """ n_epochs = n_epochs if n_epochs is not None else self.n_epochs self.model.fit(self.train_x, self.train_y, batch_size=self.batch_size, nb_epoch=n_epochs, verbose=verbose) def _sample_predictive(self, test_x=None, return_stats=False, **kwargs): """ Draws a new sample from the model. """ sample = self.model.predict(test_x, batch_size=self.batch_size) stats = None if return_stats: stats = SampleStats(time=self._running_time()) return [sample], [stats] @classmethod def model_from_description(cls, layers, noise_std, weights_std, batch_size, train_size): """ Creates a BBB model from the specified parameters. """ n_batches = int(train_size / batch_size) step = .01 class BBBLayer(Layer): def __init__(self, output_dim, **kwargs): self.output_dim = output_dim super().__init__(**kwargs) def build(self, input_shape): input_dim = input_shape[1] shape = [input_dim, self.output_dim] eps_std = step # weights self.eps_w = K.random_normal([input_shape[0]] + shape, std=eps_std) self.mu_w = K.variable(np.random.normal(0., 10. * step, size=shape), name='mu_w') self.rho_w = K.variable(np.random.normal(0., 10. * step, size=shape), name='rho_w') self.W = self.mu_w + self.eps_w * K.log(1.0 + K.exp(self.rho_w)) self.eps_b = K.random_normal([self.output_dim], std=eps_std) self.mu_b = K.variable(np.random.normal(0., 10. * step, size=[self.output_dim]), name='mu_b') self.rho_b = K.variable(np.random.normal(0., 10. * step, size=[self.output_dim]), name='rho_b') self.b = self.mu_b + self.eps_b * K.log(1.0 + K.exp(self.rho_b)) self.trainable_weights = [self.mu_w, self.rho_w, self.mu_b, self.rho_b] def call(self, x, mask=None): return K.squeeze(K.batch_dot(K.expand_dims(x, dim=1), self.W), axis=1) + self.b def get_output_shape_for(self, input_shape): return (input_shape[0], self.output_dim) def log_gaussian(x, mean, std): return -K.log(std) - (x - mean) ** 2 / (2. * std ** 2) def sigma_from_rho(rho): return K.log(1. + K.exp(rho)) / step def variational_objective(model, noise_std, weights_std, batch_size, nb_batches): def loss(y, fx): log_pw = K.variable(0.) log_qw = K.variable(0.) for layer in model.layers: if type(layer) is BBBLayer: log_pw += K.sum(log_gaussian(layer.W, 0., weights_std)) log_pw += K.sum(log_gaussian(layer.b, 0., weights_std)) log_qw += K.sum(log_gaussian(layer.W, layer.mu_w, sigma_from_rho(layer.rho_w))) log_qw += K.sum(log_gaussian(layer.b, layer.mu_b, sigma_from_rho(layer.rho_b))) log_likelihood = K.sum(log_gaussian(y, fx, noise_std)) return K.sum((log_qw - log_pw) / nb_batches - log_likelihood) / batch_size return loss model = Sequential() in_shape = [batch_size, layers[0][0]] # input model.add(BBBLayer(layers[1][0], batch_input_shape=in_shape)) model.add(Activation('relu')) # hidden layers for l in range(2, len(layers) - 1): model.add(BBBLayer(layers[l - 1][0])) model.add(Activation('relu')) # output layer model.add(BBBLayer(1)) loss = variational_objective(model, noise_std, weights_std, batch_size, n_batches) model.compile(loss=loss, optimizer='adam', metrics=['accuracy']) return model
mit
-8,254,993,371,662,850,000
35.201493
111
0.556999
false
3.433121
false
false
false
felixbr/nosql-rest-preprocessor
nosql_rest_preprocessor/models.py
1
5131
from __future__ import absolute_import, unicode_literals, print_function, division from nosql_rest_preprocessor import exceptions from nosql_rest_preprocessor.utils import non_mutating class BaseModel(object): required_attributes = set() optional_attributes = None immutable_attributes = set() private_attributes = set() sub_models = {} resolved_attributes = {} @classmethod def validate(cls, obj): cls._check_required_attributes(obj) cls._check_allowed_attributes(obj) # recurse for sub models for attr, sub_model in cls.sub_models.items(): if attr in obj.keys(): sub_model.validate(obj[attr]) return obj @classmethod @non_mutating def prepare_response(cls, obj): # remove non-public attrs for attr in cls.private_attributes: obj.pop(attr, None) # recurse for sub models for attr, sub_model in cls.sub_models.items(): if attr in obj.keys(): obj[attr] = sub_model.prepare_response(obj[attr]) return obj @classmethod def merge_updated(cls, db_obj, new_obj): cls.validate(new_obj) merged_obj = {} # check if previously present immutable attributes should be deleted for key in cls.immutable_attributes: if key in db_obj and key not in new_obj: raise exceptions.ChangingImmutableAttributeError() # copy attributes into merged_obj for key, value in new_obj.items(): cls._check_immutable_attrs_on_update(key, value, db_obj) if key in cls.resolved_attributes and isinstance(value, dict): # ignore resolved attributes in update merged_obj[key] = db_obj[key] else: merged_obj[key] = value # recurse for sub models for attr, sub_model in cls.sub_models.items(): merged_obj[attr] = sub_model.merge_updated(db_obj[attr], new_obj[attr]) return merged_obj @classmethod def _check_immutable_attrs_on_update(cls, key, value, db_obj): # check if immutable attributes should be changed if key in cls.immutable_attributes: if db_obj[key] != value: raise exceptions.ChangingImmutableAttributeError() @classmethod def _check_required_attributes(cls, obj): for attr in cls.required_attributes: if isinstance(attr, tuple): set_wanted = set(attr[1]) set_contained = set(obj.keys()) if attr[0] == 'one_of': if len(set_wanted & set_contained) < 1: raise exceptions.ValidationError() elif attr[0] == 'either_of': if len(set_wanted & set_contained) != 1: raise exceptions.ValidationError() else: raise exceptions.ConfigurationError() else: if attr not in obj.keys(): raise exceptions.ValidationError() @classmethod def _check_allowed_attributes(cls, obj): if cls.optional_attributes is not None: required = cls._required_attributes() for attr in obj.keys(): if attr in required: continue allowed = False for opt_attr in cls.optional_attributes: if attr == opt_attr: allowed = True break elif isinstance(opt_attr, tuple): if opt_attr[0] == 'all_of': if attr in opt_attr[1]: # if one of these is in obj.keys()... if not set(opt_attr[1]).issubset(obj.keys()): # ...all of them have to be there raise exceptions.ValidationError() else: allowed = True break elif opt_attr[0] == 'either_of': if attr in opt_attr[1]: # if one of these is in obj.keys()... if next((key for key in opt_attr[1] if key != attr and key in obj.keys()), None): # ...no other key may be present in obj.keys() raise exceptions.ValidationError() else: allowed = True break else: raise exceptions.ConfigurationError() if not allowed: # if we haven't found attr anywhere in cls.optional_attributes raise exceptions.ValidationError() @classmethod def _required_attributes(cls): required = set() for attr in cls.required_attributes: if isinstance(attr, tuple): required = required | set(attr[1]) else: required.add(attr) return required
mit
7,771,158,724,342,860,000
33.213333
161
0.524069
false
4.80881
false
false
false
math-a3k/django-ai
tests/test_models/migrations/0011_add_is_inferred_and_minor_tweaks.py
1
2196
# -*- coding: utf-8 -*- # Generated by Django 1.11.5 on 2017-12-20 15:34 from __future__ import unicode_literals from django.db import migrations, models class Migration(migrations.Migration): dependencies = [ ('test_models', '0010_myunsupervisedlearningtechnique'), ] operations = [ migrations.AddField( model_name='mystatisticalmodel', name='is_inferred', field=models.BooleanField( default=False, verbose_name='Is Inferred?'), ), migrations.AddField( model_name='mysupervisedlearningtechnique', name='is_inferred', field=models.BooleanField( default=False, verbose_name='Is Inferred?'), ), migrations.AddField( model_name='myunsupervisedlearningtechnique', name='is_inferred', field=models.BooleanField( default=False, verbose_name='Is Inferred?'), ), migrations.AlterField( model_name='mystatisticalmodel', name='sm_type', field=models.SmallIntegerField(blank=True, choices=[ (0, 'General / System'), (1, 'Classification'), (2, 'Regression')], default=0, null=True, verbose_name='Statistical Technique Type'), ), migrations.AlterField( model_name='mysupervisedlearningtechnique', name='sm_type', field=models.SmallIntegerField(blank=True, choices=[ (0, 'General / System'), (1, 'Classification'), (2, 'Regression')], default=0, null=True, verbose_name='Statistical Technique Type'), ), migrations.AlterField( model_name='myunsupervisedlearningtechnique', name='sm_type', field=models.SmallIntegerField(blank=True, choices=[ (0, 'General / System'), (1, 'Classification'), (2, 'Regression')], default=0, null=True, verbose_name='Statistical Technique Type'), ), ]
lgpl-3.0
-7,274,611,177,119,370
33.857143
64
0.536885
false
4.565489
false
false
false
Makeystreet/makeystreet
woot/apps/catalog/views/review.py
1
5983
from django.conf import settings from django.core.urlresolvers import reverse from django.http import HttpResponseRedirect, Http404 from django.shortcuts import render from django.utils import timezone from woot.apps.catalog.forms import CreateProductReviewForm,\ CreateShopReviewForm, CreateSpaceReviewForm from woot.apps.catalog.models.core import Product, Shop, Space, NewProduct from woot.apps.catalog.models.review import ProductReview, ShopReview,\ SpaceReview from .helper import get_user_details_json static_blob = settings.STATIC_BLOB def all_reviews(request): product_reviews = ProductReview.objects.all() shop_reviews = ShopReview.objects.all() space_reviews = SpaceReview.objects.all() context = { 'static_blob': static_blob, 'user_details': get_user_details_json(request), 'product_reviews': product_reviews, 'shop_reviews': shop_reviews, 'space_reviews': space_reviews, } return render(request, 'catalog/all_reviews.html', context) def store_review(request, review_id): try: user_details = get_user_details_json(request) review = ShopReview.objects.get(id=review_id) review.upvotes = review.voteshopreview_set.filter(vote=True) context = { 'static_blob': static_blob, 'user_details': user_details, 'review': review, } return render(request, 'catalog/store_review.html', context) except ShopReview.DoesNotExist: raise Http404 def product_review(request, review_id): try: user_details = get_user_details_json(request) review = ProductReview.objects.get(id=review_id) review.upvotes = review.voteproductreview_set.filter(vote=True) context = { 'static_blob': static_blob, 'user_details': user_details, 'review': review, } return render(request, 'catalog/product_review.html', context) except ProductReview.DoesNotExist: raise Http404 def space_review(request, review_id): try: user_details = get_user_details_json(request) review = SpaceReview.objects.get(id=review_id) review.upvotes = review.votespacereview_set.filter(vote=True) context = { 'static_blob': static_blob, 'user_details': user_details, 'review': review, } return render(request, 'catalog/space_review.html', context) except SpaceReview.DoesNotExist: raise Http404 def create_review(request): if request.method == "POST": if request.POST.get('val_type', '') == 'PART': form = CreateProductReviewForm(request.POST) if form.is_valid(): r = ProductReview() r.title = form.cleaned_data['val_title'] r.review = form.cleaned_data['val_review'] r.user = request.user r.rating = form.cleaned_data['val_rating'] r.added_time = timezone.now() product_data_split = form.cleaned_data['val_part'].split('_') product_type = product_data_split[0] product_id = int(product_data_split[1]) if product_type == 'old': product = Product.objects.get(id=product_id) r.product = product elif product_type == 'new': product = NewProduct.objects.get(id=product_id) r.product = product r.save() return HttpResponseRedirect(reverse('catalog:all_reviews')) else: print(form.errors) elif request.POST.get('val_type', '') == 'SHOP': form = CreateShopReviewForm(request.POST) if form.is_valid(): r = ShopReview() r.title = form.cleaned_data['val_title'] r.review = form.cleaned_data['val_review'] r.user = request.user r.rating = form.cleaned_data['val_rating'] r.added_time = timezone.now() shop_data_split = form.cleaned_data['val_shop'].split('_') shop_type = shop_data_split[0] shop_id = int(shop_data_split[1]) if shop_type == 'old': shop = Shop.objects.get(id=shop_id) r.shop = shop elif shop_type == 'new': shop = NewProduct.objects.get(id=shop_id) r.shop = shop r.save() return HttpResponseRedirect(reverse('catalog:all_reviews')) else: print(form.errors) elif request.POST.get('val_type', '') == 'SPACE': form = CreateSpaceReviewForm(request.POST) if form.is_valid(): r = SpaceReview() r.title = form.cleaned_data['val_title'] r.review = form.cleaned_data['val_review'] r.user = request.user r.rating = form.cleaned_data['val_rating'] r.added_time = timezone.now() space_data_split = form.cleaned_data['val_space'].split('_') space_type = space_data_split[0] space_id = int(space_data_split[1]) if space_type == 'old': space = Space.objects.get(id=space_id) r.space = space elif space_type == 'new': space = NewProduct.objects.get(id=space_id) r.space = space r.save() return HttpResponseRedirect(reverse('catalog:all_reviews')) else: print(form.errors) else: pass context = { 'static_blob': static_blob, 'user_details': get_user_details_json(request), } return render(request, 'catalog/create_product_review.html', context)
apache-2.0
661,690,713,215,756,200
33.188571
77
0.563095
false
4.131906
false
false
false
semplea/characters-meta
python/alchemy/examples/alchemy_vision_v1.py
1
1466
import json from os.path import join, dirname from watson_developer_cloud import AlchemyVisionV1 alchemy_vision = AlchemyVisionV1(api_key='c851400276c1acbd020210847f8677e6d1577c26') # Face recognition with open(join(dirname(__file__), '../resources/face.jpg'), 'rb') as image_file: print(json.dumps(alchemy_vision.recognize_faces(image_file, knowledge_graph=True), indent=2)) face_url = 'https://upload.wikimedia.org/wikipedia/commons/9/9d/Barack_Obama.jpg' print(json.dumps(alchemy_vision.recognize_faces(image_url=face_url, knowledge_graph=True), indent=2)) # Image tagging with open(join(dirname(__file__), '../resources/test.jpg'), 'rb') as image_file: print(json.dumps(alchemy_vision.get_image_keywords(image_file, knowledge_graph=True, force_show_all=True), indent=2)) # Text recognition with open(join(dirname(__file__), '../resources/text.png'), 'rb') as image_file: print(json.dumps(alchemy_vision.get_image_scene_text(image_file), indent=2)) print(json.dumps(alchemy_vision.get_image_keywords( image_url='https://upload.wikimedia.org/wikipedia/commons/8/81/Morris-Chair-Ironwood.jpg'), indent=2)) # Image link extraction print(json.dumps(alchemy_vision.get_image_links(url='http://www.zillow.com/'), indent=2)) with open(join(dirname(__file__), '../resources/example.html'), 'r') as webpage: print(json.dumps(alchemy_vision.get_image_links(html=webpage.read()), indent=2))
mit
1,583,828,267,179,628,300
47.866667
106
0.71487
false
3.073375
false
true
false
mmclenna/engine
sky/tools/create_ios_sdk.py
1
1820
#!/usr/bin/env python # Copyright 2016 The Chromium Authors. All rights reserved. # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. import argparse import subprocess import shutil import sys import os def main(): parser = argparse.ArgumentParser(description='Creates the Flutter iOS SDK') parser.add_argument('--dst', type=str, required=True) parser.add_argument('--device-out-dir', type=str, required=True) parser.add_argument('--simulator-out-dir', type=str, required=True) args = parser.parse_args() device_sdk = os.path.join(args.device_out_dir, 'Flutter') simulator_sdk = os.path.join(args.simulator_out_dir, 'Flutter') flutter_framework_binary = 'Flutter.framework/Flutter' device_dylib = os.path.join(args.device_out_dir, flutter_framework_binary) simulator_dylib = os.path.join(args.simulator_out_dir, flutter_framework_binary) if not os.path.isdir(device_sdk): print 'Cannot find iOS device SDK at', device_sdk return 1 if not os.path.isdir(simulator_sdk): print 'Cannot find iOS simulator SDK at', simulator_sdk return 1 if not os.path.isfile(device_dylib): print 'Cannot find iOS device dylib at', device_dylib return 1 if not os.path.isfile(simulator_dylib): print 'Cannot find iOS device dylib at', simulator_dylib return 1 shutil.rmtree(args.dst, True) shutil.copytree(device_sdk, args.dst) sim_tools = 'Tools/iphonesimulator' shutil.copytree(os.path.join(simulator_sdk, sim_tools), os.path.join(args.dst, sim_tools)) subprocess.call([ 'lipo', device_dylib, simulator_dylib, '-create', '-output', os.path.join(args.dst, 'Tools/common/Flutter.framework/Flutter') ]) if __name__ == '__main__': sys.exit(main())
bsd-3-clause
4,522,966,620,628,158,000
26.575758
77
0.697802
false
3.427495
false
false
false
calinerd/AWS
LAMBDA/Lambda_AutoUpdate_SecurityGroup_to_Allow_inbound_All_CloudFront_IPs_443.py
1
6268
''' Copyright 2015 Amazon.com, Inc. or its affiliates. All Rights Reserved. Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with the License. A copy of the License is located at http://aws.amazon.com/apache2.0/ or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. ''' import boto3 import hashlib import json import urllib2 # Name of the service, as seen in the ip-groups.json file, to extract information for SERVICE = "CLOUDFRONT" # Ports your application uses that need inbound permissions from the service for INGRESS_PORTS = [ 443 ] # Tags which identify the security groups you want to update SECURITY_GROUP_TAGS = { 'Name': 'SG_Allow_CF_IPs_443', 'AutoUpdate': 'true' } def lambda_handler(event, context): print("Received event: " + json.dumps(event, indent=2)) message = json.loads(event['Records'][0]['Sns']['Message']) # Load the ip ranges from the url ip_ranges = json.loads(get_ip_groups_json(message['url'], message['md5'])) # extract the service ranges cf_ranges = get_ranges_for_service(ip_ranges, SERVICE) # update the security groups result = update_security_groups(cf_ranges) return result def get_ip_groups_json(url, expected_hash): print("Updating from " + url) response = urllib2.urlopen(url) ip_json = response.read() m = hashlib.md5() m.update(ip_json) hash = m.hexdigest() if hash != expected_hash: raise Exception('MD5 Mismatch: got ' + hash + ' expected ' + expected_hash) return ip_json def get_ranges_for_service(ranges, service): service_ranges = list() for prefix in ranges['prefixes']: if prefix['service'] == service: print('Found ' + service + ' range: ' + prefix['ip_prefix']) service_ranges.append(prefix['ip_prefix']) return service_ranges def update_security_groups(new_ranges): client = boto3.client('ec2') groups = get_security_groups_for_update(client) print ('Found ' + str(len(groups)) + ' SecurityGroups to update') result = list() updated = 0 for group in groups: if update_security_group(client, group, new_ranges): updated += 1 result.append('Updated ' + group['GroupId']) result.append('Updated ' + str(updated) + ' of ' + str(len(groups)) + ' SecurityGroups') return result def update_security_group(client, group, new_ranges): added = 0 removed = 0 if len(group['IpPermissions']) > 0: for permission in group['IpPermissions']: if INGRESS_PORTS.count(permission['ToPort']) > 0: old_prefixes = list() to_revoke = list() to_add = list() for range in permission['IpRanges']: cidr = range['CidrIp'] old_prefixes.append(cidr) if new_ranges.count(cidr) == 0: to_revoke.append(range) print(group['GroupId'] + ": Revoking " + cidr + ":" + str(permission['ToPort'])) for range in new_ranges: if old_prefixes.count(range) == 0: to_add.append({ 'CidrIp': range }) print(group['GroupId'] + ": Adding " + range + ":" + str(permission['ToPort'])) removed += revoke_permissions(client, group, permission, to_revoke) added += add_permissions(client, group, permission, to_add) else: for port in INGRESS_PORTS: to_add = list() for range in new_ranges: to_add.append({ 'CidrIp': range }) print(group['GroupId'] + ": Adding " + range + ":" + str(port)) permission = { 'ToPort': port, 'FromPort': port, 'IpProtocol': 'tcp'} added += add_permissions(client, group, permission, to_add) print (group['GroupId'] + ": Added " + str(added) + ", Revoked " + str(removed)) return (added > 0 or removed > 0) def revoke_permissions(client, group, permission, to_revoke): if len(to_revoke) > 0: revoke_params = { 'ToPort': permission['ToPort'], 'FromPort': permission['FromPort'], 'IpRanges': to_revoke, 'IpProtocol': permission['IpProtocol'] } client.revoke_security_group_ingress(GroupId=group['GroupId'], IpPermissions=[revoke_params]) return len(to_revoke) def add_permissions(client, group, permission, to_add): if len(to_add) > 0: add_params = { 'ToPort': permission['ToPort'], 'FromPort': permission['FromPort'], 'IpRanges': to_add, 'IpProtocol': permission['IpProtocol'] } client.authorize_security_group_ingress(GroupId=group['GroupId'], IpPermissions=[add_params]) return len(to_add) def get_security_groups_for_update(client): filters = list(); for key, value in SECURITY_GROUP_TAGS.iteritems(): filters.extend( [ { 'Name': "tag-key", 'Values': [ key ] }, { 'Name': "tag-value", 'Values': [ value ] } ] ) response = client.describe_security_groups(Filters=filters) return response['SecurityGroups'] ''' Sample Event From SNS: { "Records": [ { "EventVersion": "1.0", "EventSubscriptionArn": "arn:aws:sns:EXAMPLE", "EventSource": "aws:sns", "Sns": { "SignatureVersion": "1", "Timestamp": "1970-01-01T00:00:00.000Z", "Signature": "EXAMPLE", "SigningCertUrl": "EXAMPLE", "MessageId": "95df01b4-ee98-5cb9-9903-4c221d41eb5e", "Message": "{\"create-time\": \"yyyy-mm-ddThh:mm:ss+00:00\", \"synctoken\": \"0123456789\", \"md5\": \"03a8199d0c03ddfec0e542f8bf650ee7\", \"url\": \"https://ip-ranges.amazonaws.com/ip-ranges.json\"}", "Type": "Notification", "UnsubscribeUrl": "EXAMPLE", "TopicArn": "arn:aws:sns:EXAMPLE", "Subject": "TestInvoke" } } ] } '''
unlicense
4,925,142,042,631,168,000
34.619318
266
0.596522
false
3.755542
false
false
false
Xdynix/PixivPixie
bundle_cli.py
1
2691
import os import subprocess import sys from pixiv_pixie.cli import main as cli_main, NAME BINARY_PATH = 'lib' DATA_PATH = 'data' def is_packaged(): # Return true if executing from packaged file return hasattr(sys, 'frozen') def get_path(path, package_prefix=DATA_PATH): if os.path.isabs(path) or not is_packaged(): return path else: return os.path.join( sys.prefix, os.path.join(package_prefix, path) ) def build( script, name=None, one_file=False, no_console=False, icon=None, binary_path=BINARY_PATH, addition_binary=None, data_path=DATA_PATH, addition_data=None, hidden_import=None, distpath=None, workpath=None, specpath=None, addition_args=None, ): args = [] if name is not None: args.extend(('-n', name)) if one_file: args.append('-F') if no_console: args.append('-w') if icon is not None: args.extend(('-i', icon)) if addition_args is None: addition_args = [] def add_resource(add_type, path, resources): for resource in resources: args.append('--add-{}'.format(add_type)) if isinstance(resource, tuple) or isinstance(resource, list): src = resource[0] dest = resource[1] args.append(src + os.path.pathsep + os.path.join(path, dest)) else: args.append( resource + os.path.pathsep + os.path.join(path, resource), ) if addition_binary is not None: add_resource( add_type='binary', path=binary_path, resources=addition_binary, ) if addition_data is not None: add_resource( add_type='data', path=data_path, resources=addition_data, ) if hidden_import is not None: for m in hidden_import: args.extend(('--hidden-import', m)) if distpath is not None: args.extend(('--distpath', distpath)) if workpath is not None: args.extend(('--workpath', workpath)) if specpath is not None: args.extend(('--specpath', specpath)) subprocess.call(['pyinstaller'] + args + addition_args + [script]) def main(): if not is_packaged(): build( __file__, name=NAME, one_file=True, addition_binary=[ ('freeimage-3.15.1-win64.dll', '') ], addition_args=[ '-y', '--clean', ], ) else: cli_main() if __name__ == '__main__': main()
apache-2.0
-7,565,490,886,109,477,000
24.628571
78
0.531401
false
3.860832
false
false
false
tortugueta/multilayers
examples/radcenter_distribution.py
1
8087
# -*- coding: utf-8 -*- """ Name : radcenter_distribution Author : Joan Juvert <[email protected]> Version : 1.0 Description : This script calculates the influence of the distribution of : radiative centers in the active layer on the observed : spectrum. Copyright 2012 Joan Juvert This program is free software: you can redistribute it and/or modify it under the terms of the GNU General Public License as published by the Free Software Foundation, either version 3 of the License, or (at your option) any later version. This program is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details. You should have received a copy of the GNU General Public License along with this program. If not, see <http://www.gnu.org/licenses/>. """ import multilayers as ml import numpy as np import bphysics as bp import scipy.integrate as integ import argparse as ap import sys import pdb # Argument parsing parser = ap.ArgumentParser( description = "This script calculates the effect of the " + \ "distribution of radiative centers in the active layer on " + \ "the modificator to the spectrum. The observation angle is " + \ "a fixed parameter. Optionally, the output can be plotted " + \ "and output to the standard output or to a file. The matrix " + \ "containing the values of F(z, lambda) can be saved to a file " + \ "and recovered in a following run of the program to avoid " + \ "recalculating it in case we want to calculate the effect of " + \ "different distributions on the same system.") parser.add_argument( "--graph", help = "Plot the results", action = "store_true") parser.add_argument( "-o", "--output", help = "Dump the results to a file") parser.add_argument( "-s", "--savematrix", help = "Save the matrix with the F(z, lambda) values to a file") parser.add_argument( "-l", "--loadmatrix", help = "Load the matrix with the F(z, lambda) values from a file") args = parser.parse_args() # Load the depth distribution of radiative centers. Note that the origin # and units of z must be the same as in the multilayer.The distribution # should be normalized to 1. print("Loading the distribution...") path = "/home/joan/Dropbox/CNM/projectes/simulations_report/figures/" + \ "rcdistributions/" distribution = bp.rdfile(path + "gaussian_m25_s07.dat", usecols = [0, 1])[1] print("Done") print("Checking the distribution...") integral = integ.simps(distribution[:, 1], distribution[:, 0], 0) np.testing.assert_almost_equal(integral, 1, 2) print("Done") # If we load the values of F(z, lambda) calculated in a previous # execution we do not need to build the multilayer and repeat the # calculation of the F function. Notice that the values of z at which # the new distribution is sampled should be the same as the previous # one. if args.loadmatrix: print("Loading matrix...") fmatrix = np.load(args.loadmatrix) zlist = fmatrix['zlist'] np.testing.assert_array_equal(zlist, distribution[:, 0]) wlist = fmatrix['wlist'] angle = fmatrix['angle'] fte = fmatrix['fte'] ftm = fmatrix['ftm'] print("Done") else: # Create the materials print("Loading materials... ") silicon = ml.Medium("silicon.dat") air = ml.Medium("air.dat") sio2 = ml.Medium("sio2.dat") poly = ml.Medium("polysilicon.dat") print("Done") # Set the fixed parameters. angle = np.deg2rad(0) # Create the multilayer print("Building multilayer and allocating memory... ") thicknesses = [300, 50] multilayer = ml.Multilayer([ air, [poly, thicknesses[0]], [sio2, thicknesses[1]], silicon]) # Define the wavelengths and z coordinates at which F will be calculated # and allocate memory for the results. We will use a structured array to # store the values of F(z, lambda). wstep = 1 wmin = multilayer.getMinMaxWlength()[0] wmax = multilayer.getMinMaxWlength()[1] wlist = np.arange(wmin, wmax, wstep) zlist = distribution[:, 0] ftype = np.dtype([ ('fx', np.complex128), ('fy', np.complex128), ('fz', np.complex128)]) resmatrix = np.empty((zlist.size, wlist.size), dtype = ftype) print("Done") # I(wavelength, theta) = s(wavelength) * F'(wavelength, theta), where # F'(wav, theta) = integral[z](|F|^2 * rcdist(z). Therefore, we # calculate the new spectrum as a modification to the original spectrum. # The modification factor F'(wav, theta) is an integral over z. # First calculate |Fy|^2 for te and |Fx*cos^2 + Fz*sin^2|^2 for tm. We # do fx and fz in one loop and fy in another independent loop to avoid # recalculating the characteristic matrix at every iteration due to the # change of polarization. print("Calculating F...") for (widx, wlength) in enumerate(wlist): percent = (float(widx) / wlist.size) * 100 print("%.2f%%" % percent) for (zidx, z) in enumerate(zlist): resmatrix[zidx][widx]['fx'] = multilayer.calculateFx(z, wlength, angle) resmatrix[zidx][widx]['fz'] = multilayer.calculateFz(z, wlength, angle) for (zidx, z) in enumerate(zlist): resmatrix[zidx][widx]['fy'] = multilayer.calculateFy(z, wlength, angle) # We are probably more interesed on the effect of the multilayer on the # energy rather than the electric field. What we want is |Fy(z)|^2 for # TE waves and |Fx(z) cosA^2 + Fz(z) sinA^2|^2 for TM waves. ftm = np.absolute( resmatrix['fx'] * np.cos(angle) ** 2 + \ resmatrix['fz'] * np.sin(angle) ** 2) ** 2 fte = np.absolute(resmatrix['fy']) ** 2 print("Done") # Notice that until now we have not used the distribution of the # radiative ceneters, but the calculation of ftm and fte is costly. # If requested, we can save fte and ftm to a file. In a following # execution of the script, the matrix can be loaded from the file # instead of recalculated. if args.savematrix: print("Saving matrix...") np.savez(args.savematrix, fte = fte, ftm = ftm, zlist = zlist, wlist = wlist, angle = angle) print("Done") # Build or load the original spectrum. It should be sampled at the same # wavelengths defined in wlist. If we are interested only in the # modificator to the spectrum, not in the modified spectrum, we can # leave it at 1. original_spec = 1 # Multiply each F(z, lambda) by the distribution. print("Integrating...") distval = distribution[:, 1].reshape(distribution[:, 1].size, 1) fte_mplied = fte * distval ftm_mplied = ftm * distval fte_int = integ.simps(fte_mplied, zlist, axis = 0) ftm_int = integ.simps(ftm_mplied, zlist, axis = 0) spectrum_modte = original_spec * fte_int spectrum_modtm = original_spec * ftm_int print("Done") # Dump data to file or stdout comments = "# F_TE = |Fy^2|^2\n" + \ "# F_TM = |Fx * cosA^2 + Fz * sinA^2|^2\n" + \ "# Modified spectrum for TE and TM waves for a\n" + \ "# distributions of the radiative centers.\n" + \ "# wlength\tF_TE\tF_TM" if args.output: bp.wdfile(args.output, comments, np.array([wlist, spectrum_modte, spectrum_modtm]).T, '%.6e') else: print(comments) for i in xrange(wlist.size): print("%.6e\t%.6e\t%.6e" % (wlist[i], spectrum_modte[i], spectrum_modtm[i])) # Plot data if requested if args.graph: import matplotlib.pyplot as plt plt.plot(wlist, spectrum_modte, label='TE', color = 'r') plt.plot(wlist, spectrum_modtm, label='TM', color = 'b') plt.xlabel('Wavelength (nm)') plt.ylabel('Energy ratio') plt.grid() plt.legend(loc=2) plt.title('%.1f rad' % angle) plt.show() plt.close()
gpl-3.0
-2,529,649,230,264,011,300
36.967136
83
0.649808
false
3.451558
false
false
false
linaro-technologies/jobserv
jobserv/storage/local_storage.py
1
3989
# Copyright (C) 2017 Linaro Limited # Author: Andy Doan <[email protected]> import hmac import os import mimetypes import shutil from flask import Blueprint, request, send_file, url_for from jobserv.jsend import get_or_404 from jobserv.models import Build, Project, Run from jobserv.settings import INTERNAL_API_KEY, LOCAL_ARTIFACTS_DIR from jobserv.storage.base import BaseStorage blueprint = Blueprint('local_storage', __name__, url_prefix='/local-storage') class Storage(BaseStorage): blueprint = blueprint def __init__(self): super().__init__() self.artifacts = LOCAL_ARTIFACTS_DIR def _get_local(self, storage_path): assert storage_path[0] != '/' path = os.path.join(self.artifacts, storage_path) dirname = os.path.dirname(path) if not os.path.exists(dirname): os.makedirs(dirname) return path def _create_from_string(self, storage_path, contents): path = self._get_local(storage_path) with open(path, 'w') as f: f.write(contents) def _create_from_file(self, storage_path, filename, content_type): path = self._get_local(storage_path) with open(filename, 'rb') as fin, open(path, 'wb') as fout: shutil.copyfileobj(fin, fout) def _get_as_string(self, storage_path): assert storage_path[0] != '/' path = os.path.join(self.artifacts, storage_path) with open(path, 'r') as f: return f.read() def list_artifacts(self, run): path = '%s/%s/%s/' % ( run.build.project.name, run.build.build_id, run.name) path = os.path.join(self.artifacts, path) for base, _, names in os.walk(path): for name in names: if name != '.rundef.json': yield os.path.join(base, name)[len(path):] def get_download_response(self, request, run, path): try: p = os.path.join(self.artifacts, self._get_run_path(run), path) mt = mimetypes.guess_type(p)[0] return send_file(open(p, 'rb'), mimetype=mt) except FileNotFoundError: return 'File not found', 404 def _generate_put_url(self, run, path, expiration, content_type): p = os.path.join(self.artifacts, self._get_run_path(run), path) msg = '%s,%s,%s' % ('PUT', p, content_type) sig = hmac.new(INTERNAL_API_KEY, msg.encode(), 'sha1').hexdigest() return url_for( 'local_storage.run_upload_artifact', sig=sig, proj=run.build.project.name, build_id=run.build.build_id, run=run.name, path=path, _external=True) def _get_run(proj, build_id, run): p = get_or_404(Project.query.filter_by(name=proj)) b = get_or_404(Build.query.filter_by(project=p, build_id=build_id)) return Run.query.filter_by( name=run ).filter( Run.build.has(Build.id == b.id) ).first_or_404() @blueprint.route('/<sig>/<proj>/builds/<int:build_id>/runs/<run>/<path:path>', methods=('PUT',)) def run_upload_artifact(sig, proj, build_id, run, path): run = _get_run(proj, build_id, run) # validate the signature ls = Storage() p = os.path.join(ls.artifacts, ls._get_run_path(run), path) msg = '%s,%s,%s' % (request.method, p, request.headers.get('Content-Type')) computed = hmac.new(INTERNAL_API_KEY, msg.encode(), 'sha1').hexdigest() if not hmac.compare_digest(sig, computed): return 'Invalid signature', 401 dirname = os.path.dirname(p) try: # we could have 2 uploads trying this, so just do it this way to avoid # race conditions os.makedirs(dirname) except FileExistsError: pass # stream the contents to disk with open(p, 'wb') as f: chunk_size = 4096 while True: chunk = request.stream.read(chunk_size) if len(chunk) == 0: break f.write(chunk) return 'ok'
agpl-3.0
-3,963,570,515,246,286,300
33.094017
79
0.603159
false
3.426976
false
false
false
jadref/buffer_bci
python/echoClient/eventForwarder.py
1
2911
#!/usr/bin/env python3 bufferpath = "../../python/signalProc" fieldtripPath="../../dataAcq/buffer/python" import os, sys, random, math, time, socket, struct sys.path.append(os.path.join(os.path.dirname(os.path.abspath(__file__)),bufferpath)) import bufhelp sys.path.append(os.path.join(os.path.dirname(os.path.abspath(__file__)),fieldtripPath)) import FieldTrip # Configuration of buffer buffer1_hostname='localhost' buffer1_port=1972 # Configuration of forwarding buffer buffer2_hostname=None buffer2_port=None # holder for the buffer2 connection ftc2=None # flag to stop running when used from another function running=True def connectBuffers(buffer1_hostname,buffer1_port,buffer2_hostname,buffer2_port): if buffer1_hostname==buffer2_hostname and buffer1_port==buffer2_port : print("WARNING:: fowarding to the same port may result in infinite loops!!!!") #Connect to Buffer2 -- do this first so the global state is for ftc1 print("Connecting to " + buffer2_hostname + ":" + str(buffer2_port)) (ftc2,hdr2) = bufhelp.connect(buffer2_hostname,buffer2_port) print("Connected"); print(hdr2) #Connect to Buffer1 print("Connecting to " + buffer1_hostname + ":" + str(buffer1_port)) (ftc1,hdr1) = bufhelp.connect(buffer1_hostname,buffer1_port) print("Connected!"); print(hdr1) return (ftc1,ftc2) # Receive events from the buffer1 and send them to buffer2 def forwardBufferEvents(ftc1,ftc2): global running global ftc ftc=ftc1 while ( running ): events = bufhelp.buffer_newevents() for evt in events: print(str(evt.sample) + ": " + str(evt)) evt.sample=-1 ftc2.putEvents(evt) def guiGetBuffer2(): print("GUI info not supported yet!!") return; import tkinter as tk master = tk.Tk() tk.Label(master, text="HostName").grid(row=0) tk.Label(master, text="Port").grid(row=1) e1 = tk.Entry(master) e2 = tk.Entry(master) e1.grid(row=0, column=1) e2.grid(row=1, column=1) master.mainloop() if __name__ == "__main__": if len(sys.argv)>0: # called with options, i.e. commandline buffer2_hostname = sys.argv[1] if len(sys.argv)>1: try: buffer2_port = int(sys.argv[2]) except: print('Error: second argument (%s) must be a valid (=integer) port number'%sys.argv[2]) sys.exit(1) if buffer2_hostname is None : (buffer2_hostname,buffer2_port)=guiGetBuffer2() (ftc1,ftc2)=connectBuffers(buffer1_hostname,buffer1_port,buffer2_hostname,buffer2_port) forwardBufferEvents(ftc1,ftc2)
gpl-3.0
-8,554,221,728,104,355,000
34.938272
103
0.605634
false
3.602723
false
false
false
ozgurakgun/minion
mini-scripts/testallconstraints.py
1
3983
#!/usr/bin/python # Generate two minion input files, run them then compare dumptree outputs to # detect bugs in constraint propagators. import sys, os, getopt from constraint_test_common import * from multiprocessing import Pool, Manager import random #from sendemail import * import time (optargs, other)=getopt.gnu_getopt(sys.argv, "", ["minion=", "numtests=", "email", "fullprop", "64bit", "procs=", "seed=", "conslist="]) if len(other)>1: print("Usage: testallconstraints.py [--minion=<location of minion binary>] [--numtests=...] [--email] [--procs=...] [--seed=...] [--conslist=...]") sys.exit(1) # This one tests all the constraints in the following list. conslist=[] # equality constraints conslist+=["diseq", "eq", "gaceq"] # alldiffs conslist+=["alldiff", "gacalldiff", "alldiffmatrix"] # capacity constraints conslist+=["gcc", "gccweak", "occurrence", "occurrenceleq", "occurrencegeq"] #element constraints conslist+=["element", "element_undefzero", "watchelement", "watchelement_undefzero"] conslist+=["watchelement_one", "element_one"] # arithmetic constraints conslist+=["modulo", "modulo_undefzero", "pow", "minuseq", "product", "div", "div_undefzero", "abs"] conslist+=["watchsumleq", "watchsumgeq", "watchvecneq", "hamming", "not-hamming"] conslist+=["weightedsumleq", "weightedsumgeq"] conslist+=["litsumgeq"] # should test table to test reifytable? and reifyimplytable conslist+=["sumgeq", "sumleq", "weightedsumleq", "weightedsumgeq"] conslist+=["ineq"] conslist+=["difference"] conslist+=["negativetable", "lighttable"] # symmetry-breaking constraints conslist+=["lexleq", "lexless", "lexleq_rv", "lexleq_quick", "lexless_quick"] conslist+=["max", "min"] conslist+=["watchneq", "watchless"] conslist+=["w-inset", "w-inintervalset", "w-notinset", "w-inrange", "w-notinrange", "w-literal", "w-notliteral"] conslist+=["watchsumgeq", "litsumgeq", "watchneq", "watchless", "not-hamming"] conslist+=["not-hamming"] conslist+=["gacschema", "haggisgac", "haggisgac-stable", "str2plus", "shortstr2", "shortctuplestr2", "mddc"] conslist+=["nvalueleq", "nvaluegeq"] # add reifyimply variant of all constraints, # and reify variant of all except those in reifyexceptions it=conslist[:] for c in it: conslist+=["reifyimply"+c] conslist+=["reify"+c] numtests=100 minionbin="bin/minion" email=False fullprop=False # compare the constraint against itself with fullprop. Needs DEBUG=1. bit64=False procs=1 seed=12345 for i in optargs: (a1, a2)=i if a1=="--minion": minionbin=a2 elif a1=="--numtests": numtests=int(a2) elif a1=="--email": email=True elif a1=="--fullprop": fullprop=True elif a1=="--64bit": bit64=True elif a1=="--procs": procs=int(a2) elif a1=="--seed": seed=int(a2) elif a1=="--conslist": conslist=a2.split(",") def runtest(consname): cachename = consname starttime=time.time() sys.stdout.flush() random.seed(seed) reify=False reifyimply=False if consname[0:10]=="reifyimply": reifyimply=True consname=consname[10:] if consname[0:5]=="reify": reify=True consname=consname[5:] consname=consname.replace("-", "__minus__") testobj=eval("test"+consname+"()") testobj.solver=minionbin for testnum in range(numtests): options = {'reify': reify, 'reifyimply': reifyimply, 'fullprop': fullprop, 'printcmd': False, 'fixlength':False, 'getsatisfyingassignment':True} if not testobj.runtest(options): print("Failed when testing %s"%cachename) sys.stdout.flush() return False print("Completed testing %s, duration: %d"%(cachename, time.time()-starttime)) return True if __name__ == '__main__': p = Pool(procs) retval = p.map(runtest, conslist) if all(retval): print("Success") exit(0) else: print("Failure") exit(1)
gpl-2.0
-5,649,680,209,550,508,000
27.654676
152
0.651017
false
3.061491
true
false
false
rabramley/telomere
app/model/batch.py
1
2972
from app import db from sqlalchemy.ext.hybrid import hybrid_property from sqlalchemy.sql import select, func from app.model.outstandingError import OutstandingError import numpy import decimal class Batch(db.Model): id = db.Column(db.Integer, primary_key=True) robot = db.Column(db.String(20)) temperature = db.Column(db.Numeric(precision=3, scale=1)) datetime = db.Column(db.DateTime()) userId = db.Column(db.Integer, db.ForeignKey('user.id')) version_id = db.Column(db.Integer, nullable=False) plateName = db.Column(db.String(50)) halfPlate = db.Column(db.String(1)) humidity = db.Column(db.Integer()) primerBatch = db.Column(db.Integer()) enzymeBatch = db.Column(db.Integer()) rotorGene = db.Column(db.Integer()) operatorUserId = db.Column(db.Integer, db.ForeignKey('user.id')) batchFailureReason = db.Column(db.Integer()) processType = db.Column(db.String(20)) __mapper_args__ = { "version_id_col": version_id } def __init__(self, *args, **kwargs): self.id = kwargs.get('id') self.robot = kwargs.get('robot') self.temperature = kwargs.get('temperature') self.datetime = kwargs.get('datetime') self.userId = kwargs.get('userId') self.plateName = kwargs.get('plateName') self.halfPlate = kwargs.get('halfPlate') self.humidity = kwargs.get('humidity') self.primerBatch = kwargs.get('primerBatch') self.enzymeBatch = kwargs.get('enzymeBatch') self.rotorGene = kwargs.get('rotorGene') self.operatorUserId = kwargs.get('operatorUserId') self.batchFailureReason = kwargs.get('batchFailureReason') self.processType = kwargs.get('processType') @hybrid_property def outstandingErrorCount(self): return len(self.outstandingErrors) @outstandingErrorCount.expression def outstandingErrorCount(cls): return (select([func.count(OutstandingError.id)]). where(OutstandingError.batchId == cls.id). label("outstandingErrorCount") ) def get_measurements_for_sample_code(self, sampleCode): return [m for m in self.measurements if m.sample.sampleCode == sampleCode] def has_no_pool_samples(self): return not any(m.sample.is_pool_sample() for m in self.measurements) def has_no_non_pool_samples(self): return not any(not m.sample.is_pool_sample() for m in self.measurements) def has_invalid_pool_ts_average(self): poolTsValues = [ decimal.Decimal(m.ts) for m in self.measurements if m.ts is not None and m.sample.is_pool_sample()] averagePoolTs = numpy.mean(poolTsValues) return averagePoolTs < 0.99 or averagePoolTs > 1.01 def is_duplicate(self): return self.processType == "Duplicate" def is_replate(self): return self.processType == "Re-Plate" def is_initial(self): return self.processType == "Initial"
mit
7,113,725,852,751,646,000
37.102564
124
0.664536
false
3.580723
false
false
false
DarioGT/OMS-PluginXML
org.modelsphere.sms/lib/jython-2.2.1/Lib/uu.py
1
6092
#! /usr/bin/env python # Copyright 1994 by Lance Ellinghouse # Cathedral City, California Republic, United States of America. # All Rights Reserved # Permission to use, copy, modify, and distribute this software and its # documentation for any purpose and without fee is hereby granted, # provided that the above copyright notice appear in all copies and that # both that copyright notice and this permission notice appear in # supporting documentation, and that the name of Lance Ellinghouse # not be used in advertising or publicity pertaining to distribution # of the software without specific, written prior permission. # LANCE ELLINGHOUSE DISCLAIMS ALL WARRANTIES WITH REGARD TO # THIS SOFTWARE, INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY AND # FITNESS, IN NO EVENT SHALL LANCE ELLINGHOUSE CENTRUM BE LIABLE # FOR ANY SPECIAL, INDIRECT OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES # WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN # ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT # OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. # # Modified by Jack Jansen, CWI, July 1995: # - Use binascii module to do the actual line-by-line conversion # between ascii and binary. This results in a 1000-fold speedup. The C # version is still 5 times faster, though. # - Arguments more compliant with python standard """Implementation of the UUencode and UUdecode functions. encode(in_file, out_file [,name, mode]) decode(in_file [, out_file, mode]) """ import binascii import os import sys from types import StringType __all__ = ["Error", "encode", "decode"] class Error(Exception): pass def encode(in_file, out_file, name=None, mode=None): """Uuencode file""" # # If in_file is a pathname open it and change defaults # if in_file == '-': in_file = sys.stdin elif isinstance(in_file, StringType): if name is None: name = os.path.basename(in_file) if mode is None: try: mode = os.stat(in_file)[0] except AttributeError: pass in_file = open(in_file, 'rb') # # Open out_file if it is a pathname # if out_file == '-': out_file = sys.stdout elif isinstance(out_file, StringType): out_file = open(out_file, 'w') # # Set defaults for name and mode # if name is None: name = '-' if mode is None: mode = 0666 # # Write the data # out_file.write('begin %o %s\n' % ((mode&0777),name)) str = in_file.read(45) while len(str) > 0: out_file.write(binascii.b2a_uu(str)) str = in_file.read(45) out_file.write(' \nend\n') def decode(in_file, out_file=None, mode=None, quiet=0): """Decode uuencoded file""" # # Open the input file, if needed. # if in_file == '-': in_file = sys.stdin elif isinstance(in_file, StringType): in_file = open(in_file) # # Read until a begin is encountered or we've exhausted the file # while 1: hdr = in_file.readline() if not hdr: raise Error, 'No valid begin line found in input file' if hdr[:5] != 'begin': continue hdrfields = hdr.split(" ", 2) if len(hdrfields) == 3 and hdrfields[0] == 'begin': try: int(hdrfields[1], 8) break except ValueError: pass if out_file is None: out_file = hdrfields[2].rstrip() if os.path.exists(out_file): raise Error, 'Cannot overwrite existing file: %s' % out_file if mode is None: mode = int(hdrfields[1], 8) # # Open the output file # opened = False if out_file == '-': out_file = sys.stdout elif isinstance(out_file, StringType): fp = open(out_file, 'wb') try: os.path.chmod(out_file, mode) except AttributeError: pass out_file = fp opened = True # # Main decoding loop # s = in_file.readline() while s and s.strip() != 'end': try: data = binascii.a2b_uu(s) except binascii.Error, v: # Workaround for broken uuencoders by /Fredrik Lundh nbytes = (((ord(s[0])-32) & 63) * 4 + 5) / 3 data = binascii.a2b_uu(s[:nbytes]) if not quiet: sys.stderr.write("Warning: %s\n" % str(v)) out_file.write(data) s = in_file.readline() if not s: raise Error, 'Truncated input file' if opened: out_file.close() def test(): """uuencode/uudecode main program""" import getopt dopt = 0 topt = 0 input = sys.stdin output = sys.stdout ok = 1 try: optlist, args = getopt.getopt(sys.argv[1:], 'dt') except getopt.error: ok = 0 if not ok or len(args) > 2: print 'Usage:', sys.argv[0], '[-d] [-t] [input [output]]' print ' -d: Decode (in stead of encode)' print ' -t: data is text, encoded format unix-compatible text' sys.exit(1) for o, a in optlist: if o == '-d': dopt = 1 if o == '-t': topt = 1 if len(args) > 0: input = args[0] if len(args) > 1: output = args[1] if dopt: if topt: if isinstance(output, StringType): output = open(output, 'w') else: print sys.argv[0], ': cannot do -t to stdout' sys.exit(1) decode(input, output) else: if topt: if isinstance(input, StringType): input = open(input, 'r') else: print sys.argv[0], ': cannot do -t from stdin' sys.exit(1) encode(input, output) if __name__ == '__main__': test()
gpl-3.0
8,905,765,275,347,266,000
29.241026
72
0.559094
false
3.732843
false
false
false
jhogg41/gm-o-matic
gom_server/gom_server/urls.py
1
1187
"""gom_server URL Configuration The `urlpatterns` list routes URLs to views. For more information please see: https://docs.djangoproject.com/en/1.8/topics/http/urls/ Examples: Function views 1. Add an import: from my_app import views 2. Add a URL to urlpatterns: url(r'^$', views.home, name='home') Class-based views 1. Add an import: from other_app.views import Home 2. Add a URL to urlpatterns: url(r'^$', Home.as_view(), name='home') Including another URLconf 1. Add an import: from blog import urls as blog_urls 2. Add a URL to urlpatterns: url(r'^blog/', include(blog_urls)) """ from django.conf.urls import include, url from django.contrib import admin from rest_framework import routers import core.router import char_attr.router router = routers.DefaultRouter() core.router.addRoutes(router) char_attr.router.addRoutes(router) urlpatterns = [ url(r'^admin/', include(admin.site.urls)), url(r'^api-auth/', include('rest_framework.urls', namespace='rest-framework')), url(r'^', include(router.urls)), url(r'^rest-auth/', include('rest_auth.urls')), url(r'^rest-auth/registration', include('rest_auth.registration.urls')), ]
bsd-2-clause
-3,051,245,291,478,614,500
36.09375
83
0.708509
false
3.420749
false
false
false
kevin-coder/tensorflow-fork
tensorflow/python/keras/layers/normalization_test.py
1
22900
# Copyright 2016 The TensorFlow Authors. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # ============================================================================== """Tests for normalization layers.""" from __future__ import absolute_import from __future__ import division from __future__ import print_function from absl.testing import parameterized import numpy as np from tensorflow.python import keras from tensorflow.python.framework import constant_op from tensorflow.python.framework import test_util as tf_test_util from tensorflow.python.keras import keras_parameterized from tensorflow.python.keras import testing_utils from tensorflow.python.keras.layers import normalization from tensorflow.python.keras.layers import normalization_v2 from tensorflow.python.keras.mixed_precision.experimental import policy from tensorflow.python.platform import test from tensorflow.python.training import gradient_descent class BatchNormalizationTest(keras_parameterized.TestCase): @keras_parameterized.run_all_keras_modes def test_basic_batchnorm(self): testing_utils.layer_test( keras.layers.BatchNormalization, kwargs={ 'momentum': 0.9, 'epsilon': 0.1, 'gamma_regularizer': keras.regularizers.l2(0.01), 'beta_regularizer': keras.regularizers.l2(0.01) }, input_shape=(3, 4, 2)) testing_utils.layer_test( keras.layers.BatchNormalization, kwargs={ 'gamma_initializer': 'ones', 'beta_initializer': 'ones', 'moving_mean_initializer': 'zeros', 'moving_variance_initializer': 'ones' }, input_shape=(3, 4, 2)) testing_utils.layer_test( keras.layers.BatchNormalization, kwargs={'scale': False, 'center': False}, input_shape=(3, 3)) @tf_test_util.run_in_graph_and_eager_modes def test_batchnorm_weights(self): layer = keras.layers.BatchNormalization(scale=False, center=False) layer.build((None, 3, 4)) self.assertEqual(len(layer.trainable_weights), 0) self.assertEqual(len(layer.weights), 2) layer = keras.layers.BatchNormalization() layer.build((None, 3, 4)) self.assertEqual(len(layer.trainable_weights), 2) self.assertEqual(len(layer.weights), 4) @tf_test_util.run_in_graph_and_eager_modes def test_batchnorm_regularization(self): layer = keras.layers.BatchNormalization( gamma_regularizer='l1', beta_regularizer='l1') layer.build((None, 3, 4)) self.assertEqual(len(layer.losses), 2) max_norm = keras.constraints.max_norm layer = keras.layers.BatchNormalization( gamma_constraint=max_norm, beta_constraint=max_norm) layer.build((None, 3, 4)) self.assertEqual(layer.gamma.constraint, max_norm) self.assertEqual(layer.beta.constraint, max_norm) @keras_parameterized.run_all_keras_modes def test_batchnorm_convnet(self): if test.is_gpu_available(cuda_only=True): with self.session(use_gpu=True): model = keras.models.Sequential() norm = keras.layers.BatchNormalization( axis=1, input_shape=(3, 4, 4), momentum=0.8) model.add(norm) model.compile(loss='mse', optimizer=gradient_descent.GradientDescentOptimizer(0.01), run_eagerly=testing_utils.should_run_eagerly()) # centered on 5.0, variance 10.0 x = np.random.normal(loc=5.0, scale=10.0, size=(1000, 3, 4, 4)) model.fit(x, x, epochs=4, verbose=0) out = model.predict(x) out -= np.reshape(keras.backend.eval(norm.beta), (1, 3, 1, 1)) out /= np.reshape(keras.backend.eval(norm.gamma), (1, 3, 1, 1)) np.testing.assert_allclose(np.mean(out, axis=(0, 2, 3)), 0.0, atol=1e-1) np.testing.assert_allclose(np.std(out, axis=(0, 2, 3)), 1.0, atol=1e-1) @keras_parameterized.run_all_keras_modes def test_batchnorm_convnet_channel_last(self): model = keras.models.Sequential() norm = keras.layers.BatchNormalization( axis=-1, input_shape=(4, 4, 3), momentum=0.8) model.add(norm) model.compile(loss='mse', optimizer=gradient_descent.GradientDescentOptimizer(0.01), run_eagerly=testing_utils.should_run_eagerly()) # centered on 5.0, variance 10.0 x = np.random.normal(loc=5.0, scale=10.0, size=(1000, 4, 4, 3)) model.fit(x, x, epochs=4, verbose=0) out = model.predict(x) out -= np.reshape(keras.backend.eval(norm.beta), (1, 1, 1, 3)) out /= np.reshape(keras.backend.eval(norm.gamma), (1, 1, 1, 3)) np.testing.assert_allclose(np.mean(out, axis=(0, 1, 2)), 0.0, atol=1e-1) np.testing.assert_allclose(np.std(out, axis=(0, 1, 2)), 1.0, atol=1e-1) @keras_parameterized.run_all_keras_modes def test_batchnorm_correctness(self): _run_batchnorm_correctness_test( normalization.BatchNormalization, dtype='float32') _run_batchnorm_correctness_test( normalization_v2.BatchNormalization, dtype='float32') @keras_parameterized.run_all_keras_modes def test_batchnorm_mixed_precision(self): _run_batchnorm_correctness_test( normalization.BatchNormalization, dtype='float16') _run_batchnorm_correctness_test( normalization_v2.BatchNormalization, dtype='float16') @tf_test_util.run_in_graph_and_eager_modes def test_batchnorm_policy(self): norm = keras.layers.BatchNormalization( axis=-1, input_shape=(4, 4, 3), momentum=0.8, dtype=policy.Policy('infer_float32_vars')) x = np.random.normal(size=(10, 4, 4, 3)).astype('float16') y = norm(x) self.assertEqual(y.dtype, 'float16') self.assertEqual(norm.beta.dtype.base_dtype, 'float32') self.assertEqual(norm.gamma.dtype.base_dtype, 'float32') class BatchNormalizationV1Test(test.TestCase): @tf_test_util.run_in_graph_and_eager_modes def test_v1_fused_attribute(self): norm = normalization.BatchNormalization() inp = keras.layers.Input((4, 4, 4)) norm(inp) self.assertEqual(norm.fused, True) norm = normalization.BatchNormalization(fused=False) self.assertEqual(norm.fused, False) inp = keras.layers.Input(shape=(4, 4, 4)) norm(inp) self.assertEqual(norm.fused, False) norm = normalization.BatchNormalization(virtual_batch_size=2) self.assertEqual(norm.fused, True) inp = keras.layers.Input(shape=(2, 2, 2)) norm(inp) self.assertEqual(norm.fused, False) class BatchNormalizationV2Test(keras_parameterized.TestCase): @keras_parameterized.run_all_keras_modes def test_basic_batchnorm_v2(self): testing_utils.layer_test( normalization_v2.BatchNormalization, kwargs={'fused': True}, input_shape=(3, 3, 3, 3)) testing_utils.layer_test( normalization_v2.BatchNormalization, kwargs={'fused': None}, input_shape=(3, 3, 3)) @tf_test_util.run_in_graph_and_eager_modes def test_v2_fused_attribute(self): norm = normalization_v2.BatchNormalization() self.assertEqual(norm.fused, None) inp = keras.layers.Input(shape=(4, 4, 4)) norm(inp) self.assertEqual(norm.fused, True) norm = normalization_v2.BatchNormalization() self.assertEqual(norm.fused, None) inp = keras.layers.Input(shape=(4, 4)) norm(inp) self.assertEqual(norm.fused, False) norm = normalization_v2.BatchNormalization(virtual_batch_size=2) self.assertEqual(norm.fused, False) inp = keras.layers.Input(shape=(4, 4, 4)) norm(inp) self.assertEqual(norm.fused, False) norm = normalization_v2.BatchNormalization(fused=False) self.assertEqual(norm.fused, False) inp = keras.layers.Input(shape=(4, 4, 4)) norm(inp) self.assertEqual(norm.fused, False) norm = normalization_v2.BatchNormalization(fused=True, axis=[3]) self.assertEqual(norm.fused, True) inp = keras.layers.Input(shape=(4, 4, 4)) norm(inp) self.assertEqual(norm.fused, True) with self.assertRaisesRegexp(ValueError, 'fused.*renorm'): normalization_v2.BatchNormalization(fused=True, renorm=True) with self.assertRaisesRegexp(ValueError, 'fused.*when axis is 1 or 3'): normalization_v2.BatchNormalization(fused=True, axis=2) with self.assertRaisesRegexp(ValueError, 'fused.*when axis is 1 or 3'): normalization_v2.BatchNormalization(fused=True, axis=[1, 3]) with self.assertRaisesRegexp(ValueError, 'fused.*virtual_batch_size'): normalization_v2.BatchNormalization(fused=True, virtual_batch_size=2) with self.assertRaisesRegexp(ValueError, 'fused.*adjustment'): normalization_v2.BatchNormalization(fused=True, adjustment=lambda _: (1, 0)) norm = normalization_v2.BatchNormalization(fused=True) self.assertEqual(norm.fused, True) inp = keras.layers.Input(shape=(4, 4)) with self.assertRaisesRegexp(ValueError, '4D input tensors'): norm(inp) def _run_batchnorm_correctness_test(layer, dtype='float32', fused=False): model = keras.models.Sequential() model.add(keras.Input(shape=(2, 2, 2), dtype=dtype)) norm = layer(momentum=0.8, fused=fused) model.add(norm) if dtype == 'float16': # Keras models require float32 losses. model.add(keras.layers.Lambda(lambda x: keras.backend.cast(x, 'float32'))) model.compile(loss='mse', optimizer=gradient_descent.GradientDescentOptimizer(0.01), run_eagerly=testing_utils.should_run_eagerly()) # centered on 5.0, variance 10.0 x = (np.random.normal(loc=5.0, scale=10.0, size=(1000, 2, 2, 2)) .astype(dtype)) model.fit(x, x, epochs=4, verbose=0) out = model.predict(x) out -= keras.backend.eval(norm.beta) out /= keras.backend.eval(norm.gamma) np.testing.assert_allclose(out.mean(), 0.0, atol=1e-1) np.testing.assert_allclose(out.std(), 1.0, atol=1e-1) @parameterized.parameters( [normalization.BatchNormalization, normalization_v2.BatchNormalization]) class NormalizationLayersGraphModeOnlyTest( test.TestCase, parameterized.TestCase): def test_shared_batchnorm(self, layer): """Test that a BN layer can be shared across different data streams.""" with self.cached_session(): # Test single layer reuse bn = layer() x1 = keras.layers.Input(shape=(10,)) _ = bn(x1) x2 = keras.layers.Input(shape=(10,)) y2 = bn(x2) x = np.random.normal(loc=5.0, scale=10.0, size=(2, 10)) model = keras.models.Model(x2, y2) model.compile(gradient_descent.GradientDescentOptimizer(0.01), 'mse') model.train_on_batch(x, x) self.assertEqual(len(bn.updates), 4) self.assertEqual(len(model.updates), 2) self.assertEqual(len(model.get_updates_for(x2)), 2) # Test model-level reuse x3 = keras.layers.Input(shape=(10,)) y3 = model(x3) new_model = keras.models.Model(x3, y3, name='new_model') self.assertEqual(len(new_model.updates), 2) self.assertEqual(len(model.updates), 4) self.assertEqual(len(new_model.get_updates_for(x3)), 2) new_model.compile(gradient_descent.GradientDescentOptimizer(0.01), 'mse') new_model.train_on_batch(x, x) def test_that_trainable_disables_updates(self, layer): with self.cached_session(): val_a = np.random.random((10, 4)) val_out = np.random.random((10, 4)) a = keras.layers.Input(shape=(4,)) layer = layer(input_shape=(4,)) b = layer(a) model = keras.models.Model(a, b) model.trainable = False assert not model.updates model.compile(gradient_descent.GradientDescentOptimizer(0.01), 'mse') assert not model.updates x1 = model.predict(val_a) model.train_on_batch(val_a, val_out) x2 = model.predict(val_a) self.assertAllClose(x1, x2, atol=1e-7) model.trainable = True model.compile(gradient_descent.GradientDescentOptimizer(0.01), 'mse') assert model.updates model.train_on_batch(val_a, val_out) x2 = model.predict(val_a) assert np.abs(np.sum(x1 - x2)) > 1e-5 layer.trainable = False model.compile(gradient_descent.GradientDescentOptimizer(0.01), 'mse') assert not model.updates x1 = model.predict(val_a) model.train_on_batch(val_a, val_out) x2 = model.predict(val_a) self.assertAllClose(x1, x2, atol=1e-7) @tf_test_util.run_deprecated_v1 def test_batchnorm_trainable(self, layer): """Tests that batchnorm layer is trainable when learning phase is enabled. Computes mean and std for current inputs then applies batch normalization using them. Args: layer: Either V1 or V2 of BatchNormalization layer. """ # TODO(fchollet): enable in all execution modes when issue with # learning phase setting is resolved. with self.cached_session(): bn_mean = 0.5 bn_std = 10. val_a = np.expand_dims(np.arange(10.), axis=1) def get_model(bn_mean, bn_std): inp = keras.layers.Input(shape=(1,)) x = layer()(inp) model1 = keras.models.Model(inp, x) model1.set_weights([ np.array([1.]), np.array([0.]), np.array([bn_mean]), np.array([bn_std**2]) ]) return model1 # Simulates training-mode with trainable layer. # Should use mini-batch statistics. with keras.backend.learning_phase_scope(1): model = get_model(bn_mean, bn_std) model.compile(loss='mse', optimizer='rmsprop') out = model.predict(val_a) self.assertAllClose( (val_a - np.mean(val_a)) / np.std(val_a), out, atol=1e-3) def _run_layernorm_correctness_test(layer, dtype='float32'): model = keras.models.Sequential() norm = layer(input_shape=(2, 2, 2)) model.add(norm) model.compile(loss='mse', optimizer=gradient_descent.GradientDescentOptimizer(0.01), run_eagerly=testing_utils.should_run_eagerly()) # centered on 5.0, variance 10.0 x = (np.random.normal(loc=5.0, scale=10.0, size=(1000, 2, 2, 2)) .astype(dtype)) model.fit(x, x, epochs=4, verbose=0) out = model.predict(x) out -= keras.backend.eval(norm.beta) out /= keras.backend.eval(norm.gamma) np.testing.assert_allclose(out.mean(), 0.0, atol=1e-1) np.testing.assert_allclose(out.std(), 1.0, atol=1e-1) class LayerNormalizationTest(keras_parameterized.TestCase): @keras_parameterized.run_all_keras_modes def test_basic_layernorm(self): testing_utils.layer_test( keras.layers.LayerNormalization, kwargs={ 'gamma_regularizer': keras.regularizers.l2(0.01), 'beta_regularizer': keras.regularizers.l2(0.01) }, input_shape=(3, 4, 2)) testing_utils.layer_test( keras.layers.LayerNormalization, kwargs={ 'gamma_initializer': 'ones', 'beta_initializer': 'ones', }, input_shape=(3, 4, 2)) testing_utils.layer_test( keras.layers.LayerNormalization, kwargs={'scale': False, 'center': False}, input_shape=(3, 3)) @tf_test_util.run_in_graph_and_eager_modes def test_layernorm_weights(self): layer = keras.layers.LayerNormalization(scale=False, center=False) layer.build((None, 3, 4)) self.assertEqual(len(layer.trainable_weights), 0) self.assertEqual(len(layer.weights), 0) layer = keras.layers.LayerNormalization() layer.build((None, 3, 4)) self.assertEqual(len(layer.trainable_weights), 2) self.assertEqual(len(layer.weights), 2) @tf_test_util.run_in_graph_and_eager_modes def test_layernorm_regularization(self): layer = keras.layers.LayerNormalization( gamma_regularizer='l1', beta_regularizer='l1') layer.build((None, 3, 4)) self.assertEqual(len(layer.losses), 2) max_norm = keras.constraints.max_norm layer = keras.layers.LayerNormalization( gamma_constraint=max_norm, beta_constraint=max_norm) layer.build((None, 3, 4)) self.assertEqual(layer.gamma.constraint, max_norm) self.assertEqual(layer.beta.constraint, max_norm) @keras_parameterized.run_all_keras_modes def test_layernorm_convnet(self): if test.is_gpu_available(cuda_only=True): with self.session(use_gpu=True): model = keras.models.Sequential() norm = keras.layers.LayerNormalization( input_shape=(3, 4, 4), params_axis=1) model.add(norm) model.compile(loss='mse', optimizer=gradient_descent.GradientDescentOptimizer(0.01), run_eagerly=testing_utils.should_run_eagerly()) # centered on 5.0, variance 10.0 x = np.random.normal(loc=5.0, scale=10.0, size=(1000, 3, 4, 4)) model.fit(x, x, epochs=4, verbose=0) out = model.predict(x) out -= np.reshape(keras.backend.eval(norm.beta), (1, 3, 1, 1)) out /= np.reshape(keras.backend.eval(norm.gamma), (1, 3, 1, 1)) np.testing.assert_allclose(np.mean(out, axis=(0, 2, 3)), 0.0, atol=1e-1) np.testing.assert_allclose(np.std(out, axis=(0, 2, 3)), 1.0, atol=1e-1) @keras_parameterized.run_all_keras_modes def test_layernorm_convnet_channel_last(self): model = keras.models.Sequential() norm = keras.layers.LayerNormalization(input_shape=(4, 4, 3)) model.add(norm) model.compile(loss='mse', optimizer=gradient_descent.GradientDescentOptimizer(0.01), run_eagerly=testing_utils.should_run_eagerly()) # centered on 5.0, variance 10.0 x = np.random.normal(loc=5.0, scale=10.0, size=(1000, 4, 4, 3)) model.fit(x, x, epochs=4, verbose=0) out = model.predict(x) out -= np.reshape(keras.backend.eval(norm.beta), (1, 1, 1, 3)) out /= np.reshape(keras.backend.eval(norm.gamma), (1, 1, 1, 3)) np.testing.assert_allclose(np.mean(out, axis=(0, 1, 2)), 0.0, atol=1e-1) np.testing.assert_allclose(np.std(out, axis=(0, 1, 2)), 1.0, atol=1e-1) @keras_parameterized.run_all_keras_modes def test_layernorm_correctness(self): _run_layernorm_correctness_test( normalization.LayerNormalization, dtype='float32') @keras_parameterized.run_all_keras_modes def test_layernorm_mixed_precision(self): _run_layernorm_correctness_test( normalization.LayerNormalization, dtype='float16') def doOutputTest(self, input_shape, tol=1e-5, norm_axis=None, params_axis=-1, dtype=None): ndim = len(input_shape) if norm_axis is None: moments_axis = range(1, ndim) elif isinstance(norm_axis, int): if norm_axis < 0: moments_axis = [norm_axis + ndim] else: moments_axis = [norm_axis] else: moments_axis = [] for dim in norm_axis: if dim < 0: dim = dim + ndim moments_axis.append(dim) moments_axis = tuple(moments_axis) expected_shape = [] for i in range(ndim): if i not in moments_axis: expected_shape.append(input_shape[i]) expected_mean = np.zeros(expected_shape) expected_var = np.ones(expected_shape) for mu in [0.0, 1e2]: for sigma in [1.0, 0.1]: inputs = np.random.randn(*input_shape) * sigma + mu inputs_t = constant_op.constant(inputs, shape=input_shape) layer = normalization.LayerNormalization( norm_axis=norm_axis, params_axis=params_axis, dtype=dtype) outputs = layer(inputs_t) beta = layer.beta gamma = layer.gamma for weight in layer.weights: self.evaluate(weight.initializer) outputs = self.evaluate(outputs) beta = self.evaluate(beta) gamma = self.evaluate(gamma) # The mean and variance of the output should be close to 0 and 1 # respectively. # Make sure that there are no NaNs self.assertFalse(np.isnan(outputs).any()) mean = np.mean(outputs, axis=moments_axis) var = np.var(outputs, axis=moments_axis) # Layer-norm implemented in numpy eps = 1e-12 expected_out = ( (gamma * (inputs - np.mean( inputs, axis=moments_axis, keepdims=True)) / np.sqrt(eps + np.var( inputs, axis=moments_axis, keepdims=True))) + beta) self.assertAllClose(expected_mean, mean, atol=tol, rtol=tol) self.assertAllClose(expected_var, var, atol=tol) # The full computation gets a bigger tolerance self.assertAllClose(expected_out, outputs, atol=5 * tol) @tf_test_util.run_in_graph_and_eager_modes def testOutput2DInput(self): self.doOutputTest((10, 300)) self.doOutputTest((10, 300), norm_axis=[0]) self.doOutputTest((10, 300), params_axis=[0, 1]) @tf_test_util.run_in_graph_and_eager_modes def testOutput2DInputDegenerateNormAxis(self): with self.assertRaisesRegexp(ValueError, r'Invalid axis: 2'): self.doOutputTest((10, 300), norm_axis=2) @tf_test_util.run_in_graph_and_eager_modes def testOutput4DInput(self): self.doOutputTest((100, 10, 10, 3)) @tf_test_util.run_in_graph_and_eager_modes def testOutput4DInputNormOnInnermostAxis(self): # Equivalent tests shape = (100, 10, 10, 3) self.doOutputTest( shape, norm_axis=list(range(3, len(shape))), tol=1e-4, dtype='float64') self.doOutputTest(shape, norm_axis=-1, tol=1e-4, dtype='float64') @tf_test_util.run_in_graph_and_eager_modes def testOutputSmallInput(self): self.doOutputTest((10, 10, 10, 30)) @tf_test_util.run_in_graph_and_eager_modes def testOutputSmallInputNormOnInnermostAxis(self): self.doOutputTest((10, 10, 10, 30), norm_axis=3) @tf_test_util.run_in_graph_and_eager_modes def testOutputSmallInputNormOnMixedAxes(self): self.doOutputTest((10, 10, 10, 30), norm_axis=[0, 3]) self.doOutputTest((10, 10, 10, 30), params_axis=[-2, -1]) self.doOutputTest((10, 10, 10, 30), norm_axis=[0, 3], params_axis=[-3, -2, -1]) @tf_test_util.run_in_graph_and_eager_modes def testOutputBigInput(self): self.doOutputTest((1, 100, 100, 1)) self.doOutputTest((1, 100, 100, 1), norm_axis=[1, 2]) self.doOutputTest((1, 100, 100, 1), norm_axis=[1, 2], params_axis=[-2, -1]) if __name__ == '__main__': test.main()
apache-2.0
2,995,893,033,428,413,000
36.115073
80
0.650524
false
3.394101
true
false
false
meisamhe/GPLshared
Programming/MPI — AMath 483 583, Spring 2013 1.0 documentation_files/s2.py
1
1744
import time import threading # @include class SpellCheckService: w_last = closest_to_last_word = None lock = threading.Lock() @staticmethod def service(req, resp): w = req.extract_word_to_check_from_request() result = None with SpellCheckService.lock: if w == SpellCheckService.w_last: result = SpellCheckService.closest_to_last_word.copy() if result is None: result = closest_in_dictionary(w) with SpellCheckService.lock: SpellCheckService.w_last = w SpellCheckService.closest_to_last_word = result resp.encode_into_response(result) # @exclude class ServiceRequest: def __init__(self, s): self.request = s def extract_word_to_check_from_request(self): return self.request class ServiceResponse: response = None def encode_into_response(self, s): self.response = s def closest_in_dictionary(w): time.sleep(0.2) return [w + '_result'] class ServiceThread(threading.Thread): def __init__(self, data): super().__init__() self.data = data def run(self): start_time = time.time() req = ServiceRequest(self.data) resp = ServiceResponse() SpellCheckService.service(req, resp) print(self.data, '->', resp.response, '(%.3f sec)' % (time.time() - start_time)) def main(): i = 0 while True: ServiceThread('req:%d' % (i + 1)).start() if i > 0: # while req:i+1 is computed we could return req:i from the cache ServiceThread('req:%d' % i).start() time.sleep(0.5) i += 1 if __name__ == '__main__': main()
gpl-3.0
-6,428,526,257,127,913,000
22.567568
76
0.575115
false
3.62578
false
false
false
gypsymauro/gestione-cantiere
build/lib.linux-x86_64-2.7/cantiere/admin.py
1
1533
from django.contrib import admin # Register your models here. from .models import Squadra from .models import StatoSegnalazione from .models import Segnalazione from .models import StatoIntervento from .models import Intervento from .models import Risorsa from .models import InterventoRisorsa from .models import Costo from .models import CentroCosto from .models import Allegato class InterventoRisorsaInline(admin.TabularInline): model = InterventoRisorsa exclude = ['created','created_by','modified','modified_by','deleted','note'] class RisorsaAdmin(admin.ModelAdmin): inlines = (InterventoRisorsaInline,) exclude = ['created','created_by','modified','modified_by','deleted'] class InterventoAdmin(admin.ModelAdmin): inlines = (InterventoRisorsaInline,) list_display = ['oggetto','data_inizio','stato','stampa_intervento'] list_editable = ['stato'] ordering = ['created'] exclude = ['created','created_by','modified','modified_by','deleted'] list_filter = ('stato','data_inizio','centro_costo','responsabile') save_on_top = True search_fields = ('oggetto','data_inizio') admin.site.register(Squadra) admin.site.register(StatoSegnalazione) admin.site.register(Segnalazione) admin.site.register(StatoIntervento) admin.site.register(Intervento,InterventoAdmin) admin.site.register(Risorsa,RisorsaAdmin) admin.site.register(Costo) admin.site.register(CentroCosto) admin.site.register(Allegato) #admin.site.register(InterventoMezzo) #admin.site.register(InterventoPersona)
gpl-2.0
-2,517,305,136,308,949,000
32.326087
80
0.763862
false
3.200418
false
false
false
SembeiNorimaki/Bioinformatics
EulerianCycle.py
1
1903
# Test passed :) # TODO: split right here before the conditional. import sys def handle_input_output(): # handle input graph = {} while True: try: line = sys.stdin.readline().rstrip('\n') left, right = line.split(' -> ') if left in graph.keys(): graph[left].append(right) else: graph[left] = right.split(',') except: break # EOF #print(graph) # Execute main function r = EulerianCycle(graph) # handle output print('->'.join(r)) def EulerianCycle(graph): stack = [] location = None circuit = [] # since it's an Eulerian Cycle we can start at any vertex location = list(graph)[0] # Repeat until the current vertex has no more out-going edges (neighbors) # and the stack is empty. while len(graph[location]) > 0 or len(stack) > 0: if len(graph[location]) == 0: # If current vertex has no out-going edges circuit.append(location) # add it to circuit location = stack.pop() # remove the last vertex from the stack and set it as the current one else: # otherwise stack.append(location) # add the vertex to the stack location = graph[location].pop() # take any of its neighbors # remove the edge between that vertex and selected neighbor # and set that neighbor as the current vertex # Here we must append the first element at the end to close the cycle # but since circuit is reversed, we append the last element at the beginning circuit.insert(0, circuit[-1]) return circuit[::-1] # return the reversed circuit if __name__ == '__main__': handle_input_output()
mit
-4,300,464,103,968,979,000
33
115
0.553337
false
4.541766
false
false
false
kittiu/account-payment
account_payment_return/models/payment_return.py
1
15028
# Copyright 2011-2012 7 i TRIA <http://www.7itria.cat> # Copyright 2011-2012 Avanzosc <http://www.avanzosc.com> # Copyright 2013 Pedro M. Baeza <[email protected]> # Copyright 2014 Markus Schneider <[email protected]> # Copyright 2016 Carlos Dauden <[email protected]> # Copyright 2017 Luis M. Ontalba <[email protected]> # License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl). from odoo import _, api, fields, models from odoo.exceptions import Warning as UserError import odoo.addons.decimal_precision as dp class PaymentReturn(models.Model): _name = "payment.return" _inherit = ['mail.thread'] _description = 'Payment return' _order = 'date DESC, id DESC' company_id = fields.Many2one( 'res.company', string='Company', required=True, states={'done': [('readonly', True)], 'cancelled': [('readonly', True)]}, default=lambda self: self.env['res.company']._company_default_get( 'account')) date = fields.Date( string='Return date', help="This date will be used as the account entry date.", states={'done': [('readonly', True)], 'cancelled': [('readonly', True)]}, default=lambda x: fields.Date.today()) name = fields.Char( string="Reference", required=True, states={'done': [('readonly', True)], 'cancelled': [('readonly', True)]}, default=lambda self: self.env['ir.sequence'].next_by_code( 'payment.return')) line_ids = fields.One2many( comodel_name='payment.return.line', inverse_name='return_id', states={'done': [('readonly', True)], 'cancelled': [('readonly', True)]}) journal_id = fields.Many2one( comodel_name='account.journal', string='Bank journal', required=True, states={'done': [('readonly', True)], 'cancelled': [('readonly', True)]}) move_id = fields.Many2one( comodel_name='account.move', string='Reference to the created journal entry', states={'done': [('readonly', True)], 'cancelled': [('readonly', True)]}) state = fields.Selection( selection=[('draft', 'Draft'), ('imported', 'Imported'), ('done', 'Done'), ('cancelled', 'Cancelled')], string='State', readonly=True, default='draft', track_visibility='onchange') @api.multi @api.constrains('line_ids') def _check_duplicate_move_line(self): def append_error(error_line): error_list.append( _("Payment Line: %s (%s) in Payment Return: %s") % ( ', '.join(error_line.mapped('move_line_ids.name')), error_line.partner_id.name, error_line.return_id.name ) ) error_list = [] all_move_lines = self.env['account.move.line'] for line in self.mapped('line_ids'): for move_line in line.move_line_ids: if move_line in all_move_lines: append_error(line) all_move_lines |= move_line if (not error_list) and all_move_lines: duplicate_lines = self.env['payment.return.line'].search([ ('move_line_ids', 'in', all_move_lines.ids), ('return_id.state', '=', 'done'), ]) if duplicate_lines: for line in duplicate_lines: append_error(line) if error_list: raise UserError( _("Payment reference must be unique" "\n%s") % '\n'.join(error_list) ) def _get_move_amount(self, return_line): return return_line.amount def _prepare_invoice_returned_vals(self): return {'returned_payment': True} @api.multi def unlink(self): if self.filtered(lambda x: x.state == 'done'): raise UserError(_( "You can not remove a payment return if state is 'Done'")) return super(PaymentReturn, self).unlink() @api.multi def button_match(self): self.mapped('line_ids').filtered(lambda x: ( (not x.move_line_ids) and x.reference))._find_match() self._check_duplicate_move_line() @api.multi def _prepare_return_move_vals(self): """Prepare the values for the journal entry created from the return. :return: Dictionary with the record values. """ self.ensure_one() return { 'name': '/', 'ref': _('Return %s') % self.name, 'journal_id': self.journal_id.id, 'date': self.date, 'company_id': self.company_id.id, } @api.multi def action_confirm(self): self.ensure_one() # Check for incomplete lines if self.line_ids.filtered(lambda x: not x.move_line_ids): raise UserError( _("You must input all moves references in the payment " "return.")) invoices = self.env['account.invoice'] move_line_obj = self.env['account.move.line'] move = self.env['account.move'].create( self._prepare_return_move_vals() ) total_amount = 0.0 for return_line in self.line_ids: move_amount = self._get_move_amount(return_line) move_line2 = self.env['account.move.line'].with_context( check_move_validity=False).create({ 'name': move.ref, 'debit': move_amount, 'credit': 0.0, 'account_id': return_line.move_line_ids[0].account_id.id, 'move_id': move.id, 'partner_id': return_line.partner_id.id, 'journal_id': move.journal_id.id, }) total_amount += move_amount for move_line in return_line.move_line_ids: returned_moves = move_line.matched_debit_ids.mapped( 'debit_move_id') invoices |= returned_moves.mapped('invoice_id') move_line.remove_move_reconcile() (move_line | move_line2).reconcile() return_line.move_line_ids.mapped('matched_debit_ids').write( {'origin_returned_move_ids': [(6, 0, returned_moves.ids)]}) if return_line.expense_amount: expense_lines_vals = [] expense_lines_vals.append({ 'name': move.ref, 'move_id': move.id, 'debit': 0.0, 'credit': return_line.expense_amount, 'partner_id': return_line.expense_partner_id.id, 'account_id': (return_line.return_id.journal_id. default_credit_account_id.id), }) expense_lines_vals.append({ 'move_id': move.id, 'debit': return_line.expense_amount, 'name': move.ref, 'credit': 0.0, 'partner_id': return_line.expense_partner_id.id, 'account_id': return_line.expense_account.id, }) for expense_line_vals in expense_lines_vals: move_line_obj.with_context( check_move_validity=False).create(expense_line_vals) extra_lines_vals = return_line._prepare_extra_move_lines(move) for extra_line_vals in extra_lines_vals: move_line_obj.create(extra_line_vals) move_line_obj.create({ 'name': move.ref, 'debit': 0.0, 'credit': total_amount, 'account_id': self.journal_id.default_credit_account_id.id, 'move_id': move.id, 'journal_id': move.journal_id.id, }) # Write directly because we returned payments just now invoices.write(self._prepare_invoice_returned_vals()) move.post() self.write({'state': 'done', 'move_id': move.id}) return True @api.multi def action_cancel(self): invoices = self.env['account.invoice'] for move_line in self.mapped('move_id.line_ids').filtered( lambda x: x.user_type_id.type == 'receivable'): for partial_line in move_line.matched_credit_ids: invoices |= partial_line.origin_returned_move_ids.mapped( 'invoice_id') lines2reconcile = (partial_line.origin_returned_move_ids | partial_line.credit_move_id) partial_line.credit_move_id.remove_move_reconcile() lines2reconcile.reconcile() self.move_id.button_cancel() self.move_id.unlink() self.write({'state': 'cancelled', 'move_id': False}) invoices.check_payment_return() return True @api.multi def action_draft(self): self.write({'state': 'draft'}) return True class PaymentReturnLine(models.Model): _name = "payment.return.line" _description = 'Payment return lines' return_id = fields.Many2one( comodel_name='payment.return', string='Payment return', required=True, ondelete='cascade') concept = fields.Char( string='Concept', help="Read from imported file. Only for reference.") reason_id = fields.Many2one( comodel_name='payment.return.reason', oldname="reason", string='Return reason', ) reference = fields.Char( string='Reference', help="Reference to match moves from related documents") move_line_ids = fields.Many2many( comodel_name='account.move.line', string='Payment Reference') date = fields.Date( string='Return date', help="Only for reference", ) partner_name = fields.Char( string='Partner name', readonly=True, help="Read from imported file. Only for reference.") partner_id = fields.Many2one( comodel_name='res.partner', string='Customer', domain="[('customer', '=', True)]") amount = fields.Float( string='Amount', help="Returned amount. Can be different from the move amount", digits=dp.get_precision('Account')) expense_account = fields.Many2one( comodel_name='account.account', string='Charges Account') expense_amount = fields.Float(string='Charges Amount') expense_partner_id = fields.Many2one( comodel_name="res.partner", string="Charges Partner", domain=[('supplier', '=', True)], ) @api.multi def _compute_amount(self): for line in self: line.amount = sum(line.move_line_ids.mapped('credit')) @api.multi def _get_partner_from_move(self): for line in self.filtered(lambda x: not x.partner_id): partners = line.move_line_ids.mapped('partner_id') if len(partners) > 1: raise UserError( _("All payments must be owned by the same partner")) line.partner_id = partners[:1].id line.partner_name = partners[:1].name @api.onchange('move_line_ids') def _onchange_move_line(self): self._compute_amount() @api.onchange('expense_amount') def _onchange_expense_amount(self): if self.expense_amount: journal = self.return_id.journal_id self.expense_account = journal.default_expense_account_id self.expense_partner_id = journal.default_expense_partner_id @api.multi def match_invoice(self): for line in self: domain = line.partner_id and [ ('partner_id', '=', line.partner_id.id)] or [] domain.append(('number', '=', line.reference)) invoice = self.env['account.invoice'].search(domain) if invoice: payments = invoice.payment_move_line_ids if payments: line.move_line_ids = payments[0].ids if not line.concept: line.concept = _('Invoice: %s') % invoice.number @api.multi def match_move_lines(self): for line in self: domain = line.partner_id and [ ('partner_id', '=', line.partner_id.id)] or [] if line.return_id.journal_id: domain.append(('journal_id', '=', line.return_id.journal_id.id)) domain.extend([ ('account_id.internal_type', '=', 'receivable'), ('reconciled', '=', True), '|', ('name', '=', line.reference), ('ref', '=', line.reference), ]) move_lines = self.env['account.move.line'].search(domain) if move_lines: line.move_line_ids = move_lines.ids if not line.concept: line.concept = (_('Move lines: %s') % ', '.join(move_lines.mapped('name'))) @api.multi def match_move(self): for line in self: domain = line.partner_id and [ ('partner_id', '=', line.partner_id.id)] or [] domain.append(('name', '=', line.reference)) move = self.env['account.move'].search(domain) if move: if len(move) > 1: raise UserError( _("More than one matches to move reference: %s") % self.reference) line.move_line_ids = move.line_ids.filtered(lambda l: ( l.user_type_id.type == 'receivable' and l.reconciled )).ids if not line.concept: line.concept = _('Move: %s') % move.ref @api.multi def _find_match(self): # we filter again to remove all ready matched lines in inheritance lines2match = self.filtered(lambda x: ( (not x.move_line_ids) and x.reference)) lines2match.match_invoice() lines2match = lines2match.filtered(lambda x: ( (not x.move_line_ids) and x.reference)) lines2match.match_move_lines() lines2match = lines2match.filtered(lambda x: ( (not x.move_line_ids) and x.reference)) lines2match.match_move() self._get_partner_from_move() self.filtered(lambda x: not x.amount)._compute_amount() @api.multi def _prepare_extra_move_lines(self, move): """Include possible extra lines in the return journal entry for other return concepts. :param self: Reference to the payment return line. :param move: Reference to the journal entry created for the return. :return: A list with dictionaries of the extra move lines to add """ self.ensure_one() return []
agpl-3.0
5,260,136,324,633,751,000
39.506739
79
0.544118
false
4.058331
false
false
false
dpshelio/sunpy
examples/units_and_coordinates/planet_locations.py
1
1252
""" =================================== Getting the location of the planets =================================== How to get the position of planetary bodies im the solar system using `astropy's solar system ephemeris <http://docs.astropy.org/en/stable/coordinates/solarsystem.html#solar-system-ephemerides>`__ information and SunPy. """ import matplotlib.pyplot as plt from astropy.time import Time from sunpy.coordinates import get_body_heliographic_stonyhurst ############################################################################## # Lets grab the positions of each of the planets in Heliographic Stonyhurst # coordinates. obstime = Time('2014-05-15T07:54:00.005') planet_list = ['earth', 'venus', 'mars', 'mercury', 'jupiter', 'neptune', 'uranus', 'sun'] planet_coord = [get_body_heliographic_stonyhurst(this_planet, time=obstime) for this_planet in planet_list] ############################################################################## # Let's plot the results. Remember the Sun is at the center of this coordinate # system. ax = plt.subplot(projection='polar') for this_planet, this_coord in zip(planet_list, planet_coord): plt.polar(this_coord.lon.to('rad'), this_coord.radius, 'o', label=this_planet) plt.legend() plt.show()
bsd-2-clause
-3,296,977,724,421,778,000
42.172414
149
0.615815
false
3.639535
false
false
false
Djimmer/obts
Fuzzer/function_scanner.py
1
6412
#!/usr/bin/python # -*- coding: utf-8 -*- import socket import time import binascii import os import sys from libmich.formats import * import gsm_um import smarter_fuzzer_function_def as fuzzer import itertools from random import randint from math import factorial import logging from pythonjsonlogger import jsonlogger # Fill in current mobile device if len(sys.argv) > 2: device = sys.argv[1]; imsi = sys.argv[2]; else: print("ERROR: Device name not found.") print("Call the script with: ./smarter_fuzzer #DEVICE #IMSI"); print("Where #DEVICE is the name and #IMSI is the IMSI of the mobile device."); sys.exit(0); ############################################### SETTINGS ############################################# # Default OpenBTS port TESTCALL_PORT = 28670; # Log file location date = str(time.strftime("%Y%m%d-%H%M%S")); log_all_functions_JSON = "logs/functions/" + device + "_log_" + date + ".json"; # Creat socket tcsock = socket.socket(socket.AF_INET, socket.SOCK_DGRAM) tcsock.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1) tcsock.settimeout(2) ocsock = socket.socket(socket.AF_INET, socket.SOCK_DGRAM) ocsock.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1) HOST = 'localhost' # Symbolic name meaning all available interfaces PORT = 21337 # Arbitrary non-privileged port ocsock.bind((HOST, PORT)) ocsock.settimeout(20) # Initialize JSON logger logger = logging.getLogger() logger.setLevel(logging.INFO) # create a file handler handler = logging.FileHandler(log_all_functions_JSON) handler.setLevel(logging.INFO) # create a logging format formatter = jsonlogger.JsonFormatter() handler.setFormatter(formatter) # add the handlers to the logger logger.addHandler(handler) logger.info({ "message": "Function Scanner; Device and SIM information", "device": device, "imsi" : imsi}); ################################################# LOG ################################################ def log_packets(run, maxRun, packet, parsed_packet, reply, parsed_reply): if "ERROR" in parsed_reply: parsed_reply = "libmich ERROR"; logger.info({ "message": run, "maxRun" : maxRun, "packet": str(packet).encode("hex"), "parsed_packet": parsed_packet, "reply": str(reply).encode("hex"), "parsed_reply": parsed_reply }) ############################################## CHANNEL ############################################### # Send a restart to OpenBTS to establish a new channel def establishNewChannel(): restart = "RESTART"; print("Channel restart: Establishing a new channel, this may take a second."); tcsock.sendto(restart, ('127.0.0.1', TESTCALL_PORT)); # Wait for OpenBTS to confirm new channel. try: reply = ocsock.recv(20000) except: print "Could not establish a new channel."; return False; print "New channel established, fuzzing will continue."; time.sleep(1); return True; def send(tcsock, packet): try: tcsock.sendto(packet, ('127.0.0.1', TESTCALL_PORT)) reply = tcsock.recv(1024) except socket.timeout: print "socket.timeout: Mobile device is not responding"; return False return packetImplemented(reply) def packetImplemented(reply): parsed_reply = repr(L3Mobile.parse_L3(reply)); print "Received packet: ", str(reply).encode("hex") + "\n"; print "GSM_UM interpetation: " + '\n' + parsed_reply + "\n\n"; if "RELEASE_COMPLETE" in parsed_reply: return "Restart"; elif((str(reply).encode("hex") == "786e430200")): #MDL_ERROR_INDICATION return "Restart"; elif((str(reply).encode("hex") == "789ea400")): #MDL_ERROR_INDICATION return "Restart"; elif((str(reply).encode("hex") == "06126100")): return "Skip"; elif "Message type non-existent or not implemented" in parsed_reply: return "Skip"; else: return reply; ############################################### UTILS ################################################ def printPacket(packet, currentRun, total_runs): print('------------------------------- INPUT -------------------------------' + '\n'); print('Run ' + str(currentRun) + "/" + str(total_runs) + '\n'); # Make the packet readable if(len(packet) % 2 == 0): printable = str(packet).encode("hex"); print "Current complete packet: " + printable + '\n'; # Decode printable hex to make it usable for L3Mobile. # Adding the \x for the bytes. l3msg_input = repr(L3Mobile.parse_L3(str(packet))); print "GSM_UM interpetation: \n " + l3msg_input + '\n\n'; print "------------------------------- OUTPUT -------------------------------" + '\n'; ############################################ SMART FUZZER ############################################ # This fuzzer targets fields with variable length # Tries all different bytes for length byte # Tries random bytes for a range of lengths ###################################################################################################### # Fuzzer specific settings maxPacketAttempt = 5; currentPacketAttempt = 1; protocols = [3]; currentRun = 1; total_runs = len(protocols) * 256; print "Total amount of runs: " + str(total_runs); time.sleep(1); for i in protocols: firstByte = "{0:0{1}x}".format(i,2); n = 1; while n < 256: secondByte = "{0:0{1}x}".format(n,2); if(i == 5 and n == 17): # Skip because the packet 0511 is a Authentication Reject # and disconnects the mobile device secondByte = "{0:0{1}x}".format(n+1,2); packet = "\\x" + str(firstByte) + "\\x" + str(secondByte); packet = packet.replace('\\x', '').decode('hex'); print "Packet: " + str(packet).encode("hex"); printPacket(packet, currentRun, total_runs); # Send packet to the mobile device. result = send(tcsock, packet); if(result == "Restart" or result == False): currentPacketAttempt = currentPacketAttempt + 1; establishNewChannel(); if(currentPacketAttempt >= maxPacketAttempt): parsed_packet = repr(L3Mobile.parse_L3(packet)); log_packets(currentRun, total_runs, packet, parsed_packet, "None", "None"); currentRun = currentRun + 1; n = n + 1; elif(result =="Skip"): currentRun = currentRun + 1; currentPacketAttempt = 0; n = n + 1; else: parsed_result = repr(L3Mobile.parse_L3(result)); parsed_packet = repr(L3Mobile.parse_L3(packet)); log_packets(currentRun, total_runs, packet, parsed_packet, result, parsed_result); currentRun = currentRun + 1; currentPacketAttempt = 0; n = n + 1;
agpl-3.0
7,928,618,940,592,154,000
29.980676
102
0.611822
false
3.385428
false
false
false
gyurisc/stackjobs
clean_data.py
1
1758
# Ad-hoc fixing of mongo database from datetime import datetime import pymongo client = pymongo.MongoClient('localhost', 27017) db = client['stackoverflow'] jobs = db['jobs'] # total jobs total_jobs = jobs.count() print "Total jobs: %s" % total_jobs print "=== Fixing Date Stamp ===" date_stamp = datetime(2016, 6, 1, 7, 01, 01) jobs.update_many({ "date" : { "$exists" : False}}, {"$set" : {"date" : date_stamp}}) count = 0 for job in jobs.find( { "date" : { "$exists" : False}}): count = count + 1 # print(job) print "=== Fixing Date Stamp ===" print "Number of jobs with no date is %s." % count count = 0 for job in jobs.find( { "date" : date_stamp}): count = count + 1 # print(job) print "Number of jobs with default date is %s." % count # Week number print "=== Fixing Week Number ===" wkcount = jobs.find( {"weeknum" : {"$exists" : True}}).count() print "Week number exists with %s and missing for %s jobs." % (wkcount, total_jobs - wkcount) for job in jobs.find({"weeknum" : {"$exists": False}}): d = datetime.strptime(job["date"], '%Y-%m-%d') wk = d.isocalendar()[1] jobs.update({"_id" : job["_id"]}, {"$set" : {"weeknum" : wk}}) # Employee and Location Whitespace print "=== Fixing Employee & Location ===" print "Striping strings from white space in employer and location strings" for job in jobs.find(): _emp = job["employer"].strip() _loc = job["location"].strip() jobs.update({"_id" : job["_id"]}, {"$set" : {"employer" : _emp, "location" : _loc}}) print "Stripping strings from whitespace where salary exists" for job in jobs.find({ "salary" : { "$exists" : True }}): _salary = job["salary"].strip() jobs.update({"_id" : job["_id"]}, {"$set" : {"salary" : _salary}})
mit
6,723,297,113,947,829,000
31.555556
93
0.610353
false
3.106007
false
false
false
rodo/ansible-tsung
ec2tool.py
1
5117
#!/usr/bin/env python import boto.ec2 import jinja2 import sys import json import yaml class Tsing(boto.ec2.instance.Instance): def shortname(self): return self.private_dns_name.split('.')[0] @property def private_short_name(self): return self.private_dns_name.split('.')[0] def get_specs(instance, region, data): """ region (string) : the region name data (dict) """ datas = get_data_region(region, data) instance_spec = get_instance(instance, datas) return instance_spec def get_instance(instance, data): """ instance (string) data (dict) """ result = None for inst in data['instanceTypes']: for size in inst['sizes']: if instance == size['size']: result = size break return result def get_data_region(region, data): """ region (string) : the region name data (dict) """ config = data['config'] ec2_regions = {"us-east-1": "us-east", "us-west-1": "us-west", "us-west-2": "us-west-2", "eu-west-1": "eu-ireland", "ap-southeast-1": "apac-sin", "ap-southeast-2": "apac-syd", "ap-northeast-1": "apac-tokyo", "sa-east-1": "sa-east-1" } for reg in config['regions']: if reg['region'] == ec2_regions[region]: return reg def write_nodes(controller, injectors, data): """ controller (dict) injectors (dict) """ hosts = open("playbooks/roles/tsung/vars/nodes.yml", 'w') hosts.write("---\n") contr_str = "controller: { private_dns_name: '%s', private_ip_address: '%s', private_short_name: '%s' }\n\n" hosts.write(contr_str % (controller.private_dns_name, controller.private_ip_address, controller.private_short_name)) hosts.write("injectors:\n") for injec in injectors: print injec.__dict__ specs = get_specs(injec.instance_type, region, data) injector = {"private_dns_name": str(injec.private_dns_name), "private_ip_address": str(injec.private_ip_address), "private_short_name": str(injec.private_short_name), "instance_type": str(injec.instance_type), "cpu": int(specs['vCPU'])} hosts.write(" - {}".format(yaml.dump(injector, encoding='utf-8'))) hosts.close() def instance_weights(injectors, region, data): """ Define instances weights """ assw = {} weights = [] for injec in injectors: specs = get_specs(injec['instance_type'], region, data) weights.append(float(specs['memoryGiB'])) minweight = min(weights) for injec in injectors: specs = get_specs(injec['instance_type'], region, data) iid = injec['id'] assw[iid] = int(round(float(specs['memoryGiB']) / minweight)) return assw def parse_instances(instances): """ Wait for instance in running state """ controller = None injectors = [] for instance in instances: inst = instance.instances[0] inst.__class__ = Tsing if inst.state == 'running': tags = inst.tags if 'tsung_role' in tags: if tags['tsung_role'] == 'controller': controller = inst else: injectors.append(inst) else: injectors.append(inst) return controller, injectors def cloud_connect(region): """ Connect on cloud """ print "connect on {}...".format(region) conn = boto.ec2.connect_to_region(region) return conn def write_ini(injectors, controller): """ Write ansible .ini file """ templateLoader = jinja2.FileSystemLoader(searchpath=".") templateEnv = jinja2.Environment(loader=templateLoader) templateVars = {"injectors": injectors, "controller": controller} # # Configure the cluster # template = templateEnv.get_template("cluster.j2") clients = open("cluster.ini", 'w') clients.write(template.render(templateVars)) clients.close() if __name__ == "__main__": try: region = sys.argv[1] except: print "usage : ec2tool.py REGI0N" sys.exit(1) conn = cloud_connect(region) print "connected" instances = conn.get_all_instances() controller, injectors = parse_instances(instances) print "found\n {} injectors".format(len(injectors)) if controller is None: print "ERROR didn't found any controller" sys.exit(1) else: print " controller : tsung@{} ".format(controller.ip_address) # # with open("linux-od.json") as data_file: data = json.load(data_file) # # write_nodes(controller, injectors, data) write_ini(injectors, controller) # print 'ansible-playbook -i cluster.ini -u ubuntu playbooks/tsung.yml'
gpl-3.0
6,011,133,720,753,712,000
24.713568
112
0.560876
false
3.737765
false
false
false
nemesisdesign/openwisp2
openwisp_controller/config/controller/views.py
1
14788
import json from ipaddress import ip_address from django.core.exceptions import FieldDoesNotExist, ValidationError from django.db import transaction from django.db.models import Q from django.utils.decorators import method_decorator from django.views.decorators.csrf import csrf_exempt from django.views.generic.base import View from django.views.generic.detail import SingleObjectMixin from swapper import load_model from .. import settings as app_settings from ..signals import checksum_requested, config_download_requested, device_registered from ..utils import ( ControllerResponse, forbid_unallowed, get_object_or_404, invalid_response, send_device_config, send_vpn_config, update_last_ip, ) Device = load_model('config', 'Device') OrganizationConfigSettings = load_model('config', 'OrganizationConfigSettings') Vpn = load_model('config', 'Vpn') class BaseConfigView(SingleObjectMixin, View): """ Base view that implements a ``get_object`` method Subclassed by all views dealing with existing objects """ def get_object(self, *args, **kwargs): kwargs['config__isnull'] = False return get_object_or_404(self.model, *args, **kwargs) class CsrfExtemptMixin(object): """ Mixin that makes the view extempt from CSFR protection """ @method_decorator(csrf_exempt) def dispatch(self, request, *args, **kwargs): return super().dispatch(request, *args, **kwargs) class UpdateLastIpMixin(object): def update_last_ip(self, device, request): result = update_last_ip(device, request) if result: # avoid that any other device in the # same org stays with the same management_ip # This can happen when management interfaces are using DHCP # and they get a new address which was previously used by another # device that may now be offline, without this fix, we will end up # with two devices having the same management_ip, which will # cause OpenWISP to be confused self.model.objects.filter( organization=device.organization, management_ip=device.management_ip ).exclude(pk=device.pk).update(management_ip='') # in the case of last_ip, we take a different approach, # because it may be a public IP. If it's a public IP we will # allow it to be duplicated if ip_address(device.last_ip).is_private: Device.objects.filter( organization=device.organization, last_ip=device.last_ip ).exclude(pk=device.pk).update(last_ip='') return result class ActiveOrgMixin(object): """ adds check to organization.is_active to ``get_object`` method """ def get_object(self, *args, **kwargs): kwargs['organization__is_active'] = True return super().get_object(*args, **kwargs) class DeviceChecksumView(ActiveOrgMixin, UpdateLastIpMixin, BaseConfigView): """ returns device's configuration checksum """ model = Device def get(self, request, *args, **kwargs): device = self.get_object(*args, **kwargs) bad_request = forbid_unallowed(request, 'GET', 'key', device.key) if bad_request: return bad_request self.update_last_ip(device, request) checksum_requested.send( sender=device.__class__, instance=device, request=request ) return ControllerResponse(device.config.checksum, content_type='text/plain') class DeviceDownloadConfigView(ActiveOrgMixin, BaseConfigView): """ returns configuration archive as attachment """ model = Device def get(self, request, *args, **kwargs): device = self.get_object(*args, **kwargs) bad_request = forbid_unallowed(request, 'GET', 'key', device.key) if bad_request: return bad_request config_download_requested.send( sender=device.__class__, instance=device, request=request ) return send_device_config(device.config, request) class DeviceUpdateInfoView(ActiveOrgMixin, CsrfExtemptMixin, BaseConfigView): """ updates general information about the device """ model = Device UPDATABLE_FIELDS = ['os', 'model', 'system'] def post(self, request, *args, **kwargs): device = self.get_object(*args, **kwargs) bad_request = forbid_unallowed(request, 'POST', 'key', device.key) if bad_request: return bad_request # update device information for attr in self.UPDATABLE_FIELDS: if attr in request.POST: setattr(device, attr, request.POST.get(attr)) # validate and save everything or fail otherwise try: with transaction.atomic(): device.full_clean() device.save() except ValidationError as e: # dump message_dict as JSON, # this should make it easy to debug return ControllerResponse( json.dumps(e.message_dict, indent=4, sort_keys=True), content_type='text/plain', status=400, ) return ControllerResponse('update-info: success', content_type='text/plain') class DeviceReportStatusView(ActiveOrgMixin, CsrfExtemptMixin, BaseConfigView): """ updates status of config objects """ model = Device def post(self, request, *args, **kwargs): device = self.get_object(*args, **kwargs) config = device.config # ensure request is well formed and authorized allowed_status = [choices[0] for choices in config.STATUS] allowed_status.append('running') # backward compatibility required_params = [('key', device.key), ('status', allowed_status)] for key, value in required_params: bad_response = forbid_unallowed(request, 'POST', key, value) if bad_response: return bad_response status = request.POST.get('status') # mantain backward compatibility with old agents # ("running" was changed to "applied") status = status if status != 'running' else 'applied' # call set_status_{status} method on Config model method_name = f'set_status_{status}' getattr(config, method_name)() return ControllerResponse( f'report-result: success\ncurrent-status: {config.status}\n', content_type='text/plain', ) class DeviceRegisterView(UpdateLastIpMixin, CsrfExtemptMixin, View): """ registers new Config objects """ model = Device org_config_settings_model = OrganizationConfigSettings UPDATABLE_FIELDS = ['os', 'model', 'system'] def init_object(self, **kwargs): """ initializes Config object with incoming POST data """ device_model = self.model config_model = device_model.get_config_model() options = {} for attr in kwargs.keys(): # skip attributes that are not model fields try: device_model._meta.get_field(attr) except FieldDoesNotExist: continue options[attr] = kwargs.get(attr) # do not specify key if: # app_settings.CONSISTENT_REGISTRATION is False # if key is ``None`` (it would cause exception) if 'key' in options and ( app_settings.CONSISTENT_REGISTRATION is False or options['key'] is None ): del options['key'] if 'hardware_id' in options and options['hardware_id'] == "": options['hardware_id'] = None config = config_model(device=device_model(**options), backend=kwargs['backend']) config.organization = self.organization config.device.organization = self.organization return config def get_template_queryset(self, config): """ returns Template model queryset """ queryset = config.get_template_model().objects.all() # filter templates of the same organization or shared templates return queryset.filter(Q(organization=self.organization) | Q(organization=None)) def add_tagged_templates(self, config, request): """ adds templates specified in incoming POST tag setting """ tags = request.POST.get('tags') if not tags: return # retrieve tags and add them to current config tags = tags.split() queryset = self.get_template_queryset(config) templates = queryset.filter(tags__name__in=tags).only('id').distinct() for template in templates: config.templates.add(template) def invalid(self, request): """ ensures request is well formed """ allowed_backends = [path for path, name in app_settings.BACKENDS] required_params = [ ('secret', None), ('name', None), ('mac_address', None), ('backend', allowed_backends), ] # valid required params or forbid for key, value in required_params: invalid_response = forbid_unallowed(request, 'POST', key, value) if invalid_response: return invalid_response def forbidden(self, request): """ ensures request is authorized: - secret matches an organization's shared_secret - the organization has registration_enabled set to True """ try: secret = request.POST.get('secret') org_settings = self.org_config_settings_model.objects.select_related( 'organization' ).get(shared_secret=secret, organization__is_active=True) except self.org_config_settings_model.DoesNotExist: return invalid_response(request, 'error: unrecognized secret', status=403) if not org_settings.registration_enabled: return invalid_response(request, 'error: registration disabled', status=403) # set an organization attribute as a side effect # this attribute will be used in ``init_object`` self.organization = org_settings.organization def post(self, request, *args, **kwargs): """ POST logic """ if not app_settings.REGISTRATION_ENABLED: return ControllerResponse('error: registration disabled', status=403) # ensure request is valid bad_response = self.invalid(request) if bad_response: return bad_response # ensure request is allowed forbidden = self.forbidden(request) if forbidden: return forbidden # prepare model attributes key = None if app_settings.CONSISTENT_REGISTRATION: key = request.POST.get('key') # try retrieving existing Device first # (key is not None only if CONSISTENT_REGISTRATION is enabled) new = False try: device = self.model.objects.get(key=key) # update hw info for attr in self.UPDATABLE_FIELDS: if attr in request.POST: setattr(device, attr, request.POST.get(attr)) config = device.config # if get queryset fails, instantiate a new Device and Config except self.model.DoesNotExist: if not app_settings.REGISTRATION_SELF_CREATION: return ControllerResponse( 'Device not found in the system, please create it first.', status=404, ) new = True config = self.init_object(**request.POST.dict()) device = config.device # if get queryset succedes but device has no related config # instantiate new Config but reuse existing device except self.model.config.RelatedObjectDoesNotExist: config = self.init_object(**request.POST.dict()) config.device = device # update last_ip field of device device.last_ip = request.META.get('REMOTE_ADDR') # validate and save everything or fail otherwise try: with transaction.atomic(): device.full_clean() device.save() config.full_clean() config.save() except ValidationError as e: # dump message_dict as JSON, # this should make it easy to debug return ControllerResponse( json.dumps(e.message_dict, indent=4, sort_keys=True), content_type='text/plain', status=400, ) # add templates specified in tags self.add_tagged_templates(config, request) # emit device registered signal device_registered.send(sender=device.__class__, instance=device, is_new=new) # prepare response s = ( 'registration-result: success\n' 'uuid: {id}\n' 'key: {key}\n' 'hostname: {name}\n' 'is-new: {is_new}\n' ) attributes = device.__dict__.copy() attributes.update({'id': device.pk.hex, 'key': device.key, 'is_new': int(new)}) return ControllerResponse( s.format(**attributes), content_type='text/plain', status=201 ) class VpnChecksumView(BaseConfigView): """ returns vpn's configuration checksum """ model = Vpn def get(self, request, *args, **kwargs): vpn = self.get_object(*args, **kwargs) bad_request = forbid_unallowed(request, 'GET', 'key', vpn.key) if bad_request: return bad_request checksum_requested.send(sender=vpn.__class__, instance=vpn, request=request) return ControllerResponse(vpn.checksum, content_type='text/plain') class VpnDownloadConfigView(BaseConfigView): """ returns configuration archive as attachment """ model = Vpn def get(self, request, *args, **kwargs): vpn = self.get_object(*args, **kwargs) bad_request = forbid_unallowed(request, 'GET', 'key', vpn.key) if bad_request: return bad_request config_download_requested.send( sender=vpn.__class__, instance=vpn, request=request ) return send_vpn_config(vpn, request) device_checksum = DeviceChecksumView.as_view() device_download_config = DeviceDownloadConfigView.as_view() device_update_info = DeviceUpdateInfoView.as_view() device_report_status = DeviceReportStatusView.as_view() device_register = DeviceRegisterView.as_view() vpn_checksum = VpnChecksumView.as_view() vpn_download_config = VpnDownloadConfigView.as_view()
gpl-3.0
6,931,061,912,724,383,000
35.78607
88
0.61719
false
4.388131
true
false
false
pinax/pinax-blog
pinax/blog/admin.py
1
3056
from functools import partial as curry from django.contrib import admin from django.utils import timezone from django.utils.translation import ugettext_lazy as _ from pinax.images.admin import ImageInline from pinax.images.models import ImageSet from .conf import settings from .forms import AdminPostForm from .models import Blog, Post, ReviewComment, Section class PostImageSet(ImageSet): class Meta: proxy = True class ReviewInline(admin.TabularInline): model = ReviewComment def make_published(modeladmin, request, queryset): queryset = queryset.exclude(state=Post.STATE_CHOICES[-1][0], published__isnull=False) queryset.update(state=Post.STATE_CHOICES[-1][0]) queryset.filter(published__isnull=True).update(published=timezone.now()) make_published.short_description = _("Publish selected posts") class PostAdmin(admin.ModelAdmin): list_display = ["title", "state", "section", "published", "show_secret_share_url"] list_filter = ["section", "state"] form = AdminPostForm actions = [make_published] fields = [ "section", "title", "slug", "author", "markup", "teaser", "content", "description", "sharable_url", "state", "published", "image_set" # maybe this https://github.com/anziem/django_reverse_admin ] readonly_fields = ["sharable_url"] prepopulated_fields = {"slug": ("title",)} inlines = [ ReviewInline, ] def show_secret_share_url(self, obj): return '<a href="{}">{}</a>'.format(obj.sharable_url, obj.sharable_url) show_secret_share_url.short_description = _("Share this url") show_secret_share_url.allow_tags = True def formfield_for_dbfield(self, db_field, **kwargs): request = kwargs.get("request") if db_field.name == "author": ff = super().formfield_for_dbfield(db_field, **kwargs) ff.initial = request.user.id return ff return super().formfield_for_dbfield(db_field, **kwargs) def get_form(self, request, obj=None, **kwargs): kwargs.update({ "formfield_callback": curry(self.formfield_for_dbfield, request=request), }) return super().get_form(request, obj, **kwargs) def save_form(self, request, form, change): # this is done for explicitness that we want form.save to commit # form.save doesn't take a commit kwarg for this reason return form.save(Blog.objects.first() if not settings.PINAX_BLOG_SCOPING_MODEL else None) if settings.PINAX_BLOG_SCOPING_MODEL: PostAdmin.fields.insert(0, "blog") PostAdmin.list_filter.append("blog__scoper") class SectionAdmin(admin.ModelAdmin): prepopulated_fields = {"slug": ("name",)} admin.site.register(Post, PostAdmin) admin.site.register(Section, SectionAdmin) admin.site.register( PostImageSet, list_display=["blog_post", "primary_image", "created_by", "created_at"], raw_id_fields=["created_by"], inlines=[ImageInline], )
mit
7,357,990,425,241,163,000
29.56
97
0.659359
false
3.690821
false
false
false